blob_id stringlengths 40 40 | directory_id stringlengths 40 40 | path stringlengths 3 288 | content_id stringlengths 40 40 | detected_licenses listlengths 0 112 | license_type stringclasses 2 values | repo_name stringlengths 5 115 | snapshot_id stringlengths 40 40 | revision_id stringlengths 40 40 | branch_name stringclasses 684 values | visit_date timestamp[us]date 2015-08-06 10:31:46 2023-09-06 10:44:38 | revision_date timestamp[us]date 1970-01-01 02:38:32 2037-05-03 13:00:00 | committer_date timestamp[us]date 1970-01-01 02:38:32 2023-09-06 01:08:06 | github_id int64 4.92k 681M ⌀ | star_events_count int64 0 209k | fork_events_count int64 0 110k | gha_license_id stringclasses 22 values | gha_event_created_at timestamp[us]date 2012-06-04 01:52:49 2023-09-14 21:59:50 ⌀ | gha_created_at timestamp[us]date 2008-05-22 07:58:19 2023-08-21 12:35:19 ⌀ | gha_language stringclasses 147 values | src_encoding stringclasses 25 values | language stringclasses 1 value | is_vendor bool 2 classes | is_generated bool 2 classes | length_bytes int64 128 12.7k | extension stringclasses 142 values | content stringlengths 128 8.19k | authors listlengths 1 1 | author_id stringlengths 1 132 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
9f9f80ab179a523cd94e5105dec350aa9f2db895 | 84e4149b3571ff4abe5c27a66ecbde03c5afec3c | /chapter_10/section_4_3/remember_me.py | 290e2cdaddd45c4dd93827b728d06f21b960e63d | [] | no_license | zhanlu-wm/Python-Crash-Course | 6efa04bd5c03e37394b3602d20e7ae57688836e7 | 043fe97b4acdf0008351fd0fdb045888e9bdd44d | refs/heads/master | 2021-07-18T18:34:32.435763 | 2017-10-23T15:27:17 | 2017-10-23T15:27:17 | 103,259,810 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 505 | py | import json
def greet_user():
"""问候用户,并指出其名字"""
filename = 'username.json'
try:
with open(filename) as f_obj:
username = json.load(f_obj)
except FileNotFoundError:
username = input("What is your name? ")
with open(filename, 'w') as f_obj:
json.dump(username, f_obj)
print("We'll remember you when you come back, " + username + "!")
else:
print("Welcome back, " + username + "!")
greet_user() | [
"ncu09wangming@163.com"
] | ncu09wangming@163.com |
b54e8f18efa6c1389182d0d9d0d9ed00020a5ac5 | 4d5e6e0a7057123ddd7cb97027e667117e1be143 | /data_structure/python_dictionary.py | 7c2cdca3a3a25e5200e605b252fd542c38fde9b4 | [] | no_license | shubhomedia/Learn_Python | cee48990c04521fcbb7dbf5ad120c69170dcd1be | 01e0a8e3dc2de87b09c963e7cb9fc5e246831ddb | refs/heads/master | 2021-07-01T08:53:51.151326 | 2021-01-02T17:31:36 | 2021-01-02T17:31:36 | 204,191,119 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 363 | py | #python Dictionary
my_dictionary = {'key':'value',('K','E','Y'):5}
my_dictionary1 = {x:x+1 for x in range(10)}
print(my_dictionary['key'])
print(my_dictionary1)
try:
print(my_dictionary[1])
except Exception as e:
print(e)
print(my_dictionary.keys()) # print keys
print(my_dictionary.values()) # print values
my_dictionary1.clear()
print(my_dictionary1) | [
"shubhomedia@gmail.com"
] | shubhomedia@gmail.com |
227f5a29b56728a8daf1b78dbeac24d393ae2c6d | 8a47ab47a101d4b44dd056c92a1763d5fac94f75 | /力扣/简单练习/344-双指针实现反转字符串.py | ed42ba2e0f43f9e67fb638bb7438fb76bc5b6fbc | [] | no_license | Clint-cc/Leecode | d5528aa7550a13a5bcf2f3913be2d5db2b5299f3 | 8befe73ab3eca636944800e0be27c179c45e1dbf | refs/heads/master | 2020-09-14T07:35:41.382377 | 2020-07-01T01:27:18 | 2020-07-01T01:27:18 | 223,066,742 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 266 | py | # !D:/Code/python
# -*- coding:utf-8 -*-
# @Author : Clint
# 双指针
def reverseString(s):
i = 0
j = len(s) - 1
while i < j:
s[i], s[j] = s[j], s[i]
i += 1
j -= 1
return s
print(reverseString(['w', 'e', 'a', 'r', 'y']))
| [
"clint1801@163.com"
] | clint1801@163.com |
1773f62bd5e54835d7f80a13b980ba3bec26d85b | b771dbc3dc2dc330cf67ff5d030c3bbd474b5a86 | /setup.py | d302b0f496404be0fb05a6368e9df706b82bb04a | [] | no_license | RedTurtle/pyramid_alfresco | 8ddd273604edfdf36eabf11205c38d7d140d8312 | 63129943f52839956e3a39244c1f547ebe5a342f | refs/heads/master | 2021-01-10T16:10:22.817580 | 2013-04-29T08:10:06 | 2013-04-29T08:10:06 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,382 | py | import os
from setuptools import setup, find_packages
here = os.path.abspath(os.path.dirname(__file__))
README = open(os.path.join(here, 'README.md')).read()
CHANGES = open(os.path.join(here, 'CHANGES.txt')).read()
requires = [
'pyramid',
'SQLAlchemy',
'transaction',
'pyramid_tm',
'pyramid_debugtoolbar',
'zope.sqlalchemy',
'waitress',
'pyramid_fanstatic',
'js.bootstrap==2.2.2',
'js.jqueryui',
'js.tinymce',
'velruse',
'pyramid_beaker',
'cmislib'
]
setup(name='pyramid_alfresco',
version='0.0',
description='pyramid_alfresco',
long_description=README + '\n\n' + CHANGES,
classifiers=[
"Programming Language :: Python",
"Framework :: Pyramid",
"Topic :: Internet :: WWW/HTTP",
"Topic :: Internet :: WWW/HTTP :: WSGI :: Application",
],
author='',
author_email='',
url='',
keywords='web wsgi bfg pylons pyramid',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
test_suite='pyramid_alfresco',
install_requires=requires,
entry_points="""\
[paste.app_factory]
main = pyramid_alfresco:main
[console_scripts]
initdb = pyramid_alfresco.initdb:main
[fanstatic.libraries]
pyramid_alfresco = pyramid_alfresco.resources:library
""",
)
| [
"andrew@mleczko.net"
] | andrew@mleczko.net |
1aa621a701a09656aeb71c5930bc6daca9a9e26d | d7949f5b2075384075fa066d571144bbbe02ffd8 | /supervised/utils/subsample.py | cfd80b0b6af309dc95d037293f409a4fbad068f9 | [
"MIT"
] | permissive | mljar/mljar-supervised | 57fb56b05b1a53ea979bf9cb9b127f314853bdbd | 6722eb1e6441c11990f2aed01a444ddcae478c09 | refs/heads/master | 2023-08-30T23:48:28.692945 | 2023-08-28T15:09:39 | 2023-08-28T15:09:39 | 156,218,203 | 2,759 | 388 | MIT | 2023-08-28T10:24:12 | 2018-11-05T12:58:04 | Python | UTF-8 | Python | false | false | 463 | py | import numpy as np
import pandas as pd
from sklearn.model_selection import train_test_split
from supervised.algorithms.registry import REGRESSION
def subsample(X, y, ml_task, train_size):
shuffle = True
stratify = None
if ml_task != REGRESSION:
stratify = y
X_train, X_test, y_train, y_test = train_test_split(
X, y, train_size=train_size, shuffle=shuffle, stratify=stratify
)
return X_train, X_test, y_train, y_test
| [
"pplonski86@gmail.com"
] | pplonski86@gmail.com |
36b154704e3c1462f72659cfa2cda1ae2a75c817 | 78520f19165b33909364299aaaea2283b8aa2367 | /keywords/elif_kwd.py | 9560d07296baca68083aaac8fc0abe480d2b3939 | [
"BSD-2-Clause"
] | permissive | s3n0/Python-Course | 3d3618b97c7d4d9bbe7c3987d2c329203251029b | d48568d096e9a78e397eefd83b2588ddd27aa481 | refs/heads/master | 2020-09-09T00:51:28.874296 | 2019-09-29T17:50:48 | 2019-09-29T17:50:48 | 221,294,533 | 0 | 1 | null | 2019-11-12T19:23:11 | 2019-11-12T19:23:10 | null | UTF-8 | Python | false | false | 218 | py | #!/usr/bin/python3
# elif_kwd.py
name = "Luke"
if name == "Jack":
print ("Hello Jack!")
elif name == "John":
print ("Hello John!")
elif name == "Luke":
print ("Hello Luke!")
else:
print ("Hello there!")
| [
"noreply@github.com"
] | s3n0.noreply@github.com |
c9a06a45ccc50918208dc3b38d5f8f81ece849f5 | 2bb90b620f86d0d49f19f01593e1a4cc3c2e7ba8 | /pardus/playground/fatih/xorg/xorg-video-imstt/actions.py | 77b159e19b6e28768b169754190e63b78bbf5007 | [] | no_license | aligulle1/kuller | bda0d59ce8400aa3c7ba9c7e19589f27313492f7 | 7f98de19be27d7a517fe19a37c814748f7e18ba6 | refs/heads/master | 2021-01-20T02:22:09.451356 | 2013-07-23T17:57:58 | 2013-07-23T17:57:58 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 394 | py | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Licensed under the GNU General Public License, version 2.
# See the file http://www.gnu.org/copyleft/gpl.txt.
from pisi.actionsapi import autotools
from pisi.actionsapi import get
WorkDir = "xf86-video-imstt-%s" % get.srcVERSION()
def setup():
autotools.configure()
def build():
autotools.make()
def install():
autotools.install()
| [
"yusuf.aydemir@istanbul.com"
] | yusuf.aydemir@istanbul.com |
919333992dfe9e94cf1dc40447cfe0e90db3d328 | bd696223aaf5404987df11832b4c17c916b9690f | /rec_sample/gaussian_process_regressor_numeric_rating/gaussian_process_regressor_numeric_rating/main.py | 2d0d88d6ab609827bf79b3e34be5853efebdce0f | [] | no_license | wararaki718/scrapbox3 | 000a285477f25c1e8a4b6017b6ad06c76f173342 | 9be5dc879a33a1988d9f6611307c499eec125dc2 | refs/heads/master | 2023-06-16T08:46:32.879231 | 2021-07-17T14:12:54 | 2021-07-17T14:12:54 | 280,590,887 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 435 | py | import numpy as np
from sklearn.datasets import load_wine
from sklearn.gaussian_process import GaussianProcessRegressor
def main():
wine = load_wine()
X = wine.data
y = wine.target
gpr = GaussianProcessRegressor()
gpr.fit(X, y)
for i in np.random.choice(X.shape[0], 10):
x = X[i, :]
print(f'real:{y[i]}, predict: {gpr.predict([x])}')
print('DONE')
if __name__ == '__main__':
main()
| [
"ky7.ott.w@gmail.com"
] | ky7.ott.w@gmail.com |
eff38dcf60b38fcc1037d870b04197c61add0189 | 971e0efcc68b8f7cfb1040c38008426f7bcf9d2e | /tests/model_control/detailed/transf_Fisher/model_control_one_enabled_Fisher_Lag1Trend_Seasonal_MonthOfYear_SVR.py | bf6738ef2f81d7475f6a83e95b8def2231eab79b | [
"BSD-3-Clause",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | antoinecarme/pyaf | a105d172c2e7544f8d580d75f28b751351dd83b6 | b12db77cb3fa9292e774b2b33db8ce732647c35e | refs/heads/master | 2023-09-01T09:30:59.967219 | 2023-07-28T20:15:53 | 2023-07-28T20:15:53 | 70,790,978 | 457 | 77 | BSD-3-Clause | 2023-03-08T21:45:40 | 2016-10-13T09:30:30 | Python | UTF-8 | Python | false | false | 160 | py | import tests.model_control.test_ozone_custom_models_enabled as testmod
testmod.build_model( ['Fisher'] , ['Lag1Trend'] , ['Seasonal_MonthOfYear'] , ['SVR'] ); | [
"antoine.carme@laposte.net"
] | antoine.carme@laposte.net |
b512a1038df77f42373157c2e38de6ded09715d8 | d52522a713d4e0522c22692e05948be897c4339b | /constants/i18n/greetings.py | 3cbd99d5b4f8e25b26351d8b69f240a5710c19dc | [
"MIT"
] | permissive | frankwrk/django-htk | de52a9132f494845ed9c3cb19a9e81e22f9a57a3 | fa9c6fe18d8651e4b96f036429169d741a1f2fe0 | refs/heads/master | 2023-04-21T10:37:57.563298 | 2019-03-04T10:47:49 | 2019-03-04T10:47:49 | 173,731,278 | 0 | 0 | MIT | 2023-04-03T23:23:47 | 2019-03-04T11:21:42 | Python | UTF-8 | Python | false | false | 844 | py | #! /usr/bin/env python
# -*- coding: utf-8 -*-
I18N_GREETINGS = {
'ar' : [
u'مرحبا',
],
'el' : [
u'Χαίρετε',
],
'en' : [
u'Greetings',
u'Hello',
],
'eo' : [
u'saluton',
],
'es' : [
u'Hola',
u'Como estas',
],
'fr' : [
u'Salut',
],
'haw' : [
u'Aloha',
],
'he' : [
u'שלום',
],
'hi' : [
u'नमस्ते'
],
'it' : [
u'Ciao',
],
'ja' : [
u'こんにちは',
],
'ko' : [
u'안녕하세요',
],
'mn' : [
u'Сайн уу',
],
'nl' : [
u'Hallo',
],
'ru' : [
u'Здравствуйте',
],
'vi' : [
u'chào bạn',
],
'zh' : [
u'你好',
],
}
| [
"hello@jontsai.com"
] | hello@jontsai.com |
67a9ab339d0c77fe6b902946ddb037814635bb58 | d43c1974de5ef60a85d0e8af648f7d1546c1b5c3 | /exceptions.py | 8525e10d81204d53fb7030613c82e3949c24330c | [] | no_license | Hermotimos/Learning | 7168146b1ba80827997a895716c645dda57a47d7 | 7c5453279a43e9a15c66a1cf925aa9c05c820224 | refs/heads/master | 2023-08-12T15:42:09.043657 | 2023-04-01T09:36:21 | 2023-04-01T09:36:21 | 180,561,558 | 0 | 0 | null | 2023-07-25T21:26:23 | 2019-04-10T10:50:22 | Python | UTF-8 | Python | false | false | 3,366 | py | """
This file is for learning and exercise purposes.
Topics:
- exceptions: syntax and catching
- examples: ZeroDivisionError, AssertionError
Sources:
https://ocw.mit.edu/courses/electrical-engineering-and-computer-science/6-0001-introduction-to-computer-science-and-programming-in-python-fall-2016/lecture-videos/lecture-7-testing-debugging-exceptions-and-assertions/
"""
############################################################################################
# 1)
def get_ratios(list1, list2):
ratios = []
for index in range(len(list1)):
try:
ratios.append(list1[index] / list2[index])
except ZeroDivisionError:
ratios.append(float('nan'))
except:
raise ValueError('get_ratios called with wrong argument(s)')
return ratios
lista1 = [0, 1, 2, 3, 4, 5]
lista2 = [0, 4, 4, 4, 4, 4]
print(get_ratios(lista1, lista2))
# special float value 'nan' - to secure coherence in the list (only floats)
print(type(float('nan')))
print('#'*30)
############################################################################################
# 2)
def yearly_scores_with_avg(scores):
list_with_scores = []
for elem in scores:
list_with_scores.append([elem[0], elem[1], round(avg(elem[1]), 2)])
return list_with_scores
def avg(list):
try:
return sum(list)/len(list)
except ZeroDivisionError:
print("Warning: some students have no results data (indicated by 0 as average score)")
return 0.0
test_grades = [[['Peter', 'Parker'], [80.0, 70.0, 85.0]],
[['Bruce', 'Wayne'], [100.0, 80.0, 74.0]],
[['Clint', 'Eastwood'], [25.0, 80.0, 85.0]],
[['Mr.', 'Nobody'], []],
[['Clint', 'Westwood'], [25.0, 82.0, 85.0]]]
print(test_grades)
print(yearly_scores_with_avg(test_grades))
print('#'*30)
############################################################################################
# 3) ASSERTIONS:
def yearly_scores_with_avg(scores):
list_with_scores = []
for elem in scores:
list_with_scores.append([elem[0], elem[1], round(avg(elem[1]), 2)])
return list_with_scores
def avg(list):
try:
assert len(list) > 0 # here comes assertion
return sum(list)/len(list)
except AssertionError: # here comes assertion handling
print("Warning: some students have no results data (indicated by 0 as average score)")
return 0.0
except Exception as exception1: # handling of other exceptions
print(f"An error occured: {exception1}")
test_grades = [[['Peter', 'Parker'], [80.0, 70.0, 85.0]],
[['Bruce', 'Wayne'], [100.0, 80.0, 74.0]],
[['Clint', 'Eastwood'], [25.0, 80.0, 85.0]],
[['Mr.', 'Nobody'], []],
[['Clint', 'Westwood'], [25.0, 82.0, 85.0]]]
print(test_grades)
print(yearly_scores_with_avg(test_grades))
print()
# ZADANIE [my version]
def celcius_to_kelvin(temp):
try:
assert temp >= -273.15
temp += 273.15
return temp
except AssertionError:
return "Wrong temperature given: lower than absolute zero !"
print(celcius_to_kelvin(20))
print(celcius_to_kelvin(-400))
| [
"lukas.kozicki@gmail.com"
] | lukas.kozicki@gmail.com |
a50c477311cf2c7d396443b3959bdca2fd9644de | 0feb9799532328d2eb5c9673751bf44a06652375 | /ethics/russellnorvig.py | 1391b1d8e488edda475086428c8fd190f4e9c81b | [] | no_license | krishnakatyal/philosophy | ebc78947508f12a9d06356d2cc8d38f6afb0510a | f7735e9adc9ba609894d89384562dbda2f794548 | refs/heads/master | 2022-03-28T14:00:52.460599 | 2020-01-25T00:28:55 | 2020-01-25T00:28:55 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 843 | py |
"""
An artificial intelligence (AI) approach to ethics can embrace many different
ideas from philosophy. In "Artificial Intelligence: A Modern Approach," (known as AIMA)
computer scientists Peter Norvig and Stuart Russell believe we can create goals for AI
to act rationally. Russell asks "What is AI?" as "What is intelligence?" to identify intelligence
closely tied with rationality. Intelligent agents can take percepts as input and act
based upon them. We can create performance measures by calculating V the expected utility
according to the performance measure U of the agent function f that operates on E:
f_opt = max V(f, E, U)
"""
def fopt(V, E, U):
"""
Maximize our function fopt by maximizing expected utility V in the corresponding environment E
with some performance measure U.
"""
return max(V(f0, E, U))
| [
"shussainather@gmail.com"
] | shussainather@gmail.com |
2de8fd5dc49632454858701d0eb25dff5269111b | 163bbb4e0920dedd5941e3edfb2d8706ba75627d | /Code/CodeRecords/2595/49687/317028.py | 83ccacc93c888f73a52a2032b57d71c47b87f5fa | [] | no_license | AdamZhouSE/pythonHomework | a25c120b03a158d60aaa9fdc5fb203b1bb377a19 | ffc5606817a666aa6241cfab27364326f5c066ff | refs/heads/master | 2022-11-24T08:05:22.122011 | 2020-07-28T16:21:24 | 2020-07-28T16:21:24 | 259,576,640 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 217 | py | listN = []
listK = []
n = int(input())
for i in range(n):
inp = input().split()
listN.append(int(inp[0]))
listK.append(int(inp[1]))
for i in range(n):
N = listN[i]
K = listK[i]
print(K**(N-1))
| [
"1069583789@qq.com"
] | 1069583789@qq.com |
28eaa1e1e8d6cfee324f75c131cf883a1ef3d182 | 99c4d4a6592fded0e8e59652484ab226ac0bd38c | /code/batch-2/vse-naloge-brez-testov/DN12-M-044.py | b8cbd3d76ee516ba832cb8dba72e35c8674aa25d | [] | no_license | benquick123/code-profiling | 23e9aa5aecb91753e2f1fecdc3f6d62049a990d5 | 0d496d649247776d121683d10019ec2a7cba574c | refs/heads/master | 2021-10-08T02:53:50.107036 | 2018-12-06T22:56:38 | 2018-12-06T22:56:38 | 126,011,752 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,268 | py | def preberi(ime_datoteke):
a = dict ()
b = dict ()
with open (ime_datoteke) as g:
vrstica = g.readline ()
count = 1
while vrstica:
a[count] = []
b[count] = []
for e in vrstica.strip ().split():
e = int ( e )
a[count].append ( e )
for d in a.items ():
e, k = d
s = []
x = 1000000
for i in k:
if i < x:
x = i
index = k.index ( x )
for element in k[index:]:
s.append ( element )
for e in k[0:index]:
s.append ( e )
b[count] = s
vrstica = g.readline ()
count += 1
return b
def mozna_pot(pot, zemljevid):
zk = []
for d in zemljevid.items ():
e, k = d
if len ( k ) == 1:
zk.append ( e )
i = 0
if pot[i] in zk and pot[-1] in zk:
i += 1
while (i < len ( pot ) - 1):
if pot[i] not in zk:
if pot[i] != pot[i + 1]:
i += 1
else:
return False
break
else:
return False
break
for i in range ( 1, len ( pot ) ):
if pot[i] not in zemljevid[pot[i - 1]]:
return False
return True
else:
return False
def hamiltonova(pot, zemljevid):
zk = []
for i in zemljevid:
if len(zemljevid[i]) == 1:
zk.append(i)
if len(pot) > 1:
if pot[0] not in zk:
return False
if pot[-1] not in zk:
return False
if len(zemljevid)-len(zk)+2 != len(pot):
return False
for i in range(0, len(pot)):
for j in range(i, len(pot)):
if i != j:
if pot[i] == pot[j]:
return False
for i in range(1, len(pot)):
if pot[i] not in zemljevid[pot[i-1]]:
return False
return True
| [
"benjamin.fele@gmail.com"
] | benjamin.fele@gmail.com |
633ef5cfdfe205fc64d22d17d19aa76fd7270d9e | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p03910/s208268903.py | 9205fa08e81866830a16082ced5475d645ca65f1 | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 168 | py | N=int(input())
l=0
r=0
for i in range(1,N+1):
s=i*(i+1)//2
if s>=N:
l=i
r=s-N
break
ans=list(range(1,l+1))
if r!=0:
ans.remove(r)
print(*ans,sep='\n') | [
"66529651+Aastha2104@users.noreply.github.com"
] | 66529651+Aastha2104@users.noreply.github.com |
1786fbc05af84e705fd1009dcb2cfb8ad667abcb | c8ef568cd88a82459fca9d7ef2ca298763ef75e1 | /04. picamera/01. Basic/ex01.py | 2a3f840788fd062d2b7c58d3ee373655be8da213 | [] | no_license | caniro/multicampus-iot-raspberrypi | 8017711ebe4f9e9a7954649333c8106727b4ff86 | b870b25b6386c5e7954b0cdb1f966a6db89e61fd | refs/heads/main | 2023-08-12T21:36:28.621846 | 2021-09-27T08:56:40 | 2021-09-27T08:56:40 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 301 | py | # 파이 카메라 인식 테스트 및 미리보기
from picamera import PiCamera
from time import sleep
camera = PiCamera()
#camera.rotation = 180 # 180도 회전
print(camera.resolution)
camera.start_preview()
# camera.start_preview(alpha=200) # 투명도 : 0 ~ 255
sleep(1)
camera.stop_preview()
| [
"caniro@naver.com"
] | caniro@naver.com |
a4908a97af26ea234e6156e003de1a6e3a6b89a8 | b1bc2e54f8cd35c9abb6fc4adb35b386c12fe6b4 | /toontown/src/quest/BlinkingArrows.py | 534b1179b0efd1a72384597e1a99a5cfb1466f4b | [] | no_license | satire6/Anesidora | da3a44e2a49b85252b87b612b435fb4970469583 | 0e7bfc1fe29fd595df0b982e40f94c30befb1ec7 | refs/heads/master | 2022-12-16T20:05:13.167119 | 2020-09-11T16:58:04 | 2020-09-11T17:02:06 | 294,751,966 | 89 | 32 | null | null | null | null | UTF-8 | Python | false | false | 2,996 | py | from direct.interval.IntervalGlobal import *
from pandac.PandaModules import *
class BlinkingArrows:
def __init__(self, parent=aspect2d, otherNode=None):
self.arrow1 = loader.loadModel('phase_3/models/props/arrow')
self.arrow2 = loader.loadModel('phase_3/models/props/arrow')
self.arrowTrack = None
self.parent = parent
# The otherNode is an optional node that can flash along with the
# arrows.
self.otherNode = otherNode
def delete(self):
self.arrowsOff()
self.arrow1.removeNode()
self.arrow2.removeNode()
del self.arrow1
del self.arrow2
def arrowsOn(self, x1, y1, h1, x2, y2, h2, onTime=0.75, offTime=0.75):
self.stopArrowsFlashing()
self.arrow1.setBin('gui-popup', 0)
self.arrow2.setBin('gui-popup', 0)
self.arrow1.reparentTo(self.parent)
self.arrow2.reparentTo(self.parent)
self.arrow1.setScale(0.2)
self.arrow2.setScale(0.2)
self.arrow1.setPos(x1, 0, y1)
self.arrow2.setPos(x2, 0, y2)
self.arrow1.setR(h1)
self.arrow2.setR(h2)
self.onTime = onTime
self.offTime = offTime
self.startArrowsFlashing()
def arrowsOff(self):
self.stopArrowsFlashing()
self.arrow1.reparentTo(hidden)
self.arrow2.reparentTo(hidden)
def startArrowsFlashing(self):
onColor = Vec4(1,1,1,1)
offColor = Vec4(1,1,1,0.25)
self.arrow1.show()
self.arrow2.show()
if self.otherNode:
self.otherNode.show()
self.arrowTrack = Sequence(
Parallel(
self.arrow1.colorScaleInterval(self.onTime, onColor, offColor),
self.arrow2.colorScaleInterval(self.onTime, onColor, offColor),
self.otherNode.colorScaleInterval(self.onTime, onColor, offColor),
),
Parallel(
self.arrow1.colorScaleInterval(self.offTime, offColor, onColor),
self.arrow2.colorScaleInterval(self.offTime, offColor, onColor),
self.otherNode.colorScaleInterval(self.offTime, offColor, onColor),
),
)
else:
self.arrowTrack = Sequence(
Parallel(
self.arrow1.colorScaleInterval(self.onTime, onColor, offColor),
self.arrow2.colorScaleInterval(self.onTime, onColor, offColor),
),
Parallel(
self.arrow1.colorScaleInterval(self.offTime, offColor, onColor),
self.arrow2.colorScaleInterval(self.offTime, offColor, onColor),
),
)
self.arrowTrack.loop()
def stopArrowsFlashing(self):
if self.arrowTrack:
self.arrowTrack.finish()
self.arrowTrack = None
self.arrow1.hide()
self.arrow2.hide()
if self.otherNode:
self.otherNode.hide()
| [
"66761962+satire6@users.noreply.github.com"
] | 66761962+satire6@users.noreply.github.com |
e02026690c4a2ea039bfc824c17165f8b40c88c6 | e5a52968a86946c4839b64d218cb25f4a91e5ee4 | /ml_project/enities/__init__.py | 692cab33e78b4a63c6e7f83b77aa4ca9d7921440 | [] | no_license | made-ml-in-prod-2021/MaksM89 | 1a6f40c66de671dca2345e1b44051c01d166e2d8 | c00a04b6f77f682e5ff419c0afc4c1ea4669deed | refs/heads/main | 2023-06-07T13:27:14.532934 | 2021-06-24T08:33:08 | 2021-06-24T08:33:08 | 354,295,345 | 1 | 0 | null | 2021-06-24T08:33:09 | 2021-04-03T13:10:38 | Jupyter Notebook | UTF-8 | Python | false | false | 442 | py | from .data_params import Features, InputDataset, SplittingParams
from .train_params import TrainingParams
from .train_pipeline_params import (
read_training_pipeline_params,
TrainingPipelineParamsSchema,
TrainingPipelineParams,
)
__all__ = [
"Features",
"InputDataset",
"SplittingParams",
"TrainingPipelineParams",
"TrainingPipelineParamsSchema",
"TrainingParams",
"read_training_pipeline_params",
]
| [
"noreply@github.com"
] | made-ml-in-prod-2021.noreply@github.com |
c10fa354b6592ecbb7c64daa0fb6e6f00b1a9cc6 | ac5e52a3fc52dde58d208746cddabef2e378119e | /exps-mrsp.0/mrsp_ut=3.5_rd=0.65_rw=0.06_rn=4_u=0.075-0.325_p=harmonic-2/sched=RUN_trial=18/sched.py | cccef8ab09306debebd37b70d423b713008846dd | [] | no_license | ricardobtxr/experiment-scripts | 1e2abfcd94fb0ef5a56c5d7dffddfe814752eef1 | 7bcebff7ac2f2822423f211f1162cd017a18babb | refs/heads/master | 2023-04-09T02:37:41.466794 | 2021-04-25T03:27:16 | 2021-04-25T03:27:16 | 358,926,457 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 487 | py | -S 0 -X RUN -Q 0 -L 3 102 400
-S 0 -X RUN -Q 0 -L 3 64 300
-S 0 -X RUN -Q 0 -L 3 61 200
-S 1 -X RUN -Q 1 -L 2 57 300
-S 1 -X RUN -Q 1 -L 2 52 175
-S 1 -X RUN -Q 1 -L 2 42 200
-S 3 -X RUN -Q 2 -L 1 41 200
-S 3 -X RUN -Q 2 -L 1 37 175
-S 3 -X RUN -Q 2 -L 1 33 200
-S 2 -X RUN -Q 3 -L 1 32 100
-S 2 -X RUN -Q 3 -L 1 32 200
-S 2 -X RUN -Q 3 -L 1 30 125
-S 4 25 300
-S 4 25 300
-S 4 25 175
-S 4 23 125
-S 4 18 150
-S 4 11 100
-S 4 1 300
| [
"ricardo.btxr@gmail.com"
] | ricardo.btxr@gmail.com |
8c647b3ff310fdfcadf078532d7ada9211a25871 | f338eb32c45d8d5d002a84798a7df7bb0403b3c4 | /Calibration/EcalCalibAlgos/python/electronRecalibSCAssociator_cfi.py | a80848d2096bba57fd6a045c88013391ba722a56 | [] | permissive | wouf/cmssw | 0a8a8016e6bebc611f1277379e12bef130464afb | 60da16aec83a0fc016cca9e2a5ed0768ba3b161c | refs/heads/CMSSW_7_3_X | 2022-06-30T04:35:45.380754 | 2015-05-08T17:40:17 | 2015-05-08T17:40:17 | 463,028,972 | 0 | 0 | Apache-2.0 | 2022-02-24T06:05:30 | 2022-02-24T06:05:26 | null | UTF-8 | Python | false | false | 465 | py | import FWCore.ParameterSet.Config as cms
electronRecalibSCAssociator = cms.EDProducer("ElectronRecalibSuperClusterAssociator",
electronCollection = cms.string(''),
scIslandCollection = cms.string('IslandEndcapRecalibSC'),
scIslandProducer = cms.string('correctedIslandEndcapSuperClusters'),
scProducer = cms.string('correctedHybridSuperClusters'),
electronProducer = cms.string('electronFilter'),
scCollection = cms.string('recalibSC')
)
| [
"giulio.eulisse@gmail.com"
] | giulio.eulisse@gmail.com |
14d500c1c4aae9c78ce481e73ff595c4ecac06f5 | d424bb5aef62c9bf07319a26cebc4f14433f927d | /ganji/GraphMaker/sample_bar_chart.py | 501426d9d6cf28e2227047ea2bc5304f53b4fc51 | [] | no_license | adorn331/CrawlerToy | d840104610ae3f8b51ddf5e8cb604573c626cc3b | e51ffd2785c3c22e934390a555257314ae6ef858 | refs/heads/master | 2021-07-05T18:15:29.892398 | 2017-09-24T14:12:08 | 2017-09-24T14:12:08 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 936 | py | #统计北京某个区各种类目前三名的柱状图
import pymongo
import charts
conn = pymongo.MongoClient('localhost', 27017)
ganji_db = conn['ganji']
data_collection = ganji_db['sample']
areas = list(set(i['area'][0] for i in data_collection.find() if i['area']))
print(areas) #查看所有区,选一个柱状图
area = '朝阳' #选中了朝阳区,画图
pipeline = [
{'$match':{ 'area' :area}}, #area只要在'area'这个list数据项里面就会被match
{'$group':{'_id':'$cates', 'counts':{'$sum':1}}},
#'avg_price':{'$avg':'$price'} 除了实现统计个数还可以取它另一个字段的平均值
{'$sort':{'counts':-1}},
{'$limit':3}
]
# for i in data_collection.aggregate(pipeline):
# print(i)
series = [{
'name': i['_id'],
'data':[i['counts']],
'type':'column'
} for i in data_collection.aggregate(pipeline)]
charts.plot(series, show='inline') | [
"="
] | = |
0042178b6dbbb24ce80e628b46cffd655a787f57 | 76718066cbc971b83d1d633cad9daac52ad3ec50 | /src/truverifi/_compat.py | 4e02e40734d0f0f75a0c14495eeea428a0df1a6e | [
"MIT"
] | permissive | achillesrasquinha/truverifi | 594366622c535faefdc25b8ef2dabdbe3523c733 | 4e1b3760a9744b44a86ec2dfaff6714680c8b78c | refs/heads/master | 2020-05-18T05:13:06.665240 | 2019-04-30T05:57:10 | 2019-04-30T05:57:10 | 184,199,229 | 0 | 2 | null | null | null | null | UTF-8 | Python | false | false | 694 | py | # imports - standard imports
import sys
PYTHON_VERSION = sys.version_info
def _is_python_version(*args, **kwargs):
major = kwargs.get("major", None)
minor = kwargs.get("minor", None)
patch = kwargs.get("patch", None)
result = True
if major:
result = result and major == PYTHON_VERSION.major
if minor:
result = result and minor == PYTHON_VERSION.minor
if patch:
result = result and patch == PYTHON_VERSION.micro
return result
PY2 = _is_python_version(major = 2)
if PY2:
# Add your Python 2 imports here.
from urlparse import urljoin
else:
# Add your Python 3 imports here.
from urllib.parse import urljoin | [
"achillesrasquinha@gmail.com"
] | achillesrasquinha@gmail.com |
7c738fca51ec5ae686b8427e6283007ca86b8fe3 | 9743d5fd24822f79c156ad112229e25adb9ed6f6 | /xai/brain/wordbase/otherforms/_reconsidered.py | c4e21782a16ada2154592bfc1620661ed878cbaa | [
"MIT"
] | permissive | cash2one/xai | de7adad1758f50dd6786bf0111e71a903f039b64 | e76f12c9f4dcf3ac1c7c08b0cc8844c0b0a104b6 | refs/heads/master | 2021-01-19T12:33:54.964379 | 2017-01-28T02:00:50 | 2017-01-28T02:00:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 240 | py |
#calss header
class _RECONSIDERED():
def __init__(self,):
self.name = "RECONSIDERED"
self.definitions = reconsider
self.parents = []
self.childen = []
self.properties = []
self.jsondata = {}
self.basic = ['reconsider']
| [
"xingwang1991@gmail.com"
] | xingwang1991@gmail.com |
b042807be384a59317acdd5cc345a25db9bcb91d | 779c7d032eb8d5a4421b8b236c9004559b70756d | /apps/guide/views.py | d0c5fdb435ccd749ccf739f768d9dd442ab5c698 | [] | no_license | corincerami/opus | 727e91a461a6488f2bc263ca6c98a27a93424228 | 281f246ff5bd703a009ab3bad6271249e0e00bff | refs/heads/master | 2022-11-11T13:46:06.317320 | 2018-01-11T00:33:54 | 2018-01-11T00:33:54 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 465 | py | from guide.models import *
from django.shortcuts import render
from django.http import HttpResponse,Http404
from metrics.views import update_metrics
def guide(request):
update_metrics(request)
base_url = 'http://' +request.META['HTTP_HOST'] + '/opus/'
groups = Group.objects.all()
resources = Resource.objects.filter(display=True).select_related().order_by('disp_order')
return render(request, 'guide.html', locals())
# def update(request)
| [
"lballard.cat@gmail.com"
] | lballard.cat@gmail.com |
949c746f1ce29096b7f31f94da55866632df6c4d | f7ac9ae8835b243a6ddbf4a1e8230883266186b9 | /Maximum-AND/code.py | 80d481203b5b01876dc761171b693523328122c4 | [] | no_license | sohailshaukat/HackerEarth-competition-solutions | b992d8bef8bd9f0806e2fa1cb2870500647942ee | 6e969aec4b8e224a2c0f1b18bddde250d2bcced6 | refs/heads/master | 2020-07-04T21:23:57.339578 | 2019-08-15T20:04:23 | 2019-08-15T20:04:23 | 202,422,662 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 438 | py | '''
-sohailshaukat ( https://github.com/sohailshaukat )
-sohail47k@gmail.com
'''
times = int(input())
for _ in range(times):
inp = input().split()
a = int(inp[0])
b = int(inp[1])
maximum_and = 0
maximum = (2 ** len(bin(a).replace('0b','')))-1
for i in range(b+1,a,-1):
if i &(i-1) > maximum_and:
maximum_and = i&(i-1)
if maximum_and == maximum:
break
print(maximum_and)
| [
"sohail47k@gmail.com"
] | sohail47k@gmail.com |
eff291c741a23fff2801d4e5b8d88673b9c4de5e | 3a7412502b89b917f23cda9a3318d2dc4d02185b | /panoptes/accounts/fields.py | 0d6c7b6a50e7bf1df9d2d8df027f17552bc77e87 | [
"BSD-2-Clause"
] | permissive | cilcoberlin/panoptes | 5f0b19d872993bc5c7f51a44c9ccc596fe0a8ab5 | 67d451ea4ffc58c23b5f347bfa5609fa7f853b45 | refs/heads/master | 2021-01-21T00:17:42.038637 | 2012-07-10T03:20:47 | 2012-07-10T03:20:47 | 1,660,305 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 471 | py |
from django import forms
from django.contrib.auth.models import User
class UserField(forms.ModelChoiceField):
"""A select field for a user that uses their full name for sorting and display."""
def __init__(self, *args, **kwargs):
kwargs['queryset'] = User.objects.all().order_by('last_name', 'first_name')
super(UserField, self).__init__(*args, **kwargs)
def label_from_instance(self, user):
"""Return the user's full name."""
return user.get_full_name()
| [
"justin.locsei@oberlin.edu"
] | justin.locsei@oberlin.edu |
f94cf11d95f5bda38870aba5378e6b1b03e8652a | 742c5bfcff91a454dfe6df0be6d98408fa990569 | /bnum/tests/implement.py | 61f86421302cdcd18793cc18df15bc5d8d1f9db3 | [] | no_license | andrewcooke/bnum | ea9179c1379a1ea92d68dc361a44414dc7582379 | 7f93379cff5c4605195fdfb3868ba0185f66b20c | refs/heads/master | 2020-05-24T14:48:24.189229 | 2013-06-02T15:34:44 | 2013-06-02T15:34:44 | 10,178,884 | 4 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,656 | py |
from unittest import TestCase
from bnum import ImplicitBnum, ExplicitBnum, from_one
'''
Test various implementation details.
'''
class NoExplicitTest(TestCase):
def test_no_explicit_in_implicit(self):
with self.assertRaises(TypeError):
class Colour(ImplicitBnum):
red = 1
with self.assertRaises(TypeError):
class Colour(ImplicitBnum):
def foo(self): pass
with self.assertRaises(TypeError):
class Number(int, ExplicitBnum, values=from_one):
with implicit:
one = 1
class ImplicitTest(TestCase):
def test_implicit(self):
class Foo(ImplicitBnum):
implicit
explicit
assert Foo.implicit in Foo
assert repr(Foo.implicit) == "Foo('implicit')", repr(Foo.implicit)
def test_explicit(self):
with self.assertRaises(AttributeError):
# this one has the initial implicit shadowing the context
class Bar(ExplicitBnum):
implicit = 1
with implicit:
explicit
class Baz(ExplicitBnum):
explicit = 1
with implicit:
implicit
assert Baz.implicit in Baz
assert Baz.explicit in Baz
assert repr(Baz.implicit) == "Baz('implicit')", repr(Baz.implicit)
class Baf(ExplicitBnum):
with implicit:
explicit
implicit = 1
assert Baf.implicit in Baf
assert Baf.explicit in Baf
assert repr(Baf.implicit) == "Baf(value=1, name='implicit')", repr(Baf.implicit)
| [
"andrew@acooke.org"
] | andrew@acooke.org |
bb40962baa7b16fd1d7cade0ce12acb734b3138e | 236a8988e513bfa286298d426e705f92099dc25a | /examples/torch_tensor_io.py | 2621176e34ced5edf3515316d487af041334fc8a | [
"MIT"
] | permissive | hzy5000/taichi | 99a468ad3efe31e57a0bb60f7321f55dd1537f65 | 73dfd36fa190b0ff39a962e18d2c8cd3b41b32ce | refs/heads/master | 2020-08-28T02:49:58.455700 | 2019-10-25T13:29:31 | 2019-10-25T13:29:31 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,216 | py | import taichi as ti
import numpy as np
import torch
# ti.set_gdb_trigger(True)
ti.cfg.arch = ti.cuda
# n = 1024 * 1024
n = 32
y = ti.var(ti.f32)
# https://pytorch.org/tutorials/beginner/examples_autograd/two_layer_net_custom_function.html
@ti.layout
def values():
# actually useless in thie example
ti.root.dense(ti.i, n).place(y)
ti.root.lazy_grad()
@ti.kernel
def torch_kernel(t: np.ndarray, o: np.ndarray):
for i in range(n):
o[i] = t[i] * t[i]
@ti.kernel
def torch_kernel_2(t_grad: np.ndarray, t:np.ndarray, o_grad: np.ndarray):
for i in range(n):
ti.print(o_grad[i])
t_grad[i] = 2 * t[i] * o_grad[i]
class Sqr(torch.autograd.Function):
@staticmethod
def forward(ctx, inp):
outp = torch.zeros_like(inp)
ctx.save_for_backward(inp)
torch_kernel(inp, outp)
return outp
@staticmethod
def backward(ctx, outp_grad):
outp_grad = outp_grad.contiguous()
inp_grad = torch.zeros_like(outp_grad)
inp, = ctx.saved_tensors
torch_kernel_2(inp_grad, inp, outp_grad)
return inp_grad
sqr = Sqr.apply
X = torch.tensor(2 * np.ones((n, ), dtype=np.float32), device=torch.device('cuda:0'), requires_grad=True)
sqr(X).sum().backward()
print(X.grad.cpu())
| [
"yuanmhu@gmail.com"
] | yuanmhu@gmail.com |
7e6ae7bf76d99e46c491659b5c586f3f419bf314 | 2ff556bb90d2a004e92c07bf4101325f492825b5 | /bot/plugins/lists.py | 12a316e74e0d9146d2daefccde275dcbfdf52a7a | [] | no_license | qinyuhang/qbittorrent-bot | df471f1ef46904dcb0a71291da08a941dc57f6f5 | e7ff17a4513d08e55cfbe1d27afa2729e013933c | refs/heads/master | 2023-09-02T18:25:15.933139 | 2021-11-22T22:20:13 | 2021-11-24T01:20:05 | 411,872,254 | 0 | 0 | null | 2021-11-24T01:20:06 | 2021-09-30T00:38:07 | Python | UTF-8 | Python | false | false | 2,897 | py | import logging
import re
# noinspection PyPackageRequirements
from telegram.ext import CallbackQueryHandler, CallbackContext, MessageHandler, Filters
# noinspection PyPackageRequirements
from telegram import ParseMode, Update, BotCommand
from bot.qbtinstance import qb
from bot.updater import updater
from utils import u
from utils import Permissions
logger = logging.getLogger(__name__)
TORRENT_STRING_COMPACT = """• <code>{short_name}</code> ({progress_pretty}% of {size_pretty}, {state_pretty}, <b>{dl_speed_pretty}/s</b>) \
[<a href="{info_deeplink}">info</a>]"""
TORRENT_STRING_COMPLETED = '• <code>{name}</code> ({size_pretty})'
TORRENTS_CATEGORIES = [r'\/?all', r'\/?completed', r'\/?downloading', r'\/?paused', r'\/?inactive', r'\/?active', r'\/?tostart']
TORRENT_CATEG_REGEX_PATTERN = r'^({})'.format('|'.join(TORRENTS_CATEGORIES))
TORRENT_CATEG_REGEX = re.compile(TORRENT_CATEG_REGEX_PATTERN, re.I)
@u.check_permissions(required_permission=Permissions.READ)
@u.failwithmessage
def on_torrents_list_selection(update: Update, context: CallbackContext):
logger.info('torrents list menu button from %s: %s', update.message.from_user.first_name, context.match[0])
qbfilter = context.match[0]
if qbfilter.startswith('/'):
# remove the "/" if the category has been used as command
qbfilter = qbfilter.replace('/', '')
logger.info('torrents status: %s', qbfilter)
torrents = qb.torrents(filter=qbfilter, sort='dlspeed', reverse=False) or []
if qbfilter == 'tostart':
all_torrents = qb.torrents(filter='all')
completed_torrents = [t.hash for t in qb.torrents(filter='completed')]
active_torrents = [t.hash for t in qb.torrents(filter='active')]
torrents = [t for t in all_torrents if t.hash not in completed_torrents and t.hash not in active_torrents]
logger.info('qbittirrent request returned %d torrents', len(torrents))
if not torrents:
update.message.reply_html('There is no torrent to be listed for <i>{}</i>'.format(qbfilter))
return
if qbfilter == 'completed':
base_string = TORRENT_STRING_COMPLETED # use a shorter string with less info for completed torrents
else:
base_string = TORRENT_STRING_COMPACT
strings_list = [base_string.format(**torrent.dict()) for torrent in torrents]
for strings_chunk in u.split_text(strings_list):
update.message.reply_html('\n'.join(strings_chunk))
updater.add_handler(MessageHandler(Filters.regex(TORRENT_CATEG_REGEX), on_torrents_list_selection), bot_command=[
BotCommand("all", "show all torrents"),
BotCommand("completed", "show completed torrents"),
BotCommand("downloading", "show downloading torrents"),
BotCommand("paused", "show paused torrents"),
BotCommand("inactive", "show inactive torrents"),
BotCommand("tostart", "show torrents that can be started")
])
| [
"numeralzeroone@gmail.com"
] | numeralzeroone@gmail.com |
31e5bdfe0d035cdb5f07f4feb56b9aa681368837 | b6aa9768dbac327943e0220df1c56ce38adc6de1 | /775_n-ary-tree-preorder-traversal.py | 108edc9956ffc6834b3b3b87214f7572d22da8ec | [] | no_license | Khrystynka/LeetCodeProblems | f86e4c1e46f70f874924de137ec5efb2f2518766 | 917bd000c2a055dfa2633440a61ca4ae2b665fe3 | refs/heads/master | 2021-03-17T00:51:10.102494 | 2020-09-28T06:31:03 | 2020-09-28T06:31:03 | 246,954,162 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 569 | py | # Problem Title: N-ary Tree Preorder Traversal
"""
# Definition for a Node.
class Node(object):
def __init__(self, val, children):
self.val = val
self.children = children
"""
class Solution(object):
def preorder(self, root):
"""
:type root: Node
:rtype: List[int]
"""
self.lst = []
def preorder(node):
if node:
self.lst.append(node.val)
for child in node.children:
preorder(child)
preorder(root)
return self.lst
| [
"khrystyna@Khrystynas-MacBook-Pro.local"
] | khrystyna@Khrystynas-MacBook-Pro.local |
747ee9a7651abc0c1c4d1f012b95d88e8a937ccc | baf3996414315ffb60470c40c7ad797bf4e6897f | /02_ai/1_ml/9_xgboost/code/chapter_15/plot_performance.py | 10e2225a0794e6e34bca57b257321b24d317a4ea | [
"MIT"
] | permissive | thiago-allue/portfolio | 8fbbecca7ce232567aebe97c19944f444508b7f4 | 0acd8253dc7c5150fef9b2d46eead3db83ca42de | refs/heads/main | 2023-03-15T22:10:21.109707 | 2022-09-14T17:04:35 | 2022-09-14T17:04:35 | 207,919,073 | 0 | 0 | null | 2019-11-13T18:18:23 | 2019-09-11T22:40:46 | Python | UTF-8 | Python | false | false | 343 | py | # Plot performance for learning_rate=0.1
from matplotlib import pyplot
n_estimators = [100, 200, 300, 400, 500]
loss = [-0.001239, -0.001153, -0.001152, -0.001153, -0.001153]
pyplot.plot(n_estimators, loss)
pyplot.xlabel('n_estimators')
pyplot.ylabel('Log Loss')
pyplot.title('XGBoost learning_rate=0.1 n_estimators vs Log Loss')
pyplot.show() | [
"thiago.allue@yahoo.com"
] | thiago.allue@yahoo.com |
62d9537af18fc3c30d69a9a1d9bf0cc5f02f761c | d94b6845aeeb412aac6850b70e22628bc84d1d6d | /better_storylines/src/evaluate_story_cloze_test.py | ea25a83e6fd346a0dd0ad056702fa1e1c874bb62 | [
"CC-BY-4.0",
"Apache-2.0"
] | permissive | ishine/google-research | 541aea114a68ced68736340e037fc0f8257d1ea2 | c1ae273841592fce4c993bf35cdd0a6424e73da4 | refs/heads/master | 2023-06-08T23:02:25.502203 | 2023-05-31T01:00:56 | 2023-05-31T01:06:45 | 242,478,569 | 0 | 0 | Apache-2.0 | 2020-06-23T01:55:11 | 2020-02-23T07:59:42 | Jupyter Notebook | UTF-8 | Python | false | false | 4,258 | py | # coding=utf-8
# Copyright 2023 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Output the overall test accuracy on the 2016 test set.
"""
import os
from absl import app
from absl import flags
from absl import logging
import gin
import gin.tf
import models
import rocstories_sentence_embeddings
import tensorflow.compat.v2 as tf
import tensorflow_datasets.public_api as tfds
import utils
gfile = tf.io.gfile
FLAGS = flags.FLAGS
flags.DEFINE_string('base_dir', '/tmp/model',
'Base directory containing checkpoints and .gin config.')
flags.DEFINE_string('data_dir', 'tfds_datasets',
'Where to look for TFDS datasets.')
flags.DEFINE_multi_string('gin_bindings', [], 'Not used.')
tf.enable_v2_behavior()
@gin.configurable('dataset')
def prepare_dataset(dataset_name=gin.REQUIRED,
shuffle_input_sentences=False,
num_eval_examples=2000,
batch_size=32):
"""Create batched, properly-formatted datasets from the TFDS datasets.
Args:
dataset_name: Name of TFDS dataset.
shuffle_input_sentences: Not used during evaluation, but arg still needed
for gin compatibility.
num_eval_examples: Number of examples to use during evaluation. For the
nolabel evaluation, this is also the number of distractors we choose
between.
batch_size: Batch size.
Returns:
A dictionary mapping from the dataset split to a Dataset object.
"""
del batch_size
del num_eval_examples
del shuffle_input_sentences
dataset = tfds.load(
dataset_name,
data_dir=FLAGS.data_dir,
split=rocstories_sentence_embeddings.TEST_2016,
download=False)
dataset = utils.build_validation_dataset(dataset)
return dataset
def eval_single_checkpoint(model, dataset):
"""Runs quantitative evaluation on a single checkpoint."""
test_2016_accuracy = tf.keras.metrics.Accuracy(name='test_spring2016_acc')
for x, fifth_embedding_1, fifth_embedding_2, label in dataset:
correct = utils.eval_step(
model, x, fifth_embedding_1, fifth_embedding_2, label)
test_2016_accuracy(1, correct)
logging.warning('Test accuracy: %f', test_2016_accuracy.result())
return test_2016_accuracy.result().numpy().tolist()
def run_eval(base_dir):
"""Writes model's predictions in proper format to [base_dir]/answer.txt."""
best_checkpoint_name = utils.pick_best_checkpoint(base_dir)
dataset = prepare_dataset()
checkpoint_path = os.path.join(base_dir, best_checkpoint_name)
embedding_dim = tf.compat.v1.data.get_output_shapes(dataset)[0][-1]
num_input_sentences = tf.compat.v1.data.get_output_shapes(dataset)[0][1]
model = models.build_model(
num_input_sentences=num_input_sentences, embedding_dim=embedding_dim)
checkpoint = tf.train.Checkpoint(model=model)
checkpoint.restore(checkpoint_path).expect_partial()
logging.info('Evaluating with checkpoint: "%s"', checkpoint_path)
test_accuracy = eval_single_checkpoint(model, dataset)
with gfile.GFile(os.path.join(base_dir, 'test_spring2016_acc.txt'), 'w') as f:
f.write(str(test_accuracy))
def main(argv):
del argv
base_dir = FLAGS.base_dir
# Load gin.config settings stored in model directory. It might take some time
# for the train script to start up and actually write out a gin config file.
# Wait 10 minutes (periodically checking for file existence) before giving up.
gin_config_path = os.path.join(base_dir, 'config.gin')
if not gfile.exists(gin_config_path):
raise ValueError('Could not find config.gin in "%s"' % base_dir)
gin.parse_config_file(gin_config_path, skip_unknown=True)
gin.finalize()
run_eval(base_dir)
if __name__ == '__main__':
app.run(main)
| [
"copybara-worker@google.com"
] | copybara-worker@google.com |
80c993714af78f5cb4e4762af61c743c91c48e70 | 379f10752e962b0695bdedcde5d55584b50cb0c0 | /setup.py | 2b616540f4bcfdb409eaba1bf1883e228b215dc0 | [
"MIT"
] | permissive | mahlettaye/Lidar_3DEM | e6a9c875c2900a1d7c9e3c490d4625a11a6b7a29 | af0a10afb7a6acd3e7eb601cb2152015458ed52e | refs/heads/master | 2023-08-13T10:35:13.895085 | 2021-10-20T04:18:32 | 2021-10-20T04:18:32 | 400,816,265 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 650 | py | from setuptools import setup, find_packages
classifiers = [
'Development Status :: 2 - Pre-Alpha',
'Intended Audience :: Education',
'Programming Language :: Python :: 3'
]
setup(
author='Mahlet Taye',
author_email='formahlet@gmail.com',
name='LIDAR_3DEM',
version='0.1.0',
description='A python package used to featch and visuaize raster data',
long_description_content_type='text/markdown',
long_description=open('README.md').read(),
url='',
classifiers=classifiers,
keywords='LIDAR',
packages=find_packages(),
install_requires=['georasters','gdal','pdal','geopandas', 'matplotlib']) | [
"you@example.com"
] | you@example.com |
6480af91ae31cc3d289d896f4297cc1af6742c2f | 00a9295409b78a53ce790f7ab44931939f42c0e0 | /FPGA/apio/iCEBreaker/FIR_Filter/sympy/venv/lib/python3.8/site-packages/sympy/matrices/tests/test_normalforms.py | 5be332446dac4ca65b0ac9c87b3e1e1666f6869d | [
"Apache-2.0"
] | permissive | klei22/Tech-OnBoarding-Class | c21f0762d2d640d5e9cb124659cded5c865b32d4 | 960e962322c37be9117e0523641f8b582a2beceb | refs/heads/master | 2022-11-10T13:17:39.128342 | 2022-10-25T08:59:48 | 2022-10-25T08:59:48 | 172,292,871 | 2 | 3 | Apache-2.0 | 2019-05-19T00:26:32 | 2019-02-24T03:50:35 | C | UTF-8 | Python | false | false | 882 | py | from sympy import Symbol, Poly
from sympy.polys.solvers import RawMatrix as Matrix
from sympy.matrices.normalforms import invariant_factors, smith_normal_form
from sympy.polys.domains import ZZ, QQ
def test_smith_normal():
m = Matrix([[12, 6, 4,8],[3,9,6,12],[2,16,14,28],[20,10,10,20]])
setattr(m, 'ring', ZZ)
smf = Matrix([[1, 0, 0, 0], [0, 10, 0, 0], [0, 0, -30, 0], [0, 0, 0, 0]])
assert smith_normal_form(m) == smf
x = Symbol('x')
m = Matrix([[Poly(x-1), Poly(1, x),Poly(-1,x)],
[0, Poly(x), Poly(-1,x)],
[Poly(0,x),Poly(-1,x),Poly(x)]])
setattr(m, 'ring', QQ[x])
invs = (Poly(1, x, domain='QQ'), Poly(x - 1, domain='QQ'), Poly(x**2 - 1, domain='QQ'))
assert invariant_factors(m) == invs
m = Matrix([[2, 4]])
setattr(m, 'ring', ZZ)
smf = Matrix([[2, 0]])
assert smith_normal_form(m) == smf
| [
"kaunalei@gmail.com"
] | kaunalei@gmail.com |
1dd04629d4fe0a0932f648ef6f07170eb03fe9e0 | 46ac0965941d06fde419a6f216db2a653a245dbd | /sdks/python/test/test_AppDistributionGroupUsersRequest.py | 93310f6c325a96c20600d8c0e085dcb9a65832aa | [
"MIT",
"Unlicense"
] | permissive | b3nab/appcenter-sdks | 11f0bab00d020abb30ee951f7656a3d7ed783eac | bcc19c998b5f648a147f0d6a593dd0324e2ab1ea | refs/heads/master | 2022-01-27T15:06:07.202852 | 2019-05-19T00:12:43 | 2019-05-19T00:12:43 | 187,386,747 | 0 | 3 | MIT | 2022-01-22T07:57:59 | 2019-05-18T17:29:21 | Python | UTF-8 | Python | false | false | 1,066 | py | # coding: utf-8
"""
App Center Client
Microsoft Visual Studio App Center API # noqa: E501
OpenAPI spec version: preview
Contact: benedetto.abbenanti@gmail.com
Project Repository: https://github.com/b3nab/appcenter-sdks
"""
from __future__ import absolute_import
import unittest
import appcenter_sdk
from AppDistributionGroupUsersRequest.clsAppDistributionGroupUsersRequest import AppDistributionGroupUsersRequest # noqa: E501
from appcenter_sdk.rest import ApiException
class TestAppDistributionGroupUsersRequest(unittest.TestCase):
"""AppDistributionGroupUsersRequest unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testAppDistributionGroupUsersRequest(self):
"""Test AppDistributionGroupUsersRequest"""
# FIXME: construct object with mandatory attributes with example values
# model = appcenter_sdk.models.clsAppDistributionGroupUsersRequest.AppDistributionGroupUsersRequest() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
| [
"b3nab@users.noreply.github.com"
] | b3nab@users.noreply.github.com |
d56787796d37eb49a9a784dcd66499f4274899f0 | 20387589a922dcfdfb47c23c692318b9cc4f7515 | /listings/views.py | a7b9bef8da7811f2fc3be8f3a7228589097c1cc8 | [] | no_license | shahjalalh/btre | c95a951eea32bde64d8cd01a73771efed0b99125 | 56069a03b8fc35febdb864312aefb368404d3090 | refs/heads/master | 2020-05-22T03:27:19.401585 | 2019-10-21T10:44:40 | 2019-10-21T10:44:40 | 186,212,698 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,050 | py | from django.shortcuts import render, get_object_or_404
from django.core.paginator import EmptyPage, PageNotAnInteger, Paginator
from listings.models import Listing
from listings.choices import price_choices, bedroom_choices, state_choices
# Create your views here.
def index(request):
listings = Listing.objects.order_by('-list_date').filter(is_published=True)
paginator = Paginator(listings, 3)
page = request.GET.get('page')
paged_listings = paginator.get_page(page)
context = {
'listings': paged_listings
}
return render(request, 'listings/listings.html', context)
def listing(request, listing_id):
listing = get_object_or_404(Listing, pk=listing_id)
context = {
'listing': listing
}
return render(request, 'listings/listing.html', context)
def search(request):
queryset_list = Listing.objects.order_by('-list_date')
# Keywords
if 'keywords' in request.GET:
keywords = request.GET['keywords']
if keywords:
queryset_list = queryset_list.filter(description__icontains=keywords)
# City
if 'city' in request.GET:
city = request.GET['city']
if city:
queryset_list = queryset_list.filter(city__iexact=city)
# State
if 'state' in request.GET:
state = request.GET['state']
if state:
queryset_list = queryset_list.filter(state__iexact=state)
# Bedrooms
if 'bedrooms' in request.GET:
bedrooms = request.GET['bedrooms']
if bedrooms:
queryset_list = queryset_list.filter(bedrooms__lte=bedrooms)
# Price
if 'price' in request.GET:
price = request.GET['price']
if price:
queryset_list = queryset_list.filter(price__lte=price)
context = {
'state_choices': state_choices,
'bedroom_choices': bedroom_choices,
'price_choices': price_choices,
'listings': queryset_list,
'values': request.GET
}
return render(request, 'listings/search.html', context)
| [
"shahjalal.tipu@gmail.com"
] | shahjalal.tipu@gmail.com |
a50a3721da05a0a27cecaa6aa1c042b2b6af8159 | 117626e3c32dc848519d319635cb995bbe78dd43 | /examples/imdb.py | 017f8bf4266a595c1d70cff9213516ae760ed660 | [
"MIT"
] | permissive | amcs1729/Keras-IndRNN | 412a183a0f1149ce905ebef6748330079ae0ad8d | e5f1da3c4d191bd528491f11ae7bdf0fdb54df21 | refs/heads/master | 2022-11-15T03:24:41.366368 | 2020-07-10T15:34:29 | 2020-07-10T15:34:29 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,254 | py | '''Trains a Minimal RNN on the IMDB sentiment classification task.
The dataset is actually too small for Minimal RNN to be of any advantage
compared to simpler, much faster methods such as TF-IDF + LogReg.
'''
from __future__ import print_function
import os
from keras.preprocessing import sequence
from keras.models import Sequential
from keras.layers import Dense, Embedding, BatchNormalization
from keras.callbacks import ModelCheckpoint
from keras.datasets import imdb
from ind_rnn import IndRNN
if not os.path.exists('weights'):
os.makedirs('weights/')
max_features = 20000
maxlen = 500 # cut texts after this number of words (among top max_features most common words)
batch_size = 128
print('Loading data...')
(x_train, y_train), (x_test, y_test) = imdb.load_data(num_words=max_features)
print(len(x_train), 'train sequences')
print(len(x_test), 'test sequences')
print('Pad sequences (samples x time)')
x_train = sequence.pad_sequences(x_train, maxlen=maxlen)
x_test = sequence.pad_sequences(x_test, maxlen=maxlen)
print('x_train shape:', x_train.shape)
print('x_test shape:', x_test.shape)
print('Build model...')
model = Sequential()
model.add(Embedding(max_features, 128, input_shape=(maxlen,)))
model.add(IndRNN(128, recurrent_clip_min=-1, recurrent_clip_max=-1, dropout=0.0, recurrent_dropout=0.0,
return_sequences=True))
model.add(IndRNN(128, recurrent_clip_min=-1, recurrent_clip_max=-1, dropout=0.0, recurrent_dropout=0.0,
return_sequences=False))
model.add(Dense(1, activation='sigmoid'))
# try using different optimizers and different optimizer configs
model.compile(loss='binary_crossentropy',
optimizer='adam',
metrics=['accuracy'])
model.summary()
print('Train...')
model.fit(x_train, y_train,
batch_size=batch_size,
epochs=15,
validation_data=(x_test, y_test),
callbacks=[ModelCheckpoint('weights/imdb_indrnn.h5', monitor='val_acc',
save_best_only=True, save_weights_only=True)])
model.load_weights('weights/imdb_indrnn.h5')
score, acc = model.evaluate(x_test, y_test,
batch_size=batch_size)
print('Test score:', score)
print('Test accuracy:', acc)
| [
"titu1994@gmail.com"
] | titu1994@gmail.com |
f89f783a1d90fd93f69cab3aa560869306aa5aad | 24fe1f54fee3a3df952ca26cce839cc18124357a | /servicegraph/lib/python2.7/site-packages/acimodel-4.0_3d-py2.7.egg/cobra/modelimpl/vz/rtintraepg.py | 60a181a70e329f73daad40d7b7e163a4d27998c4 | [] | no_license | aperiyed/servicegraph-cloudcenter | 4b8dc9e776f6814cf07fe966fbd4a3481d0f45ff | 9eb7975f2f6835e1c0528563a771526896306392 | refs/heads/master | 2023-05-10T17:27:18.022381 | 2020-01-20T09:18:28 | 2020-01-20T09:18:28 | 235,065,676 | 0 | 0 | null | 2023-05-01T21:19:14 | 2020-01-20T09:36:37 | Python | UTF-8 | Python | false | false | 6,137 | py | # coding=UTF-8
# **********************************************************************
# Copyright (c) 2013-2019 Cisco Systems, Inc. All rights reserved
# written by zen warriors, do not modify!
# **********************************************************************
from cobra.mit.meta import ClassMeta
from cobra.mit.meta import StatsClassMeta
from cobra.mit.meta import CounterMeta
from cobra.mit.meta import PropMeta
from cobra.mit.meta import Category
from cobra.mit.meta import SourceRelationMeta
from cobra.mit.meta import NamedSourceRelationMeta
from cobra.mit.meta import TargetRelationMeta
from cobra.mit.meta import DeploymentPathMeta, DeploymentCategory
from cobra.model.category import MoCategory, PropCategory, CounterCategory
from cobra.mit.mo import Mo
# ##################################################
class RtIntraEpg(Mo):
"""
Mo doc not defined in techpub!!!
"""
meta = TargetRelationMeta("cobra.model.vz.RtIntraEpg", "cobra.model.fv.EPg")
meta.moClassName = "vzRtIntraEpg"
meta.rnFormat = "rtfvIntraEpg-[%(tDn)s]"
meta.category = MoCategory.RELATIONSHIP_FROM_LOCAL
meta.label = "End Point Group"
meta.writeAccessMask = 0x101
meta.readAccessMask = 0x2701
meta.isDomainable = False
meta.isReadOnly = True
meta.isConfigurable = False
meta.isDeletable = False
meta.isContextRoot = False
meta.parentClasses.add("cobra.model.vz.BrCP")
meta.superClasses.add("cobra.model.reln.From")
meta.superClasses.add("cobra.model.reln.Inst")
meta.superClasses.add("cobra.model.pol.NFromRef")
meta.rnPrefixes = [
('rtfvIntraEpg-', True),
]
prop = PropMeta("str", "childAction", "childAction", 4, PropCategory.CHILD_ACTION)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop._addConstant("deleteAll", "deleteall", 16384)
prop._addConstant("deleteNonPresent", "deletenonpresent", 8192)
prop._addConstant("ignore", "ignore", 4096)
meta.props.add("childAction", prop)
prop = PropMeta("str", "dn", "dn", 1, PropCategory.DN)
prop.label = "None"
prop.isDn = True
prop.isImplicit = True
prop.isAdmin = True
prop.isCreateOnly = True
meta.props.add("dn", prop)
prop = PropMeta("str", "lcOwn", "lcOwn", 9, PropCategory.REGULAR)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = 0
prop.defaultValueStr = "local"
prop._addConstant("implicit", "implicit", 4)
prop._addConstant("local", "local", 0)
prop._addConstant("policy", "policy", 1)
prop._addConstant("replica", "replica", 2)
prop._addConstant("resolveOnBehalf", "resolvedonbehalf", 3)
meta.props.add("lcOwn", prop)
prop = PropMeta("str", "modTs", "modTs", 7, PropCategory.REGULAR)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = 0
prop.defaultValueStr = "never"
prop._addConstant("never", "never", 0)
meta.props.add("modTs", prop)
prop = PropMeta("str", "rn", "rn", 2, PropCategory.RN)
prop.label = "None"
prop.isRn = True
prop.isImplicit = True
prop.isAdmin = True
prop.isCreateOnly = True
meta.props.add("rn", prop)
prop = PropMeta("str", "status", "status", 3, PropCategory.STATUS)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop._addConstant("created", "created", 2)
prop._addConstant("deleted", "deleted", 8)
prop._addConstant("modified", "modified", 4)
meta.props.add("status", prop)
prop = PropMeta("str", "tCl", "tCl", 33322, PropCategory.REGULAR)
prop.label = "Target-class"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = 1899
prop.defaultValueStr = "fvEPg"
prop._addConstant("dhcpCRelPg", None, 1467)
prop._addConstant("dhcpPRelPg", None, 1466)
prop._addConstant("fvAEPg", None, 1981)
prop._addConstant("fvEPg", None, 1899)
prop._addConstant("fvTnlEPg", None, 9196)
prop._addConstant("infraCEPg", None, 4326)
prop._addConstant("infraPEPg", None, 4325)
prop._addConstant("l2extInstP", None, 1746)
prop._addConstant("l3extInstP", None, 1775)
prop._addConstant("l3extInstPDef", None, 5987)
prop._addConstant("mgmtInB", None, 2194)
prop._addConstant("unspecified", "unspecified", 0)
prop._addConstant("vnsEPpInfo", None, 4694)
prop._addConstant("vnsREPpInfo", None, 5959)
prop._addConstant("vnsSDEPpInfo", None, 5958)
prop._addConstant("vnsSHEPpInfo", None, 6131)
meta.props.add("tCl", prop)
prop = PropMeta("str", "tDn", "tDn", 33321, PropCategory.REGULAR)
prop.label = "Target-dn"
prop.isConfig = True
prop.isAdmin = True
prop.isCreateOnly = True
prop.isNaming = True
meta.props.add("tDn", prop)
meta.namingProps.append(getattr(meta.props, "tDn"))
getattr(meta.props, "tDn").needDelimiter = True
# Deployment Meta
meta.deploymentQuery = True
meta.deploymentType = "Ancestor"
meta.deploymentQueryPaths.append(DeploymentPathMeta("CtrctIfToEPgCons", "Contract Interface EPG Consumer", "cobra.model.fv.EPg"))
meta.deploymentQueryPaths.append(DeploymentPathMeta("CtrctIfToEPgConsNwIf", "Contract Interface EPG Consumer Interface", "cobra.model.nw.If"))
meta.deploymentQueryPaths.append(DeploymentPathMeta("ABrCPToAnyProv", "Any To Provider", "cobra.model.nw.If"))
meta.deploymentQueryPaths.append(DeploymentPathMeta("ABrCPToAnyCons", "Any To Consumer", "cobra.model.nw.If"))
meta.deploymentQueryPaths.append(DeploymentPathMeta("ABrCPToEPgProv", "EPG Provider", "cobra.model.nw.If"))
meta.deploymentQueryPaths.append(DeploymentPathMeta("ABrCPToEPgCons", "EPG Consumer", "cobra.model.nw.If"))
meta.deploymentQueryPaths.append(DeploymentPathMeta("GraphInstancesinacontract", "Graph Instances", "cobra.model.vns.GraphInst"))
def __init__(self, parentMoOrDn, tDn, markDirty=True, **creationProps):
namingVals = [tDn]
Mo.__init__(self, parentMoOrDn, markDirty, *namingVals, **creationProps)
# End of package file
# ##################################################
| [
"rrishike@cisco.com"
] | rrishike@cisco.com |
3922819099c6ac8d461fdda7a281e41f2ade3c9c | 03dfcd4bd41ff9ba76e67895e96a9794ad003a31 | /tutorial/9-classes/object.py | e2a14344132cd7641300d8541f7d7f4f772ff7bf | [] | no_license | gittygitgit/python-sandbox | 71ca68fcc90745931737f7aeb61306ac3417ce60 | 3b3e0eaf4edad13aabe51eb3258ebe9e6b951c67 | refs/heads/master | 2021-01-19T02:41:17.047711 | 2018-11-22T18:07:15 | 2018-11-22T18:07:15 | 39,742,770 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,007 | py | #!/usr/bin/python
# is there an Object class that everything extends from?
# yes
# how do you get a handle to the Object class?
print object
# object has no baseclasses
print object.__bases__ # returns empty tuple ()
# difference between a type and a base...
# each object has a single type...it's type
print type("test") # type string
# a base is like a superclass
print type("sdsdf").__bases__
print type(type("sdfsdf")).__bases__
# type returns an instance's __class__ attribute
print isinstance("sdfsdf",str)
# what's the difference between type and object?
# both are primitive objects
# every object has a __class__ attribute
# every type object has a __bases__ attribute
# test if str is a subclass of object
issubclass(str,object) # true if str extends object directly or one of it's baseclasses extends object
# identity operator
a="one"
b=a
print "id(a): " + id(a) + ", id(b):" + id(b)
print a is b # prints True
c="two"
print a is c # prints False
#
# stuff provided by object
| [
"grudkowm@Michaels-Air-2.fios-router.home"
] | grudkowm@Michaels-Air-2.fios-router.home |
9f55979e50154c96648c73a1506a7753eef4cfda | 0c469c4100fe9d352e83731688e388062a3c55c7 | /bactracking/37. Sudoku Solver.py | 96b9e17b6889f4bd966b4d102c71d7b9b3372080 | [] | no_license | asperaa/back_to_grind | 9e055c7e6561384e5b7ae52f01063e4beb34a298 | 5ea1976b9d5c6d04800e296e45e8ff90fdde5001 | refs/heads/master | 2022-12-16T18:32:01.443743 | 2020-09-05T13:29:39 | 2020-09-05T13:29:39 | 254,910,528 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,020 | py | """We are the captains of our ships, and we stay 'till the end. We see our stories through.
"""
"""37. Sudoku Solver
"""
class Solution:
def solveSudoku(self, board):
self.solve(board)
def solve(self, board):
for i in range(9):
for j in range(9):
if board[i][j] == ".":
for ch in "123456789":
if self.is_valid(board, i, j, ch):
board[i][j] = ch
if self.solve(board):
return True
board[i][j] = "."
return False
return True
def is_valid(self, board, row, col, ch):
square_row = 3 * (row//3)
square_col = 3 * (col//3)
for i in range(9):
if board[i][col] == ch or board[row][i] == ch:
return False
if board[square_row + i // 3][square_col + i % 3] == ch:
return False
return True | [
"adityaankur44@gmail.com"
] | adityaankur44@gmail.com |
fd33f9e1e4befb522cdb051178add1f66fc9e2ad | c071eb46184635818e8349ce9c2a78d6c6e460fc | /system/python_stubs/-745935208/PyQt5/QtWidgets/QListWidgetItem.py | 0787a284749b0903ac1dee930f442569072e297e | [] | no_license | sidbmw/PyCharm-Settings | a71bc594c83829a1522e215155686381b8ac5c6e | 083f9fe945ee5358346e5d86b17130d521d1b954 | refs/heads/master | 2020-04-05T14:24:03.216082 | 2018-12-28T02:29:29 | 2018-12-28T02:29:29 | 156,927,399 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,602 | py | # encoding: utf-8
# module PyQt5.QtWidgets
# from C:\Users\siddh\AppData\Local\Programs\Python\Python37\lib\site-packages\PyQt5\QtWidgets.pyd
# by generator 1.146
# no doc
# imports
import PyQt5.QtCore as __PyQt5_QtCore
import PyQt5.QtGui as __PyQt5_QtGui
import sip as __sip
class QListWidgetItem(__sip.wrapper):
"""
QListWidgetItem(parent: QListWidget = None, type: int = QListWidgetItem.Type)
QListWidgetItem(str, parent: QListWidget = None, type: int = QListWidgetItem.Type)
QListWidgetItem(QIcon, str, parent: QListWidget = None, type: int = QListWidgetItem.Type)
QListWidgetItem(QListWidgetItem)
"""
def background(self): # real signature unknown; restored from __doc__
""" background(self) -> QBrush """
pass
def checkState(self): # real signature unknown; restored from __doc__
""" checkState(self) -> Qt.CheckState """
pass
def clone(self): # real signature unknown; restored from __doc__
""" clone(self) -> QListWidgetItem """
return QListWidgetItem
def data(self, p_int): # real signature unknown; restored from __doc__
""" data(self, int) -> Any """
pass
def flags(self): # real signature unknown; restored from __doc__
""" flags(self) -> Qt.ItemFlags """
pass
def font(self): # real signature unknown; restored from __doc__
""" font(self) -> QFont """
pass
def foreground(self): # real signature unknown; restored from __doc__
""" foreground(self) -> QBrush """
pass
def icon(self): # real signature unknown; restored from __doc__
""" icon(self) -> QIcon """
pass
def isHidden(self): # real signature unknown; restored from __doc__
""" isHidden(self) -> bool """
return False
def isSelected(self): # real signature unknown; restored from __doc__
""" isSelected(self) -> bool """
return False
def listWidget(self): # real signature unknown; restored from __doc__
""" listWidget(self) -> QListWidget """
return QListWidget
def read(self, QDataStream): # real signature unknown; restored from __doc__
""" read(self, QDataStream) """
pass
def setBackground(self, Union, QBrush=None, QColor=None, Qt_GlobalColor=None, QGradient=None): # real signature unknown; restored from __doc__
""" setBackground(self, Union[QBrush, QColor, Qt.GlobalColor, QGradient]) """
pass
def setCheckState(self, Qt_CheckState): # real signature unknown; restored from __doc__
""" setCheckState(self, Qt.CheckState) """
pass
def setData(self, p_int, Any): # real signature unknown; restored from __doc__
""" setData(self, int, Any) """
pass
def setFlags(self, Union, Qt_ItemFlags=None, Qt_ItemFlag=None): # real signature unknown; restored from __doc__
""" setFlags(self, Union[Qt.ItemFlags, Qt.ItemFlag]) """
pass
def setFont(self, QFont): # real signature unknown; restored from __doc__
""" setFont(self, QFont) """
pass
def setForeground(self, Union, QBrush=None, QColor=None, Qt_GlobalColor=None, QGradient=None): # real signature unknown; restored from __doc__
""" setForeground(self, Union[QBrush, QColor, Qt.GlobalColor, QGradient]) """
pass
def setHidden(self, bool): # real signature unknown; restored from __doc__
""" setHidden(self, bool) """
pass
def setIcon(self, QIcon): # real signature unknown; restored from __doc__
""" setIcon(self, QIcon) """
pass
def setSelected(self, bool): # real signature unknown; restored from __doc__
""" setSelected(self, bool) """
pass
def setSizeHint(self, QSize): # real signature unknown; restored from __doc__
""" setSizeHint(self, QSize) """
pass
def setStatusTip(self, p_str): # real signature unknown; restored from __doc__
""" setStatusTip(self, str) """
pass
def setText(self, p_str): # real signature unknown; restored from __doc__
""" setText(self, str) """
pass
def setTextAlignment(self, p_int): # real signature unknown; restored from __doc__
""" setTextAlignment(self, int) """
pass
def setToolTip(self, p_str): # real signature unknown; restored from __doc__
""" setToolTip(self, str) """
pass
def setWhatsThis(self, p_str): # real signature unknown; restored from __doc__
""" setWhatsThis(self, str) """
pass
def sizeHint(self): # real signature unknown; restored from __doc__
""" sizeHint(self) -> QSize """
pass
def statusTip(self): # real signature unknown; restored from __doc__
""" statusTip(self) -> str """
return ""
def text(self): # real signature unknown; restored from __doc__
""" text(self) -> str """
return ""
def textAlignment(self): # real signature unknown; restored from __doc__
""" textAlignment(self) -> int """
return 0
def toolTip(self): # real signature unknown; restored from __doc__
""" toolTip(self) -> str """
return ""
def type(self): # real signature unknown; restored from __doc__
""" type(self) -> int """
return 0
def whatsThis(self): # real signature unknown; restored from __doc__
""" whatsThis(self) -> str """
return ""
def write(self, QDataStream): # real signature unknown; restored from __doc__
""" write(self, QDataStream) """
pass
def __eq__(self, *args, **kwargs): # real signature unknown
""" Return self==value. """
pass
def __ge__(self, *args, **kwargs): # real signature unknown
""" Return self>=value. """
pass
def __gt__(self, *args, **kwargs): # real signature unknown
""" Return self>value. """
pass
def __init__(self, *__args): # real signature unknown; restored from __doc__ with multiple overloads
pass
def __le__(self, *args, **kwargs): # real signature unknown
""" Return self<=value. """
pass
def __lt__(self, *args, **kwargs): # real signature unknown
""" Return self<value. """
pass
def __ne__(self, *args, **kwargs): # real signature unknown
""" Return self!=value. """
pass
__weakref__ = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""list of weak references to the object (if defined)"""
Type = 0
UserType = 1000
__hash__ = None
| [
"siddharthnatamai@gmail.com"
] | siddharthnatamai@gmail.com |
c285438f7e8e453c14624038b1b409d0666a8568 | 33ff050337ba4575042032d9602bf84dcf81435e | /test/functional/test_framework/authproxy.py | 270128a77fe0e70ba6108139c410b737c1c6b010 | [
"MIT"
] | permissive | robinadaptor/chronon | 5256b33fbe797bbdeb9c9a3c2091f0592afe6614 | 630b3945824c1b1cd2ea67ca80835a9f669b9124 | refs/heads/master | 2020-07-11T06:27:01.758237 | 2019-12-17T20:53:48 | 2019-12-17T20:53:48 | 145,383,639 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,620 | py | # Copyright (c) 2011 Jeff Garzik
#
# Previous copyright, from python-jsonrpc/jsonrpc/proxy.py:
#
# Copyright (c) 2007 Jan-Klaas Kollhof
#
# This file is part of jsonrpc.
#
# jsonrpc is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation; either version 2.1 of the License, or
# (at your option) any later version.
#
# This software is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this software; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
"""HTTP proxy for opening RPC connection to chronond.
AuthServiceProxy has the following improvements over python-jsonrpc's
ServiceProxy class:
- HTTP connections persist for the life of the AuthServiceProxy object
(if server supports HTTP/1.1)
- sends protocol 'version', per JSON-RPC 1.1
- sends proper, incrementing 'id'
- sends Basic HTTP authentication headers
- parses all JSON numbers that look like floats as Decimal
- uses standard Python json lib
"""
import base64
import decimal
import http.client
import json
import logging
import socket
import time
import urllib.parse
HTTP_TIMEOUT = 300
USER_AGENT = "AuthServiceProxy/0.1"
log = logging.getLogger("BitcoinRPC")
class JSONRPCException(Exception):
def __init__(self, rpc_error):
try:
errmsg = '%(message)s (%(code)i)' % rpc_error
except (KeyError, TypeError):
errmsg = ''
super().__init__(errmsg)
self.error = rpc_error
def EncodeDecimal(o):
if isinstance(o, decimal.Decimal):
return str(o)
raise TypeError(repr(o) + " is not JSON serializable")
class AuthServiceProxy():
__id_count = 0
# ensure_ascii: escape unicode as \uXXXX, passed to json.dumps
def __init__(self, service_url, service_name=None, timeout=HTTP_TIMEOUT, connection=None, ensure_ascii=True):
self.__service_url = service_url
self._service_name = service_name
self.ensure_ascii = ensure_ascii # can be toggled on the fly by tests
self.__url = urllib.parse.urlparse(service_url)
port = 80 if self.__url.port is None else self.__url.port
user = None if self.__url.username is None else self.__url.username.encode('utf8')
passwd = None if self.__url.password is None else self.__url.password.encode('utf8')
authpair = user + b':' + passwd
self.__auth_header = b'Basic ' + base64.b64encode(authpair)
if connection:
# Callables re-use the connection of the original proxy
self.__conn = connection
elif self.__url.scheme == 'https':
self.__conn = http.client.HTTPSConnection(self.__url.hostname, port, timeout=timeout)
else:
self.__conn = http.client.HTTPConnection(self.__url.hostname, port, timeout=timeout)
def __getattr__(self, name):
if name.startswith('__') and name.endswith('__'):
# Python internal stuff
raise AttributeError
if self._service_name is not None:
name = "%s.%s" % (self._service_name, name)
return AuthServiceProxy(self.__service_url, name, connection=self.__conn)
def _request(self, method, path, postdata):
'''
Do a HTTP request, with retry if we get disconnected (e.g. due to a timeout).
This is a workaround for https://bugs.python.org/issue3566 which is fixed in Python 3.5.
'''
headers = {'Host': self.__url.hostname,
'User-Agent': USER_AGENT,
'Authorization': self.__auth_header,
'Content-type': 'application/json'}
try:
self.__conn.request(method, path, postdata, headers)
return self._get_response()
except http.client.BadStatusLine as e:
if e.line == "''": # if connection was closed, try again
self.__conn.close()
self.__conn.request(method, path, postdata, headers)
return self._get_response()
else:
raise
except (BrokenPipeError, ConnectionResetError):
# Python 3.5+ raises BrokenPipeError instead of BadStatusLine when the connection was reset
# ConnectionResetError happens on FreeBSD with Python 3.4
self.__conn.close()
self.__conn.request(method, path, postdata, headers)
return self._get_response()
def get_request(self, *args):
AuthServiceProxy.__id_count += 1
log.debug("-%s-> %s %s" % (AuthServiceProxy.__id_count, self._service_name,
json.dumps(args, default=EncodeDecimal, ensure_ascii=self.ensure_ascii)))
return {'version': '1.1',
'method': self._service_name,
'params': args,
'id': AuthServiceProxy.__id_count}
def __call__(self, *args, **argsn):
postdata = json.dumps(self.get_request(*args), default=EncodeDecimal, ensure_ascii=self.ensure_ascii)
response = self._request('POST', self.__url.path, postdata.encode('utf-8'))
if response['error'] is not None:
raise JSONRPCException(response['error'])
elif 'result' not in response:
raise JSONRPCException({
'code': -343, 'message': 'missing JSON-RPC result'})
else:
return response['result']
def batch(self, rpc_call_list):
postdata = json.dumps(list(rpc_call_list), default=EncodeDecimal, ensure_ascii=self.ensure_ascii)
log.debug("--> " + postdata)
return self._request('POST', self.__url.path, postdata.encode('utf-8'))
def _get_response(self):
req_start_time = time.time()
try:
http_response = self.__conn.getresponse()
except socket.timeout as e:
raise JSONRPCException({
'code': -344,
'message': '%r RPC took longer than %f seconds. Consider '
'using larger timeout for calls that take '
'longer to return.' % (self._service_name,
self.__conn.timeout)})
if http_response is None:
raise JSONRPCException({
'code': -342, 'message': 'missing HTTP response from server'})
content_type = http_response.getheader('Content-Type')
if content_type != 'application/json':
raise JSONRPCException({
'code': -342, 'message': 'non-JSON HTTP response with \'%i %s\' from server' % (http_response.status, http_response.reason)})
responsedata = http_response.read().decode('utf8')
response = json.loads(responsedata, parse_float=decimal.Decimal)
elapsed = time.time() - req_start_time
if "error" in response and response["error"] is None:
log.debug("<-%s- [%.6f] %s" % (response["id"], elapsed, json.dumps(response["result"], default=EncodeDecimal, ensure_ascii=self.ensure_ascii)))
else:
log.debug("<-- [%.6f] %s" % (elapsed, responsedata))
return response
def __truediv__(self, relative_uri):
return AuthServiceProxy("{}/{}".format(self.__service_url, relative_uri), self._service_name, connection=self.__conn)
| [
"robin.adaptor@gmail.com"
] | robin.adaptor@gmail.com |
287919d9d917521c060f36cad6e6a8d764a13d3b | b5a9d42f7ea5e26cd82b3be2b26c324d5da79ba1 | /tensorflow/python/util/dispatch_test.py | d57a980d9a3c0a102a073a315e770b888eb16b5b | [
"Apache-2.0"
] | permissive | uve/tensorflow | e48cb29f39ed24ee27e81afd1687960682e1fbef | e08079463bf43e5963acc41da1f57e95603f8080 | refs/heads/master | 2020-11-29T11:30:40.391232 | 2020-01-11T13:43:10 | 2020-01-11T13:43:10 | 230,088,347 | 0 | 0 | Apache-2.0 | 2019-12-25T10:49:15 | 2019-12-25T10:49:14 | null | UTF-8 | Python | false | false | 5,285 | py | # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for operator dispatch."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.framework import ops
from tensorflow.python.framework import test_util
from tensorflow.python.ops import gen_math_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.platform import googletest
from tensorflow.python.platform import test
from tensorflow.python.platform import tf_logging
from tensorflow.python.util import deprecation
from tensorflow.python.util import dispatch
from tensorflow.python.util.tf_export import tf_export
class CustomTensor(object):
"""A fake composite tensor class, for testing type-based dispatching."""
def __init__(self, tensor, score):
self.tensor = ops.convert_to_tensor(tensor)
self.score = score
@tf_export("test_op")
@dispatch.add_dispatch_support
def test_op(x, y, z):
"""A fake op for testing dispatch of Python ops."""
return x + (2 * y) + (3 * z)
@test_util.run_all_in_graph_and_eager_modes
class DispatchTest(test_util.TensorFlowTestCase):
def testAddDispatchForTypes_With_CppOp(self):
original_handlers = gen_math_ops.add._tf_dispatchers[:]
# Override the behavior of gen_math_ops.add.
@dispatch.dispatch_for_types(gen_math_ops.add, CustomTensor)
def custom_add(x, y, name=None): # pylint: disable=unused-variable
return CustomTensor(gen_math_ops.add(x.tensor, y.tensor, name),
(x.score+y.score) / 2.0)
self.assertEqual(len(math_ops.add._tf_dispatchers),
len(original_handlers) + 1)
# Test that we see the overridden behavior when using CustomTensors.
x = CustomTensor([1, 2, 3], 2.0)
y = CustomTensor([7, 8, 2], 0.0)
x_plus_y = gen_math_ops.add(x, y)
self.assertAllEqual(self.evaluate(x_plus_y.tensor), [8, 10, 5])
self.assertNear(x_plus_y.score, 1.0, 0.001)
# Test that we still get the right behavior when using normal Tensors.
a = [1, 2, 3]
b = [4, 5, 6]
a_plus_b = gen_math_ops.add(a, b)
self.assertAllEqual(a_plus_b, [5, 7, 9])
# Test that we still get a TypeError or ValueError if we pass some
# type that's not supported by any dispatcher.
with self.assertRaises((TypeError, ValueError)):
gen_math_ops.add(a, None)
# Clean up
gen_math_ops.add._tf_dispatchers = original_handlers
def testAddDispatchForTypes_With_PythonOp(self):
original_handlers = test_op._tf_dispatchers[:]
@dispatch.dispatch_for_types(test_op, CustomTensor)
def override_for_test_op(x, y, z): # pylint: disable=unused-variable
return CustomTensor(test_op(x.tensor, y.tensor, z.tensor),
(x.score + y.score + z.score) / 3.0)
x = CustomTensor([1, 2, 3], 0.2)
y = CustomTensor([7, 8, 2], 0.4)
z = CustomTensor([0, 1, 2], 0.6)
result = test_op(x, y, z)
self.assertAllEqual(self.evaluate(result.tensor), [15, 21, 13])
self.assertNear(result.score, 0.4, 0.001)
# Clean up
test_op._tf_dispatchers = original_handlers
def testDispatchForTypes_SignatureMismatch(self):
with self.assertRaisesRegexp(AssertionError, "The decorated function's "
"signature must exactly match.*"):
@dispatch.dispatch_for_types(test_op, CustomTensor)
def override_for_test_op(a, b, c): # pylint: disable=unused-variable
return CustomTensor(test_op(a.tensor, b.tensor, c.tensor),
(a.score + b.score + c.score) / 3.0)
def testDispatchForTypes_OpDoesNotSupportDispatch(self):
def some_op(x, y):
return x + y
with self.assertRaisesRegexp(AssertionError, "Dispatching not enabled for"):
@dispatch.dispatch_for_types(some_op, CustomTensor)
def override_for_some_op(x, y): # pylint: disable=unused-variable
return x if x.score > 0 else y
@test.mock.patch.object(tf_logging, "warning", autospec=True)
def testInteractionWithDeprecationWarning(self, mock_warning):
@deprecation.deprecated(date=None, instructions="Instructions")
@dispatch.add_dispatch_support
def some_op(x):
return x
some_op(5)
message = mock_warning.call_args[0][0] % mock_warning.call_args[0][1:]
self.assertRegexpMatches(
message,
r".*some_op \(from __main__\) is deprecated and will be "
"removed in a future version.*")
if __name__ == "__main__":
googletest.main()
| [
"v-grniki@microsoft.com"
] | v-grniki@microsoft.com |
b4cfd589f9a33ea94d548b290b5ad92cab41c430 | 9dc1c85e7d86d29400af79125e9cd89a82a9b8ab | /su_django/su_django/settings.py | 70e3f66d9e3bf1dadbe6287aa4005594eb96c938 | [
"MIT"
] | permissive | borko81/simple_django | e284ff8f79b3e708b4903ba0b774e3a480de9190 | 9dbd2d848cbf0ff0c58e93471853c5b21c769758 | refs/heads/master | 2023-07-14T01:25:13.294095 | 2021-08-16T15:48:00 | 2021-08-16T15:48:00 | 349,369,208 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,242 | py | import os
from pathlib import Path
# Build paths inside the project like this: BASE_DIR / 'subdir'.
BASE_DIR = Path(__file__).resolve().parent.parent
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.2/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'django-insecure-nl!+e0+xy%-*ipck7p6(9&jxfh3)pmwp0anrzcb)7@s#_bzamb'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'su_django',
'app01',
'cityes',
'secondary_app',
'main_app',
'boards',
'filemanager',
'posts',
'forms_lab',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'su_django.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'templates')],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'su_django.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.2/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': BASE_DIR / 'db.sqlite3',
}
}
# Password validation
# https://docs.djangoproject.com/en/3.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.2/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.2/howto/static-files/
STATIC_URL = '/static/'
STATIC_ROOT = os.path.join(BASE_DIR, STATIC_URL)
MEDIA_URL = 'media/'
MEDIA_ROOT = os.path.join(BASE_DIR, 'media')
# Default primary key field type
# https://docs.djangoproject.com/en/3.2/ref/settings/#default-auto-field
DEFAULT_AUTO_FIELD = 'django.db.models.BigAutoField'
| [
"bstoilov81@gmail.com"
] | bstoilov81@gmail.com |
fe3c7db78c54e5352b73bf103a3df636a50cdaaa | 483f45b1d241d318c06842f250719e73b8c4dfe7 | /Ex084.py | 71a391cde9885a5f79bb5b240a9d890998173737 | [] | no_license | andersondev96/Curso-em-Video-Python | 510a82bfa65830449374eb5e2b81af404120689e | 76449e6a0ba3624d2c5643268499dea3fccfa5d1 | refs/heads/master | 2022-10-19T02:07:10.967713 | 2020-06-14T23:57:02 | 2020-06-14T23:57:02 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 989 | py | """
Faça um programa que leia nome e peso de várias pessoas,
guardando tudo em uma lista. No final, mostre:
A) Quantas pessoas foram cadastradas.
B) Uma listagem com as pessoas mais pesadas.
C) Uma listagem com as pessoas mais leves.
"""
lista = list()
dados = list()
tot = 0
mai = men = 0
while True:
dados.append(str(input('Nome: ')))
dados.append(float(input('peso: ')))
if len(lista) == 0:
mai = men = dados[1]
else:
if dados[1] > mai:
mai = dados[1]
if dados[1] < men:
men = dados[1]
lista.append(dados[:])
dados.clear()
tot = tot + 1
cont = str(input('Deseja continuar [s/n]: '))
if cont in 'Nn':
break
print("-="*30)
print(f'Foram cadastradas {tot} pessoas na lista.')
print(f'O maior peso foi de {mai}Kg')
for p in lista:
if p[1] == mai:
print(f'[{p[0]}]')
print()
print(f'O menor peso foi de {men}Kg')
for p in lista:
if p[1] == men:
print(f'[{p[0]}]')
print()
| [
"andersonfferreira13@hotmail.com"
] | andersonfferreira13@hotmail.com |
8e62cae88c31bff477b2aa066abce0303c029d80 | 3eb99709809a493c46a79171ef9774aa4261b59d | /脚本/llianli/cf_app_flottery_client_data.py | 62454ee49a8036d8b5a611fcd32f37f9b13c3625 | [] | no_license | bingwin/tencent | c831a5b344f597a06c7a7b179d4f67d668198c90 | ea5dc5ff398d85cfdf4df056dc8b4064e66fb5fb | refs/heads/master | 2020-07-28T21:44:00.281933 | 2016-05-28T03:21:31 | 2016-05-28T03:21:31 | 209,548,176 | 1 | 0 | null | 2019-09-19T12:29:21 | 2019-09-19T12:29:21 | null | UTF-8 | Python | false | false | 3,904 | py | #!/usr/bin/env python
#-*- coding: utf-8 -*-
# ******************************************************************************
# 程序名称: cf_app_flottery_client_data.py
# 功能描述: 掌上穿越火线抽奖功能客户端相关事件统计
# 输入参数: yyyymmdd 例如:20140113
# 目标表名: ieg_qt_community_app.tb_cf_app_flottery_client_click
# 数据源表: teg_mta_intf.ieg_lol
# 创建人名: llianli
# 创建日期: 2016-02-01
# 版本说明: v1.0
# 公司名称: tencent
# 修改人名:
# 修改日期:
# 修改原因:
# ******************************************************************************
#import system module
# main entry
def TDW_PL(tdw, argv=[]):
tdw.WriteLog("== begin ==")
tdw.WriteLog("== argv[0] = " + argv[0] + " ==")
sDate = argv[0];
##sDate = '20150111'
tdw.WriteLog("== sDate = " + sDate + " ==")
tdw.WriteLog("== connect tdw ==")
sql = """use ieg_qt_community_app"""
res = tdw.execute(sql)
sql = """set hive.inputfiles.splitbylinenum=true"""
res = tdw.execute(sql)
sql = """set hive.inputfiles.line_num_per_split=1000000"""
res = tdw.execute(sql)
sql = """
CREATE TABLE IF NOT EXISTS tb_cf_app_flottery_client_click
(
sdate int,
id bigint,
ei string,
pv bigint,
total_uin bigint,
total_mac bigint
) """
res = tdw.execute(sql)
sql="""delete from tb_cf_app_flottery_client_click where sdate=%s """ % (sDate)
res = tdw.execute(sql)
sql = """
insert table tb_cf_app_flottery_client_click
select
%s as sdate,
case when grouping(id) = 1 then -100 else id end as id,
ei,
count(*) as pv,
count(distinct uin) as total_uin,
count(distinct ui_mc) as total_mac
from
(
select
id,
concat(ui,mc) as ui_mc,
get_json_object(kv,'$.uin') as uin ,
case
when ( id = 1100679031 and ei = '王者宝藏点击次数') or
( id = 1200679031 and ei = '抽奖_模块点击')
then '王者宝藏模块'
when ( id = 1100679031 and ei = '抽奖页面点击量') or
( id = 1200679031 and ei = '抽奖_TAB展示次数' and get_json_object(kv,'$.type') = '宝藏')
then '抽奖页面'
when ( id = 1100679031 and ei = '分享点击次数') or
( id = 1200679031 and ei = '抽奖_结果界面分享次数' )
then '分享点击次数'
when ( id = 1100679031 and ei = '排行榜页面点击量') or
( id = 1200679031 and ei = '抽奖_TAB展示次数' and get_json_object(kv,'$.type') = '排行')
then '排行页面'
when ( id = 1100679031 and ei = '兑换页面点击量') or
( id = 1200679031 and ei = '抽奖_TAB展示次数' and get_json_object(kv,'$.type') = '兑换')
then '兑换页面'
when ( id = 1100679031 and ei = '记录页面点击量') or
( id = 1200679031 and ei = '抽奖_TAB展示次数' and get_json_object(kv,'$.type') = '记录')
then '记录页面'
else 'other'
end as ei
from teg_mta_intf::ieg_lol where sdate = %s and id in (1100679031,1200679031)
)t
where ei != 'other'
group by cube(id),ei
""" % (sDate,sDate)
tdw.WriteLog(sql)
res = tdw.execute(sql)
tdw.WriteLog("== end OK ==")
| [
"996346098@qq.com"
] | 996346098@qq.com |
9f25304223889a65bb1ac1016b8110a748efbb9d | 4438b60b7095d56c7fc2ee8396ea98ec620f7f51 | /etro.py | 9f2571163bab0f826fcb1b045c10e654888bf408 | [] | no_license | haizi-zh/firenze | 8a53e49a55f0827f8b0179164ed6c1ea9a3b005d | 1129bfd0df6f9d661b4f01ce514595f1eba784c3 | refs/heads/master | 2021-01-18T15:24:33.495830 | 2013-07-13T15:57:10 | 2013-07-13T15:57:10 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,946 | py | # coding=utf-8
import json
import string
import re
import common as cm
import geosense as gs
__author__ = 'Zephyre'
db = None
def fetch_continents(data):
result = []
for i in xrange(1, 5):
d = data.copy()
d['url'] = '%s%d/' % (data['url'], i)
result.append(d)
return result
def fetch_countries(data):
url = data['url']
try:
body = cm.get_data(url)
except Exception:
print 'Error occured: %s' % url
dump_data = {'level': 0, 'time': cm.format_time(), 'data': {'url': url}, 'brand_id': data['brand_id']}
cm.dump(dump_data)
return []
m = re.search(ur'<\s*nav\s+class\s*=\s*"country-list"\s*>', body)
if m is None:
return []
sub, start, end = cm.extract_closure(body[m.start():], ur'<nav\b', ur'</nav>')
result = []
for m in re.findall(ur'<\s*li\s*>\s*<a\s+href\s*=\s*"(.+?)"\s+title=.*?>\s*(.+?)\s*<\s*/\s*a\s*>', sub):
d = data.copy()
d['url'] = m[0].strip()
d['country'] = m[1].strip().upper()
result.append(d)
return result
def fetch_stores(data):
url = data['url']
try:
body = cm.get_data(url)
except Exception:
print 'Error occured: %s' % url
dump_data = {'level': 0, 'time': cm.format_time(), 'data': {'url': url}, 'brand_id': data['brand_id']}
cm.dump(dump_data)
return []
result = []
for m in re.findall(ur'<li class="store">.+?<a href="(.+?)".+?</li>', body, re.S):
d = data.copy()
d['url'] = m.strip()
result.append(d)
return result
def fetch_store_details(data):
url = data['url']
try:
body = cm.get_data(url)
except Exception:
print 'Error occured: %s' % url
dump_data = {'level': 0, 'time': cm.format_time(), 'data': {'url': url}, 'brand_id': data['brand_id']}
cm.dump(dump_data)
return []
entry = cm.init_store_entry(data['brand_id'], data['brandname_e'], data['brandname_c'])
ret = gs.look_up(data['country'], 1)
if ret is not None:
entry[cm.country_e] = ret['name_e']
m = re.search(ur'<span class="type">Address</span>\s*<p>(.+?)</p>', body, re.S)
if m is not None:
addr = cm.reformat_addr(m.group(1))
country, province, city = gs.addr_sense(addr)
if country is not None and entry[cm.country_e] == '':
entry[cm.country_e] = country
if province is not None:
entry[cm.province_e] = province
if city is not None:
entry[cm.city_e] = city
entry[cm.addr_e] = addr
m = re.search(ur'<span class="type">Phone</span>\s*<p>(.+?)</p>', body, re.S)
if m is not None:
entry[cm.tel] = m.group(1)
m = re.search(ur'<span class="type">Opening hours</span>\s*<p>(.+?)</p>', body, re.S)
if m is not None:
entry[cm.hours] = cm.reformat_addr(m.group(1))
m = re.search(ur'<span class="type">You can find</span>\s*<p>(.+?)</p>', body, re.S)
if m is not None:
entry[cm.store_type] = cm.reformat_addr(m.group(1))
m = re.search(ur'google.maps.LatLng\(\s*(-?\d+\.\d+)\s*,\s*(-?\d+\.\d+)', body, re.S)
entry[cm.lat]=string.atof(m.group(1))
entry[cm.lng]=string.atof(m.group(2))
gs.field_sense(entry)
print '(%s / %d) Found store: %s, %s (%s, %s)' % (data['brandname_e'], data['brand_id'],
entry[cm.name_e], entry[cm.addr_e], entry[cm.country_e],
entry[cm.continent_e])
db.insert_record(entry, 'stores')
return [entry]
def fetch(level=1, data=None, user='root', passwd=''):
def func(data, level):
"""
:param data:
:param level: 0:国家;1:城市;2:商店列表
"""
if level == 0:
# 洲列表
return [{'func': lambda data: func(data, 1), 'data': s} for s in fetch_continents(data)]
if level == 1:
# 国家列表
return [{'func': lambda data: func(data, 2), 'data': s} for s in fetch_countries(data)]
if level == 2:
# 商店列表
return [{'func': lambda data: func(data, 3), 'data': s} for s in fetch_stores(data)]
if level == 3:
# 商店详情
return [{'func': None, 'data': s} for s in fetch_store_details(data)]
else:
return []
# Walk from the root node, where level == 1.
if data is None:
data = {'url': 'http://www.etro.com/en_wr/storefinder/get/list/continent/',
'brand_id': 10127, 'brandname_e': u'Etro', 'brandname_c': u'艾特罗'}
global db
db = cm.StoresDb()
db.connect_db(user=user, passwd=passwd)
db.execute(u'DELETE FROM %s WHERE brand_id=%d' % ('stores', data['brand_id']))
results = cm.walk_tree({'func': lambda data: func(data, 0), 'data': data})
db.disconnect_db()
return results | [
"haizi.zh@gmail.com"
] | haizi.zh@gmail.com |
b2851bd4f588b8c3675082d85433d7b02b70f58a | 82dca8287b9cc32599404c402240a177f7dfb9f7 | /venv/bin/sqlformat | b9ae5435c634511caba14ee0aff440ee9ac61801 | [] | no_license | kairat3/homework-tutorial | 8abaf2710a76c042701882cb904ea5cbac75f600 | 2babb6ddbd04fd596b0a5fe83e9b7eb1969c24d9 | refs/heads/master | 2023-05-05T14:34:25.121718 | 2021-06-01T09:40:51 | 2021-06-01T09:40:51 | 372,775,199 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 242 | #!/home/ka/projects/tutorial/venv/bin/python
# -*- coding: utf-8 -*-
import re
import sys
from sqlparse.__main__ import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())
| [
"jvckmiller@gmail.com"
] | jvckmiller@gmail.com | |
1faf061beb78dad3524eae193c52e3e541f3652e | 28dbe47aba287ed94ef7bba734203736bcc06249 | /.history/dmac_20200624221838.py | 1dd8a319dcdd7456874878e6ce201b25c7df7cbf | [] | no_license | ntung88/Trading_Algorithms | 242fd816b19df95e02e9fcd8c5c91c862d2ede40 | d96488b1754e3751f739d9c3f094a8f8dc54a0a9 | refs/heads/master | 2022-11-19T16:04:07.800344 | 2020-07-17T21:14:10 | 2020-07-17T21:14:10 | 276,239,640 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,599 | py | import yfinance as yf
import numpy as np
from scipy.stats import norm
import pandas as pd
from pandasgui import show
from scipy.optimize import minimize, LinearConstraint
import matplotlib.pyplot as plt
def clean_data(data):
incomplete_idxs = False
for col in data.columns:
incomplete_idxs |= np.isnan(data[col])
return data[~incomplete_idxs]
def calc_crossovers(sma, lma):
num_points = len(clean_data(lma))
#Currently using only closing prices
sma = sma['Close']
lma = lma['Close']
high = (sma > lma)[-num_points:]
crossovers = high.astype(int).diff()[1:]
trimmed = crossovers[crossovers != 0]
return trimmed
def profit(data, crossovers):
if len(crossovers) == 0:
return 0
total = 0
if crossovers.iloc[0] == -1:
total += data.loc[crossovers.index[0]] - data.iloc[0]
for i in range(1,len(crossovers)):
left_bound = crossovers.index[i-1]
if crossovers.loc[left_bound] == 1:
right_bound = crossovers.index[i]
total += data.loc[right_bound] - data.loc[left_bound]
if crossovers.iloc[-1] == 1:
total += data.iloc[-1] - data.loc[crossovers.index[-1]]
return total
def optimize(data):
cons = ({'type': 'ineq', 'fun': lambda x: x[1] - x[0]},
{'type': 'ineq', 'fun': lambda x: x[0] - 5})
# 'type':'eq', 'fun': lambda x : max([x[i]-int(x[i]) for i in range(len(x))]),
short_seeds = range(5, 300, 30)
long_seeds = range(20, 800, 40)
# short_seeds = [100]
# long_seeds = [750]
minimum = float('inf')
best_short = 0
best_long = 0
for short_seed in short_seeds:
for long_seed in long_seeds:
if long_seed > short_seed:
res = minimize(run_analysis, [short_seed, long_seed], args=(data,), method='COBYLA', constraints=cons, options={'rhobeg': 10.0, 'catol': 0.0})
if res.fun < minimum:
best_short = res.x[0]
best_long = res.x[1]
minimum = res.fun
return (int(round(best_short)), int(round(best_long)), minimum)
def run_analysis(periods, data):
# print(periods)
short_period = int(round(periods[0]))
long_period = int(round(periods[1]))
# print(short_period, long_period)
sma = data.rolling(short_period).mean()
lma = data.rolling(long_period).mean()
crossovers = calc_crossovers(sma, lma)
result = -1 * profit(data['Close'], crossovers)
# print(short_period, long_period, result)
return result
def main():
tickers = 'SPY AAPL MRNA TSLA MMM APA'
data = yf.download(tickers, period='max', group_by='ticker')
dirty = pd.DataFrame(data['APA'])
frame = clean_data(dirty)
# periods = optimize(frame)
# visualize(data, periods[0], periods[1])
visualize(frame, 50, 200)
def visualize(data, short_period, long_period):
sma = data.rolling(short_period).mean()
lma = data.rolling(long_period).mean()
crossovers = calc_crossovers(sma, lma)
buys = pd.DataFrame(crossovers[crossovers == 1.0])
sells = pd.DataFrame(crossovers[crossovers == -1.0])
plot_sells = sells * data['Close']
# plot_sells[np.isnan(plot_sells)] = 0
plot_buys = buys * data['Close']
# print(len(plot_sells.index), len(plot_sells['Close']))
# plot_buys[np.isnan(plot_buys)] = 0
data.plot(color='black')
plot_sells.plot(kind='scatter', x=plot_sells.index, y=plot_sells['Close'], color='red')
plot_buys.plot(kind='scatter', x=plot_buys.index, y=plot_buys['Close'], color='green')
plt.show()
if __name__ == "__main__":
main() | [
"nathantung@Nathans-MacBook-Pro.local"
] | nathantung@Nathans-MacBook-Pro.local |
e7b577c747abdc08c9955fd13104ca6a9f5c9d3c | 15f321878face2af9317363c5f6de1e5ddd9b749 | /solutions_python/Problem_199/1725.py | c78439354107fb7183c695dfd3799280485c6f7d | [] | no_license | dr-dos-ok/Code_Jam_Webscraper | c06fd59870842664cd79c41eb460a09553e1c80a | 26a35bf114a3aa30fc4c677ef069d95f41665cc0 | refs/heads/master | 2020-04-06T08:17:40.938460 | 2018-10-14T10:12:47 | 2018-10-14T10:12:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 845 | py | INPUT_FILE = 'A-large.in'
OUTPUT_FILE = 'A-large_out.txt'
def solve(f_in):
l = f_in.readline().strip()
lst = l.split(' ')
k = int(lst[1])
pancakesStr = lst[0]
pancakes = 0
nPancakes = len(pancakesStr)
for i in range(nPancakes):
if pancakesStr[i] == '-':
pancakes = pancakes | (1 << (nPancakes - i - 1))
countFlips = 0
flip = 0
for i in range(k):
flip = flip | (1 << i)
for i in range(nPancakes - k + 1):
if pancakes & (1 << (nPancakes - i - 1)):
countFlips = countFlips + 1
pancakes = pancakes ^ (flip << nPancakes - k - i)
if pancakes == 0:
result = countFlips
else:
result = "IMPOSSIBLE"
return result
with open(INPUT_FILE, 'r') as f:
with open(OUTPUT_FILE, 'w') as f_out:
T = int(f.readline())
for i in range(T):
f_out.write('Case #%d: %s\n' % (i + 1, solve(f)))
| [
"miliar1732@gmail.com"
] | miliar1732@gmail.com |
9c9a840d4942d09bbb6213039764fca57b919091 | ca507259c36a4299666f4c064f25832f5c3f45c1 | /test/test_mosaic_dto.py | f7959f7a562463df95275868b61ee3b8768dc54e | [] | no_license | fullcircle23/symbol-openapi-python-client | ae38a2537d1f2eebca115733119c444b79ec4962 | 3728d30eb1b5085a5a5e991402d180fac8ead68b | refs/heads/master | 2022-04-15T06:20:47.821281 | 2020-04-16T02:39:14 | 2020-04-16T02:39:14 | 254,701,919 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,273 | py | # coding: utf-8
"""
****************************************************************************
Copyright (c) 2016-present,
Jaguar0625, gimre, BloodyRookie, Tech Bureau, Corp. All rights reserved.
This file is part of Catapult.
Catapult is free software: you can redistribute it and/or modify
it under the terms of the GNU Lesser General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Catapult is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public License
along with Catapult. If not, see <http://www.gnu.org/licenses/>.
****************************************************************************
Catapult REST Endpoints
OpenAPI Specification of catapult-rest 1.0.20.22 # noqa: E501
The version of the OpenAPI document: 0.8.9
Contact: ravi@nem.foundation
NOTE: This file is auto generated by Symbol OpenAPI Generator:
https://github.com/nemtech/symbol-openapi-generator
Do not edit this file manually.
"""
from __future__ import absolute_import
import unittest
import datetime
import symbol_openapi_client
from symbol_openapi_client.models.mosaic_dto import MosaicDTO # noqa: E501
from symbol_openapi_client.rest import ApiException
class TestMosaicDTO(unittest.TestCase):
"""MosaicDTO unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def make_instance(self, include_optional):
"""Test MosaicDTO
include_option is a boolean, when False only required
params are included, when True both required and
optional params are included """
# model = symbol_openapi_client.models.mosaic_dto.MosaicDTO() # noqa: E501
if include_optional :
return MosaicDTO(
id = '0DC67FBE1CAD29E3',
supply = 123456,
start_height = 1,
owner_public_key = 'AC1A6E1D8DE5B17D2C6B1293F1CAD3829EEACF38D09311BB3C8E5A880092DE26',
owner_address = '9081FCCB41F8C8409A9B99E485E0E28D23BD6304EF7215E01A',
revision = 1,
flags = 56,
divisibility = 56,
duration = 200
)
else :
return MosaicDTO(
id = '0DC67FBE1CAD29E3',
supply = 123456,
start_height = 1,
owner_public_key = 'AC1A6E1D8DE5B17D2C6B1293F1CAD3829EEACF38D09311BB3C8E5A880092DE26',
owner_address = '9081FCCB41F8C8409A9B99E485E0E28D23BD6304EF7215E01A',
revision = 1,
flags = 56,
divisibility = 56,
duration = 200,
)
def testMosaicDTO(self):
"""Test MosaicDTO"""
inst_req_only = self.make_instance(include_optional=False)
inst_req_and_optional = self.make_instance(include_optional=True)
if __name__ == '__main__':
unittest.main()
| [
"fullcircle2324@gmail.com"
] | fullcircle2324@gmail.com |
c5f19c607da75bed9b9a36a1a5f11b71b04c0f5e | 637bb3f080ff18001a732d9bf607ef962b09c5dd | /AtividadeURI_01/uri_1012_area.py | 8d96cd42ca3bd54dc511638969355f63eba86ad3 | [] | no_license | magnoazneto/IFPI_Algoritmos | 995296fa22445c57981a1fad43e1ef7a8da83e5e | 3b5e79c79b7a1fb7a08206719fd418fba1b39691 | refs/heads/master | 2022-02-27T10:59:17.123895 | 2019-11-17T13:51:35 | 2019-11-17T13:51:35 | 186,868,451 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 469 | py | def main():
entradas = input().split()
A = float(entradas[0])
B = float(entradas[1])
C = float(entradas[2])
tria_retangulo = (A*C) / 2
circulo = 3.14159 * (C**2)
trapezio = ((A+B) * C) / 2
quadrado = B ** 2
retangulo = A * B
print('TRIANGULO: {:.3f}'.format(tria_retangulo))
print('CIRCULO: {:.3f}'.format(circulo))
print('TRAPEZIO: {:.3f}'.format(trapezio))
print('QUADRADO: {:.3f}'.format(quadrado))
print('RETANGULO: {:.3f}'.format(retangulo))
main() | [
"magnoazneto@gmail.com"
] | magnoazneto@gmail.com |
cd01a0671a16b17c6f88a80526d827fcc05fd55a | 52a15d4fabf68bf23a23799312ae40465764908c | /src/changeset/load.py | 56b0f7312eaf95e688e194baaccf8d0d1988df1e | [
"MIT",
"Apache-2.0"
] | permissive | jensl/critic | 2071a1b0600051967323df48f4d3a5656a5d2bb8 | c2d962b909ff7ef2f09bccbeb636333920b3659e | refs/heads/stable/1 | 2022-05-28T03:51:15.108944 | 2018-03-27T18:47:46 | 2018-03-29T15:08:30 | 6,430,552 | 224 | 36 | NOASSERTION | 2023-05-29T15:38:00 | 2012-10-28T18:26:04 | Python | UTF-8 | Python | false | false | 4,926 | py | # -*- mode: python; encoding: utf-8 -*-
#
# Copyright 2012 Jens Lindström, Opera Software ASA
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy of
# the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
import diff
import dbutils
import gitutils
def loadChangeset(db, repository, changeset_id, filtered_file_ids=None, load_chunks=True):
return loadChangesets(db, repository,
changesets=[diff.Changeset.fromId(db, repository, changeset_id)],
filtered_file_ids=filtered_file_ids,
load_chunks=load_chunks)[0]
def loadChangesetsForCommits(db, repository, commits, filtered_file_ids=None, load_chunks=True):
commit_ids = dict([(commit.getId(db), commit) for commit in commits])
def getCommit(commit_id):
return commit_ids.get(commit_id) or gitutils.Commit.fromId(db, repository, commit_id)
cursor = db.cursor()
cursor.execute("SELECT id, parent, child FROM changesets WHERE child=ANY (%s) AND type='direct'", (commit_ids.keys(),))
changesets = []
for changeset_id, parent_id, child_id in cursor:
changesets.append(diff.Changeset(changeset_id, getCommit(parent_id), getCommit(child_id), "direct"))
return loadChangesets(db, repository, changesets, filtered_file_ids=filtered_file_ids, load_chunks=load_chunks)
def loadChangesets(db, repository, changesets, filtered_file_ids=None, load_chunks=True):
cursor = db.cursor()
changeset_ids = [changeset.id for changeset in changesets]
filtered_file_ids = list(filtered_file_ids) if filtered_file_ids else None
if filtered_file_ids is None:
cursor.execute("""SELECT changeset, file, path, old_sha1, new_sha1, old_mode, new_mode
FROM fileversions
JOIN files ON (files.id=fileversions.file)
WHERE changeset=ANY (%s)""",
(changeset_ids,))
else:
cursor.execute("""SELECT changeset, file, path, old_sha1, new_sha1, old_mode, new_mode
FROM fileversions
JOIN files ON (files.id=fileversions.file)
WHERE changeset=ANY (%s)
AND file=ANY (%s)""",
(changeset_ids, filtered_file_ids))
files = dict([(changeset.id, {}) for changeset in changesets])
for changeset_id, file_id, file_path, file_old_sha1, file_new_sha1, file_old_mode, file_new_mode in cursor.fetchall():
files[changeset_id][file_id] = diff.File(file_id, file_path,
file_old_sha1, file_new_sha1,
repository,
old_mode=file_old_mode,
new_mode=file_new_mode,
chunks=[])
if load_chunks:
if filtered_file_ids is None:
cursor.execute("""SELECT id, changeset, file, deleteOffset, deleteCount, insertOffset, insertCount, analysis, whitespace
FROM chunks
WHERE changeset=ANY (%s)
ORDER BY file, deleteOffset ASC""",
(changeset_ids,))
else:
cursor.execute("""SELECT id, changeset, file, deleteOffset, deleteCount, insertOffset, insertCount, analysis, whitespace
FROM chunks
WHERE changeset=ANY (%s)
AND file=ANY (%s)
ORDER BY file, deleteOffset ASC""",
(changeset_ids, filtered_file_ids))
for chunk_id, changeset_id, file_id, delete_offset, delete_count, insert_offset, insert_count, analysis, is_whitespace in cursor:
files[changeset_id][file_id].chunks.append(diff.Chunk(delete_offset, delete_count,
insert_offset, insert_count,
id=chunk_id,
is_whitespace=is_whitespace,
analysis=analysis))
for changeset in changesets:
changeset.files = diff.File.sorted(files[changeset.id].values())
return changesets
| [
"jl@opera.com"
] | jl@opera.com |
b9a24f96a2369aaff0a8e0ac012240974a522eba | 9a4bf3864d9e66ba285e2d28d570b920b47d1ae9 | /0886_Possible_Bipartition.py | 855be9f8c40a2796f8b15cc637eb0b4fa00bf602 | [] | no_license | actcheng/leetcode-solutions | bf32044bcd916912c86294d83638be08a27b9d8f | 4bf1a7814b5c76e11242e7933e09c59ede3284a3 | refs/heads/master | 2021-06-28T19:15:30.089116 | 2020-08-25T08:17:48 | 2020-08-25T08:17:48 | 139,708,553 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,317 | py | # Problem 886
# Date completed: 2020/05/27
# 1084 ms (18%)
class Solution:
def possibleBipartition(self, N: int, dislikes: List[List[int]]) -> bool:
rec = collections.defaultdict(set)
for a,b in dislikes:
rec[a].add(b)
rec[b].add(a)
wait = set(rec.keys())
queue = []
groups = [set([]), set([])]
group_dislike = [set([]),set([])]
while queue or wait:
key = queue.pop(0) if queue else wait.pop()
for i in [0,1]:
if key not in group_dislike[i]:
groups[i].add(key)
group_dislike[i].update(rec[key])
if key in wait: wait.remove(key)
for b in rec[key].intersection(wait):
if b in group_dislike[1-i]:
print(b,groups)
return False
groups[1-i].add(b)
group_dislike[1-i].update(rec[b])
queue.extend([val for val in rec[b] if val in wait])
wait.remove(b)
break
if len(group_dislike[0].intersection(group_dislike[1])) > 0: return False
return True
| [
"noreply@github.com"
] | actcheng.noreply@github.com |
cc06a1afd3cf990aab9e8017188708c6770ddf82 | 16b389c8dcace7f7d010c1fcf57ae0b3f10f88d3 | /docs/jnpr_healthbot_swagger/test/test_devicegroup_schema_publish.py | bd0b683c3c81f49498cdd4654c35eff56b402752 | [
"Apache-2.0"
] | permissive | Juniper/healthbot-py-client | e4e376b074920d745f68f19e9309ede0a4173064 | 0390dc5d194df19c5845b73cb1d6a54441a263bc | refs/heads/master | 2023-08-22T03:48:10.506847 | 2022-02-16T12:21:04 | 2022-02-16T12:21:04 | 210,760,509 | 10 | 5 | Apache-2.0 | 2022-05-25T05:48:55 | 2019-09-25T05:12:35 | Python | UTF-8 | Python | false | false | 994 | py | # coding: utf-8
"""
Healthbot APIs
API interface for Healthbot application # noqa: E501
OpenAPI spec version: 1.0.0
Contact: healthbot-hackers@juniper.net
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import unittest
import swagger_client
from swagger_client.models.devicegroup_schema_publish import DevicegroupSchemaPublish # noqa: E501
from swagger_client.rest import ApiException
class TestDevicegroupSchemaPublish(unittest.TestCase):
"""DevicegroupSchemaPublish unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testDevicegroupSchemaPublish(self):
"""Test DevicegroupSchemaPublish"""
# FIXME: construct object with mandatory attributes with example values
# model = swagger_client.models.devicegroup_schema_publish.DevicegroupSchemaPublish() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
| [
"nitinkr@juniper.net"
] | nitinkr@juniper.net |
04598fc33634c577860347eab85306f2b9f11336 | 10e19b5cfd59208c1b754fea38c34cc1fb14fdbe | /desktop/core/ext-py/django-openid-auth-0.5/django_openid_auth/tests/__init__.py | f76af6f1bdfa86e22acaa51e11710b716b03d492 | [
"Apache-2.0",
"BSD-2-Clause"
] | permissive | sarvex/hue | 780d28d032edd810d04e83f588617d1630ec2bef | 6e75f0c4da2f3231e19c57bdedd57fb5a935670d | refs/heads/master | 2023-08-15T21:39:16.171556 | 2023-05-01T08:37:43 | 2023-05-01T08:37:43 | 32,574,366 | 0 | 0 | Apache-2.0 | 2023-09-14T16:55:28 | 2015-03-20T09:18:18 | Python | UTF-8 | Python | false | false | 1,749 | py | # django-openid-auth - OpenID integration for django.contrib.auth
#
# Copyright (C) 2009-2013 Canonical Ltd.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
import unittest
from test_views import *
from test_store import *
from test_auth import *
from test_admin import *
def suite():
suite = unittest.TestSuite()
for name in ['test_auth', 'test_store', 'test_views', 'test_admin']:
mod = __import__('%s.%s' % (__name__, name), {}, {}, ['suite'])
suite.addTest(mod.suite())
return suite
| [
"romain@cloudera.com"
] | romain@cloudera.com |
552019b6e9b5e757bfae8e0ece6e89266860f4b9 | d1e540562faabf463788e6ad31c337e2fe329944 | /holland/core/exceptions.py | 4b6356f5b6aa08cfd50a1938c60530cafa77c5e0 | [
"BSD-3-Clause",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | nwbreneman/holland | 5a675883d8ca037505bf6a9e271a3cc33b0c6902 | 8defe48aafb32388ec2bf1cc76070da98c766d8e | refs/heads/master | 2020-04-09T20:24:13.453638 | 2018-11-27T20:21:14 | 2018-11-27T20:21:14 | 160,572,324 | 0 | 0 | NOASSERTION | 2018-12-10T23:31:07 | 2018-12-05T20:08:38 | Python | UTF-8 | Python | false | false | 325 | py | """
Standard public exceptions that are raised by
the various APIs in holland-core
"""
class ConfigError(Exception):
"""Configuration error"""
pass
class InsufficientSpaceError(Exception):
"""Operation could not complete due to disk space"""
pass
class ArgumentError(Exception):
"""Invalid argument"""
| [
"wdierkes@rackspace.com"
] | wdierkes@rackspace.com |
b3cdc2e5d186896ef3832edbf72fbb38490d134d | 4acfe1899465199ed5f6b40e6261f362a731cc28 | /stock_system/migrations/0037_auto_20210224_0626.py | 7168db6ee4400734b703dd338efd3587c50bbb15 | [] | no_license | Tiilon/cjs_inventory | 13d1304a4aa21b8ffb748fb29fd964795dfc3601 | a9e0b844d885c47355e8d40985f33265517595a8 | refs/heads/master | 2023-03-14T08:58:35.660834 | 2021-03-04T17:29:38 | 2021-03-04T17:29:38 | 340,845,191 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 677 | py | # Generated by Django 3.1.7 on 2021-02-23 22:26
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('stock_system', '0036_auto_20210223_1852'),
]
operations = [
migrations.AddField(
model_name='brand',
name='reorder_lvl',
field=models.IntegerField(blank=True, default=0, null=True),
),
migrations.AlterField(
model_name='returns',
name='reason',
field=models.CharField(blank=True, choices=[('Damaged', 'Damaged'), ('Expired', 'Expired'), ('Unwanted', 'Unwanted')], max_length=250, null=True),
),
]
| [
"tiilon42@gmail.com"
] | tiilon42@gmail.com |
6f6ca39857cec502b96273f03e3e4ee7a792ec78 | 7ea54debed6a3acda594adc9c9cb36027ba4842c | /article/migrations/0055_auto_20181011_2133.py | 8f5c3d1135ae3d38276a46d6339e4316356a3d01 | [
"MIT"
] | permissive | higab85/drugsandme | e32df050b0a1fb24c06c53eece50f2e4b9b4f01e | 7db66d9687ac9a04132de94edda364f191d497d7 | refs/heads/master | 2022-12-19T12:12:36.885236 | 2019-08-21T14:50:21 | 2019-08-21T14:50:21 | 153,900,258 | 2 | 1 | MIT | 2022-12-08T02:26:17 | 2018-10-20T11:48:46 | JavaScript | UTF-8 | Python | false | false | 1,195 | py | # Generated by Django 2.0.9 on 2018-10-11 21:33
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('article', '0054_auto_20181009_1452'),
]
operations = [
migrations.AddField(
model_name='articlepage',
name='search_description_en',
field=models.TextField(blank=True),
),
migrations.AddField(
model_name='articlepage',
name='search_description_es',
field=models.TextField(blank=True),
),
migrations.AddField(
model_name='articlepage',
name='seo_title_en',
field=models.TextField(blank=True),
),
migrations.AddField(
model_name='articlepage',
name='seo_title_es',
field=models.TextField(blank=True),
),
migrations.AddField(
model_name='articlepage',
name='slug_en',
field=models.TextField(blank=True),
),
migrations.AddField(
model_name='articlepage',
name='slug_es',
field=models.TextField(blank=True),
),
]
| [
"gabh@protonmail.com"
] | gabh@protonmail.com |
25145269e9924692592b4be1912a1a7332cffae3 | 305a9cade2fd8a69ab363a40389420296eccddd0 | /LogExpConversion.py | 098470645632eee5e7d19ba3cf40459b5d5e055f | [] | no_license | PMiskew/Python3_Small_Projects | 823ee6fe20715e8aa9dbadf87f0091929ac87e58 | 1fd80b81d0fa80d90d894bd88a2039d15d2330ca | refs/heads/master | 2021-01-01T04:27:33.711886 | 2017-07-14T01:54:01 | 2017-07-14T01:54:01 | 97,176,400 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 793 | py | import tkinter as tk
class LogExpConversionCalc:
def __init__(self):
self.root = tk.Tk()
self.labLog = tk.Label(self.root, text = "Log")
self.labLog.grid(row = 0, column = 0)
self.entN = tk.Entry(self.root, width = 1)
self.entN.grid(row = 1,column = 1)
self.entX = tk.Entry(self.root,width = 3)
self.entX.grid(row = 0, column = 2)
self.labEqu = tk.Label(self.root, text = "=")
self.labEqu.grid(row = 0, column = 3)
self.entA = tk.Entry(self.root,width = 4)
self.entA.grid(row = 0, column = 4)
self.butSub = tk.Button(self.root,text = "Submit")
self.butSub.grid(row = 2,column = 0,columnspan = 5,sticky = "NESW")
self.root.mainloop()
game = LogExpConversionCalc() | [
"paul.miskew@gmail.com"
] | paul.miskew@gmail.com |
78ba1ce80f8ed7fefab1b41579e66895862d173a | ee803c29e9c5216a16a2699854b98c8a6d9760b8 | /dataServer/FlaskDataServer/app/LocalDataServer/DBApi/Conf.py | cd4560affc42bb9db4f034e95e3bf0b2cb952893 | [] | no_license | algo2019/algorithm | c160e19b453bc979853caf903ad96c2fa8078b69 | 3b5f016d13f26acab89b4a177c95a4f5d2dc1ba1 | refs/heads/master | 2022-12-12T17:59:57.342665 | 2019-02-23T07:45:39 | 2019-02-23T07:45:39 | 162,404,028 | 0 | 0 | null | 2022-12-08T01:29:20 | 2018-12-19T08:08:13 | Python | UTF-8 | Python | false | false | 444 | py | DB_PATH = '/Users/renren1/test/LocalData.db'
EXCHANGE_ID = {
'SHFX': {'IM', 'AL', 'AU', 'NI', 'PB', 'CU', 'SN', 'ZN', 'AG', 'BU', 'RB', 'FU', 'HC', 'WR', 'RU'},
'DLFX': {'V', 'B', 'M', 'A', 'Y', 'JD', 'JM', 'J', 'BB', 'PP', 'L', 'I', 'FB', 'C', 'CS', 'P'},
'ZZFX': {'SR', 'CF', 'ZC', 'FG', 'TA', 'MA', 'WH', 'PM', 'R', 'LR', 'JR', 'RS', 'OI', 'RM', 'SF', 'SM', 'RI'},
'CFFEX': {'IF', 'IH', 'IC', 'TF', 'T', 'TT', 'AF', 'EF'}
} | [
"xingwang.zhang@renren-inc.com"
] | xingwang.zhang@renren-inc.com |
544f529cb861e34908c3c8dc63ee19177cfa3629 | 6d05f0521eabec73a53f34c3080e9eaee14253fb | /eden/integration/hg/absorb_test.py | 9393ec1dbd3f226d39680fb89cae9510acf518d0 | [
"BSD-3-Clause"
] | permissive | eamonnkent/eden | 8d44ce7bd75329754fb38c1a815dbcdb65b1a481 | c0a837ec7d1f05903bbc17e707bc9fd427046fd8 | refs/heads/master | 2020-03-27T09:43:46.545906 | 2018-08-27T05:42:20 | 2018-08-27T05:52:34 | 146,367,298 | 0 | 0 | null | 2018-08-27T23:54:15 | 2018-08-27T23:54:15 | null | UTF-8 | Python | false | false | 3,344 | py | #!/usr/bin/env python3
#
# Copyright (c) 2004-present, Facebook, Inc.
# All rights reserved.
#
# This source code is licensed under the BSD-style license found in the
# LICENSE file in the root directory of this source tree. An additional grant
# of patent rights can be found in the PATENTS file in the same directory.
import logging
from .lib.hg_extension_test_base import EdenHgTestCase, hg_test
log = logging.getLogger("eden.test.absorb")
@hg_test
class AbsorbTest(EdenHgTestCase):
def populate_backing_repo(self, repo):
repo.write_file("readme.txt", "readme\n")
repo.write_file(
"src/test.c",
"""\
start of the file
line 1
line 2
line 3
end of the file
""",
)
self.commit1 = repo.commit("Initial commit.")
repo.hg("phase", "--public", self.commit1)
log.debug("commit1: %s", self.commit1)
def test_absorb(self):
self.assert_status_empty()
# Update src/test.c in our first draft commit
self.write_file(
"src/test.c",
"""\
start of the file
line 1
new line a
line 2
new line b
line 3
end of the file
""",
)
self.assert_status({"src/test.c": "M"})
commit2 = self.repo.commit("new lines in test.c\n")
self.assert_status_empty()
log.debug("commit2: %s", commit2)
# Update src/new.c in our second draft commit
self.write_file(
"src/new.c",
"""\
this is a brand new file
with some new contents
last line
""",
)
self.hg("add", "src/new.c")
self.assert_status({"src/new.c": "A"})
commit3 = self.repo.commit("add new.c\n")
self.assert_status_empty()
log.debug("commit2: %s", commit3)
# Now modify test.c and new.c in the working copy
self.write_file(
"src/test.c",
"""\
start of the file
line 1
new line abc
testing
line 2
new line b
line 3
end of the file
""",
)
self.write_file(
"src/new.c",
"""\
this is a brand new file
with some enhanced new contents
last line
""",
)
self.assert_status({"src/new.c": "M", "src/test.c": "M"})
old_commits = self.repo.log()
# Run "hg absorb" to fold these changes into their respective commits
out = self.hg("absorb", "-p")
log.debug("absorb output:\n%s" % (out,))
self.assert_status_empty()
# Verify the results are what we expect
new_commits = self.repo.log()
files_changed = self.repo.log(template="{files}")
self.assertEqual(len(old_commits), len(new_commits))
self.assertEqual(old_commits[0], new_commits[0])
self.assertNotEqual(old_commits[1], new_commits[1])
self.assertNotEqual(old_commits[2], new_commits[2])
self.assertEqual(files_changed[0], "readme.txt src/test.c")
self.assertEqual(files_changed[1], "src/test.c")
self.assertEqual(files_changed[2], "src/new.c")
self.assertEqual(
self.read_file("src/test.c"),
"""\
start of the file
line 1
new line abc
testing
line 2
new line b
line 3
end of the file
""",
)
self.assertEqual(
self.read_file("src/new.c"),
"""\
this is a brand new file
with some enhanced new contents
last line
""",
)
| [
"facebook-github-bot@users.noreply.github.com"
] | facebook-github-bot@users.noreply.github.com |
f9fae8b6ca5b4e04fe4f6c0bd5bc1d152c5639c2 | f12770add2dd48d2262612fcb1aaecc4a714b4fb | /django/remoteomd/remoteomd/urls.py | 2e25dece1960a6dc2c60a4198f90325e94284859 | [] | no_license | github188/vending | 3558666b57b37e843e72d194d80f6a8ef5dbc7a4 | ce3e2f1fcbb4d132f7b01a99400d917d7ca174a6 | refs/heads/master | 2021-06-16T09:54:09.214694 | 2017-01-28T01:31:30 | 2017-01-28T01:31:30 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 969 | py | """localomd URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.10/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url, include
from django.contrib import admin
from localomd.admin import admin_omd
urlpatterns = [
url(r'^admin_omd/', admin_omd.urls),
# url(r'^admin/', admin.site.urls),
# url(r'^web/', include('localomdweb.urls')),
url(r'^', include('remoteomddata.urls', namespace='data')),
]
| [
"jeson.peng@gmail.com"
] | jeson.peng@gmail.com |
83705db78bfb60ae472f3a82f0c3f249d1980e10 | 340ace07b347ffd35852641591ea702e3bf0c947 | /Objects And Classes/06_inventory.py | a91bf8b7642e04bd617ff0643f0b43c17af565ba | [] | no_license | IvayloValkov/Python_Fundamentals | 8de28bf68d32822e0d442469477d2b2c87ba0f9f | e86cb3e516b4baf7027808128fc105f5b86807c9 | refs/heads/main | 2023-02-24T23:39:54.067798 | 2021-01-17T08:54:27 | 2021-01-17T08:54:27 | 330,347,105 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 664 | py | class Inventory:
def __init__(self, capacity):
self.__capacity = capacity
self.items = []
def add_item(self, item):
if len(self.items) < self.__capacity:
self.items.append(item)
else:
return "not enough room in the inventory"
def get_capacity(self):
return self.__capacity
def __repr__(self):
return f"Items: {', '.join(self.items)}.\nCapacity left: {self.__capacity - len(self.items)}"
inventory = Inventory(2)
inventory.add_item("potion")
inventory.add_item("sword")
inventory.add_item("bottle")
print(inventory.get_capacity())
print(inventory)
| [
"noreply@github.com"
] | IvayloValkov.noreply@github.com |
1607b39a03fe85418f0255842a8d9ea674b4af8d | 81b57282ccbc000a416382e3a91645a0938febb5 | /dbupgrade/files.py | 3169b028579926c781120b7ffef85e6454c79c19 | [
"MIT"
] | permissive | srittau/dbupgrade | 03ae61f5bd307c289db9d3069b87c5e5fbba934d | 35c704321a39a5319231dd78b1446e836ef3cadc | refs/heads/main | 2023-08-10T07:07:29.811756 | 2023-08-01T11:10:44 | 2023-08-01T11:10:44 | 120,672,859 | 2 | 0 | MIT | 2023-09-01T08:56:39 | 2018-02-07T21:17:50 | Python | UTF-8 | Python | false | false | 1,099 | py | import os.path
from os import listdir
from typing import List
class FileInfo:
def __init__(
self,
filename: str,
schema: str,
dialect: str,
version: int,
api_level: int,
) -> None:
self.filename = filename
self.schema = schema
self.dialect = dialect
self.version = version
self.api_level = api_level
self.transaction = True
def __lt__(self, other: "FileInfo") -> bool:
if self.schema != other.schema or self.dialect != other.dialect:
raise TypeError("FileInfos must have the same schema and dialect")
return self.version < other.version
def __repr__(self) -> str:
return "FileInfo({}, {}, {}, {}, {})".format(
repr(self.filename),
repr(self.schema),
repr(self.dialect),
self.version,
self.api_level,
)
def collect_sql_files(directory: str) -> List[str]:
return [
os.path.join(directory, fn)
for fn in listdir(directory)
if fn.endswith(".sql")
]
| [
"srittau@rittau.biz"
] | srittau@rittau.biz |
97409bedd791d4aeea1ef231189b2d17114187b0 | 3a8b0cf0484e06f317923f0b6de08cb8e381b38b | /tests/test_routes.py | b746c6ae758c3e4c8c63eea60047645cc5059385 | [
"MIT"
] | permissive | luiscape/hdx-monitor-sql-collect | 844260b1bfb8152f5a741a76307775e09fd81515 | 3fb7d31363227de160f95d259dee2b0b38da1982 | refs/heads/master | 2021-01-10T07:36:55.642759 | 2015-11-19T15:18:53 | 2015-11-19T15:18:53 | 45,499,253 | 0 | 0 | null | 2015-11-19T15:18:53 | 2015-11-03T22:15:02 | Python | UTF-8 | Python | false | false | 6,468 | py | #!/usr/bin/python
# -*- coding: utf-8 -*-
'''
Integration tests for testing the application
HTTP routes and methods.
'''
import json
import flask
import unittest
import app.server as Server
class Generic(unittest.TestCase):
'''
Set of generic tests.
'''
def setUp(self, object):
self.object = object
def keys(self):
'''
Generic test for expected default keys.
'''
keys = ['success', 'message', 'endpoint', 'time', 'ETA', 'computations']
for key in self.object.keys():
self.assertIn(key, keys)
def computations(self):
'''
Generic test for expected computation keys.
'''
computation_keys = ['total', 'completed', 'failed', 'queued', 'progress']
for key in self.object['computations']:
self.assertIn(key, computation_keys)
def types(self):
'''
Generic test for the types of data.
'''
types = {
'online': bool,
'message': str,
'endpoint': str,
'time': str,
'ETA': str,
'computations': {
'total': int,
'completed': int,
'failed': int,
'queued': int,
'progress': float
}
}
for key in self.object.keys():
self.assertIs(type(self.object.get(key)), types[key])
if type(key) == type({}):
for k in key:
self.assertIs(type(self.object['computations'].get(key)), types['computations'][k])
class TestRoutes(unittest.TestCase):
'''
Tests for all routes and methods.
'''
def setUp(self):
self.app = Server.createServer('test', debug=False)
self.client = self.app.test_client()
#
# /status
#
def test_status_type(self):
'''
routes.status: /status endpoint returns a JSON object.
'''
response = self.client.get('/status')
result = json.loads(response.data.decode('utf8'))
self.assertIs(type(result), type({}))
def test_status_object(self):
'''
routes.status: /status endpoint returns a complete object.
'''
response = self.client.get('/status')
result = json.loads(response.data.decode('utf8'))
keys = ['online', 'version', 'description', 'repository', 'maintainer', 'ckan']
for key in result.keys():
self.assertIn(key, keys)
# #
# # /users
# #
# def test_users_type(self):
# '''
# routes: /users endpoint returns a JSON object.
# '''
# response = self.client.get('/users')
# result = json.loads(response.data.decode('utf8'))
# assert type(result) == type({})
# def test_users_object(self):
# '''
# routes: /users endpoint returns a complete object.
# '''
# response = self.client.get('/users')
# result = json.loads(response.data.decode('utf8'))
# generic = Generic(result)
# generic.keys()
# generic.types()
# generic.computations()
#
# /revisions
#
def test_revisions_type(self):
'''
routes.revisions: /revisions endpoint returns a JSON object.
'''
response = self.client.get('/revisions')
result = json.loads(response.data.decode('utf8'))
self.assertIs(type(result) == type({}))
def test_revisions_object(self):
'''
routes.revisions: /revisions endpoint returns a complete object.
'''
response = self.client.get('/revisions')
result = json.loads(response.data.decode('utf8'))
generic = Generic(result)
generic.keys()
generic.types()
generic.computations()
#
# /datasets
#
def test_datasets_type(self):
'''
routes.datasets: /datasets endpoint returns a JSON object.
'''
response = self.client.get('/datasets')
result = json.loads(response.data.decode('utf8'))
self.assertIs(type(result), type({}))
def test_datasets_object(self):
'''
routes.datasets: /datasets endpoint returns a complete object.
'''
response = self.client.get('/datasets')
result = json.loads(response.data.decode('utf8'))
generic = Generic(result)
generic.keys()
generic.types()
generic.computations()
#
# /resources
#
def test_resources_type(self):
'''
routes.resources: /resources endpoint returns a JSON object.
'''
response = self.client.get('/resources')
result = json.loads(response.data.decode('utf8'))
self.assertIs(type(result), type({}))
def test_resources_object(self):
'''
routes.resources: /resources endpoint returns a complete object.
'''
response = self.client.get('/resources')
result = json.loads(response.data.decode('utf8'))
generic = Generic(result)
generic.keys()
generic.types()
generic.computations()
#
# /countries
#
def test_countries_type(self):
'''
routes.countries: /countries endpoint returns a JSON object.
'''
response = self.client.get('/countries')
result = json.loads(response.data.decode('utf8'))
self.assertIs(type(result), type({}))
def test_countries_object(self):
'''
routes.countries: /countries endpoint returns a complete object.
'''
response = self.client.get('/countries')
result = json.loads(response.data.decode('utf8'))
generic = Generic(result)
generic.keys()
generic.types()
generic.computations()
#
# /gallery_items
#
def test_gallery_items_type(self):
'''
routes:.gallery_items /gallery_items endpoint returns a JSON object.
'''
response = self.client.get('/gallery_items')
result = json.loads(response.data.decode('utf8'))
self.assertIs(type(result), type({}))
def test_gallery_items_object(self):
'''
routes.gallery_items: /gallery_items endpoint returns a complete object.
'''
response = self.client.get('/gallery_items')
result = json.loads(response.data.decode('utf8'))
generic = Generic(result)
generic.keys()
generic.types()
generic.computations()
#
# /organizations
#
def test_organizations_type(self):
'''
routes.organizations: /organizations endpoint returns a JSON object.
'''
response = self.client.get('/organizations')
result = json.loads(response.data.decode('utf8'))
self.assertIs(type(result), type({}))
def test_organizations_object(self):
'''
routes.organizations: /organizations endpoint returns a complete object.
'''
response = self.client.get('/organizations')
result = json.loads(response.data.decode('utf8'))
generic = Generic(result)
generic.keys()
generic.types()
generic.computations()
| [
"luiscape@gmail.com"
] | luiscape@gmail.com |
f784c7b34a8ee2d40ec04537960612598627a438 | de24f83a5e3768a2638ebcf13cbe717e75740168 | /moodledata/vpl_data/32/usersdata/75/10599/submittedfiles/questao2_av1.py | 5570de3c54d4ac0fa8e7dbd035738c5856d048e5 | [] | no_license | rafaelperazzo/programacao-web | 95643423a35c44613b0f64bed05bd34780fe2436 | 170dd5440afb9ee68a973f3de13a99aa4c735d79 | refs/heads/master | 2021-01-12T14:06:25.773146 | 2017-12-22T16:05:45 | 2017-12-22T16:05:45 | 69,566,344 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 997 | py | # -*- coding: utf-8 -*-
from __future__ import division
a=int(input('Digite o primeiro número:'))
b=int(input('Digite o segundo número:'))
c=int(input('Digite o terceiro número:'))
d=int(input('Digite o quarto número:'))
e=int(input('Digite o quinto número:'))
f=int(input('Digite o sexto número:'))
na=int(input('Digite o primeiro número sorteado:'))
nb=int(input('Digite o segundo número sorteado:'))
nc=int(input('Digite o terceiro número sorteado:'))
nd=int(input('Digite o quarto número sorteado:'))
ne=int(input('Digite o quinto número sorteado:'))
nf=int(input('Digite o sexto número sorteado:'))
cont=0
if a==na:
cont=cont+1
if b==na:
cont=cont+1
if c==nc:
cont=cont+1
if d==nd:
cont=cont+1
if e==ne:
cont=cont+1
if f==nf:
cont=cont+1
if cont==3:
print ('terno')
if cont==4:
print ('quadra')
if cont==5:
print ('quina')
if cont==6:
print ('seno')
if (cont!=3) and (cont!=4) and (cont!=5) and (cont!=6):
print ('azar')
| [
"rafael.mota@ufca.edu.br"
] | rafael.mota@ufca.edu.br |
6de0665ab973cf33ab27a8950f3dabcf7953f852 | 3dbbde1aa96fc09e9aab885cf3713e86f3572dec | /gs-vtoi/bin/hachoir-metadata-gtk | 9ef9a81ee82a3aca600d74de4023a47b328d39de | [] | no_license | bopopescu/gs-vtoi | 6223d6dbf47e89292bd0e79e24e5664450e28cf6 | f12b802976d0020179d1b40b0b5e3af5b72d55cc | refs/heads/master | 2022-11-24T16:31:36.804869 | 2018-07-31T08:30:56 | 2018-07-31T08:30:56 | 282,551,982 | 0 | 0 | null | 2020-07-26T01:09:10 | 2020-07-26T01:09:09 | null | UTF-8 | Python | false | false | 3,649 | #!/Users/Sang/OneDrive/Developments/gs-vtoi/gs-vtoi/bin/python
import sys, pygtk, os
pygtk.require('2.0')
import gtk
from hachoir_core.cmd_line import unicodeFilename
from hachoir_parser import createParser
from hachoir_metadata import extractMetadata
from hachoir_metadata.metadata import MultipleMetadata
class Gui:
def __init__(self):
self.main_window = gtk.Window(gtk.WINDOW_TOPLEVEL)
self.main_window.set_border_width(5)
self.main_window.connect("destroy", self._destroy)
self.main_vbox = gtk.VBox()
self.select_hbox = gtk.HBox()
self.select_button = gtk.Button("Select")
self.select_button.connect("clicked", self._select_clicked)
self.select_hbox.pack_start(self.select_button, False)
self.file_combo = gtk.combo_box_new_text()
self.file_combo.connect("changed", self._file_combo_changed)
self.select_hbox.pack_start(self.file_combo)
self.main_vbox.pack_start(self.select_hbox, False)
self.metadata_table = gtk.Table(1, 1)
self.metadata_table.attach(gtk.Label("Select a file to view metadata information..."), 0, 1, 0, 1)
self.main_vbox.pack_start(self.metadata_table)
self.main_window.add(self.main_vbox)
self.main_window.show_all()
def add_file(self, filename):
self.file_combo.append_text(filename)
def _select_clicked(self, widget):
file_chooser = gtk.FileChooserDialog("Ouvrir..", None,
gtk.FILE_CHOOSER_ACTION_OPEN,
(gtk.STOCK_CANCEL, gtk.RESPONSE_CANCEL,
gtk.STOCK_OPEN, gtk.RESPONSE_OK))
file_chooser.set_default_response(gtk.RESPONSE_OK)
file_chooser.show()
reponse = file_chooser.run()
if reponse == gtk.RESPONSE_OK:
selected_file = file_chooser.get_filename()
self.add_file(selected_file)
file_chooser.destroy()
def _file_combo_changed(self, widget):
self.main_vbox.remove(self.metadata_table)
real_filename = self.file_combo.get_active_text()
filename = unicodeFilename(real_filename)
parser = createParser(filename, real_filename=real_filename)
metadata = extractMetadata(parser)
self.metadata_table = gtk.Table(1, 2)
self.main_vbox.pack_start(self.metadata_table)
if metadata is None:
self.metadata_table.attach(gtk.Label("Unknown file format"), 0, 1, 0, 1)
else:
total = 1
for data in sorted(metadata):
if not data.values:
continue
title = data.description
for item in data.values:
self.metadata_table.resize(total, 2)
value = item.text
self.metadata_table.attach(gtk.Label(title + ":"), 0, 1, total-1, total)
self.metadata_table.attach(gtk.Label(value), 1, 2, total-1, total)
total += 1
self.metadata_table.show_all()
def _destroy(self, widget, data=None):
gtk.main_quit()
def main(self):
has_file = False
for arg in sys.argv[1:]:
if os.path.isdir(arg):
for file in os.listdir(arg):
path = os.path.join(arg, file)
if os.path.isfile(path):
self.add_file(path)
has_file = True
elif os.path.isfile(arg):
self.add_file(arg)
has_file = True
if has_file:
self.file_combo.set_active(0)
gtk.main()
if __name__ == "__main__":
Gui().main()
| [
"sy0414@gmail.com"
] | sy0414@gmail.com | |
699d3bfef04aed9ea2fbddc55b9a01bb4b3afd83 | ad4c2aa0398406ccb7e70562560e75fa283ffa1a | /merge-two-binary-trees/merge-two-binary-trees.py | 081371ccd42e4224ccd32f00bbf5ef910c431f4d | [
"Apache-2.0"
] | permissive | kmgowda/kmg-leetcode-python | 427d58f1750735618dfd51936d33240df5ba9ace | 4d32e110ac33563a8bde3fd3200d5804db354d95 | refs/heads/main | 2023-08-22T06:59:43.141131 | 2021-10-16T14:04:32 | 2021-10-16T14:04:32 | 417,841,590 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 880 | py | // https://leetcode.com/problems/merge-two-binary-trees
# Definition for a binary tree node.
# class TreeNode(object):
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution(object):
def mergeTrees(self, t1, t2):
"""
:type t1: TreeNode
:type t2: TreeNode
:rtype: TreeNode
"""
def merge(one, two):
node = None
if one or two:
node = TreeNode(0)
if one:
node.val+=one.val
if two:
node.val+=two.val
node.left = merge(one.left if one else None, two.left if two else None)
node.right = merge(one.right if one else None, two.right if two else None)
return node
return merge(t1,t2)
| [
"keshava.gowda@gmail.com"
] | keshava.gowda@gmail.com |
623081c6b4a86024a7ab4cf1d69a7e46d21600d1 | f131d940b96452441602e8bd687a55d62ea22912 | /models/unet.py | b4e6f1c0d05195078e8fc8f2d43815bfada6cbb9 | [] | no_license | aguilarmg/cs221-final-project | a66e3b9e037de59f83ef5a950106a46430a808c3 | 9506a50614ca1619dc4338e7e2afa02e99becec1 | refs/heads/master | 2020-06-01T07:40:34.281510 | 2020-03-28T12:39:56 | 2020-03-28T12:39:56 | 190,702,941 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,967 | py | import torch
import torch.nn as nn
from torch.nn import Conv2d as Conv2D
import torch.nn.init as init
import torch.nn.functional as F
import numpy
from torch.nn import Upsample
class Up(nn.Module):
def __init__(self, channel_in, channel_out):
super(Up, self).__init__()
self.upsample = nn.Upsample(scale_factor=2, mode='bilinear')
self.conv = nn.Sequential(
Conv2D(channel_in, channel_out, kernel_size = 3, padding = 1),
nn.BatchNorm2d(channel_out),
nn.ReLU(inplace=True)
)
def forward(self, x1, x2):
# Input size - Batch_Size X Channel X Height of Activation Map X Width of Activation Map
# Upsample using bilinear mode and scale it to twice its size
x1 = self.upsample(x1)
# in 4D array - matching the last two in case of 5D it will take
# last three dimensions
difference_in_X = x1.size()[2] - x2.size()[2]
difference_in_Y = x1.size()[3] - x2.size()[3]
# Padding it with the required value
x2 = F.pad(x2, (difference_in_X // 2, int(difference_in_X / 2),
difference_in_Y // 2, int(difference_in_Y / 2)))
# concat on channel axis
x = torch.cat([x2, x1], dim=1)
# Use convolution
x = self.conv(x)
return x
class Down(nn.Module):
def __init__(self, channel_in, channel_out):
super(Down, self).__init__()
self.conv = nn.Sequential(
Conv2D(channel_in, channel_out, kernel_size = 3, padding = 1),
nn.BatchNorm2d(channel_out),
nn.ReLU(inplace=True)
)
def forward(self, x):
# Input size - Batch_Size X Channel X Height of Activation Map X Width of Activation Map
# Downsample First
x = F.max_pool2d(x,2)
# Use convolution
x = self.conv(x)
return x
class UNet(nn.Module):
def __init__(self, channel_in, classes):
super(UNet, self).__init__()
self.input_conv = self.conv = nn.Sequential(
Conv2D(channel_in, 8, kernel_size = 3, padding = 1),
nn.BatchNorm2d(8),
nn.ReLU(inplace=True)
)
self.down1 = Down(8, 16)
self.down2 = Down(16, 32)
self.down3 = Down(32, 32)
self.up1 = Up(64, 16)
self.up2 = Up(32, 8)
self.up3 = Up(16, 4)
self.output_conv = nn.Conv2d(4, classes, kernel_size = 1)
def forward(self, x):
x1 = self.input_conv(x)
x2 = self.down1(x1)
x3 = self.down2(x2)
x4 = self.down3(x3)
x = self.up1(x4, x3)
x = self.up2(x, x2)
x = self.up3(x, x1)
output = self.output_conv(x)
# print(output.shape)
m = nn.Softmax2d()
return m(output)
def weights_init(m):
if isinstance(m, nn.Conv2d):
init.xavier_uniform(m.weight, gain=numpy.sqrt(2.0))
init.constant(m.bias, 0.1)
| [
"google-dl-platform@googlegroups.com"
] | google-dl-platform@googlegroups.com |
fdc1f3bb98810086e2d180f43f57c1b891d6fd4a | 9743d5fd24822f79c156ad112229e25adb9ed6f6 | /xai/brain/wordbase/nouns/_cuffs.py | e56d0f9e3b98522f7ec4b6b644bb19718b341852 | [
"MIT"
] | permissive | cash2one/xai | de7adad1758f50dd6786bf0111e71a903f039b64 | e76f12c9f4dcf3ac1c7c08b0cc8844c0b0a104b6 | refs/heads/master | 2021-01-19T12:33:54.964379 | 2017-01-28T02:00:50 | 2017-01-28T02:00:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 224 | py |
from xai.brain.wordbase.nouns._cuff import _CUFF
#calss header
class _CUFFS(_CUFF, ):
def __init__(self,):
_CUFF.__init__(self)
self.name = "CUFFS"
self.specie = 'nouns'
self.basic = "cuff"
self.jsondata = {}
| [
"xingwang1991@gmail.com"
] | xingwang1991@gmail.com |
5b81ab2428a480bcbaceb2e72a457fdc4d31ac41 | 63b0fed007d152fe5e96640b844081c07ca20a11 | /ABC/ABC001~ABC099/ABC033/C.py | 0a5b493b4fb1a0d3b7c6093c373952bae7e20536 | [] | no_license | Nikkuniku/AtcoderProgramming | 8ff54541c8e65d0c93ce42f3a98aec061adf2f05 | fbaf7b40084c52e35c803b6b03346f2a06fb5367 | refs/heads/master | 2023-08-21T10:20:43.520468 | 2023-08-12T09:53:07 | 2023-08-12T09:53:07 | 254,373,698 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 307 | py | s=input().split('+')
if len(s)==1:
ans=1
for j in range(len(s[0])):
if s[0][j]!='*':
ans*=int(s[0][j])
if ans!=0:
ans=1
else:
ans=0
print(ans)
exit(0)
ans=0
for i in range(len(s)):
if eval(s[i])==0:
continue
ans+=1
print(ans) | [
"ymdysk911@gmail.com"
] | ymdysk911@gmail.com |
76a0a0029a7548dcbbc65c90816c31168f43dbd5 | 4b4c6b19805d52dab9cc232e343a45785fb33c5a | /mininews/managers.py | 12bd3bfe8ddd7a916fcc8dfdcee9823b4eb076f4 | [
"MIT"
] | permissive | marcinn/django-mininews | d5866c11a7947ec5bc724dc1d61ce2b49b2e0177 | 5868fd5978ab0e64a68df576b8f14b4d4caf1bb5 | refs/heads/master | 2023-03-31T11:13:56.729693 | 2021-03-24T00:54:20 | 2021-03-24T00:54:20 | 350,901,546 | 0 | 0 | MIT | 2021-03-24T00:50:16 | 2021-03-24T00:50:16 | null | UTF-8 | Python | false | false | 387 | py | from django.db.models.query import QuerySet
from django.db.models import Q
import datetime
class MininewsQuerySet(QuerySet):
def live(self, statuses=['published']):
today = datetime.date.today()
return self.filter(status__in=statuses).\
filter(Q(start__lte=today) | Q(start__isnull=True)).\
filter(Q(end__gte=today) | Q(end__isnull=True))
| [
"richard@arbee-design.co.uk"
] | richard@arbee-design.co.uk |
336506c6a65cfee6893992a7afd6b8650db9d2c4 | 3aef4825c5f2366f2e551cdfa54b88c034b0b4f4 | /tutorials/2_tensorflow_old/sklearnTUT/sk10_cross_validation3.py | 5818f6b03c21bbb84be7ede221d50d2aee9605d5 | [
"MIT"
] | permissive | wull566/tensorflow_demo | 4a65cbe1bdda7430ab1c3883889501a62258d8a6 | c2c45050867cb056b8193eb53466d26b80b0ec13 | refs/heads/master | 2020-04-06T17:34:05.912164 | 2018-11-15T07:41:47 | 2018-11-15T07:41:48 | 157,665,187 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,155 | py | # View more 3_python 1_tensorflow_new tutorial on my Youtube and Youku channel!!!
# Youtube video tutorial: https://www.youtube.com/channel/UCdyjiB5H8Pu7aDTNVXTTpcg
# Youku video tutorial: http://i.youku.com/pythontutorial
"""
Please note, this code is only for 3_python 3+. If you are using 3_python 2+, please modify the code accordingly.
"""
from __future__ import print_function
from sklearn.learning_curve import validation_curve
from sklearn.datasets import load_digits
from sklearn.svm import SVC
import matplotlib.pyplot as plt
import numpy as np
digits = load_digits()
X = digits.data
y = digits.target
param_range = np.logspace(-6, -2.3, 5)
train_loss, test_loss = validation_curve(
SVC(), X, y, param_name='gamma', param_range=param_range, cv=10,
scoring='mean_squared_error')
train_loss_mean = -np.mean(train_loss, axis=1)
test_loss_mean = -np.mean(test_loss, axis=1)
plt.plot(param_range, train_loss_mean, 'o-', color="r",
label="Training")
plt.plot(param_range, test_loss_mean, 'o-', color="g",
label="Cross-validation")
plt.xlabel("gamma")
plt.ylabel("Loss")
plt.legend(loc="best")
plt.show() | [
"vicleo566@163.com"
] | vicleo566@163.com |
f0fccbe1f1d129a6d95be4858240a525c0c23db7 | 24fe1f54fee3a3df952ca26cce839cc18124357a | /servicegraph/lib/python2.7/site-packages/acimodel-4.0_3d-py2.7.egg/cobra/modelimpl/aaa/aretp.py | 2ca1f5865928d1d54330991f06f2bb16cd4862ad | [] | no_license | aperiyed/servicegraph-cloudcenter | 4b8dc9e776f6814cf07fe966fbd4a3481d0f45ff | 9eb7975f2f6835e1c0528563a771526896306392 | refs/heads/master | 2023-05-10T17:27:18.022381 | 2020-01-20T09:18:28 | 2020-01-20T09:18:28 | 235,065,676 | 0 | 0 | null | 2023-05-01T21:19:14 | 2020-01-20T09:36:37 | Python | UTF-8 | Python | false | false | 5,007 | py | # coding=UTF-8
# **********************************************************************
# Copyright (c) 2013-2019 Cisco Systems, Inc. All rights reserved
# written by zen warriors, do not modify!
# **********************************************************************
from cobra.mit.meta import ClassMeta
from cobra.mit.meta import StatsClassMeta
from cobra.mit.meta import CounterMeta
from cobra.mit.meta import PropMeta
from cobra.mit.meta import Category
from cobra.mit.meta import SourceRelationMeta
from cobra.mit.meta import NamedSourceRelationMeta
from cobra.mit.meta import TargetRelationMeta
from cobra.mit.meta import DeploymentPathMeta, DeploymentCategory
from cobra.model.category import MoCategory, PropCategory, CounterCategory
from cobra.mit.mo import Mo
# ##################################################
class ARetP(Mo):
meta = ClassMeta("cobra.model.aaa.ARetP")
meta.isAbstract = True
meta.moClassName = "aaaARetP"
meta.moClassName = "aaaARetP"
meta.rnFormat = ""
meta.category = MoCategory.REGULAR
meta.label = "Record Retention Policy"
meta.writeAccessMask = 0x1
meta.readAccessMask = 0x800000000000001
meta.isDomainable = False
meta.isReadOnly = False
meta.isConfigurable = True
meta.isDeletable = True
meta.isContextRoot = False
meta.childClasses.add("cobra.model.fault.Delegate")
meta.childNamesAndRnPrefix.append(("cobra.model.fault.Delegate", "fd-"))
meta.superClasses.add("cobra.model.naming.NamedObject")
meta.superClasses.add("cobra.model.pol.Obj")
meta.superClasses.add("cobra.model.pol.Def")
meta.superClasses.add("cobra.model.condition.RetP")
meta.concreteSubClasses.add("cobra.model.aaa.CtrlrRetP")
meta.concreteSubClasses.add("cobra.model.aaa.SwRetP")
meta.rnPrefixes = [
]
prop = PropMeta("str", "childAction", "childAction", 4, PropCategory.CHILD_ACTION)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop._addConstant("deleteAll", "deleteall", 16384)
prop._addConstant("deleteNonPresent", "deletenonpresent", 8192)
prop._addConstant("ignore", "ignore", 4096)
meta.props.add("childAction", prop)
prop = PropMeta("str", "descr", "descr", 5579, PropCategory.REGULAR)
prop.label = "Description"
prop.isConfig = True
prop.isAdmin = True
prop.range = [(0, 128)]
prop.regex = ['[a-zA-Z0-9\\!#$%()*,-./:;@ _{|}~?&+]+']
meta.props.add("descr", prop)
prop = PropMeta("str", "dn", "dn", 1, PropCategory.DN)
prop.label = "None"
prop.isDn = True
prop.isImplicit = True
prop.isAdmin = True
prop.isCreateOnly = True
meta.props.add("dn", prop)
prop = PropMeta("str", "maxSize", "maxSize", 72, PropCategory.REGULAR)
prop.label = "None"
prop.isConfig = True
prop.isAdmin = True
meta.props.add("maxSize", prop)
prop = PropMeta("str", "name", "name", 4991, PropCategory.REGULAR)
prop.label = "Name"
prop.isConfig = True
prop.isAdmin = True
prop.range = [(0, 64)]
prop.regex = ['[a-zA-Z0-9_.:-]+']
meta.props.add("name", prop)
prop = PropMeta("str", "nameAlias", "nameAlias", 28417, PropCategory.REGULAR)
prop.label = "Name alias"
prop.isConfig = True
prop.isAdmin = True
prop.range = [(0, 63)]
prop.regex = ['[a-zA-Z0-9_.-]+']
meta.props.add("nameAlias", prop)
prop = PropMeta("str", "ownerKey", "ownerKey", 15230, PropCategory.REGULAR)
prop.label = "None"
prop.isConfig = True
prop.isAdmin = True
prop.range = [(0, 128)]
prop.regex = ['[a-zA-Z0-9\\!#$%()*,-./:;@ _{|}~?&+]+']
meta.props.add("ownerKey", prop)
prop = PropMeta("str", "ownerTag", "ownerTag", 15231, PropCategory.REGULAR)
prop.label = "None"
prop.isConfig = True
prop.isAdmin = True
prop.range = [(0, 64)]
prop.regex = ['[a-zA-Z0-9\\!#$%()*,-./:;@ _{|}~?&+]+']
meta.props.add("ownerTag", prop)
prop = PropMeta("str", "purgeWin", "purgeWin", 73, PropCategory.REGULAR)
prop.label = "None"
prop.isConfig = True
prop.isAdmin = True
prop.range = [(100, 1000)]
prop.defaultValue = 250
prop.defaultValueStr = "250"
meta.props.add("purgeWin", prop)
prop = PropMeta("str", "rn", "rn", 2, PropCategory.RN)
prop.label = "None"
prop.isRn = True
prop.isImplicit = True
prop.isAdmin = True
prop.isCreateOnly = True
meta.props.add("rn", prop)
prop = PropMeta("str", "status", "status", 3, PropCategory.STATUS)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop._addConstant("created", "created", 2)
prop._addConstant("deleted", "deleted", 8)
prop._addConstant("modified", "modified", 4)
meta.props.add("status", prop)
def __init__(self, parentMoOrDn, markDirty=True, **creationProps):
namingVals = []
Mo.__init__(self, parentMoOrDn, markDirty, *namingVals, **creationProps)
# End of package file
# ##################################################
| [
"rrishike@cisco.com"
] | rrishike@cisco.com |
1d83ba7c7e40243158ac207a36dd6c74cb8eb0f3 | d85a26d336fd127fd002e661ac6442bd915bf3ea | /tests/test_settings.py | c8313c5a17705b52a1c26f22481ce03cd2363cd5 | [
"Apache-2.0"
] | permissive | uduse/bonobo | e2f1addee253fc4798ef854dc89a39c7a4deaa36 | 4247f17d381161dfe26a9d294dfa33594338cf38 | refs/heads/master | 2021-08-28T06:42:10.700541 | 2017-12-11T12:50:36 | 2017-12-11T12:50:36 | 113,858,232 | 1 | 0 | null | 2017-12-11T12:50:37 | 2017-12-11T12:47:37 | Python | UTF-8 | Python | false | false | 1,585 | py | import logging
from os import environ
from unittest.mock import patch
import pytest
from bonobo import settings
TEST_SETTING = 'TEST_SETTING'
def test_to_bool():
assert not settings.to_bool('')
assert not settings.to_bool('FALSE')
assert not settings.to_bool('NO')
assert not settings.to_bool('0')
assert settings.to_bool('yup')
assert settings.to_bool('True')
assert settings.to_bool('yes')
assert settings.to_bool('1')
def test_setting():
s = settings.Setting(TEST_SETTING)
assert s.get() is None
with patch.dict(environ, {TEST_SETTING: 'hello'}):
assert s.get() is None
s.clear()
assert s.get() == 'hello'
s = settings.Setting(TEST_SETTING, default='nope')
assert s.get() is 'nope'
with patch.dict(environ, {TEST_SETTING: 'hello'}):
assert s.get() == 'nope'
s.clear()
assert s.get() == 'hello'
def test_default_settings():
settings.clear_all()
assert settings.DEBUG.get() == False
assert settings.PROFILE.get() == False
assert settings.QUIET.get() == False
assert settings.LOGGING_LEVEL.get() == logging._checkLevel('INFO')
with patch.dict(environ, {'DEBUG': 't'}):
settings.clear_all()
assert settings.LOGGING_LEVEL.get() == logging._checkLevel('DEBUG')
settings.clear_all()
def test_check():
settings.check()
with patch.dict(environ, {'DEBUG': 't', 'PROFILE': 't', 'QUIET': 't'}):
settings.clear_all()
with pytest.raises(RuntimeError):
settings.check()
settings.clear_all()
| [
"romain@dorgueil.net"
] | romain@dorgueil.net |
9fec051f4266d18f40cc3b954c62022c834e4c02 | 82f67ea9fbf21d21f26b611cea5ad6047617e1ab | /cvs-projects/build_scripts/utils/zenlib.py | ec69bfbcea2816b1674a0efbd0ddf0cde88ffa6d | [] | no_license | metalsky/mvst | 741a0e8ddc1c43ca28c7b26dc5720e965a70b764 | e1deb593f47c28e0142e66d11ca47fa4af247ed8 | refs/heads/master | 2020-03-19T09:42:59.923445 | 2018-03-02T00:38:58 | 2018-03-02T00:38:58 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,048 | py | #!/usr/bin/python
#Normal
import sys, traceback, syslog
from types import *
#Pyro
try:
import Pyro.core
except:
logError(traceback.format_exc())
PYROSERVER = 'PYROLOC://overlord:7769/zenbuild'
Pyro.core.initClient(0)
Pyro.config.PYRO_PRINT_REMOTE_TRACEBACK = 1
LOG_FACILITY = syslog.LOG_LOCAL0
def logError(msg):
syslog.openlog('ZENLIB', 0, LOG_FACILITY)
syslog.syslog(syslog.LOG_ERR, msg)
syslog.closelog()
def regBuild(buildtag, type, ETA=None):
try:
bdmd = Pyro.core.getProxyForURI(PYROSERVER)
bdmd.regBuild(buildtag, type, ETA)
except:
logError(traceback.format_exc())
def updateBuild(buildtag, stage=None, stagePercentComplete=None, percentComplete=None, ETA=None, status=None):
try:
bdmd = Pyro.core.getProxyForURI(PYROSERVER)
bdmd.updateBuild(buildtag,stage, stagePercentComplete, percentComplete, ETA, status)
except:
logError(traceback.format_exc())
def regSubBuild(buildtag, hostname, task, ETA=None):
try:
bdmd = Pyro.core.getProxyForURI(PYROSERVER)
bdmd.regSubBuild(buildtag,hostname,task, ETA)
except:
logError(traceback.format_exc())
def updateSubBuild(device, stage=None, stagePercentComplete=None, percentComplete=None, ETA=None, status=None):
try:
bdmd = Pyro.core.getProxyForURI(PYROSERVER)
bdmd.updateSubBuild(device, stage, stagePercentComplete, percentComplete, ETA, status)
except:
logError(traceback.format_exc())
def completeBuild(buildtag):
try:
bdmd = Pyro.core.getProxyForURI(PYROSERVER)
bdmd.completeBuild(buildtag)
except:
logError(traceback.format_exc())
def completeSubBuild(device):
try:
bdmd = Pyro.core.getProxyForURI(PYROSERVER)
bdmd.completeSubBuild(device)
except:
logError(traceback.format_exc())
def test():
regBuild('test666','test build')
regBuild('foundation_test_1234','foundation')
regSubBuild('test666', 'node-24', 'arm_iwmmxt_le target apps')
updateBuild('test666',stage="Build Prep")
print completeSubBuild('node-24')
print completeBuild('test666')
def main():
test()
sys.exit(1)
if __name__=="__main__":
main()
| [
"njka.github@gmail.com"
] | njka.github@gmail.com |
89412e55408ee7a9d9b91b23e4891741e3ca1a77 | 9d1238fb0e4a395d49a7b8ff745f21476c9d9c00 | /framework/Tests/PAS/PAS/SecuritySettings/MultiplePasswordCheckouts/test_system_level_password_checkout_blank.py | a7f3b714415f8fbea980a615498a546f84534f42 | [] | no_license | jaspalsingh92/TestAutomation-1 | a48ee1d3b73386f1bf8f53328a5b55444238e054 | e631c67255b10f150e0012991fb1474ede904417 | refs/heads/master | 2023-04-18T14:52:08.836221 | 2021-04-07T12:01:07 | 2021-04-07T12:01:07 | 357,175,690 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,419 | py | import pytest
import logging
from Shared.API.infrastructure import ResourceManager
from Shared.API.redrock import RedrockController
logger = logging.getLogger("test")
lock_tenant = True
@pytest.mark.api
@pytest.mark.pas
def test_system_level_password_checkout_blank(core_session, pas_windows_setup, users_and_roles,
update_tenant_multiple_checkouts):
"""
C1548 : System level password checkout set to '--'
:param core_session: Authenticated Centrify Session
:param pas_windows_setup: Added and return Windows system and Account associated to it.
:param users_and_roles: Gets user and role on demand.
"""
system_id, account_id, sys_info, connector_id, user_password = pas_windows_setup()
user_session = users_and_roles.get_session_for_user("Privileged Access Service Administrator")
pas_admin = user_session.__dict__['auth_details']
# Setting 'Allow multiple password checkouts' policy to Uncheck on Global Security Setting page
result, success = update_tenant_multiple_checkouts(core_session, False)
assert success, f"Not able to disable 'Allow multiple password checkouts' policy on Global " \
f"Security Setting page. API response result: {result}."
logger.info(f"'Allow multiple password checkouts' policy Unchecked on Global Security Setting page")
# Assigning 'Checkout' permission to user for Account.
account_result, account_success = ResourceManager.assign_account_permissions(core_session, 'Naked',
pas_admin['User'],
pas_admin['UserId'], 'User',
account_id)
assert account_success, f"Assign Checkout permission to account : {account_id} failed. " \
f"API response result: {account_result}"
logger.info(f"'Checkout' permission given to user: {pas_admin['User']} for Account:{account_id}.")
# Checkout account while logged in as Cloud Admin
admin_checkout_result, admin_checkout_success = ResourceManager.check_out_password(core_session, 1, account_id)
assert admin_checkout_result['Password'] == user_password, f"Not able to checkout Account : {account_id}. API " \
f"response result: {admin_checkout_result} "
logger.info(f"Account Checkout successful for Account :{account_id}.")
# Checkout account while logged in as Privileged Access Service Administrator
user_checkout_result, user_checkout_success = ResourceManager.check_out_password(user_session, 1, account_id)
assert not user_checkout_success, f"Checkout Account successful : {account_id} : API response " \
f"result: {user_checkout_result}"
logger.info(f"Not able to checkout Account : {account_id}")
activity = RedrockController.get_account_activity(user_session, account_id)[0]['Detail']
assert 'Multiple checkouts not allowed' in activity, f"Checkout Failed activity not found " \
f"for account : {account_id}. API response result: {activity} "
logger.info(f"Checkout Failed activity found in Activity for account : {account_id}")
| [
"singh.jaspal92@gmail.com"
] | singh.jaspal92@gmail.com |
49ae7530ff0772b04bb33a96550692f5f9106cbf | c9fde4576216a22e8d5711bbe97adda1aafa2f08 | /model-optimizer/mo/front/common/partial_infer/reduce.py | 74fdd40e9a935d5f5158baaf54e31969ede37593 | [
"Apache-2.0"
] | permissive | dliang0406/dldt | c703d6a837de3f996528fc8a9543f9530b23342c | d9b10abcebafe8b10ba81e09e433de7a366c072c | refs/heads/2018 | 2020-04-03T08:24:47.723353 | 2018-10-29T07:58:05 | 2018-10-29T07:58:05 | 155,132,108 | 3 | 1 | Apache-2.0 | 2019-10-10T08:39:46 | 2018-10-29T01:03:54 | C++ | UTF-8 | Python | false | false | 1,410 | py | """
Copyright (c) 2018 Intel Corporation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import logging as log
import numpy as np
def tf_reduce_infer(node, op=None):
input_shape = node.in_node(0).shape
log.debug("input_shape: {}".format(input_shape))
axis = node.in_node(1).value
if input_shape is None or axis is None or input_shape.ndim != 1 or axis.ndim > 1:
return
output_shape = np.array(input_shape)
if node.keep_dims:
output_shape[axis] = 1
else:
output_shape = np.delete(output_shape, axis)
node.out_node().shape = output_shape
if op is not None and node.in_node(0).value is not None:
node.out_node(0).value = np.array([op(node.in_node(0).value, (*axis,))],
dtype=node.in_node(0).value.dtype) # TODO extend to multi-dimensional axis
log.debug("value: {}".format(node.out_node(0).value))
| [
"openvino_pushbot@intel.com"
] | openvino_pushbot@intel.com |
7b1c11206207badd5f7d7c507c0be0f6262fa6eb | aad917f794226f917257ce103c295fd85a51ee62 | /categories/models.py | 6ee2075d4d3e0087cdb60858341277d6ddcecc4f | [] | no_license | jgsogo/bb-django-taxonomy-categories | 3fc05d5b3074cd6a469e97b81ad545834438adab | f00b86bcd08857a668cb62ef40caac0c3cba4bb5 | refs/heads/master | 2020-12-20T11:28:03.426634 | 2013-02-02T21:14:01 | 2013-02-02T21:14:01 | 236,059,332 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,645 | py | #!/usr/bin/env python
# encoding: utf-8
from django.db import models
from django.template.defaultfilters import slugify
from django.utils.translation import ugettext_lazy as _
from taxonomy.models.taxon import BaseTaxon
from taxonomy.managers import TaxonManager
class CategoryManager(TaxonManager):
def choices(self, user=None):
qs = self.get_query_set().all()
if user is not None and user.is_superuser:
# superusers get to see all categories
return qs
else:
# only show public categories to regular users
return qs.filter(public=self.model.PUBLIC)
def orphan(self, user=None):
""" Retrieves all categories with no parent """
return self.choices(user).filter(parent=None)
class Category(BaseTaxon):
PUBLIC = 0
PRIVATE = 1
PUBLICY_CHOICES = ( (PUBLIC, _('public')), (PRIVATE, _('private')),)
title = models.CharField(max_length=100)
slug = models.SlugField()
description = models.TextField(blank=True, help_text=_(u'Optional'))
public = models.IntegerField(choices = PUBLICY_CHOICES)
objects = CategoryManager()
class MPTTMeta:
order_insertion_by = ['slug']
class Meta(BaseTaxon.Meta):
db_table = 'category_tree'
app_label = 'category_tree'
verbose_name = _('category')
verbose_name_plural = _('categories')
abstract = False
def get_name(self):
return self.title
def save(self, *args, **kwargs):
if not len(self.slug.strip()):
self.slug = slugify(self.title)
super(Category, self).save(*args, **kwargs)
| [
"jgsogo@gmail.com"
] | jgsogo@gmail.com |
c59afdb801c2ae60596ec009aeb16a6c7eb6379e | c3cd2d040ceb3eabd387281835cacd0967fdbb6a | /web2py/extras/build_web2py/web2py.win_no_console.spec | d6c55d2f3ecd4375adca10507765bc7b3bc23279 | [
"LGPL-3.0-only",
"LicenseRef-scancode-warranty-disclaimer",
"MIT",
"BSD-3-Clause",
"LGPL-2.0-or-later",
"GPL-2.0-only",
"Apache-2.0",
"BSD-2-Clause",
"LicenseRef-scancode-free-unknown"
] | permissive | operepo/smc | cc55338b8b9fbeac78e67397079759965d859b68 | d10e7b7567266e31de73e5b29663577cab119a90 | refs/heads/master | 2022-09-22T07:17:59.970650 | 2022-07-11T00:20:45 | 2022-07-11T00:20:45 | 116,905,452 | 1 | 3 | MIT | 2021-03-09T03:01:37 | 2018-01-10T03:53:08 | Python | UTF-8 | Python | false | false | 2,230 | spec | # -*- mode: python -*-
block_cipher = None
a = Analysis(['web2py.py'],
pathex=['.'],
binaries=[],
datas=[],
hiddenimports=['site-packages', 'argparse', 'cgi', 'cgitb', 'code', 'concurrent', 'concurrent.futures',
'concurrent.futures._base', 'concurrent.futures.process', 'concurrent.futures.thread', 'configparser', 'csv', 'ctypes.wintypes',
'email.mime', 'email.mime.base', 'email.mime.multipart', 'email.mime.nonmultipart', 'email.mime.text', 'html.parser', 'http.cookies',
'ipaddress', 'imp', 'json', 'json.decoder', 'json.encoder', 'json.scanner', 'logging.config', 'logging.handlers', 'profile', 'pstats',
'psycopg2', 'psycopg2._ipaddress', 'psycopg2._json', 'psycopg2._range', 'psycopg2.extensions', 'psycopg2.extras', 'psycopg2.sql',
'psycopg2.tz', 'pyodbc', 'python-ldap', 'rlcompleter', 'sched', 'site', 'smtplib', 'sqlite3', 'sqlite3.dbapi2', 'sqlite3.dump', 'timeit', 'tkinter',
'tkinter.commondialog', 'tkinter.constants', 'tkinter.messagebox', 'uuid', 'win32con', 'win32evtlogutil', 'winerror', 'wsgiref',
'wsgiref.handlers', 'wsgiref.headers', 'wsgiref.simple_server', 'wsgiref.util', 'xml.dom', 'xml.dom.NodeFilter', 'xml.dom.domreg',
'xml.dom.expatbuilder', 'xml.dom.minicompat', 'xml.dom.minidom', 'xml.dom.pulldom', 'xml.dom.xmlbuilder', 'xmlrpc.server'],
hookspath=[],
runtime_hooks=[],
excludes=['gluon'],
win_no_prefer_redirects=False,
win_private_assemblies=False,
cipher=block_cipher,
noarchive=False)
pyz = PYZ(a.pure, a.zipped_data,
cipher=block_cipher)
exe = EXE(pyz,
a.scripts,
[],
exclude_binaries=True,
name='web2py_no_console',
debug=False,
bootloader_ignore_signals=False,
strip=False,
upx=True,
console=False , icon='extras\\icons\\web2py.ico')
coll = COLLECT(exe,
a.binaries,
a.zipfiles,
a.datas,
strip=False,
upx=True,
name='web2py_no_console')
| [
"ray@cmagic.biz"
] | ray@cmagic.biz |
05e0de1b6de56c36b6858a92f1e58b04ba53ba4e | 1f7d287ef90041e20468513a26a39e1f3d221289 | /Level-2/s15/guvi-L2-s15-py03.py | bd07bcfd56aa7a6e68f256aa01b44c59c5d2dc54 | [] | no_license | ksthacker/python | d787d69f954c0e9b59b0cc96a8b8fc5c0594d8a0 | 3a3775e1b9349e313f8c96ea11eade54a7e9bf54 | refs/heads/master | 2021-04-27T16:32:40.923316 | 2019-08-21T04:50:22 | 2019-08-21T04:50:22 | 122,303,461 | 0 | 17 | null | 2019-10-03T14:59:51 | 2018-02-21T07:09:32 | Python | UTF-8 | Python | false | false | 240 | py | import sys,string, math,itertools
n = int(input())
L1 = [ int(x) for x in input().split()]
L2 = [ int(x) for x in input().split()]
L22 = sorted(L2)
L12 = []
for x in L22 :
k = L2.index(x)
L12.append(L1[k])
print(*L12)
| [
"noreply@github.com"
] | ksthacker.noreply@github.com |
9d64563f089d3521304bb6648f440eb8c1b1e375 | c18a63e2e37712025794bc7d0bb824ca3a8cde51 | /IDserver/ssh_remote/data.py | 3f38241e919d3df484d294e500f8023dcdca543e | [] | no_license | wuqiangchuan/Xproxxx | 9202767573a3f0bfc1b00b6069eaf6ef9bc25907 | 6403bde2bc091faab55cca5ac9fff62b13d6a0cb | refs/heads/master | 2021-01-01T17:48:55.335991 | 2017-07-20T10:00:52 | 2017-07-20T10:00:52 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 508 | py | import json
class BaseData(object):
def __init__(self, data=""):
self.from_json(data)
def from_json(self, data=""):
self.__dict__ = json.loads(data)
def to_json(self):
return json.dumps(self)
def get_type(self):
return self.tp
class ClientData(BaseData):
def __init__(self, data=""):
super(ClientData, self).__init__(data)
class ServerData(BaseData):
def __init__(self, data=""):
self.tp = 'server'
self.data = data
| [
"root@localhost.localdomain"
] | root@localhost.localdomain |
fb1fff9a82da62a462954d422bee1491b4574f0d | c1f0e38201ef91e772a1ffd3819b58382f7b4dc2 | /partner_bank_iban_convert/models/res_partner_bank.py | 138bfced7485a67c25080adfea1668015a9be059 | [] | no_license | victoralmau/account | f1007f3a645c0d529ad3cf519c0ffed0801c3eea | 522f59d8fb6bd7ae2cc910ac24152eb967a8c142 | refs/heads/master | 2022-12-24T08:33:00.150369 | 2020-04-03T08:34:21 | 2020-04-03T08:34:21 | 284,680,712 | 0 | 0 | null | 2020-08-03T11:21:46 | 2020-08-03T11:21:45 | null | UTF-8 | Python | false | false | 2,485 | py | # -*- coding: utf-8 -*-
import logging
_logger = logging.getLogger(__name__)
from openerp import api, models, fields
from openerp.exceptions import Warning
from datetime import datetime
import requests, json
class ResPartnerBank(models.Model):
_inherit = 'res.partner.bank'
@api.one
def check_iban_convert(self):
if self.acc_number!=False:
if self.acc_type=='bank':
if self.bank_id.id>0:
if self.bank_id.code!=False:
if self.acc_country_id.id>0:
if self.acc_country_id.code!=False:
#limpiamos caracteres + reemplazamos espacios
account_number = str(self.acc_number).strip().replace(' ', '')
#revisamos longitud de la cuenta bancaria
if len(account_number)==20:
account_number = account_number.replace(self.bank_id.code, '')
#request
url = 'https://openiban.com/v2/calculate/'+str(self.acc_country_id.code)+'/'+str(self.bank_id.code)+'/'+str(account_number)
response = requests.get(url)
if response.status_code==200:
response_json = json.loads(response.text)
if 'valid' in response_json:
if response_json['valid']==True:
if 'iban' in response_json:
if response_json['iban']!='':
#update
self.acc_number = str(response_json['iban'])
self.acc_type = 'iban'
@api.model
def create(self, values):
return_item = super(ResPartnerBank, self).create(values)
#check_iban_convert
return_item.check_iban_convert()
#return
return return_item
@api.one
def write(self, vals):
return_write = super(ResPartnerBank, self).write(vals)
#check_iban_convert
self.check_iban_convert()
#return
return return_write | [
"informatica@arelux.com"
] | informatica@arelux.com |
bcf29855ebe59aa2492ec108f429f524118b32d3 | c5d553e68de3d5c730f5fe2550209de759eabc8c | /프로그래머스/unrated/176963. 추억 점수/추억 점수.py | fd8452d93516976756e970ca7c06bdcfd1d358f5 | [] | no_license | KimMooHyeon/Algorithm-Studying | 6bb23b971b0c46c35f4cdde133148f2c5cfaa0f4 | e4417aadf209fd22f960239623bed542744fd374 | refs/heads/master | 2023-08-08T02:28:02.460332 | 2023-07-15T14:22:53 | 2023-07-15T14:22:53 | 198,966,251 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 369 | py | def solution(name, yearning, photo):
answer = []
hashScore = {}
for i in range(0,len(name)) :
hashScore[name[i]]=yearning[i]
print(hashScore)
for photoList in photo:
num = 0
for onePhoto in photoList:
if onePhoto in hashScore:
num+=hashScore[onePhoto]
answer.append(num)
return answer | [
"dlfb77@gmail.com"
] | dlfb77@gmail.com |
04bdb90a656863ac33204b200f157b78e9b4a660 | 7ab15522084e2f81d39cda505da844fb4d519f9d | /Logic and basic mathematics/Hard/Weird list/weird_list.py | e47346d384a1b426a8e538aab2170fa21eb8a2b7 | [] | no_license | Infinite-Loop-KJSIEIT/Algo-Talks | 1662cfd802bfbe4a9bfcf80a9c3157334e5cb4fd | 27d85ae3827f8765a4ebe98c80cc55b53c0562b0 | refs/heads/master | 2022-12-25T21:53:57.745115 | 2020-10-03T07:07:02 | 2020-10-03T07:07:02 | 286,681,402 | 13 | 3 | null | 2020-10-03T07:07:04 | 2020-08-11T07:53:23 | Python | UTF-8 | Python | false | false | 1,374 | py | def query(i,arr,repeat):
if i < len(arr):
return arr[i]
else:
return (repeat[(i - len(arr))% len(repeat)])
def solve():
n = int(input())
a,b,c = map(int,input().split())
arr = []
mem = {}
while True:
if n in mem:
break
else:
arr.append(n)
mem[n] = len(arr)
n = n/a
s = str(n)
for i in range(len(s)):
if s[i] == ".":
if s[i+1] != '0':
n = int(s[i+1])
else:
n = int(s[0])
arr.append(n)
n = n/b
s = str(n)
for i in range(len(s)):
if s[i] == ".":
if s[i+1] != '0':
n = int(s[i+1])
else:
n = int(s[0])
arr.append(n)
n = n/c
s = str(n)
for i in range(len(s)):
if s[i] == ".":
if s[i+1] != '0':
n = int(s[i+1])
else:
n = int(s[0])
new_arr = arr[:mem[n]-1]
repeat = arr[mem[n]-1:]
#print(new_arr)
#print(repeat)
Q = int(input())
for q in range(Q):
i = int(input())
print(query(i, new_arr, repeat))
if __name__ == '__main__':
for t in range(int(input())):
solve()
| [
"keshav.sm@somaiya.edu"
] | keshav.sm@somaiya.edu |
3e40c4baffc41e0e6c66dcf3225c7b95f25bf744 | 37fef592f365194c28579f95abd222cc4e1243ae | /streamlit/Unbottled/Pages/2_Wine_Explorer.py | 799e5b08ba10636602ad117bd4c472fd5310c68c | [] | no_license | edimaudo/Python-projects | be61e0d3fff63fb7bd00513dbf1401e2c1822cfb | 85d54badf82a0b653587a02e99daf389df62e012 | refs/heads/master | 2023-04-07T03:26:23.259959 | 2023-03-24T12:03:03 | 2023-03-24T12:03:03 | 72,611,253 | 4 | 3 | null | 2022-10-31T18:10:41 | 2016-11-02T06:37:17 | null | UTF-8 | Python | false | false | 1,021 | py | w# Libraries
import streamlit as st
import pandas as pd
import plotly.express as px
@st.cache
def load_data():
data = pd.read_csv(DATA_URL)
return data
# Load data
DATA_URL = "winemag-data_first150k.csv"
df = load_data()
st.title('Unbottled')
st.header("Wine Explorer")
country_list = df['country'].unique()
country_list = country_list.astype('str')
country_list.sort()
variety_list = df['variety'].unique()
variety_list = variety_list.astype('str')
variety_list.sort()
country_choice = st.selectbox("Select a Country",country_list)
variety_choice = st.selectbox("Select a Variety",variety_list)
price_choice = st.slider('Select a Price Range', 0, 2500, 100)
points_choice = st.slider('Select a Points Range', 80, 100, 5)
choice_df = df[(df.country == country_choice) | (df.variety == variety_choice) | (df.price.le(price_choice)) | (df.points.le(points_choice))]
choice_df = choice_df[['country','variety','price','points','description','designation','winery']]
st.dataframe(choice_df)
| [
"edimaudo@gmail.com"
] | edimaudo@gmail.com |
8ee62c7ae4ce8147c226b824dc7d65eca8972907 | 03a2c1eb549a66cc0cff72857963eccb0a56031d | /leetcode/magic-squares-in-grid.py | 8fe33c0521dd7a3b930269182a7b70defb66cada | [] | no_license | nobe0716/problem_solving | c56e24564dbe3a8b7093fb37cd60c9e0b25f8e59 | cd43dc1eddb49d6b5965419e36db708c300dadf5 | refs/heads/master | 2023-01-21T14:05:54.170065 | 2023-01-15T16:36:30 | 2023-01-15T16:36:30 | 80,906,041 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,081 | py | from typing import List
class Solution:
def numMagicSquaresInside(self, grid: List[List[int]]) -> int:
def is_magic(x: int, y: int) -> bool:
col_sum, row_sum, dia_sum = [0, 0, 0], [0, 0, 0], [0, 0]
ws = set()
for i in range(x, x + 3):
for j in range(y, y + 3):
v = grid[i][j]
col_sum[j - y] += v
row_sum[i - x] += v
if i - x == j - y:
dia_sum[0] += v
if (i - x) + (j - y) == 2:
dia_sum[1] += v
ws.add(v)
if len(ws) > 1:
return False
return len(set(col_sum) | set(row_sum) | set(dia_sum)) == 1
n, m = len(grid), len(grid[0])
c = 0
for i in range(n - 2):
for j in range(m - 2):
if is_magic(i, j):
c += 1
return c
s = Solution()
assert s.numMagicSquaresInside([[4, 3, 8, 4], [9, 5, 1, 9], [2, 7, 6, 2]]) == 1
| [
"sunghyo.jung@navercorp.com"
] | sunghyo.jung@navercorp.com |
4ab2751ff496c14437c5da2dcae0880daf0f0322 | 9ba71c165fe70e1bba26bd3d6230c321e0aa60ec | /src/python/up_sqllite_cdi.py | 68a056f9cd7cc95957529375e5b6213b626cdf07 | [] | no_license | gabrielreiss/DB_econ | 0977899be20986a0b33937a534e6cbbc9fb767a2 | bcf6de08402b09c0e84f8696b92ef3c0846b3cc1 | refs/heads/master | 2022-08-02T03:55:08.096697 | 2020-05-26T18:57:27 | 2020-05-26T18:57:27 | 266,602,990 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,171 | py | import os
import sqlalchemy
import argparse
import pandas as pd
BASE_DIR = os.path.dirname(os.path.dirname( os.path.dirname(__file__) ) )
DATA_DIR = os.path.join( BASE_DIR, 'data' )
SQL_DIR = os.path.join( BASE_DIR, 'src', 'sql' )
RESULT_DIR = os.path.join( BASE_DIR, 'resultado' )
print(DATA_DIR)
# Abrindo conexão com banco...
str_connection = os.path.join( 'sqlite:///', DATA_DIR, 'DB_econ.db' )
engine = sqlalchemy.create_engine( str_connection )
connection = engine.connect()
# Encontrando os arquivos de dados
files_names = [ i for i in os.listdir( DATA_DIR ) if i.endswith('.csv') ]
def data_quality(x):
if type(x) == str:
return x.replace("\n", "").replace("\r", '')
else:
return x
# Para cada arquivo é realizado uma inserção no banco
for i in files_names:
print(i)
df_tmp = pd.read_csv( os.path.join( DATA_DIR, i ) )
for c in df_tmp.columns:
df_tmp[c] = df_tmp[c].apply(data_quality)
table_name = "tb_" + i.strip(".csv").replace("df_", "")
#print(df_tmp.head())
df_tmp.to_sql( table_name,
connection,
if_exists='replace',
index=False ) | [
"gabrielreissdecastro@gmail.com"
] | gabrielreissdecastro@gmail.com |
7c3dfcfc469f34534a03b2380bb90d7fa72abae0 | f4c0df92671a9cd021415830e8b7183cc7c6422f | /Play/migrations/0001_initial.py | 4f26e1c683f4634e78ef5bd89154cb9960eafeef | [] | no_license | JorgitoR/DrawSomthing-Django-Python | 322e301e67ff58224fe6d628dde815ca8ed501ca | d6e0997e2ffbf38ca5b1a44c2c9840ac8bf8286d | refs/heads/main | 2023-04-09T16:14:31.152384 | 2021-04-13T22:26:08 | 2021-04-13T22:26:08 | 357,693,300 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 541 | py | # Generated by Django 3.2 on 2021-04-13 20:27
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='solucion',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('length', models.IntegerField()),
('letras', models.CharField(max_length=20)),
],
),
]
| [
"jorgitouribe133@gmail.com"
] | jorgitouribe133@gmail.com |
0596bfd7f19ac3df4ef8e329fb73a591a444828c | 9743d5fd24822f79c156ad112229e25adb9ed6f6 | /xai/brain/wordbase/nouns/_study.py | c48d7435c9a758b6b9ac596e39c436b127f51674 | [
"MIT"
] | permissive | cash2one/xai | de7adad1758f50dd6786bf0111e71a903f039b64 | e76f12c9f4dcf3ac1c7c08b0cc8844c0b0a104b6 | refs/heads/master | 2021-01-19T12:33:54.964379 | 2017-01-28T02:00:50 | 2017-01-28T02:00:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 731 | py |
#calss header
class _STUDY():
def __init__(self,):
self.name = "STUDY"
self.definitions = [u'the activity of examining a subject in detail in order to discover new information: ', u'a drawing that an artist makes in order to test ideas before starting a painting of the same subject', u'the act of learning about a subject, usually at school or university: ', u'studying or work involving studying: ', u'used in the names of some educational subjects and courses: ', u'a room, especially in a house, used for quiet work such as reading or writing']
self.parents = []
self.childen = []
self.properties = []
self.jsondata = {}
self.specie = 'nouns'
def run(self, obj1 = [], obj2 = []):
return self.jsondata
| [
"xingwang1991@gmail.com"
] | xingwang1991@gmail.com |
6e1b2983208299a57bab42a21a2cd696e401903c | fd717fe6ca74f6d77210cdd57a8c365d27c5bfc6 | /pychron/monitors/monitor.py | 31a1c54dd066afc976076913098eb1410bb2fb66 | [
"Apache-2.0"
] | permissive | stephen-e-cox/pychron | 1dea0467d904d24c8a3dd22e5b720fbccec5c0ed | 681d5bfe2c13e514859479369c2bb20bdf5c19cb | refs/heads/master | 2021-01-19T15:40:03.663863 | 2016-07-14T14:37:16 | 2016-07-14T14:37:16 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,193 | py | # ===============================================================================
# Copyright 2011 Jake Ross
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ===============================================================================
# ============= enthought library imports =======================
from traits.api import Float
# ============= standard library imports ========================
from threading import Thread, Event
import time
# ============= local library imports ==========================
# from pychron.config_loadable import ConfigLoadable
# from pychron.managers.manager import Manager
from pychron.config_loadable import ConfigLoadable
from pyface.message_dialog import warning
from pychron.core.ui.gui import invoke_in_main_thread
from pychron.paths import paths
class Monitor(ConfigLoadable):
"""
"""
sample_delay = Float(5)
manager = None
_monitoring = False
_invalid_checks = None
_stop_signal = None
configuration_dir_name = paths.monitors_dir
def is_monitoring(self):
return self._monitoring
def load(self):
config = self.get_configuration()
if config:
self.set_attribute(config, 'sample_delay',
'General', 'sample_delay', cast='float', optional=False)
self._invalid_checks = []
return self._load_hook(config)
def _load_hook(self, *args):
return True
def stop(self):
"""
"""
if self._stop_signal:
self._stop_signal.set()
# self.kill = True
self.info('Stop monitor')
self._monitoring = False
def warning(self, msg):
"""
override loggable warning to issue a warning dialog
"""
super(Monitor, self).warning(msg)
invoke_in_main_thread(warning, None, msg)
def monitor(self):
"""
"""
if not self._monitoring:
self._monitoring = True
self.info('Starting monitor')
self._stop_signal = Event()
if self.load():
t = Thread(target=self._monitor_)
t.setDaemon(1)
t.start()
return True
else:
return True
def reset_start_time(self):
"""
"""
self.start_time = time.time()
def check(self):
return any([fi() for fi in self._get_checks()])
def _get_checks(self):
return [getattr(self, h) for h in dir(self)
if '_fcheck' in h and h not in self._invalid_checks]
def _monitor_(self):
"""
"""
# load before every monitor call so that changes to the config file
# are incorpoated
if self.manager is not None:
# clear error
self.manager.error_code = None
self.gntries = 0
self.reset_start_time()
# funcs = [getattr(self, h) for h in dir(self)
# if '_fcheck' in h and h not in self._invalid_checks]
stop_signal = self._stop_signal
while not stop_signal.isSet():
for fi in self._get_checks():
fi()
if stop_signal.isSet():
break
# sleep before running monitor again
time.sleep(self.sample_delay)
# ============= EOF ====================================
# def _monitor_(self, stop_signal):
# '''
# '''
# #load before every monitor call so that changes to the config file
# #are incorpoated
# self.load()
#
# if self.manager is not None:
# self.gntries = 0
# self.reset_start_time()
# cnt = 0
# while not stop_signal.isSet():
# '''
# double checks executed twice for every check
# '''
# for h in dir(self):
# if '_doublecheck' in h and h not in self._invalid_checks:
# func = getattr(self, h)
# func()
# if stop_signal.isSet():
# break
#
# if cnt % 2 == 0:
# for h in dir(self):
# if '_check' in h and h not in self._invalid_checks:
# func = getattr(self, h)
# func()
# if stop_signal.isSet():
# break
#
# cnt += 1
# if cnt == 100:
# cnt = 0
# #sleep before running monitor again
# time.sleep(self.sample_delay / 2.0)
| [
"jirhiker@gmail.com"
] | jirhiker@gmail.com |
7fed4d541de3bcf5193df154b82115797d4c2dd1 | 1521332438d4e711b6fa4af825047a3466925511 | /GuessingGame/guessingGame.py | 9bede277f18ac0b9985398ce5dc0e9f097dfbcd3 | [] | no_license | JakeAttard/Python-2807ICT-NoteBook | df0907bdca9ff10f347498233260c97f41ea783b | 9a38035d467e569b3fb97f5ab114753efc32cecc | refs/heads/master | 2020-04-26T17:33:18.184447 | 2019-11-05T13:04:56 | 2019-11-05T13:04:56 | 173,717,675 | 3 | 1 | null | null | null | null | UTF-8 | Python | false | false | 619 | py | import random
randomNumber = random.randint(1, 10)
player = None
while True:
player = input("Pick a number from 1 to 10:")
player = int(player)
if player < randomNumber:
print("The number you entered is to low!")
elif player > randomNumber:
print("The number you entered is to high!")
else:
print("You guess the number. You win!")
playAgain = input("Do you want to play again? (y/n)")
if playAgain == "y":
randomNumber = random.randint(1, 10)
player = None
else:
print("Thankyou for playing!")
break | [
"jakeattard18@gmail.com"
] | jakeattard18@gmail.com |
d06c93dc79f19ab2a2e5a94c345f87b70f022eb9 | 83de24182a7af33c43ee340b57755e73275149ae | /aliyun-python-sdk-sgw/aliyunsdksgw/request/v20180511/DeleteGatewayBlockVolumesRequest.py | a2c400048585f93262c6d389ea4f5bc52f6e85d2 | [
"Apache-2.0"
] | permissive | aliyun/aliyun-openapi-python-sdk | 4436ca6c57190ceadbc80f0b1c35b1ab13c00c7f | 83fd547946fd6772cf26f338d9653f4316c81d3c | refs/heads/master | 2023-08-04T12:32:57.028821 | 2023-08-04T06:00:29 | 2023-08-04T06:00:29 | 39,558,861 | 1,080 | 721 | NOASSERTION | 2023-09-14T08:51:06 | 2015-07-23T09:39:45 | Python | UTF-8 | Python | false | false | 2,010 | py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdksgw.endpoint import endpoint_data
class DeleteGatewayBlockVolumesRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'sgw', '2018-05-11', 'DeleteGatewayBlockVolumes','hcs_sgw')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_IsSourceDeletion(self):
return self.get_query_params().get('IsSourceDeletion')
def set_IsSourceDeletion(self,IsSourceDeletion):
self.add_query_param('IsSourceDeletion',IsSourceDeletion)
def get_SecurityToken(self):
return self.get_query_params().get('SecurityToken')
def set_SecurityToken(self,SecurityToken):
self.add_query_param('SecurityToken',SecurityToken)
def get_IndexId(self):
return self.get_query_params().get('IndexId')
def set_IndexId(self,IndexId):
self.add_query_param('IndexId',IndexId)
def get_GatewayId(self):
return self.get_query_params().get('GatewayId')
def set_GatewayId(self,GatewayId):
self.add_query_param('GatewayId',GatewayId) | [
"sdk-team@alibabacloud.com"
] | sdk-team@alibabacloud.com |
b0ceda92856ed55a8490f94731a8f267fc40675d | 0d1ad7e5fca72ae767c7ddbe0406eb72e733596c | /smartedukart/urls.py | a435775c5b34ea5513f8ae112901fb76a26b0265 | [
"MIT"
] | permissive | Vishesh-Conbi/Chatbot | d223faa4e2935da3f9abec8821e97c69e18a03d9 | c6a966eb1947064eeffe1a1722d672ca560377bd | refs/heads/main | 2023-03-01T02:25:49.853907 | 2021-02-09T10:22:58 | 2021-02-09T10:22:58 | 337,365,376 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 973 | py | """quantum URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/3.1/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path,include
from django.conf import settings
from django.conf.urls.static import static
urlpatterns = [
path('',include('mainhome.urls')),
path('admin/', admin.site.urls),
]
urlpatterns = urlpatterns + static(settings.MEDIA_URL, document_root = settings.MEDIA_ROOT) | [
"="
] | = |
21d2e345e0a2621e72779c42482c834df4d1fbd2 | 83de24182a7af33c43ee340b57755e73275149ae | /aliyun-python-sdk-mse/aliyunsdkmse/request/v20190531/GetPluginsRequest.py | 2e7bb852993547f26504fbcc5ba4ee96f124d88f | [
"Apache-2.0"
] | permissive | aliyun/aliyun-openapi-python-sdk | 4436ca6c57190ceadbc80f0b1c35b1ab13c00c7f | 83fd547946fd6772cf26f338d9653f4316c81d3c | refs/heads/master | 2023-08-04T12:32:57.028821 | 2023-08-04T06:00:29 | 2023-08-04T06:00:29 | 39,558,861 | 1,080 | 721 | NOASSERTION | 2023-09-14T08:51:06 | 2015-07-23T09:39:45 | Python | UTF-8 | Python | false | false | 2,227 | py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkmse.endpoint import endpoint_data
class GetPluginsRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'mse', '2019-05-31', 'GetPlugins','mse')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_GatewayUniqueId(self): # String
return self.get_query_params().get('GatewayUniqueId')
def set_GatewayUniqueId(self, GatewayUniqueId): # String
self.add_query_param('GatewayUniqueId', GatewayUniqueId)
def get_EnableOnly(self): # Boolean
return self.get_query_params().get('EnableOnly')
def set_EnableOnly(self, EnableOnly): # Boolean
self.add_query_param('EnableOnly', EnableOnly)
def get_Name(self): # String
return self.get_query_params().get('Name')
def set_Name(self, Name): # String
self.add_query_param('Name', Name)
def get_AcceptLanguage(self): # String
return self.get_query_params().get('AcceptLanguage')
def set_AcceptLanguage(self, AcceptLanguage): # String
self.add_query_param('AcceptLanguage', AcceptLanguage)
def get_Category(self): # Integer
return self.get_query_params().get('Category')
def set_Category(self, Category): # Integer
self.add_query_param('Category', Category)
| [
"sdk-team@alibabacloud.com"
] | sdk-team@alibabacloud.com |
777b484e21a94d9a58c37e2f1454817c52064f88 | 1577e1cf4e89584a125cffb855ca50a9654c6d55 | /pyobjc/pyobjc/pyobjc-framework-InputMethodKit-2.5.1/setup.py | ba422bf4177764d84954db4d4b0ead83c748c644 | [
"MIT"
] | permissive | apple-open-source/macos | a4188b5c2ef113d90281d03cd1b14e5ee52ebffb | 2d2b15f13487673de33297e49f00ef94af743a9a | refs/heads/master | 2023-08-01T11:03:26.870408 | 2023-03-27T00:00:00 | 2023-03-27T00:00:00 | 180,595,052 | 124 | 24 | null | 2022-12-27T14:54:09 | 2019-04-10T14:06:23 | null | UTF-8 | Python | false | false | 1,203 | py | '''
Wrappers for the "InputMethodKit" framework on MacOSX 10.5 or later. The
interfaces in this framework allow you to develop input methods.
These wrappers don't include documentation, please check Apple's documention
for information on how to use this framework and PyObjC's documentation
for general tips and tricks regarding the translation between Python
and (Objective-)C frameworks
'''
from pyobjc_setup import setup, Extension
import os
setup(
min_os_level='10.5',
name='pyobjc-framework-InputMethodKit',
version="2.5.1",
description = "Wrappers for the framework InputMethodKit on Mac OS X",
packages = [ "InputMethodKit" ],
setup_requires = [
'pyobjc-core>=2.5.1',
],
install_requires = [
'pyobjc-core>=2.5.1',
'pyobjc-framework-Cocoa>=2.5.1',
],
ext_modules = [
Extension("InputMethodKit._InputMethodKit",
[ "Modules/_InputMethodKit.m" ],
extra_link_args=["-framework", "InputMethodKit"],
depends=[
os.path.join('Modules', fn)
for fn in os.listdir('Modules')
if fn.startswith('_InputMethodKit')
]
),
]
)
| [
"opensource@apple.com"
] | opensource@apple.com |
f1f3b500c8af88016fadb2c242416fe86c0bce21 | 1065ec75d9ee668ffd7aafc6a8de912d7c2cee6f | /addons/script.icechannel.extn.extra.uk/plugins/livetv_uk/bbc_parliament_ltvi.py | d5fcbc1f8759f07f2c949ab8e4afcac6c5a7f8e4 | [] | no_license | bopopescu/kodiprofile | 64c067ee766e8a40e5c148b8e8ea367b4879ffc7 | 7e78640a569a7f212a771aab6a4a4d9cb0eecfbe | refs/heads/master | 2021-06-11T17:16:15.498281 | 2016-04-03T06:37:30 | 2016-04-03T06:37:30 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,034 | py | '''
Ice Channel
'''
from entertainment.plugnplay.interfaces import LiveTVIndexer
from entertainment.plugnplay import Plugin
from entertainment import common
class bbc_parliament(LiveTVIndexer):
implements = [LiveTVIndexer]
display_name = "BBC Parliament"
name = "bbc_parliament"
other_names = "bbc_parliament,BBC Parliament"
import xbmcaddon
import os
addon_id = 'script.icechannel.extn.extra.uk'
addon = xbmcaddon.Addon(addon_id)
img = os.path.join( addon.getAddonInfo('path'), 'resources', 'images', name + '.png' )
regions = [
{
'name':'United Kingdom',
'img':addon.getAddonInfo('icon'),
'fanart':addon.getAddonInfo('fanart')
},
]
languages = [
{'name':'English', 'img':'', 'fanart':''},
]
genres = [
{'name':'News', 'img':'', 'fanart':''}
]
addon = None
| [
"sokasoka@hotmail.com"
] | sokasoka@hotmail.com |
60da94b3d5a56964d1f99e9185da3b986a435a2c | 7c99ea5b1ffe089c97615336daf4b6ceed9a5b00 | /Configurations/HighMass/v7_Full2017/ANlogplot_configuration_em.py | fd4651d3d926308afef03f77ab35c8b698d176ef | [] | no_license | flaviacetorelli/PlotsConfigurations | 948faadba356e1d5e6f546dc11dd8dacfe1c1910 | aa5cf802c86902378617f566186bc638e69f9936 | refs/heads/master | 2022-05-02T00:37:56.070453 | 2022-03-18T10:14:23 | 2022-03-18T10:14:23 | 235,580,894 | 0 | 1 | null | 2020-01-22T13:37:30 | 2020-01-22T13:37:29 | null | UTF-8 | Python | false | false | 920 | py | # example of configuration file
treeName= 'Events'
tag = 'Full2017_em'
# used by mkShape to define output directory for root files
outputDir = 'rootFile_'+tag
# file with TTree aliases
aliasesFile = 'aliases.py'
# file with list of variables
variablesFile = 'variables_forANplot.py'
# file with list of cuts
cutsFile = 'cuts_em.py'
# file with list of samples
samplesFile = 'samples.py'
# file with list of samples
plotFile = 'plot_log_em.py'
# luminosity to normalize to (in 1/fb)
lumi = 41.53
# used by mkPlot to define output directory for plots
# different from "outputDir" to do things more tidy
outputDirPlots = 'plot_'+tag
# used by mkDatacards to define output directory for datacards
outputDirDatacard = 'datacards'
# structure file for datacard
#structureFile = 'structure.py' # Is this even needed still?
# nuisances file for mkDatacards and for mkShape
nuisancesFile = 'nuisances.py'
| [
"dennis.roy@cern.ch"
] | dennis.roy@cern.ch |
8dc78b28e68a52e460a2066b41bc262bb6a51ea6 | e23a4f57ce5474d468258e5e63b9e23fb6011188 | /140_gui/pyqt_pyside/examples/PyQt_PySide_book/003_Placing several components in the box/005_Class_QSizePolicy/087_Maximum - toClass.py | a62d182be878b1c769b07b9724d61facdfe71f69 | [] | no_license | syurskyi/Python_Topics | 52851ecce000cb751a3b986408efe32f0b4c0835 | be331826b490b73f0a176e6abed86ef68ff2dd2b | refs/heads/master | 2023-06-08T19:29:16.214395 | 2023-05-29T17:09:11 | 2023-05-29T17:09:11 | 220,583,118 | 3 | 2 | null | 2023-02-16T03:08:10 | 2019-11-09T02:58:47 | Python | UTF-8 | Python | false | false | 676 | py | from PySide import QtCore, QtGui
import sys
class SampleWindow(QtGui.QWidget):
def __init__(self):
super(SampleWindow, self).__init__()
self.setWindowTitle("QSizePolicy")
self.resize(300, 150)
label = QtGui.QLabel("Текст надписи")
button = QtGui.QPushButton("1")
policy = QtGui.QSizePolicy(QtGui.QSizePolicy.Maximum,
QtGui.QSizePolicy.Maximum)
label.setFrameStyle(QtGui.QFrame.Box | QtGui.QFrame.Plain)
label.setSizePolicy(policy)
vbox = QtGui.QVBoxLayout()
vbox.addWidget(label)
vbox.addWidget(button)
self.setLayout(vbox)
| [
"sergejyurskyj@yahoo.com"
] | sergejyurskyj@yahoo.com |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.