blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
288
| content_id
stringlengths 40
40
| detected_licenses
listlengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 684
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
⌀ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
⌀ | gha_language
stringclasses 147
values | src_encoding
stringclasses 25
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 128
12.7k
| extension
stringclasses 142
values | content
stringlengths 128
8.19k
| authors
listlengths 1
1
| author_id
stringlengths 1
132
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
5f3d1d59dddfd4fbccd0763620228091d57a6323
|
eb2668b93899637f04e4c93e01063d0c8175ccde
|
/Irises_classification/iris_KNN_GridSearch_2_parameters.py
|
a8ae8beebc2f04cb715db123dca90df089536532
|
[] |
no_license
|
D-Katt/AI-Machine-Learning
|
aad1fe1c8f3f901cb7829919d1b69a106f0ddfab
|
1868c92366dccabf8c86c559eee640645b51bb51
|
refs/heads/master
| 2021-12-19T21:59:04.403188
| 2021-12-07T13:07:46
| 2021-12-07T13:07:46
| 235,104,866
| 3
| 2
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,161
|
py
|
# Пример классификации объектов 'iris dataset' с использованием модели KNN.
# Инструмент GridSearchCV используется для настройки 2 гиперпараметров -
# количества "соседей" и весов.
import pandas as pd
from sklearn.datasets import load_iris
from sklearn.neighbors import KNeighborsClassifier
from sklearn.model_selection import train_test_split
from sklearn.model_selection import GridSearchCV
data = load_iris() # Загружаем датасет
X = data.data # Извлекаем входные данные (размеры)
y = data.target # Извлекаем итоговые значения (наименования видов)
# Делим данные на учебные и тестовые:
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2)
# Содаем модель KNN:
knn = KNeighborsClassifier()
# Задаем список значений для настройки параметров:
k_range = range(1, 31)
weight_options = ['uniform', 'distance']
param_grid = {'n_neighbors': k_range, 'weights': weight_options}
# Передаем GridSearchCV оцениваемую модель, список значений параметров
# для выбора лучшего и критерий оценки, указываем разбивку массива
# на 10 частей во время кросс-валидации:
grid = GridSearchCV(knn, param_grid, cv=10, scoring='accuracy')
# Передаем учебные данные:
grid.fit(X_train, y_train)
print('Максимальная точность прогноза на тестовой выборке:', grid.best_score_)
print('\nПараметры лучшей модели:', grid.best_estimator_)
# Просмотреть среднюю точность и стандартное отклонение по всем вариантам:
results = pd.DataFrame(grid.cv_results_)
print(results[['param_n_neighbors', 'param_weights', 'mean_test_score', 'std_test_score']])
|
[
"noreply@github.com"
] |
D-Katt.noreply@github.com
|
1773433480c17f06c09757f6251e684035134844
|
75dff087b6bec301193b2c145579d38b28249d22
|
/Leetcode_Algorithm/Python3/280_Wiggle_Sort.py
|
2735d66046a732cbb77e5f70e0db75a365eae4b8
|
[] |
no_license
|
ChihYunPai/Data-Structure-and-Algorithms
|
2d3d930d8374b62287f7cc8c3741a1d7b3d227b6
|
27a85e20605393a5eca3f8bd7d42c389612493d5
|
refs/heads/master
| 2023-06-07T04:04:57.186920
| 2021-07-03T03:28:29
| 2021-07-03T03:28:29
| 119,465,269
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 717
|
py
|
"""
Given an unsorted array nums, reorder it in-place such that nums[0] <= nums[1] >= nums[2] <= nums[3]....
For example, given nums = [3, 5, 2, 1, 6, 4], one possible answer is [1, 6, 2, 5, 3, 4].
"""
class Solution(object):
def wiggleSort(self, nums):
"""
:type nums: List[int]
:rtype: void Do not return anything, modify nums in-place instead.
"""
sortedNums = sorted(nums)
i, j = 0, 0
while i < len(nums):
nums[i] = sortedNums[j]
i += 2
j += 1
i = len(nums) - 1 if len(nums) % 2 == 0 else len(nums) - 2
while i >= 1:
nums[i] = sortedNums[j]
i -= 2
j += 1
|
[
"tonypaiwhite@gmail.com"
] |
tonypaiwhite@gmail.com
|
cf198967e5721ff051b551643c1902e36c065adf
|
d66818f4b951943553826a5f64413e90120e1fae
|
/hackerrank/Python/Validating Roman Numerals/solution.py
|
4d3307da4a8b34976111672247c145435592a77a
|
[
"MIT"
] |
permissive
|
HBinhCT/Q-project
|
0f80cd15c9945c43e2e17072416ddb6e4745e7fa
|
19923cbaa3c83c670527899ece5c3ad31bcebe65
|
refs/heads/master
| 2023-08-30T08:59:16.006567
| 2023-08-29T15:30:21
| 2023-08-29T15:30:21
| 247,630,603
| 8
| 1
|
MIT
| 2020-07-22T01:20:23
| 2020-03-16T06:48:02
|
Python
|
UTF-8
|
Python
| false
| false
| 169
|
py
|
import re
regex_pattern = r'(?<=^)M{0,3}(C[MD]|D?C{0,3})(X[CL]|L?X{0,3})(I[VX]|V?I{0,3})(?=$)' # Do not delete 'r'.
print(str(bool(re.match(regex_pattern, input()))))
|
[
"hbinhct@gmail.com"
] |
hbinhct@gmail.com
|
45db51e7b815934ce09e02643904239397422470
|
ee974d693ca4c4156121f8cb385328b52eaac07c
|
/env/lib/python3.6/site-packages/sklearn/externals/joblib/disk.py
|
56b6fcc6e11c358e17de25c0a80e8b29c5e5b103
|
[] |
no_license
|
ngonhi/Attendance_Check_System_with_Face_Recognition
|
f4531cc4dee565d0e45c02217f73f3eda412b414
|
92ff88cbc0c740ad48e149033efd38137c9be88d
|
refs/heads/main
| 2023-03-12T07:03:25.302649
| 2021-02-26T15:37:33
| 2021-02-26T15:37:33
| 341,493,686
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 129
|
py
|
version https://git-lfs.github.com/spec/v1
oid sha256:3d0a5346f8b6ea081d787ef43f9520d3be4604ea7341ac86e5950a80c51c19d4
size 3938
|
[
"Nqk180998!"
] |
Nqk180998!
|
d5778be7d85fc4bd898845b67be6e04539e54fd0
|
a5af73a980c08eef311cda1a27645f57dc07e0eb
|
/formApp/templatetags/my_extras.py
|
01796028288dc2c86b336e3dfbfc5c09aab23b48
|
[] |
no_license
|
pritamSarkar123/django2020-pracTwo
|
c38ef08537cd48d884a6d75eef67277c38ad49d0
|
84e32249bc5c7d0ba89cd0e1070fee05e2523183
|
refs/heads/master
| 2022-07-08T21:02:21.124866
| 2020-05-15T04:39:23
| 2020-05-15T04:39:23
| 264,003,427
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 198
|
py
|
from django import template
register = template.Library()
@register.filter(name='cut')
def cut(value, arg):
"""
cuts all the values same as args
"""
return value.replace(arg, '')
|
[
"pritamsarkar84208220@gmail.com"
] |
pritamsarkar84208220@gmail.com
|
47057d9275b82dbfea7d0349e2bd1350c848700a
|
25404f4cfb9be3e6f1b3fe31a1554459eb200813
|
/my_redis/why_use_redis/count_realtime_users.py
|
fa7fa21aaed3c6586f1802c58a13f2b7ef57700f
|
[] |
no_license
|
nightimero/annal_report_test
|
1c6eb4b71482f870c753f5084212afd071929f57
|
7bbc76ba703527ba8f4b84fbdb94fd57b37b9887
|
refs/heads/master
| 2021-09-06T21:18:59.534963
| 2018-02-11T15:31:21
| 2018-02-11T15:31:21
| 103,259,391
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,747
|
py
|
# -*- coding:utf-8 -*-
# todo: http://www.cnblogs.com/clover-siyecao/p/5600078.html
# todo: http://debugo.com/python-redis/
# todo: http://flask.pocoo.org/snippets/71/
# todo: https://www.google.com/search?q=python+redis+应用 以后需要搜索加上 应用 2字,更实例。
import time
from redis import Redis
from datetime import datetime
ONLINE_LAST_MINUTES = 5
redis = Redis()
def mark_online(user_id): # 将一个用户标记为online
now = int(time.time()) # 当前的UNIX时间戳
expires = now + (app.config['ONLINE_LAST_MINUTES'] * 60) + 10 # 过期的UNIX时间戳
all_users_key = 'online-users/%d' % (now // 60) # 集合名,包含分钟信息
user_key = 'user-activity/%s' % user_id
p = redis.pipeline()
p.sadd(all_users_key, user_id) # 将用户id插入到包含分钟信息的集合中
p.set(user_key, now) # 记录用户的标记时间
p.expireat(all_users_key, expires) # 设定集合的过期时间为UNIX的时间戳
p.expireat(user_key, expires)
p.execute()
def get_user_last_activity(user_id): # 获得用户的最后活跃时间
last_active = redis.get('user-activity/%s' % user_id) # 如果获取不到,则返回None
if last_active is None:
return None
return datetime.utcfromtimestamp(int(last_active))
def get_online_users(): # 获得当前online用户的列表
current = int(time.time()) // 60
minutes = xrange(app.config['ONLINE_LAST_MINUTES'])
return redis.sunion(['online-users/%d' % (current - x) # 取ONLINE_LAST_MINUTES分钟对应集合的交集
for x in minutes])
|
[
"chenxiang@aiknown.com"
] |
chenxiang@aiknown.com
|
45a3de2385c5bb0e77f6e3f913a5e766c392cf7a
|
7b15c40c00ba2008024979d0e520a922bc2f8229
|
/1st_try/144_Binary_Tree_Preorder_Traversal_1st_try.py
|
ac7a9b6c27275d70ef87a3427e2ff0de3860a3c6
|
[] |
no_license
|
axd8911/Leetcode
|
aa9875a5b55c7d5e961d9a3ea55823d06eb08a88
|
1c6cab14f4dac4f3f29f1b5ce13bb5289724fdb4
|
refs/heads/master
| 2022-07-07T12:59:38.251218
| 2021-06-22T06:27:05
| 2021-06-22T06:27:05
| 173,857,144
| 0
| 1
| null | 2022-06-22T01:22:30
| 2019-03-05T02:23:42
|
Python
|
UTF-8
|
Python
| false
| false
| 1,143
|
py
|
'''
98.6%
'''
#Iteration
# Definition for a binary tree node.
# class TreeNode:
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution:
def preorderTraversal(self, root: TreeNode) -> List[int]:
output = []
node = [root]
while node:
curr = node.pop()
if curr:
output.append(curr.val)
node.append(curr.right)
node.append(curr.left)
return output
#recursion
# Definition for a binary tree node.
# class TreeNode(object):
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution(object):
def __init__(self):
self.output = []
def preorderTraversal(self, root):
"""
:type root: TreeNode
:rtype: List[int]
"""
if root:
self.output.append(root.val)
self.preorderTraversal(root.left)
self.preorderTraversal(root.right)
return self.output
|
[
"noreply@github.com"
] |
axd8911.noreply@github.com
|
e97b22133de8154a10a062cd7a9804c4b2e0ce3c
|
d9257c051591b63533fe53d5e6996689d3a93c50
|
/ocean_lib/ocean/ocean_auth.py
|
d6dfa779bcfb2334876b5d6a83cd12e1f3183c70
|
[
"Apache-2.0"
] |
permissive
|
PosthumanMarket/Posthuman.py
|
87e79fa826d50584b9cb2d722afa359aa6026e46
|
66538c29bfeb1c31199629b68bc10cc36caa3376
|
refs/heads/master
| 2023-08-26T06:35:47.866562
| 2021-06-06T18:51:34
| 2021-06-06T18:51:34
| 346,468,458
| 6
| 1
|
Apache-2.0
| 2021-06-05T02:03:27
| 2021-03-10T19:27:37
|
Python
|
UTF-8
|
Python
| false
| false
| 4,131
|
py
|
"""Ocean module."""
# Copyright 2018 Ocean Protocol Foundation
# SPDX-License-Identifier: Apache-2.0
import logging
from datetime import datetime
from ocean_lib.config_provider import ConfigProvider
from ocean_lib.web3_internal.utils import add_ethereum_prefix_and_hash_msg
from ocean_lib.web3_internal.web3_provider import Web3Provider
from ocean_lib.data_store.auth_tokens import AuthTokensStorage
from ocean_lib.web3_internal.web3helper import Web3Helper
class OceanAuth:
"""Ocean auth class.
Provide basic management of a user auth token. This token can be used to emulate
sign-in behaviour. The token can be stored and associated with an expiry time.
This is useful in front-end applications that interact with a 3rd-party wallet
apps. The advantage of using the auth token is to reduce the number of confirmation
prompts requiring user action.
The auth token works with a provider service such as Ocean provider-py which also uses this
ocean module to handle auth tokens.
Token format is "signature-timestamp".
"""
DEFAULT_EXPIRATION_TIME = 30 * 24 * 60 * 60 # in seconds
DEFAULT_MESSAGE = "Ocean Protocol Authentication"
def __init__(self, storage_path):
self._tokens_storage = AuthTokensStorage(storage_path)
@staticmethod
def _get_timestamp():
return int(datetime.now().timestamp())
def _get_expiration(self):
return int(ConfigProvider.get_config().auth_token_expiration
or self.DEFAULT_EXPIRATION_TIME)
def _get_raw_message(self):
return ConfigProvider.get_config().auth_token_message or self.DEFAULT_MESSAGE
def _get_message(self, timestamp):
return f'{self._get_raw_message()}\n{timestamp}'
def _get_message_and_time(self):
timestamp = self._get_timestamp()
return self._get_message(timestamp), timestamp
@staticmethod
def is_token_valid(token):
return isinstance(token, str) and token.startswith('0x') and len(token.split('-')) == 2
def get(self, wallet):
"""
:param wallet: Wallet instance signing the token
:return: hex str the token generated/signed by the users wallet
"""
_message, _time = self._get_message_and_time()
try:
prefixed_msg_hash = Web3Helper.sign_hash(
add_ethereum_prefix_and_hash_msg(_message), wallet)
return f'{prefixed_msg_hash}-{_time}'
except Exception as e:
logging.error(f'Error signing token: {str(e)}')
def check(self, token):
"""
:param token: hex str consist of signature and timestamp
:return: hex str ethereum address
"""
parts = token.split('-')
if len(parts) < 2:
return '0x0'
sig, timestamp = parts
if self._get_timestamp() > (int(timestamp) + self._get_expiration()):
return '0x0'
message = self._get_message(timestamp)
address = Web3Helper.personal_ec_recover(message, sig)
return Web3Provider.get_web3().toChecksumAddress(address)
def store(self, wallet):
"""
:param wallet: Wallet instance signing the token
:return:
token that was generated and stored for this users wallet
"""
token = self.get(wallet)
timestamp = token.split('-')[1]
self._tokens_storage.write_token(wallet.address, token, timestamp)
return token
def restore(self, wallet):
"""
:param wallet: Wallet instance to fetch the saved token
:return:
hex str the token retreived from storage
None if no token found for this users wallet
"""
token = self._tokens_storage.read_token(wallet.address)[0]
if not token:
return None
address = self.check(token)
return token if address == wallet.address else None
def is_stored(self, wallet):
"""
:param wallet: Wallet instance
:return: bool whether this wallet has a stored token
"""
return self.restore(wallet) is not None
|
[
"travis@travis-ci.org"
] |
travis@travis-ci.org
|
48168d4ee1ba9c5390d4058ed3fbe9b827386801
|
c6431cdf572dd10f0f4d45839e6081124b246f90
|
/code/lc297.py
|
1bf7215362b793eb944544a277e85f8c3b4e766e
|
[] |
no_license
|
bendanwwww/myleetcode
|
1ec0285ea19a213bc629e0e12fb8748146e26d3d
|
427846d2ad1578135ef92fd6549235f104f68998
|
refs/heads/master
| 2021-09-27T19:36:40.111456
| 2021-09-24T03:11:32
| 2021-09-24T03:11:32
| 232,493,899
| 2
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,916
|
py
|
"""
序列化是将一个数据结构或者对象转换为连续的比特位的操作,进而可以将转换后的数据存储在一个文件或者内存中,同时也可以通过网络传输到另一个计算机环境,采取相反方式重构得到原数据。
请设计一个算法来实现二叉树的序列化与反序列化。这里不限定你的序列 / 反序列化算法执行逻辑,你只需要保证一个二叉树可以被序列化为一个字符串并且将这个字符串反序列化为原始的树结构。
示例:
你可以将以下二叉树:
1
/ \
2 3
/ \
4 5
序列化为 "[1,2,3,null,null,4,5]"
提示: 这与 LeetCode 目前使用的方式一致,详情请参阅 LeetCode 序列化二叉树的格式。你并非必须采取这种方式,你也可以采用其他的方法解决这个问题。
说明: 不要使用类的成员 / 全局 / 静态变量来存储状态,你的序列化和反序列化算法应该是无状态的。
"""
# Definition for a binary tree node.
class TreeNode(object):
def __init__(self, x):
self.val = x
self.left = None
self.right = None
class Codec:
def serialize(self, root):
resArray = []
queue = []
queue.append(root)
while len(queue) > 0:
node = queue[0]
del queue[0]
if node is None:
resArray.append('null')
else:
resArray.append(node.val)
queue.append(node.left)
queue.append(node.right)
while len(resArray) > 0:
n = len(resArray) - 1
if resArray[n] is not 'null':
break
else:
del resArray[n]
return '[' + ','.join(map(lambda x: str(x), resArray)) + ']'
def deserialize(self, data):
if data is None or data == '[]':
return None
index = 0
nodeArray = data.replace('[', '').replace(']', '').split(',')
root = TreeNode(nodeArray[index])
queue = []
queue.append(root)
while len(queue) > 0:
node = queue[0]
del queue[0]
if index + 1 >= len(nodeArray) or nodeArray[index + 1] == 'null':
node.left = None
else:
node.left = TreeNode(nodeArray[index + 1])
if index + 2 >= len(nodeArray) or nodeArray[index + 2] == 'null':
node.right = None
else:
node.right = TreeNode(nodeArray[index + 2])
index+= 2
if node.left is not None:
queue.append(node.left)
if node.right is not None:
queue.append(node.right)
return root
# def deserialize(self, data):
# if data is None or data == '[]':
# return None
# nodeArray = data.replace('[', '').replace(']', '').split(',')
# root = TreeNode(nodeArray[0])
# self.deserializeRoot(root, nodeArray, 0)
# return root
#
# def deserializeRoot(self, node, array, n):
# nodeLeft = 2 * n + 1
# nodeRight = 2 * n + 2
# if nodeLeft < len(array) and array[nodeLeft] != 'null':
# node.left = TreeNode(array[nodeLeft])
# self.deserializeRoot(node.left, array, nodeLeft)
# else:
# node.left = None
#
# if nodeRight < len(array) and array[nodeRight] != 'null':
# node.right = TreeNode(array[nodeRight])
# self.deserializeRoot(node.right, array, nodeRight)
# else:
# node.right = None
s = Codec()
root = TreeNode(5)
root.left = TreeNode(2)
root.right = TreeNode(3)
root.right.left = TreeNode(2)
root.right.right = TreeNode(4)
root.right.left.left = TreeNode(3)
root.right.left.right = TreeNode(1)
res1 = s.serialize(root)
res2 = s.deserialize(res1)
print(res1)
print(res2)
|
[
"461806307@qq.com"
] |
461806307@qq.com
|
f3767eaf83ee7afc2f14b3755b237dcea3de3fcb
|
bf9d6b1ce9b034df2a034ff93f526638720d359f
|
/accounts/migrations/0004_auto_20170808_0547.py
|
c1f288ce918585ca8463578033cdb7e088121853
|
[] |
no_license
|
toluwanicareer/lms
|
fc2b1c2c8b728826180f27f461cec5ea95adbd59
|
41de904043d951843ed748d6bf2cffc98462f99d
|
refs/heads/master
| 2021-07-09T13:58:46.986986
| 2017-09-28T05:59:50
| 2017-09-28T05:59:50
| 105,106,347
| 2
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,040
|
py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.2 on 2017-08-08 04:47
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('accounts', '0003_auto_20170808_0545'),
]
operations = [
migrations.RemoveField(
model_name='person',
name='user',
),
migrations.AddField(
model_name='client',
name='user',
field=models.OneToOneField(default='1', on_delete=django.db.models.deletion.CASCADE, related_name='person', to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='employee',
name='user',
field=models.OneToOneField(default='1', on_delete=django.db.models.deletion.CASCADE, related_name='person_employee', to=settings.AUTH_USER_MODEL),
),
]
|
[
"abiodun.toluwanii@gmail.com"
] |
abiodun.toluwanii@gmail.com
|
1c5a6b732f8b3b71d5f22a5564bcde3bd8ae4ce6
|
348bd616afd274425ad9737964f37d0b13583310
|
/docs/source/conf.py
|
0f22d1d4e0dc4ed7d10c0bfe6756e0cbb8a1d009
|
[] |
no_license
|
whitews/ReFlow
|
d2062ab03b62c82e250599557a29e86f61e51957
|
27bd33ac3824de6234952d56cbb66b0e77f076a1
|
refs/heads/master
| 2020-12-25T16:58:41.540154
| 2019-05-01T16:40:53
| 2019-05-01T16:40:53
| 7,260,054
| 4
| 2
| null | 2015-05-15T13:50:24
| 2012-12-20T15:44:52
|
JavaScript
|
UTF-8
|
Python
| false
| false
| 7,898
|
py
|
# -*- coding: utf-8 -*-
#
# ReFlow documentation build configuration file, created by
# sphinx-quickstart on Wed Jan 23 19:50:53 2013.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
import sphinx_rtd_theme
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.doctest', 'sphinx.ext.todo', 'sphinx.ext.coverage', 'sphinx.ext.pngmath', 'sphinx.ext.mathjax']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'ReFlow'
copyright = u'2013, Scott White'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.1'
# The full version, including alpha/beta/rc tags.
release = '0.1'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = []
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'sphinx_rtd_theme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'ReFlowdoc'
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'ReFlow.tex', u'ReFlow Documentation',
u'Scott White', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'reflow', u'ReFlow Documentation',
[u'Scott White'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output ------------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'ReFlow', u'ReFlow Documentation',
u'Scott White', 'ReFlow', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
|
[
"whitews@gmail.com"
] |
whitews@gmail.com
|
493325b68e5cee35062e2a15547d5355bb645658
|
3365e4d4fc67bbefe4e8c755af289c535437c6f4
|
/.history/src/core/dialogs/swimmer_dialog_20170810151903.py
|
4281b0f5b05eac3a5ee208c260614a79152a8016
|
[] |
no_license
|
kiranhegde/OncoPlotter
|
f3ab9cdf193e87c7be78b16501ad295ac8f7d2f1
|
b79ac6aa9c6c2ca8173bc8992ba3230aa3880636
|
refs/heads/master
| 2021-05-21T16:23:45.087035
| 2017-09-07T01:13:16
| 2017-09-07T01:13:16
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,751
|
py
|
from matplotlib.backends.backend_qt5agg import FigureCanvasQTAgg as FigureCanvas
from matplotlib.backends.backend_qt5agg import NavigationToolbar2QT as NavigationToolbar
import matplotlib.pyplot as plt
from PyQt5.QtWidgets import (QApplication, QDialog, QWidget, QPushButton, QVBoxLayout, QTreeWidget, QTreeWidgetItem)
from PyQt5 import QtCore, QtGui
import numpy as np
import core.gui.swimmer as swimmmer
class Swimmer(QWidget, swimmmer.Ui_Swimmer):
def __init__(self, parent):
super(Swimmer,self).__init__(parent)
self.setupUi(self)
def on_swimmer_data_signal(self,signal):
self.swimmer_data = signal['swimmer_data'] #pandas dataframe
def closeEvent(self,event):
#Override closeEvent so that we hide the window rather than exit so we don't lose data
event.ignore()
self.hide()
class SwimmerPlotter(QWidget):
def __init__(self,parent):
super(SwimmerPlotter,self).__init__(parent)
self.figure = plt.figure()
self.canvas = FigureCanvas(self.figure)
self.toolbar = NavigationToolbar(self.canvas,self)
self.btn_plot = QPushButton('Default Plot')
self.btn_plot.clicked.connect(self.default_plot)
self.layout = QVBoxLayout()
self.layout.addWidget(self.toolbar)
self.layout.addWidget(self.canvas)
self.layout.addWidget(self.btn_plot)
self.setLayout(self.layout)
markersize = 5 #needs to be user variable so that as more/less bars added, it looks ok
bar_width = 0.75
def on_swimmer_data_signal(self,signal):
self.swimmer_data = signal['swimmer_data'] #pandas dataframe
self.btn_plot.setEnabled(True)
def on_general_settings_signal(self,signal):
try:
hasattr(self,'ax')
self.ax.set_title(signal[0])
self.ax.set_xlabel(signal[1])
self.ax.set_ylabel(signal[2])
self.canvas.draw()
except Exception as e:
print(e)
def default_plot(self):
'''
Plot swimmer data
'''
self.figure.clear()
self.ax = self.figure.add_subplot(111)
self.ax.grid(color = 'k', axis = 'y', alpha=0.25)
self.bar_locations = np.arange(len(self.swimmer_data.ix[:,0]))
self.stack_lists = [x for x in self.swimmer_data.ix[:,1:6]]
self.offset_list = [0]*len(stack_lists[0])
for i in range(len(stack_lists)):
ax.barh(self.bar_locations, self.stack_lists[i], bar_width, color = 'b', left = offset_list, edgecolor = 'k')
self.offset_list = [sum(x) for x in zip(offset_list, stack_length_lists[i])]
self.canvas.draw()
self.ax.hold(False) #rewrite the plot when plot() called
|
[
"ngoyal95@terpmail.umd.edu"
] |
ngoyal95@terpmail.umd.edu
|
b173f68c5c45adaaeffd0c75d7eebf3022350c31
|
d177addc1830153404c71fa115a5584f94a392c3
|
/N1539_KthMissingPositiveNumber.py
|
ec2a85adf987c4c2c541611169e916af013faa56
|
[] |
no_license
|
zerghua/leetcode-python
|
38a84452f60a360e991edf90c8156de03a949000
|
02726da394971ef02616a038dadc126c6ff260de
|
refs/heads/master
| 2022-10-25T11:36:22.712564
| 2022-10-02T19:56:52
| 2022-10-02T19:56:52
| 61,502,010
| 2
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,298
|
py
|
#
# Create by Hua on 4/3/22.
#
"""
Given an array arr of positive integers sorted in a strictly increasing order, and an integer k.
Find the kth positive integer that is missing from this array.
Example 1:
Input: arr = [2,3,4,7,11], k = 5
Output: 9
Explanation: The missing positive integers are [1,5,6,8,9,10,12,13,...]. The 5th missing positive integer is 9.
Example 2:
Input: arr = [1,2,3,4], k = 2
Output: 6
Explanation: The missing positive integers are [5,6,7,...]. The 2nd missing positive integer is 6.
Constraints:
1 <= arr.length <= 1000
1 <= arr[i] <= 1000
1 <= k <= 1000
arr[i] < arr[j] for 1 <= i < j <= arr.length
"""
class Solution(object):
def findKthPositive(self, arr, k):
"""
:type arr: List[int]
:type k: int
:rtype: int
thought: iterate 1-3000, build missing array, and return the kth
04/03/2022 14:04 Accepted 112 ms 13.6 MB python
easy 5 min.
can do binary search.
https://leetcode.com/problems/kth-missing-positive-number/discuss/779999/JavaC%2B%2BPython-O(logN)
"""
rt = list()
for i in range(1, 3001):
if i not in arr:
rt.append(i)
if len(rt) == k:
return rt[-1]
return -1
|
[
"zerghua@gmail.com"
] |
zerghua@gmail.com
|
6d856f492bd4e381bfaf4ac7158cb061c4a9f63b
|
6b9084d234c87d7597f97ec95808e13f599bf9a1
|
/Dataset/MOT/Seed/PathTrack.py
|
593c78a035dd677424dfafa90338eb09be91d69b
|
[] |
no_license
|
LitingLin/ubiquitous-happiness
|
4b46234ce0cb29c4d27b00ec5a60d3eeb52c26fc
|
aae2d764e136ca4a36c054212b361dd7e8b22cba
|
refs/heads/main
| 2023-07-13T19:51:32.227633
| 2021-08-03T16:02:03
| 2021-08-03T16:02:03
| 316,664,903
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 649
|
py
|
# https://www.trace.ethz.ch/publications/2017/pathtrack/index.html
from Dataset.Base.factory_seed import BaseSeed
from Dataset.Type.data_split import DataSplit
class PathTrack_Seed(BaseSeed):
def __init__(self, root_path: str=None, data_split: DataSplit=DataSplit.Training | DataSplit.Validation):
name = 'PathTrack'
if root_path is None:
root_path = self.get_path_from_config('PathTrack_PATH')
super(PathTrack_Seed, self).__init__(name, root_path, data_split, 1)
def construct(self, constructor):
from .Impl.PathTrack import construct_PathTrack
construct_PathTrack(constructor, self)
|
[
"linliting06@live.com"
] |
linliting06@live.com
|
75565b4e4f3520375386cf6faaf8f0755753e4f6
|
362765585815165ca3625895d4a675600efdb518
|
/orchestrator.py
|
67e314397be339554e940e42475881ae9f5046a7
|
[] |
no_license
|
obulpathi/notifications
|
00e7decd479685d10e815b9ada7b1f774862f0f9
|
131b784d5e40310cdfc98587e0ceaff4cca4cf03
|
refs/heads/master
| 2021-03-12T20:23:05.143662
| 2014-07-08T18:58:44
| 2014-07-08T18:58:44
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,665
|
py
|
import os
import sys
from jinja2 import Environment, FileSystemLoader, meta
def render(sense, action):
env = Environment(loader=FileSystemLoader('/src/templates/sense'))
template_source = env.loader.get_source(env, sense+'.py')[0]
parsed_content = env.parse(template_source)
vars = meta.find_undeclared_variables(parsed_content)
args = {}
for var in vars:
if "NOTIFICATIONS_" + var.upper() in os.environ:
args[var] = os.environ["NOTIFICATIONS_" + var.upper()]
else:
print "Could not find NOTIFICATIONS_"+var.upper()+" in environment variables"
exit()
template = env.get_template(sense+'.py')
render = template.render(**args)
with open('/src/binpy/sense.py', 'w+') as fh:
fh.write(render)
env = Environment(loader=FileSystemLoader('/src/templates/action'))
template_source = env.loader.get_source(env, action+'.py')[0]
parsed_content = env.parse(template_source)
vars = meta.find_undeclared_variables(parsed_content)
args = {}
for var in vars:
if "NOTIFICATIONS_" + var.upper() in os.environ:
args[var] = os.environ["NOTIFICATIONS_" + var.upper()]
else:
print "Could not find NOTIFICATIONS_"+var.upper()+" in environment variables"
exit()
template = env.get_template(action+'.py')
render = template.render(**args)
with open('/src/binpy/action.py', 'w+') as fh:
fh.write(render)
if __name__ == "__main__":
render(sys.argv[1], sys.argv[2])
from binpy import action, sense
result = sense.sense()
if result[0]:
action.action(result[1])
|
[
"obulpathi@gmail.com"
] |
obulpathi@gmail.com
|
3095707d0fd46be66dd4cba60697331cad3d4d77
|
5b57fa09b08e72ccb10de1bf341b556c00b4be42
|
/Server/src/pyticas_tetres/da/incident_iris.py
|
46a5025a883e4a2eedf26a6be2d1dd3019d017ac
|
[] |
no_license
|
mnit-rtmc/tetres
|
6f331e463f90e1608c6e47d72c232e3f6b8d5d33
|
4935d82ffe5f51284f08749b27f48491a62d9968
|
refs/heads/master
| 2022-10-02T02:45:13.860004
| 2021-12-08T23:14:51
| 2021-12-08T23:14:51
| 200,887,049
| 3
| 6
| null | 2022-09-16T18:07:46
| 2019-08-06T16:25:02
|
Python
|
UTF-8
|
Python
| false
| false
| 3,784
|
py
|
# -*- coding: utf-8 -*-
__author__ = 'Chongmyung Park (chongmyung.park@gmail.com)'
import datetime
from sqlalchemy import and_
from pyticas_tetres.da.base import DataAccessBase
from pyticas_tetres.db.iris import model, conn
from pyticas_tetres.ttypes import IrisIncidentInfo
class IrisIncidentDataAccess(object):
def __init__(self, **kwargs):
kwargs['session'] = conn.get_session()
kwargs['primary_key'] = 'event_id'
self.da_base = DataAccessBase(model.IrisIncident, IrisIncidentInfo, **kwargs)
def list_as_generator(self, sdate, edate, corridor, direction, **kwargs):
"""
:param sdate: e.g. 2013-12-04 12:00:00
:type sdate: str or datetime.datetime
:param edate: e.g. 2013-12-04 13:00:00
:type edate: str or datetime.datetime
:param corridor: only number part of corridor name e.g. 35W, 94, 494, 100, 169
:type corridor: str
:param direction: e.g. NB, SB, EB, WB
:type direction: str
:rtype: Generator : IncidentInfo
"""
if isinstance(sdate, str):
sdate = datetime.datetime.strptime(sdate, '%Y-%m-%d %H:%M:%S')
if isinstance(edate, str):
edate = datetime.datetime.strptime(edate, '%Y-%m-%d %H:%M:%S')
as_model = kwargs.get('as_model', False)
limit = kwargs.get('limit', None)
order_by = kwargs.get('order_by', None)
window_size = kwargs.get('window_size', 1000)
db_model = model.IrisIncident
session = self.da_base.session
if corridor and direction:
qry = (session.query(db_model).filter(and_(
db_model.event_date >= sdate,
db_model.event_date <= edate
)).filter(and_(
db_model.road == corridor,
db_model.direction == direction
))
)
else:
qry = (session.query(db_model).filter(and_(
db_model.event_date >= sdate,
db_model.event_date <= edate
))
)
# apply 'order by'
if order_by and isinstance(order_by, tuple):
# e.g. order_by = ('id', 'desc')
# e.g. order_by = ('name', 'asc')
qry = qry.order_by(getattr(getattr(db_model, order_by[0]), order_by[1])())
else:
qry = qry.order_by(db_model.event_date.asc())
# apply 'limit'
if limit:
qry = qry.limit(limit)
for m in self.da_base.query_generator(qry, window_size):
if as_model:
yield m
else:
yield self.da_base.to_info(m)
def list(self, sdate, edate, corridor=None, direction=None, **kwargs):
"""
:param sdate: e.g. 2013-12-04 12:00:00
:type sdate: str or datetime.datetime
:param edate: e.g. 2013-12-04 13:00:00
:type edate: str or datetime.datetime
:param corridor: only number part of corridor name e.g. 35W, 94, 494, 100, 169
:type corridor: str
:param direction: e.g. NB, SB, EB, WB
:type direction: str
:rtype: list[IrisIncidentInfo]
"""
return [m for m in self.list_as_generator(sdate, edate, corridor, direction, **kwargs)]
def get_by_id(self, pkey):
"""
:type pkey: int
:rtype: IrisIncidentInfo
"""
return self.da_base.get_data_by_id(pkey)
def get_by_event_id(self, event_id):
"""
:type event_id: int
:rtype: IrisIncidentInfo
"""
res = self.da_base.search([('event_id', event_id)])
if res:
return res[0]
else:
return None
def close_session(self):
self.da_base.close()
|
[
"beau1eth@tmsdev5.dot.state.mn.us"
] |
beau1eth@tmsdev5.dot.state.mn.us
|
535268bab86addff45ad4349f708df0310b4627d
|
d5c1f39f619e49be02238bbd9c327103ee0c6199
|
/vai/plugins/commands/Time/Time.py
|
4ed8dc22387aae6b3ff5563e24423513b64af622
|
[] |
no_license
|
CRY-D/vai
|
520d35ef1b32de2e4058f64a73cd1b9da9b2e313
|
7e6981690209e8ccd9a6e6f64d2f2a6c7426ef3f
|
refs/heads/master
| 2023-07-06T18:24:43.884669
| 2021-07-20T09:40:48
| 2021-07-20T09:40:48
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 317
|
py
|
from vai import sdk
import time
class TimePlugin(sdk.CommandPlugin):
def name(self):
"""
To be reimplement in the plugin
"""
return "Time"
def keyword(self):
return "time"
def execute(self, command_line):
sdk.statusBar().setMessage(time.asctime(), 3000)
|
[
"stefano.borini@gmail.com"
] |
stefano.borini@gmail.com
|
11c869ecd5e826427cc7d523ecdbb29f056a4b97
|
0ddcfcbfc3faa81c79e320c34c35a972dab86498
|
/tests/test_summary_ranges.py
|
c75bf5a290b7f94b3f204aa5c8f8d1d2a2f4a90a
|
[] |
no_license
|
IvanWoo/coding-interview-questions
|
3311da45895ac4f3c394b22530079c79a9215a1c
|
1312305b199b65a11804a000432ebe28d1fba87e
|
refs/heads/master
| 2023-08-09T19:46:28.278111
| 2023-06-21T01:47:07
| 2023-06-21T01:47:07
| 135,307,912
| 0
| 0
| null | 2023-07-20T12:14:38
| 2018-05-29T14:24:43
|
Python
|
UTF-8
|
Python
| false
| false
| 329
|
py
|
import pytest
from puzzles.summary_ranges import summary_ranges
@pytest.mark.parametrize(
"nums, expected",
[
([0, 1, 2, 4, 5, 7], ["0->2", "4->5", "7"]),
([0, 2, 3, 4, 6, 8, 9], ["0", "2->4", "6", "8->9"]),
],
)
def test_summary_ranges(nums, expected):
assert summary_ranges(nums) == expected
|
[
"tyivanwu@gmail.com"
] |
tyivanwu@gmail.com
|
accc82f441ea51a73813ec2cb7dbc086e252d603
|
cf9494e7953c91d786e003bfbcd9f6ad93126c7f
|
/widgets_entry2.py
|
4879d321a74e5d54581585dbc09c16bb798c330e
|
[] |
no_license
|
utisz86/TkinterTutorial
|
7fd318e631f6e1a858083c3c157fa68a60bcc85a
|
32030494ac3035d442a432bf6b389b6ff7e1c537
|
refs/heads/master
| 2022-11-26T06:36:47.959218
| 2020-07-07T22:10:33
| 2020-07-07T22:10:33
| 277,841,542
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 235
|
py
|
import tkinter as tk
window = tk.Tk()
# create a Label and an Entry widget
label = tk.Label(text="Name")
entry = tk.Entry()
# visible
label.pack()
entry.pack()
entry.insert(0,"mi?")
name = entry.get()
print(name)
window.mainloop()
|
[
"soma.kormanik@outlook.com"
] |
soma.kormanik@outlook.com
|
45aca59783a82f8aebeef1769d251a6c7c1aea2f
|
1186e0f758d930960aeb5319200ca50e09ff1d35
|
/build/lib/cplvm/lm.py
|
5d6ea254cd9fc48a55c71f271f202ac3c7edc685
|
[
"MIT"
] |
permissive
|
ethanweinberger/cplvm
|
c182ee3a960f20ce2975cec5492ec5b1f434dd71
|
f4bbcfc4b2e9a9cec7d01eb5f7ff3a169d6e3ff6
|
refs/heads/main
| 2023-06-17T15:23:15.604291
| 2021-07-07T12:04:57
| 2021-07-07T12:04:57
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,771
|
py
|
import functools
import warnings
import matplotlib.pyplot as plt
import numpy as np
import seaborn as sns
import tensorflow.compat.v2 as tf
import tensorflow_probability as tfp
from tensorflow_probability import distributions as tfd
from tensorflow_probability import bijectors as tfb
from scipy.stats import multivariate_normal
tf.enable_v2_behavior()
warnings.filterwarnings('ignore')
NUM_VI_ITERS = 300
LEARNING_RATE_VI = 0.05
# ------- Specify model ---------
def clvm(data_dim, num_datapoints, counts_per_cell, dummy, is_H0=False):
mu = yield tfd.Normal(loc=tf.zeros([data_dim, 1]),
scale=tf.ones([data_dim, 1]),
name="mu")
beta = yield tfd.Normal(loc=tf.zeros([data_dim, 1]),
scale=tf.ones([data_dim, 1]),
name="beta")
# sigma = yield tfd.InverseGamma(concentration=tf.ones([data_dim, 1]),
# scale=1,
# name="sigma")
data = yield tfd.Normal(loc=(tf.matmul(beta, dummy) + mu) + np.log(counts_per_cell),
scale=1,
name="x")
def fit_model(X, Y, compute_size_factors=True, is_H0=False):
assert X.shape[0] == Y.shape[0]
data_dim = X.shape[0]
num_datapoints_x, num_datapoints_y = X.shape[1], Y.shape[1]
n = num_datapoints_x + num_datapoints_y
dummy = np.zeros(n)
dummy[num_datapoints_x:] = 1
dummy = np.expand_dims(dummy, 0)
data = np.concatenate([X, Y], axis=1)
data = np.log(data + 1)
if compute_size_factors:
# counts_per_cell = np.sum(data, axis=0)
# counts_per_cell = np.expand_dims(counts_per_cell, axis=0)
counts_per_cell = np.sum(np.concatenate([X, Y], axis=1), axis=0)
counts_per_cell = np.expand_dims(counts_per_cell, axis=0)
assert counts_per_cell.shape[1] == X.shape[1] + Y.shape[1]
else:
counts_per_cell = 1.0
# ------- Specify model ---------
concrete_clvm_model = functools.partial(clvm,
data_dim=data_dim,
num_datapoints=n,
counts_per_cell=counts_per_cell,
dummy=dummy,
is_H0=is_H0)
model = tfd.JointDistributionCoroutineAutoBatched(concrete_clvm_model)
if is_H0:
def target_log_prob_fn(mu, beta): return model.log_prob(
(mu, beta, data))
else:
def target_log_prob_fn(mu, beta): return model.log_prob(
(mu, beta, data))
# ------- Specify variational families -----------
# Variational parmater means
# mu
qmu_mean = tf.Variable(tf.random.normal([data_dim, 1]))
qmu_stddv = tfp.util.TransformedVariable(
1e-4 * tf.ones([data_dim, 1]),
bijector=tfb.Softplus())
# beta
qbeta_mean = tf.Variable(tf.random.normal([data_dim, 1]))
qbeta_stddv = tfp.util.TransformedVariable(
1e-4 * tf.ones([data_dim, 1]),
bijector=tfb.Softplus())
# sigma
# qsigma_concentration = tfp.util.TransformedVariable(
# tf.ones([data_dim, 1]),
# bijector=tfb.Softplus())
def factored_normal_variational_model():
qmu = yield tfd.Normal(loc=qmu_mean,
scale=qmu_stddv,
name="qmu")
qbeta = yield tfd.Normal(loc=qbeta_mean,
scale=qbeta_stddv,
name="qbeta")
# qsigma = yield tfd.InverseGamma(concentration=qsigma_concentration,
# scale=1,
# name="qsigma")
# Surrogate posterior that we will try to make close to p
surrogate_posterior = tfd.JointDistributionCoroutineAutoBatched(
factored_normal_variational_model)
# --------- Fit variational inference model using MC samples and gradient descent ----------
losses = tfp.vi.fit_surrogate_posterior(
target_log_prob_fn,
surrogate_posterior=surrogate_posterior,
optimizer=tf.optimizers.Adam(learning_rate=LEARNING_RATE_VI),
num_steps=NUM_VI_ITERS)
# d = np.log(data + 1)
# d = data / data.sum(0)
# from sklearn.linear_model import LinearRegression
# plt.scatter(np.squeeze(LinearRegression().fit(dummy.T, d.T).coef_), np.squeeze(qbeta_mean.numpy()))
# plt.show()
# d = (d.T - d.mean(1)).T
# x = np.mean(d[:, num_datapoints_x:], axis=1)
# y = np.mean(d[:, :num_datapoints_x], axis=1)
# from sklearn.linear_model import LinearRegression
# import ipdb
# ipdb.set_trace()
# plt.scatter(x - y, np.squeeze(qbeta_mean.numpy()))
# plt.show()
# import ipdb
# ipdb.set_trace()
if is_H0:
return_dict = {
'loss_trace': losses,
# 'qs_mean': qs_mean,
# 'qzx_mean': qzx_mean,
# 'qzy_mean': qzy_mean,
# 'qs_stddv': qs_stddv,
# 'qzx_stddv': qzx_stddv,
# 'qzy_stddv': qzy_stddv,
# 'qdelta_mean': qdelta_mean,
# 'qdelta_stddv': qdelta_stddv,
}
else:
return_dict = {
'loss_trace': losses,
# 'qs_mean': qs_mean,
# 'qw_mean': qw_mean,
# 'qzx_mean': qzx_mean,
# 'qzy_mean': qzy_mean,
# 'qty_mean': qty_mean,
# 'qs_stddv': qs_stddv,
# 'qw_stddv': qw_stddv,
# 'qzx_stddv': qzx_stddv,
# 'qzy_stddv': qzy_stddv,
# 'qty_stddv': qty_stddv,
# 'qdelta_mean': qdelta_mean,
# 'qdelta_stddv': qdelta_stddv,
}
return return_dict
|
[
"ajones788@gmail.com"
] |
ajones788@gmail.com
|
53bbaf0c1927f1dd6a655edc347f5f068614c4fe
|
a4681043cb56a9ab45be32a62fa9700b391f087f
|
/14-Statistics_with_Python/Histograms/Finding_your_Best_Bin_Size.py
|
ee5208c7421bb08d3d728fd281c28ccc5c4509db
|
[] |
no_license
|
MarceloDL-A/Python
|
b16b221ae4355b6323092d069bf83d1d142b9975
|
c091446ae0089f03ffbdc47b3a6901f4fa2a25fb
|
refs/heads/main
| 2023-01-01T02:29:31.591861
| 2020-10-27T19:04:11
| 2020-10-27T19:04:11
| 301,565,957
| 0
| 0
| null | 2020-10-27T19:04:12
| 2020-10-05T23:41:30
|
Python
|
MacCentralEurope
|
Python
| false
| false
| 1,976
|
py
|
"""
HISTOGRAMS
Finding your Best Bin Size
The figure below displays the graph that you created in the last exercise:
Histogram
This histogram is helpful for our store manager. The last six hours of the day are the busiest ó from 6 pm until midnight. Does this mean the manager should staff their grocery store with the most employees between 6 pm and midnight?
To the manager, this doesnít make much sense. The manager knows the store is busy when many people get off work, but the rush certainly doesnít continue later than 9 pm.
The issue with this histogram is that we have too few bins. When plotting a histogram, itís essential to select bins that fully capture the trends in the underlying data. Often, this will require some guessing and checking. There isnít much of a science to selecting bin size.
How many bins do you think makes sense for this example? I would try 24 because there are 24 hours in a day.
"""
# Import packages
import codecademylib
import numpy as np
import pandas as pd
from matplotlib import pyplot as plt
# Read in transactions data
transactions = pd.read_csv("transactions.csv")
# Save transaction times to a separate numpy array
times = transactions["Transaction Time"].values
"""
Change the number of bins in your code from 4 to 24.
What do you notice about the data?
Given this new graph, when would you recommend staffing the grocery store?
Check the hint to see what we thought.
"""
# Use plt.hist() below
plt.hist(times, range=(0, 24), bins=24, edgecolor="black")
plt.title("Weekday Frequency of Customers")
plt.xlabel("Hours (1 hour increments)")
plt.ylabel("Count")
plt.show()
"""
It looks like the busiest times of day are in the morning, from 5am to 10am, and in the evening from 5pm to 10pm.
This histogram has two distinct peaks, neither of which are close to our average of 3pm. As you can see, averages donít tell the full story. By visualizing the shape of our data, we can make better-informed decisions.
"""
|
[
"marcelo.delmondes.lima@usp.br"
] |
marcelo.delmondes.lima@usp.br
|
0f0685f8137ae113e06f7428ee41f7d757b0a252
|
15bfc574ae99ea02f10c1f549136bd9951f399cd
|
/articles/views.py
|
f5c3eaddddb2e91b794b65638563c13318ad1b73
|
[] |
no_license
|
dimka1993kh/Dj_HW_5.3
|
a2b9a197729eb26fd4e6d3b4872b754542bd8c07
|
e891370b4cb740fd1bf44e19e27a17021a65f99c
|
refs/heads/master
| 2023-04-15T01:54:24.943214
| 2021-05-03T18:44:02
| 2021-05-03T18:44:02
| 364,027,709
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 661
|
py
|
from django.views.generic import ListView
from django.shortcuts import render
from .models import Article
def articles_list(request):
template_name = 'articles/news.html'
news = Article.objects.all().select_related('author', 'genre').defer('author__phone')
# news = Article.objects.all()
context = {
'object_list': news,
}
# используйте этот параметр для упорядочивания результатов
# https://docs.djangoproject.com/en/2.2/ref/models/querysets/#django.db.models.query.QuerySet.order_by
ordering = '-published_at'
return render(request, template_name, context)
|
[
"dimka1993kh@gmail.com"
] |
dimka1993kh@gmail.com
|
5f58ecef0b8e82fcead874e63f358470565ad618
|
c5bfc4509bedafe822691bbb3eb927e1fdd6daef
|
/ProblemSet5/Coordinate.py
|
0d75033313b8a1597c52bb185feea8efdae00559
|
[] |
no_license
|
BMariscal/MITx-6.00.1x
|
046f9891dcdc9c5fabf0543a01434a1304a7db9d
|
37951478f41f9f2e00bb2e1ec12ccbafb4ab8e78
|
refs/heads/master
| 2021-06-19T09:08:56.827010
| 2017-07-16T08:10:26
| 2017-07-16T08:10:26
| 82,771,759
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,403
|
py
|
"""Your task is to define the following two methods for the Coordinate class:
1.Add an __eq__ method that returns True if coordinates refer to same point in
the plane (i.e., have the same x and y coordinate).
2.Define __repr__, a special method that returns a string that looks like a valid
Python expression that could be used to recreate an object with the same value.
In other words, eval(repr(c)) == c given the definition of __eq__ from part 1.
"""
class Coordinate(object):
def __init__(self,x,y):
self.x = x
self.y = y
def __eq__(self,other):
# First make sure `other` is of the same type
assert type(other) == type(self)
# Since `other` is the same type, test if coordinates are equal
return self.getX() == other.getX() and self.getY() == other.getY()
def __repr__(self):
return 'Coordinate(' + str(self.getX()) + ',' + str(self.getY()) + ')'
def getX(self):
# Getter method for a Coordinate object's x coordinate.
# Getter methods are better practice than just accessing an attribute directly
return self.x
def getY(self):
# Getter method for a Coordinate object's y coordinate
return self.y
def __str__(self):
return '<' + str(self.getX()) + ',' + str(self.getY()) + '>'
> print(c1)
<1,-8>
> print(c2)
<1,-8>
> print(c1 == c2)
True
|
[
"BriceidaMariscal@gmail.com"
] |
BriceidaMariscal@gmail.com
|
f047b7e7e7ffeddbf2f1357674c44aee7ab8d35a
|
6e8f2e28479566dbaa338300b2d61f784ff83f97
|
/.history/code/datasetup_20210414102701.py
|
67abb47540226e9e7256dd767a367bdddaf1b16d
|
[] |
no_license
|
eeng5/CV-final-project
|
55a7d736f75602858233ebc380c4e1d67ab2b866
|
580e28819560b86f6974959efb1d31ef138198fc
|
refs/heads/main
| 2023-04-09T21:28:21.531293
| 2021-04-21T19:57:22
| 2021-04-21T19:57:22
| 352,703,734
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,231
|
py
|
from PIL import Image
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.image as mpimg
import pandas as pd
import cv2
import os
import glob
from pathlib import Path
def cleanTestDirs():
emotions = ['angry', 'happy', 'disgust', 'sad', 'neutral', 'surprise', 'fear']
for e in emotions:
pathy = '/Users/Natalie/Desktop/cs1430/CV-final-project/data/test/'+e
for f in Path(pathy).glob('*.jpg'):
try:
#f.unlink()
os.remove(f)
except OSError as e:
print("Error: %s : %s" % (f, e.strerror))
def cleanTrainDirs():
emotions = ['angry', 'happy', 'disgust', 'sad', 'neutral', 'surprise', 'fear']
for e in emotions:
pathy = '/Users/Natalie/Desktop/cs1430/CV-final-project/data/train/'+e
for f in Path(pathy).glob('*.jpg'):
try:
#f.unlink()
os.remove(f)
except OSError as e:
print("Error: %s : %s" % (f, e.strerror))
def cleanAll():
cleanTestDirs()
cleanTrainDirs()
def createPixelArray(arr):
arr = list(map(int, arr.split()))
array = np.array(arr, dtype=np.uint8)
array = array.reshape((48, 48))
return array
def equalize_hist(img):
img = cv2.equalizeHist(img)
return img
def showImages(imgs):
_, axs = plt.subplots(1, len(imgs), figsize=(20, 20))
axs = axs.flatten()
for img, ax in zip(imgs, axs):
ax.imshow(img,cmap=plt.get_cmap('gray'))
plt.show()
def augmentIMG(img, task):
imgs = [img]
img1 = equalize_hist(img)
imgs.append(img1)
if(task == 3):
img2 = cv2.bilateralFilter(img1, d=9, sigmaColor=75, sigmaSpace=75)
imgs.append(img2)
img6 = cv2.flip(img, 1) # flip horizontally
imgs.append(img6)
return imgs
def saveIMG(arr, num, folderLoc):
im = Image.fromarray(arr)
filename = folderLoc + "image_"+ num+".jpg"
im.save(filename)
def createTrain(emotion_dict, task):
df = pd.read_csv('/Users/Natalie/Desktop/cs1430/CV-final-project/data/icml_face_data.csv') # CHANGE ME
base_filename = "/Users/Natalie/Desktop/cs1430/CV-final-project/data/train/" # CHANGE ME
for index, row in df.iterrows():
if (row[' Usage'] == "Training"):
px = row[' pixels']
emot = int(row['emotion'])
emot_loc = emotion_dict[emot]
filename = base_filename + emot_loc
img = createPixelArray(px)
img_arr = augmentIMG(img, task)
idx = 0
for i in img_arr:
num = str(index) + "_" + str(idx)
idx +=1
saveIMG(i, num, filename)
def createTest(emotion_dict , task):
df = pd.read_csv('/Users/Natalie/Desktop/cs1430/CV-final-project/data/icml_face_data.csv') # CHANGE ME
base_filename = "/Users/Natalie/Desktop/cs1430/CV-final-project/data/test/" # CHANGE ME
for index, row in df.iterrows():
if (row[' Usage'] == "PublicTest"):
px = row[' pixels']
emot = int(row['emotion'])
emot_loc = emotion_dict[emot]
filename = base_filename + emot_loc
img = createPixelArray(px)
img_arr = augmentIMG(img, task)
idx = 0
for i in img_arr:
num = str(index) + "_" + str(idx)
idx +=1
saveIMG(i, num, filename)
def createEmotionDict():
emotionDict = {}
emotionDict[0]="angry/"
emotionDict[1]="disgust/"
emotionDict[2]="fear/"
emotionDict[3]="happy/"
emotionDict[4]="sad/"
emotionDict[5]="surprise/"
emotionDict[6] = "neutral/"
return emotionDict
def createSimpleData():
cleanAll()
print("Cleaning done")
emot_dict = createEmotionDict()
createTrain(emot_dict, 1)
print("Training done")
createTest(emot_dict, 1)
print("Testing done")
def createComplexData():
cleanAll()
emot_dict = createEmotionDict()
createTrain(emot_dict, 3)
createTest(emot_dict, 3)
def main():
cleanAll()
print("Cleaning done")
emot_dict = createEmotionDict()
createTrain(emot_dict, 1)
print("Training done")
createTest(emot_dict, 1)
print("Testing done")
if __name__ == '__main__':
main()
|
[
"natalie_rshaidat@brown.edu"
] |
natalie_rshaidat@brown.edu
|
be6f744ed74a9d985bf2d457c64dc8a20447b721
|
543e4a93fd94a1ebcadb7ba9bd8b1f3afd3a12b8
|
/maza/modules/creds/cameras/sentry360/ftp_default_creds.py
|
36767e634607918cc0d8f56a5cba7e413c92d652
|
[
"MIT"
] |
permissive
|
ArturSpirin/maza
|
e3127f07b90034f08ff294cc4afcad239bb6a6c3
|
56ae6325c08bcedd22c57b9fe11b58f1b38314ca
|
refs/heads/master
| 2020-04-10T16:24:47.245172
| 2018-12-11T07:13:15
| 2018-12-11T07:13:15
| 161,144,181
| 2
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 841
|
py
|
from maza.core.exploit import *
from maza.modules.creds.generic.ftp_default import Exploit as FTPDefault
class Exploit(FTPDefault):
__info__ = {
"name": "Sentry360 Camera Default FTP Creds",
"description": "Module performs dictionary attack against Sentry360 Camera FTP service. "
"If valid credentials are found, they are displayed to the user.",
"authors": (
"Marcin Bury <marcin[at]threat9.com>", # routersploit module
),
"devices": (
"Sentry360 Camera",
)
}
target = OptIP("", "Target IPv4, IPv6 address or file with ip:port (file://)")
port = OptPort(21, "Target FTP port")
threads = OptInteger(1, "Number of threads")
defaults = OptWordlist("admin:1234", "User:Pass or file with default credentials (file://)")
|
[
"a.spirin@hotmail.com"
] |
a.spirin@hotmail.com
|
2aa88315ba210081a02274e09c7e59726276c367
|
e4bceb499098281253f01f93d5c4f11284febf2e
|
/wakeup.py
|
71ff96919c980606a2c9ddd0ded586b253c131a3
|
[] |
no_license
|
Hemie143/gc_mysteries
|
d2a0a363767128bd599e079a1fab01822986d7e9
|
d26cf036f20f13d9a6c314000b7531e2f21d4d5e
|
refs/heads/master
| 2021-09-15T11:25:23.775079
| 2018-05-31T11:38:08
| 2018-05-31T11:38:08
| 125,210,054
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,279
|
py
|
import requests
import filecmp
import os
import time
import datetime
'''
curl
-A "Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2228.0 Safari/537.36"
-e http://geocachewakeup.byethost7.com/?ckattempt=1
-b "__test=a7f64c693f5755629af2d2c71aa06d2a;referrer="
-o wakeup%TS%.png
-H "Cache-Control: no-cache"
http://geocachewakeup.byethost7.com/image.php
'''
headers = {'referer': 'http://geocachewakeup.byethost7.com/?ckattempt=1',
'user-agent': 'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2228.0 Safari/537.36',
'pragma': 'no-cache',
'cache-control': 'no-cache'}
cookies = {'__test': 'a7f64c693f5755629af2d2c71aa06d2a', 'referrer': ''}
i = 1
while True:
print('Trial {0}'.format(i))
res = requests.get('http://geocachewakeup.byethost7.com/image.php', cookies=cookies, headers=headers)
res.raise_for_status()
imagefile = open('uil.png', 'wb')
for chunk in res.iter_content(1000):
imagefile.write(chunk)
imagefile.close()
if not filecmp.cmp('howlsleep.png', 'uil.png', shallow=False):
os.rename('uil.png', 'uil_{:%Y%m%d_%H%M%S}.png'.format(datetime.datetime.now()))
filecmp.clear_cache()
i += 1
time.sleep(15)
|
[
"hemie143@gmail.com"
] |
hemie143@gmail.com
|
8cdd706344dff0e13cfd77f5b6b4f98005a35c96
|
6fa7f99d3d3d9b177ef01ebf9a9da4982813b7d4
|
/bBExn57vLEsXgHC5m_13.py
|
a75afd38204a17adcd9211e34c8b31be9e80d941
|
[] |
no_license
|
daniel-reich/ubiquitous-fiesta
|
26e80f0082f8589e51d359ce7953117a3da7d38c
|
9af2700dbe59284f5697e612491499841a6c126f
|
refs/heads/master
| 2023-04-05T06:40:37.328213
| 2021-04-06T20:17:44
| 2021-04-06T20:17:44
| 355,318,759
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 217
|
py
|
def same_line(lst):
try:
return (lst[1][1] - lst[0][1]) / (lst[1][0] - lst[0][0]) == (lst[2][1] - lst[1][1]) / (lst[2][0] - lst[1][0])
except:
return lst[0][0] == 0 and lst[1][0] == 0 and lst[2][0] == 0
|
[
"daniel.reich@danielreichs-MacBook-Pro.local"
] |
daniel.reich@danielreichs-MacBook-Pro.local
|
e2e902fe476469280d8f67738cf676ce097be6c5
|
48290f34b95013e1d25b98a73fdd8f879c4b5b7a
|
/login-register.py
|
06cfab88082c20f8d6c6d00093327aba2e0b705b
|
[] |
no_license
|
anmolrajaroraa/core-python-july
|
584a3b055c39140b8c79c1158b366f8bdc4a015d
|
0fada23d8d969e548dadb6b9935aff1429f13a64
|
refs/heads/master
| 2022-12-15T23:17:19.786059
| 2020-09-13T17:37:51
| 2020-09-13T17:37:51
| 278,815,765
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,193
|
py
|
import csv
# comma separated values
print('''
1. Login
2. Register
''')
choice = int(input("Enter choice: "))
# if choice == 1:
# isLoginSuccessful = False
# usernameOrEmail = input("Enter username/email: ")
# password = input("Enter password: ")
# with open("users.csv") as fileStream:
# reader = csv.reader(fileStream)
# for row in reader:
# if usernameOrEmail == row[0] or usernameOrEmail == row[2]:
# if password == row[3]:
# print("Login successful!")
# isLoginSuccessful = True
# break
# if not isLoginSuccessful:
# print("Login failed")
if choice == 1:
usernameOrEmail = input("Enter username/email: ")
password = input("Enter password: ")
with open("users.csv") as fileStream:
reader = csv.reader(fileStream)
for row in reader:
if usernameOrEmail == row[0] or usernameOrEmail == row[2]:
if password == row[3]:
print("Login successful!")
break
else:
print("Login failed!")
# for-else block
# else says now I'm a follower of for block
# if 'for' loop ends gracefully, else will run
# but if we break the for loop(terminate it abruptly) then else is also terminated hence 'else' block will not run
elif choice == 2:
emailExists = False
username = input("Enter username: ")
fullname = input("Enter fullname: ")
email = input("Enter email: ")
password = input("Enter password: ")
# fileStream = open("users.csv", "w")
# fileStream.close()
with open("users.csv") as fileStream:
reader = csv.reader(fileStream)
for row in reader:
# print(row)
emailFromDB = row[2]
if email == emailFromDB:
print("Email already registered..please login")
emailExists = True
break
if not emailExists:
with open("users.csv", "a", newline='') as fileStream:
writer = csv.writer(fileStream)
writer.writerow([username, fullname, email, password])
print("Registered successfully...")
|
[
"anmolarora1711@gmail.com"
] |
anmolarora1711@gmail.com
|
330a65e3a647bee90a48d3f323e928e718b549f5
|
1dc67a30f9af553243088668d51bc4e75e87d83d
|
/python/dazhewan/day19/打着玩/super_init.py
|
c76074a2ffcc59ce4d27de7f348f85e81ddb37c5
|
[] |
no_license
|
houyinhu/AID1812
|
00db45b3e8905bd069b31f2e7689f83bca3fa61f
|
8eeb9f06ed9f4e742d480354ef0e336dfe8c2f17
|
refs/heads/master
| 2020-04-27T16:33:57.275890
| 2019-04-10T01:09:51
| 2019-04-10T01:09:51
| 174,486,040
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 730
|
py
|
#super_init.py
#此示例示意,用super函数显示调用基类__init__初始化方法
class Human:
def __init__(self,n,a):
self.name = n
self.age = a
print("Human的__init__方法被调用")
def infos(self):
print('姓名:',self.name)
print('年龄:',self.age)
class Student(Human):
def __init__(self,n,a,s=0):
super().__init__(n,a) #显示调用父类的初始化方法
self.score = s #添加成绩属性
print("Student类的__init__方法被调用")
def infos(self):
super().infos() #调用父类的方法
print("成绩:",self.score)
s1 = Student('小张',20,100)
s1.infos()
|
[
"ahu@163.com"
] |
ahu@163.com
|
f137634b0821b9597b08027552f1db74ad9bc5dc
|
9508ccf2802becb4d19dd049b3496cf19d5f7b15
|
/tensorflow_probability/python/mcmc/internal/leapfrog_integrator_test.py
|
6ff15635b716756dbc78a4c7c242e3ac27e9390a
|
[
"Apache-2.0"
] |
permissive
|
etarakci-hvl/probability
|
e89485968e4660050424944b0ffdbbf617533fe4
|
7a0ce5e5beff91051028258dfbc7bc6cf0c4998d
|
refs/heads/master
| 2020-11-25T20:39:53.290761
| 2019-12-18T02:13:04
| 2019-12-18T02:14:04
| 228,835,497
| 1
| 0
|
Apache-2.0
| 2019-12-18T12:27:56
| 2019-12-18T12:27:56
| null |
UTF-8
|
Python
| false
| false
| 3,873
|
py
|
# Copyright 2018 The TensorFlow Probability Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Tests for `leapfrog_integrator.py`."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
import tensorflow.compat.v1 as tf1
import tensorflow.compat.v2 as tf
from tensorflow_probability.python.internal import test_util
from tensorflow_probability.python.mcmc.internal import leapfrog_integrator as leapfrog_impl
@test_util.test_all_tf_execution_regimes
class LeapfrogIntegratorTest(test_util.TestCase):
def setUp(self):
self._shape_param = 5.
self._rate_param = 10.
tf1.random.set_random_seed(10003)
np.random.seed(10003)
def assertAllFinite(self, x):
self.assertAllEqual(np.ones_like(x).astype(bool), np.isfinite(x))
def _log_gamma_log_prob(self, x, event_dims=()):
"""Computes log-pdf of a log-gamma random variable.
Args:
x: Value of the random variable.
event_dims: Dimensions not to treat as independent.
Returns:
log_prob: The log-pdf up to a normalizing constant.
"""
return tf.reduce_sum(
self._shape_param * x - self._rate_param * tf.exp(x),
axis=event_dims)
def _integrator_conserves_energy(self, x, independent_chain_ndims):
event_dims = tf.range(independent_chain_ndims, tf.rank(x))
target_fn = lambda x: self._log_gamma_log_prob(x, event_dims)
m = tf.random.normal(tf.shape(x))
log_prob_0 = target_fn(x)
old_energy = -log_prob_0 + 0.5 * tf.reduce_sum(m**2., axis=event_dims)
event_size = np.prod(
self.evaluate(x).shape[independent_chain_ndims:])
integrator = leapfrog_impl.SimpleLeapfrogIntegrator(
target_fn,
step_sizes=[0.09 / event_size],
num_steps=1000)
[[new_m], [_], log_prob_1, [_]] = integrator([m], [x])
new_energy = -log_prob_1 + 0.5 * tf.reduce_sum(new_m**2., axis=event_dims)
old_energy_, new_energy_ = self.evaluate([old_energy, new_energy])
tf1.logging.vlog(
1, 'average energy relative change: {}'.format(
(1. - new_energy_ / old_energy_).mean()))
self.assertAllClose(old_energy_, new_energy_, atol=0., rtol=0.02)
def _integrator_conserves_energy_wrapper(self, independent_chain_ndims):
"""Tests the long-term energy conservation of the leapfrog integrator.
The leapfrog integrator is symplectic, so for sufficiently small step
sizes it should be possible to run it more or less indefinitely without
the energy of the system blowing up or collapsing.
Args:
independent_chain_ndims: Python `int` scalar representing the number of
dims associated with independent chains.
"""
x = tf.constant(np.random.rand(50, 10, 2), np.float32)
self._integrator_conserves_energy(x, independent_chain_ndims)
def testIntegratorEnergyConservationNullShape(self):
self._integrator_conserves_energy_wrapper(0)
def testIntegratorEnergyConservation1(self):
self._integrator_conserves_energy_wrapper(1)
def testIntegratorEnergyConservation2(self):
self._integrator_conserves_energy_wrapper(2)
def testIntegratorEnergyConservation3(self):
self._integrator_conserves_energy_wrapper(3)
if __name__ == '__main__':
tf.test.main()
|
[
"gardener@tensorflow.org"
] |
gardener@tensorflow.org
|
23df7434ab536aa03632fc11bee9095c7e4d847e
|
de24f83a5e3768a2638ebcf13cbe717e75740168
|
/moodledata/vpl_data/64/usersdata/208/37039/submittedfiles/atm.py
|
fffc892d1e1eeab4114a3f231991c77f9faa11fd
|
[] |
no_license
|
rafaelperazzo/programacao-web
|
95643423a35c44613b0f64bed05bd34780fe2436
|
170dd5440afb9ee68a973f3de13a99aa4c735d79
|
refs/heads/master
| 2021-01-12T14:06:25.773146
| 2017-12-22T16:05:45
| 2017-12-22T16:05:45
| 69,566,344
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 235
|
py
|
# -*- coding: utf-8 -*-
v=int(input('digite o valor que deseja sacar:'))
n1=v//20
n2=(v%20)//10
n3=((v%20)%10)//5
n4=(((v%20)%10)%5//2
n5=((((v%20)%10)%5)%2)//1
print('%d'%n1)
print('%d'%n2)
print('%d'%n3)
print('%d'%n4)
print('%d'%n5)
|
[
"rafael.mota@ufca.edu.br"
] |
rafael.mota@ufca.edu.br
|
2f52aaac7e20119310401c3ba628d0ea489c2a5b
|
0845b9e00b0046c409eff2b55c835c331190a2dc
|
/Example_code/bullets.py
|
9f4598ab62b9538ba20843eecbbaf22de1f1f74d
|
[] |
no_license
|
crazcalm/Learn_Pygame
|
edba44f4ff89add764ee3f6b2558172465f9cc26
|
e93c482fb9eb392912627855b11ff2c36c22a191
|
refs/heads/master
| 2020-04-05T22:56:29.928626
| 2014-09-27T07:08:29
| 2014-09-27T07:08:29
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,486
|
py
|
"""
Show how to fire bullets.
Sample Python/Pygame Programs
Simpson College Computer Science
http://programarcadegames.com/
http://simpson.edu/computer-science/
Explanation video: http://youtu.be/PpdJjaiLX6A
"""
import pygame
import random
# Define some colors
BLACK = ( 0, 0, 0)
WHITE = ( 255, 255, 255)
RED = ( 255, 0, 0)
BLUE = ( 0, 0, 255)
# --- Classes
class Block(pygame.sprite.Sprite):
""" This class represents the block. """
def __init__(self, color):
# Call the parent class (Sprite) constructor
pygame.sprite.Sprite.__init__(self)
self.image = pygame.Surface([20, 15])
self.image.fill(color)
self.rect = self.image.get_rect()
class Player(pygame.sprite.Sprite):
""" This class represents the Player. """
def __init__(self):
""" Set up the player on creation. """
# Call the parent class (Sprite) constructor
pygame.sprite.Sprite.__init__(self)
self.image = pygame.Surface([20, 20])
self.image.fill(RED)
self.rect = self.image.get_rect()
def update(self):
""" Update the player's position. """
# Get the current mouse position. This returns the position
# as a list of two numbers.
pos = pygame.mouse.get_pos()
# Set the player x position to the mouse x position
self.rect.x = pos[0]
class Bullet(pygame.sprite.Sprite):
""" This class represents the bullet . """
def __init__(self):
# Call the parent class (Sprite) constructor
pygame.sprite.Sprite.__init__(self)
self.image = pygame.Surface([4, 10])
self.image.fill(BLACK)
self.rect = self.image.get_rect()
def update(self):
""" Move the bullet. """
self.rect.y -= 3
# --- Create the window
# Initialize Pygame
pygame.init()
# Set the height and width of the screen
screen_width = 700
screen_height = 400
screen = pygame.display.set_mode([screen_width, screen_height])
# --- Sprite lists
# This is a list of every sprite. All blocks and the player block as well.
all_sprites_list = pygame.sprite.Group()
# List of each block in the game
block_list = pygame.sprite.Group()
# List of each bullet
bullet_list = pygame.sprite.Group()
# --- Create the sprites
for i in range(50):
# This represents a block
block = Block(BLUE)
# Set a random location for the block
block.rect.x = random.randrange(screen_width)
block.rect.y = random.randrange(350)
# Add the block to the list of objects
block_list.add(block)
all_sprites_list.add(block)
# Create a red player block
player = Player()
all_sprites_list.add(player)
#Loop until the user clicks the close button.
done = False
# Used to manage how fast the screen updates
clock = pygame.time.Clock()
score = 0
player.rect.y = 370
# -------- Main Program Loop -----------
while not done:
# --- Event Processing
for event in pygame.event.get():
if event.type == pygame.QUIT:
done = True
elif event.type == pygame.MOUSEBUTTONDOWN:
# Fire a bullet if the user clicks the mouse button
bullet = Bullet()
# Set the bullet so it is where the player is
bullet.rect.x = player.rect.x
bullet.rect.y = player.rect.y
# Add the bullet to the lists
all_sprites_list.add(bullet)
bullet_list.add(bullet)
# --- Game logic
# Call the update() method on all the sprites
all_sprites_list.update()
# Calculate mechanics for each bullet
for bullet in bullet_list:
# See if it hit a block
block_hit_list = pygame.sprite.spritecollide(bullet, block_list, True)
# For each block hit, remove the bullet and add to the score
for block in block_hit_list:
bullet_list.remove(bullet)
all_sprites_list.remove(bullet)
score += 1
print(score)
# Remove the bullet if it flies up off the screen
if bullet.rect.y < -10:
bullet_list.remove(bullet)
all_sprites_list.remove(bullet)
# --- Draw a frame
# Clear the screen
screen.fill(WHITE)
# Draw all the spites
all_sprites_list.draw(screen)
# Go ahead and update the screen with what we've drawn.
pygame.display.flip()
# --- Limit to 20 frames per second
clock.tick(60)
pygame.quit()
|
[
"crazcalm@gmail.com"
] |
crazcalm@gmail.com
|
047b96b5f23a5f677481e75436be67e963f16f40
|
0c110eb32f2eaea5c65d40bda846ddc05757ced6
|
/scripts/mastersort/scripts_dir/p7542_run2L4.py
|
d95b7a8dccc8565de88643a95318f55cc9b4a0c0
|
[] |
no_license
|
nyspisoccog/ks_scripts
|
792148a288d1a9d808e397c1d2e93deda2580ff4
|
744b5a9dfa0f958062fc66e0331613faaaee5419
|
refs/heads/master
| 2021-01-18T14:22:25.291331
| 2018-10-15T13:08:24
| 2018-10-15T13:08:24
| 46,814,408
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,197
|
py
|
from __future__ import with_statement
import os, csv, shutil,tarfile, uf, dcm_ops
dest_root = '/ifs/scratch/pimri/soccog/test_working'
dst_path_lst = ['7542', 'run2L4']
uf.buildtree(dest_root, dst_path_lst)
uf.copytree('/ifs/scratch/pimri/soccog/old/SocCog_Raw_Data_By_Exam_Number/2727/E2727_e363504/s414724_1904_2L4_s23', '/ifs/scratch/pimri/soccog/test_working/7542/run2L4')
t = tarfile.open(os.path.join('/ifs/scratch/pimri/soccog/test_working/7542/run2L4','MRDC_files.tar.gz'), 'r')
t.extractall('/ifs/scratch/pimri/soccog/test_working/7542/run2L4')
for f in os.listdir('/ifs/scratch/pimri/soccog/test_working/7542/run2L4'):
if 'MRDC' in f and 'gz' not in f:
old = os.path.join('/ifs/scratch/pimri/soccog/test_working/7542/run2L4', f)
new = os.path.join('/ifs/scratch/pimri/soccog/test_working/7542/run2L4', f + '.dcm')
os.rename(old, new)
qsub_cnv_out = dcm_ops.cnv_dcm('/ifs/scratch/pimri/soccog/test_working/7542/run2L4', '7542_run2L4', '/ifs/scratch/pimri/soccog/scripts/mastersort/scripts_dir/cnv')
#qsub_cln_out = dcm_ops.cnv_dcm('/ifs/scratch/pimri/soccog/test_working/7542/run2L4', '7542_run2L4', '/ifs/scratch/pimri/soccog/scripts/mastersort/scripts_dir/cln')
|
[
"katherine@Katherines-MacBook-Pro.local"
] |
katherine@Katherines-MacBook-Pro.local
|
452bb4a716c3bd8adec7df2878cd13e873b5b57d
|
d75fbceb28ad14b07ae4057a8b23ec0bd3682628
|
/code/chap08/ZombieMobGame.py
|
1ca5ebc7b9173aa046b6f7eb320226cfc169fb42
|
[] |
no_license
|
wubinbai/pygame-book
|
0707a0b36f41bc6f0b1282707e6c4f6cbed9c87a
|
9de1f7516a2aec940ffa97f9686cc0520bad2deb
|
refs/heads/master
| 2020-12-21T15:51:08.397619
| 2020-01-30T12:37:52
| 2020-01-30T12:37:52
| 236,478,999
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,667
|
py
|
# Zombie Mob Game
# Chapter 8
import itertools, sys, time, random, math, pygame
from pygame.locals import *
from MyLibrary import *
def calc_velocity(direction, vel=1.0):
velocity = Point(0,0)
if direction == 0: #north
velocity.y = -vel
elif direction == 2: #east
velocity.x = vel
elif direction == 4: #south
velocity.y = vel
elif direction == 6: #west
velocity.x = -vel
return velocity
def reverse_direction(sprite):
if sprite.direction == 0:
sprite.direction = 4
elif sprite.direction == 2:
sprite.direction = 6
elif sprite.direction == 4:
sprite.direction = 0
elif sprite.direction == 6:
sprite.direction = 2
#main program begins
pygame.init()
screen = pygame.display.set_mode((800,600))
pygame.display.set_caption("Collision Demo")
font = pygame.font.Font(None, 36)
timer = pygame.time.Clock()
#create sprite groups
player_group = pygame.sprite.Group()
zombie_group = pygame.sprite.Group()
health_group = pygame.sprite.Group()
#create the player sprite
player = MySprite()
player.load("farmer walk.png", 96, 96, 8)
player.position = 80, 80
player.direction = 4
player_group.add(player)
#create the zombie sprite
zombie_image = pygame.image.load("zombie walk.png").convert_alpha()
for n in range(0, 10):
zombie = MySprite()
zombie.load("zombie walk.png", 96, 96, 8)
zombie.position = random.randint(0,700), random.randint(0,500)
zombie.direction = random.randint(0,3) * 2
zombie_group.add(zombie)
#create heath sprite
health = MySprite()
health.load("health.png", 32, 32, 1)
health.position = 400,300
health_group.add(health)
game_over = False
player_moving = False
player_health = 100
#repeating loop
while True:
timer.tick(30)
ticks = pygame.time.get_ticks()
for event in pygame.event.get():
if event.type == QUIT: sys.exit()
keys = pygame.key.get_pressed()
if keys[K_ESCAPE]: sys.exit()
elif keys[K_UP] or keys[K_w]:
player.direction = 0
player_moving = True
elif keys[K_RIGHT] or keys[K_d]:
player.direction = 2
player_moving = True
elif keys[K_DOWN] or keys[K_s]:
player.direction = 4
player_moving = True
elif keys[K_LEFT] or keys[K_a]:
player.direction = 6
player_moving = True
else:
player_moving = False
if not game_over:
#set animation frames based on player's direction
player.first_frame = player.direction * player.columns
player.last_frame = player.first_frame + player.columns-1
if player.frame < player.first_frame:
player.frame = player.first_frame
if not player_moving:
#stop animating when player is not pressing a key
player.frame = player.first_frame = player.last_frame
else:
#move player in direction
player.velocity = calc_velocity(player.direction, 1.5)
player.velocity.x *= 1.5
player.velocity.y *= 1.5
#update player sprite
player_group.update(ticks, 50)
#manually move the player
if player_moving:
player.X += player.velocity.x
player.Y += player.velocity.y
if player.X < 0: player.X = 0
elif player.X > 700: player.X = 700
if player.Y < 0: player.Y = 0
elif player.Y > 500: player.Y = 500
#update zombie sprites
zombie_group.update(ticks, 50)
#manually iterate through all the zombies
for z in zombie_group:
#set the zombie's animation range
z.first_frame = z.direction * z.columns
z.last_frame = z.first_frame + z.columns-1
if z.frame < z.first_frame:
z.frame = z.first_frame
z.velocity = calc_velocity(z.direction)
#keep the zombie on the screen
z.X += z.velocity.x
z.Y += z.velocity.y
if z.X < 0 or z.X > 700 or z.Y < 0 or z.Y > 500:
reverse_direction(z)
#check for collision with zombies
attacker = None
attacker = pygame.sprite.spritecollideany(player, zombie_group)
if attacker != None:
#we got a hit, now do a more precise check
if pygame.sprite.collide_rect_ratio(0.5)(player,attacker):
player_health -= 10
if attacker.X < player.X: attacker.X -= 10
elif attacker.X > player.X: attacker.X += 10
else:
attacker = None
#update the health drop
health_group.update(ticks, 50)
#check for collision with health
if pygame.sprite.collide_rect_ratio(0.5)(player,health):
player_health += 30
if player_health > 100: player_health = 100
health.X = random.randint(0,700)
health.Y = random.randint(0,500)
#is player dead?
if player_health <= 0:
game_over = True
#clear the screen
screen.fill((50,50,100))
#draw sprites
health_group.draw(screen)
zombie_group.draw(screen)
player_group.draw(screen)
#draw energy bar
pygame.draw.rect(screen, (50,150,50,180), Rect(300,570,player_health*2,25))
pygame.draw.rect(screen, (100,200,100,180), Rect(300,570,200,25), 2)
if game_over:
print_text(font, 300, 100, "G A M E O V E R")
pygame.display.update()
|
[
"wubinbai@yahoo.com"
] |
wubinbai@yahoo.com
|
1d4abd406e5975787c37fe68fdc30ace92e9a531
|
e262e64415335060868e9f7f73ab8701e3be2f7b
|
/.history/pyexcel_20201111162515.py
|
eb28ae94c2e1ecfb48fe15a06c26ed612a6d0851
|
[] |
no_license
|
Allison001/developer_test
|
6e211f1e2bd4287ee26fd2b33baf1c6a8d80fc63
|
b8e04b4b248b0c10a35e93128a5323165990052c
|
refs/heads/master
| 2023-06-18T08:46:40.202383
| 2021-07-23T03:31:54
| 2021-07-23T03:31:54
| 322,807,303
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 899
|
py
|
from openpyxl import Workbook
from openpyxl.utils import get_column_letter
wb = Workbook()
dest_filename = 'empty_book.xlsx'
ws1 = wb.active
ws1.title = "range names"
for row in range(1, 40):
ws1.append(range(600))
ws2 = wb.create_sheet(title="Pi")
ws2['F5'] = 3.14
ws3 = wb.create_sheet(title="Data")
for row in range(10, 20):
for col in range(27, 54):
_ = ws3.cell(column=col, row=row, value="{0}".format(get_column_letter(col)))
print(ws3['AA10'].value)
ws4 = wb.create_sheet(title="test")
for i in range(1,11):
ws4.cell(column=i,row=1).value="用例编号"
ws5 = wb.create_sheet(title="Test1")
title1 = ("用例编号","用例模块","用例标题","用例级别","测试环境","测试输入","执行操作","预期结果","验证结果","备注")
for i in range(1,11):
for j in title1:
ws5.cell(column=1,row=).value=j
wb.save(filename = dest_filename)
|
[
"zhangyingxbba@gmail.com"
] |
zhangyingxbba@gmail.com
|
d14ec836cc015536b3f4fbbea3e42722d329fe10
|
fc276597c51509a13bf2c622c91123eb4987d6b2
|
/setup.py
|
d45a7fa220a59b4a2f456f2596709f0d4c052620
|
[
"Apache-2.0"
] |
permissive
|
zaquestion/requests-mv-integrations
|
df1150b0efb8bf4d97979e8ed3499737d98fa16d
|
b8f3332c1cd564ef106e725e0ee3436913fa8e19
|
refs/heads/master
| 2020-06-29T11:12:30.990262
| 2016-11-22T01:56:56
| 2016-11-22T01:56:56
| 74,429,832
| 0
| 0
| null | 2016-12-06T18:57:52
| 2016-11-22T03:21:53
|
Python
|
UTF-8
|
Python
| false
| false
| 2,180
|
py
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# @namespace pycountry-convert
from __future__ import with_statement
# To install the tune-mv-integration-python library, open a Terminal shell,
# then run this file by typing:
#
# python setup.py install
#
import sys
import re
from setuptools import setup
REQUIREMENTS = [
req for req in open('requirements.txt')
.read().split('\n')
if req != ''
]
PACKAGES = [
'requests_mv_integrations',
'requests_mv_integrations.support',
'requests_mv_integrations.errors'
]
CLASSIFIERS = [
# How mature is this project? Common values are
# 3 - Alpha
# 4 - Beta
# 5 - Production/Stable
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Natural Language :: English',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.0',
'Programming Language :: Python :: 3.1',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Software Development :: Libraries :: Python Modules'
]
with open('requests_mv_integrations/__init__.py', 'r') as fd:
version = re.search(r'^__version__\s*=\s*[\'"]([^\'"]*)[\'"]', fd.read(), re.MULTILINE).group(1)
if not version:
raise RuntimeError('Cannot find version information')
if len(sys.argv) < 2 or sys.argv[1] == 'version':
print(version)
sys.exit()
setup(
name='requests-mv-integrations',
version=version,
description='',
author='TUNE Inc., TuneLab',
author_email='jefft@tune.com',
url='https://github.com/TuneLab/requests-mv-integrations',
install_requires=REQUIREMENTS,
packages=PACKAGES,
package_dir={'requests-mv-integrations': 'requests-mv-integrations'},
include_package_data=True,
license='Apache 2.0',
zip_safe=False,
classifiers=CLASSIFIERS,
long_description="""
-----------------------------------------------------
"""
)
|
[
"jefft@tune.com"
] |
jefft@tune.com
|
81146b5b73fa9c515a476c8bf531f60c1e4b6b89
|
a8289cb7273245e7ec1e6079c7f266db4d38c03f
|
/Anthony_Flask_Tutorials/Flask_GETAPI/run.py
|
4b5fb6680b2b2838fcc12f2b4cce3d759ec957a0
|
[] |
no_license
|
palmarytech/Python_Snippet
|
6acbd572d939bc9d5d765800f35a0204bc044708
|
41b4ebe15509d166c82edd23b713a1f3bf0458c5
|
refs/heads/master
| 2022-10-06T22:51:00.469383
| 2020-03-13T08:32:11
| 2020-03-13T08:32:11
| 272,350,189
| 1
| 0
| null | 2020-06-15T05:30:44
| 2020-06-15T05:30:44
| null |
UTF-8
|
Python
| false
| false
| 617
|
py
|
from flask import Flask, jsonify, request
app = Flask(__name__)
languages = [{"name": "Javascript"}, {"name": "Python"}, {"name": "Ruby"}]
@app.route("/", methods=["GET"])
def test():
return jsonify({"message": "API works"})
@app.route("/languages", methods=["GET"])
def returnAll():
return jsonify({"languages": languages})
@app.route("/languages/<string:name>", methods=["GET"])
def returnOne(name):
_langs = [language for language in languages if language["name"] == name]
return jsonify({"language": _langs[0]})
if __name__ == "__main__":
app.run(debug=True, host="0.0.0.0", port=5000)
|
[
"leamon.lee13@gmail.com"
] |
leamon.lee13@gmail.com
|
74107df377adab78ed6ad99a7cdafb3fe88dfef6
|
a5016c90fb13caaf8ce4e2c48dc842017f195822
|
/src/0008_StringToInteger.py
|
c800458b3678b28a95706de4e3e5ff1d26c81c80
|
[] |
no_license
|
lixinchn/LeetCode
|
c21efc2d715da637374871d36d3d183ea08b9c31
|
4060d525f007c10a3a55d874f7953a0a1d98c8fd
|
refs/heads/master
| 2020-04-03T10:27:16.068777
| 2017-01-05T02:31:28
| 2017-01-05T02:31:28
| 50,000,305
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,513
|
py
|
class Solution(object):
def myAtoi(self, str):
"""
:type str: str
:rtype: int
"""
ret_int = 0
negative = False
MAX_INT = 2147483647
MIN_INT = 2147483648
i = 0
for i in range(len(str)):
if str[i] == ' ' or str[i] == '\t':
continue
break
if i < len(str) and (str[i] == '-' or str[i] == '+'):
negative = str[i] == '-'
i += 1
str = str[i:]
for i in range(len(str)):
try:
char_int = int(str[i])
except:
break
ret_int = ret_int * 10 + char_int
if not negative and ret_int > MAX_INT:
return MAX_INT
if negative and ret_int > MIN_INT:
return MIN_INT * -1
if negative:
ret_int *= -1
return ret_int
if __name__ == "__main__":
solution = Solution()
str = '100'
print solution.myAtoi(str)
str = '-1'
print solution.myAtoi(str)
str = '0'
print solution.myAtoi(str)
str = '007'
print solution.myAtoi(str)
str = '-007'
print solution.myAtoi(str)
str = ''
print solution.myAtoi(str)
str = ' '
print solution.myAtoi(str)
str = 'a123'
print solution.myAtoi(str)
str = '12aa3'
print solution.myAtoi(str)
str = '-2147483648'
print solution.myAtoi(str)
str = '-2147483649'
print solution.myAtoi(str)
|
[
"lixinit@gmail.com"
] |
lixinit@gmail.com
|
787e223f49a9ab2eeab5f45a7afcebfb8907c122
|
26e468e4f99ffc0ccd6da43c1ae9f363ec3010e6
|
/msr.py
|
32c9ecdd0dc265971335ef45b9e1bfc43fb7fe41
|
[] |
no_license
|
mtoqeerpk/rshalib
|
d86d36216fe9a4008a710af6c0e118c34c73dab4
|
9ef631ca3690c158693445a52d59c2583f8bd67e
|
refs/heads/master
| 2023-01-28T00:25:14.992405
| 2020-12-10T09:16:41
| 2020-12-10T09:16:41
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 753
|
py
|
"""
Magnitude scaling relations
"""
from __future__ import absolute_import, division, print_function, unicode_literals
try:
## Python 2
basestring
except:
## Python 3
basestring = str
__all__ = ['get_oq_msr']
def get_oq_msr(msr_or_name):
"""
Get OpenQuake magnitude scaling relationship object
:param msr_or_name:
str or instance of :class:`oqhazlib.scalerel.BaseMSR`
:return:
instance of :class:`oqhazlib.scalerel.BaseMSR`
"""
from . import oqhazlib
if isinstance(msr_or_name, oqhazlib.scalerel.BaseMSR):
msr = msr_or_name
elif isinstance(msr_or_name, basestring):
#if msr_or_name[-3:] != 'MSR':
# msr_or_name += 'MSR'
msr = getattr(oqhazlib.scalerel, msr_or_name)()
return msr
|
[
"kris.vanneste@oma.be"
] |
kris.vanneste@oma.be
|
6ec27707f06f70599fd009e6b1515054ddf675be
|
029948b3fd0e41d80d66c84d808abff4fcb24ac8
|
/dnac_api_client/models/response.py
|
21c419cccc99217f50f5b75be892b1baee6f818a
|
[] |
no_license
|
yijxiang/dnac-api-client
|
842d1da9e156820942656b8f34342d52c96d3c37
|
256d016e2df8fc1b3fdad6e28f441c6005b43b07
|
refs/heads/master
| 2021-09-25T21:10:09.502447
| 2018-10-25T14:39:57
| 2018-10-25T14:39:57
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,249
|
py
|
# coding: utf-8
"""
Cisco DNA Center Platform v. 1.2.x (EFT)
REST API (EFT) # noqa: E501
OpenAPI spec version: 1.0.0
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
class Response(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
}
attribute_map = {
}
def __init__(self): # noqa: E501
"""Response - a model defined in OpenAPI""" # noqa: E501
self.discriminator = None
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, Response):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
|
[
"cunningr@cisco.com"
] |
cunningr@cisco.com
|
34e474332837d41d7be8e3b1b8180a049fb43e1b
|
45de7d905486934629730945619f49281ad19359
|
/xlsxwriter/test/comparison/test_chart_layout02.py
|
22e9a8e8599e84e984544c6755d61220a43f21fa
|
[
"BSD-2-Clause"
] |
permissive
|
jmcnamara/XlsxWriter
|
599e1d225d698120ef931a776a9d93a6f60186ed
|
ab13807a1be68652ffc512ae6f5791d113b94ee1
|
refs/heads/main
| 2023-09-04T04:21:04.559742
| 2023-08-31T19:30:52
| 2023-08-31T19:30:52
| 7,433,211
| 3,251
| 712
|
BSD-2-Clause
| 2023-08-28T18:52:14
| 2013-01-04T01:07:06
|
Python
|
UTF-8
|
Python
| false
| false
| 1,659
|
py
|
###############################################################################
#
# Tests for XlsxWriter.
#
# SPDX-License-Identifier: BSD-2-Clause
# Copyright (c), 2013-2023, John McNamara, jmcnamara@cpan.org
#
from ..excel_comparison_test import ExcelComparisonTest
from ...workbook import Workbook
class TestCompareXLSXFiles(ExcelComparisonTest):
"""
Test file created by XlsxWriter against a file created by Excel.
"""
def setUp(self):
self.set_filename("chart_layout02.xlsx")
def test_create_file(self):
"""Test the creation of an XlsxWriter file with user defined layout."""
workbook = Workbook(self.got_filename)
worksheet = workbook.add_worksheet()
chart = workbook.add_chart({"type": "column"})
chart.axis_ids = [68311296, 69198208]
data = [
[1, 2, 3, 4, 5],
[2, 4, 6, 8, 10],
[3, 6, 9, 12, 15],
]
worksheet.write_column("A1", data[0])
worksheet.write_column("B1", data[1])
worksheet.write_column("C1", data[2])
chart.add_series({"values": "=Sheet1!$A$1:$A$5"})
chart.add_series({"values": "=Sheet1!$B$1:$B$5"})
chart.add_series({"values": "=Sheet1!$C$1:$C$5"})
chart.set_legend(
{
"layout": {
"x": 0.80197353455818021,
"y": 0.37442403032954213,
"width": 0.12858202099737534,
"height": 0.25115157480314959,
}
}
)
worksheet.insert_chart("E9", chart)
workbook.close()
self.assertExcelEqual()
|
[
"jmcnamara@cpan.org"
] |
jmcnamara@cpan.org
|
afbde06d9758d8f6cb99f0c165487171b932e4a9
|
ca7aa979e7059467e158830b76673f5b77a0f5a3
|
/Python_codes/p03399/s459988463.py
|
c485ed9fa7f3a1cbfbac8d9420a062d84304b2e3
|
[] |
no_license
|
Aasthaengg/IBMdataset
|
7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901
|
f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8
|
refs/heads/main
| 2023-04-22T10:22:44.763102
| 2021-05-13T17:27:22
| 2021-05-13T17:27:22
| 367,112,348
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 181
|
py
|
# 数値の取得
A = int(input())
B = int(input())
C = int(input())
D = int(input())
# 料金の最安値を出力
train = min(A,B)
bus = min(C,D)
tbsum = train + bus
print(tbsum)
|
[
"66529651+Aastha2104@users.noreply.github.com"
] |
66529651+Aastha2104@users.noreply.github.com
|
f19b8be273679764a4d21955b4e6283bcba5d52c
|
41e3065d6f29449251f1cc79cb340fa273ac5c61
|
/0x11-python-network_1/4-hbtn_status.py
|
9760dd4785428c240c719c71c1c380eb25f80b8c
|
[] |
no_license
|
BD20171998/holbertonschool-higher_level_programming
|
856fa3a7fcfafd3e17ebd7dd4cf9d3e5a609fd1f
|
bfa78d25bd4527e06cf1bf54cbc00722449d9a30
|
refs/heads/master
| 2021-07-16T01:58:42.911959
| 2020-11-15T07:18:19
| 2020-11-15T07:18:19
| 226,976,859
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 262
|
py
|
#!/usr/bin/python3
import requests
if __name__ == "__main__":
html = requests.get('https://intranet.hbtn.io/status')
print("Body response:")
print("{}{}".format("\t- type: ", type(html.text)))
print("{}{}".format("\t- content: ", html.text))
|
[
"robert.deprizio@gmail.com"
] |
robert.deprizio@gmail.com
|
be67b3d78dfe581cfe41e8bf0ac8acd188a3da8e
|
2ad7e88305a7d2215a816e1aa3a82ef50b685b23
|
/dshop/main/utilities.py
|
eacf75e1b907eea2ad161776d182389a5405e407
|
[] |
no_license
|
KeksikProg/shop_chairs
|
5c9fb01f47676bb118fcc8161be1854e23271550
|
4bb09c409450cf2bb024c69d51d9f046520e9349
|
refs/heads/master
| 2023-06-26T17:21:27.678778
| 2020-08-06T16:28:29
| 2020-08-06T16:28:29
| 281,150,953
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,336
|
py
|
from datetime import datetime
from os.path import splitext
from django.template.loader import render_to_string
from django.core.signing import Signer # Это для цифровой подписи
from dshop.settings import ALLOWED_HOSTS
from django.dispatch import Signal
from django.db.models.signals import post_save
def get_timestamp_path(instance, filename): # Тк эта функция не относится не к редакторам не к контроллерами не к моделям ,мы просто запишем её сюда
return f'{datetime.now().timestamp()}{splitext(filename)[1]}'
signer = Signer()
def send_activation_notification(user):
if ALLOWED_HOSTS:
host = 'http://' + ALLOWED_HOSTS[0]
else:
host = 'http://localhost:8000'
context = {'user':user, 'host':host, 'sign':signer.sign(user.username)}
subj = render_to_string('email/activation_letter_subj.txt', context)
body = render_to_string('email/activation_letter_body.txt', context)
user.email_user(subj, body)
user_registrated = Signal(providing_args = ['instance']) # Тут мы из всех сигналов берем определенный по его ключу
def user_registrated_dispatcher(sender, **kwargs):
send_activation_notification(kwargs['instance'])
user_registrated.connect(user_registrated_dispatcher)
|
[
"fvvfvfvf.vfvfvfv@yandex.ru"
] |
fvvfvfvf.vfvfvfv@yandex.ru
|
d676188c2588e84de56202458db7503191525a1e
|
715966248566909e4e8978230b37458d031418c5
|
/01.jumptopython/chap03/책/126-02.py
|
9e55500f0b6a62346082cb03961b4920774aabf8
|
[] |
no_license
|
itminha123/jumptopython
|
1101f4e065ce2f8e0a1c68bb818b14b562fc43d1
|
8b25ae84f795eab5e7bcfa609646a2736ac2a98f
|
refs/heads/master
| 2021-09-19T08:29:33.686384
| 2018-07-25T18:59:55
| 2018-07-25T18:59:55
| 112,169,738
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 534
|
py
|
coffee=10
while True:
money=int(input("돈을 넣어 주세요:"))
if money== 300:
print("커피를 줍니다.")
coffee=coffee-1
elif money>300:
print("거스름돈 %d를 주고 커피를 줍니다."%(money-300))
coffee=coffee-1
else:
print("돈을 다시 돌려주고 커피를 주지 않습니다.")
print("남은 커피의 양은 %d개 입니다."%coffee)
if not coffee:
print("커피가 다 떨어졌습니다. 판매를 중지합니다.")
break
|
[
"itminha123@naver.com"
] |
itminha123@naver.com
|
408491e006322a62395f8c6b6a009ef13abe8b3c
|
c566ceb33bfea62f4be98dd2f9536deaee46ac3e
|
/api/utils/custom_exception.py
|
27388bf098fb1fd47d3af3feec46065ce054813d
|
[] |
no_license
|
Saviodiow95/wallet_test
|
1ad0e8f1699803ecca0ebf8c6a96b10efea980a3
|
4c2bf80332458b39197de134e914af669bbcc355
|
refs/heads/main
| 2023-07-04T02:25:11.321675
| 2021-08-02T12:28:40
| 2021-08-02T12:28:40
| 390,503,421
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 328
|
py
|
from rest_framework.exceptions import APIException
class InsufficientFundsException(APIException):
"""
Exceção criada para retornar uma mensagem quando não houver saldo de um ativo para realizar o resgate
"""
status_code = 304
default_detail = 'Não é possível realizar o Resgate, Saldo Insuficiente'
|
[
"saviodiowflamengo@gmail.com"
] |
saviodiowflamengo@gmail.com
|
fd0c93ec47ca70c94d9bc8e470186eac42081257
|
9743d5fd24822f79c156ad112229e25adb9ed6f6
|
/xai/brain/wordbase/otherforms/_counselled.py
|
4ca2e9c494bd4785c5c7bc4a66f6f1113eba6619
|
[
"MIT"
] |
permissive
|
cash2one/xai
|
de7adad1758f50dd6786bf0111e71a903f039b64
|
e76f12c9f4dcf3ac1c7c08b0cc8844c0b0a104b6
|
refs/heads/master
| 2021-01-19T12:33:54.964379
| 2017-01-28T02:00:50
| 2017-01-28T02:00:50
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 230
|
py
|
#calss header
class _COUNSELLED():
def __init__(self,):
self.name = "COUNSELLED"
self.definitions = counsel
self.parents = []
self.childen = []
self.properties = []
self.jsondata = {}
self.basic = ['counsel']
|
[
"xingwang1991@gmail.com"
] |
xingwang1991@gmail.com
|
52bad13f94b0f90846709fba274572fa370e643a
|
c085578abc19db18ee0766e1f9598d79a3acdbe1
|
/18-4Sum/solution.py
|
c5448eab17a5a88cac8e2880a4fc80d1a1c7a04b
|
[
"MIT"
] |
permissive
|
Tanych/CodeTracking
|
efb6245edc036d7edf85e960972c34d03b8c707a
|
86f1cb98de801f58c39d9a48ce9de12df7303d20
|
refs/heads/master
| 2020-05-21T17:40:10.105759
| 2016-10-09T18:20:42
| 2016-10-09T18:20:42
| 60,616,356
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,830
|
py
|
class Solution(object):
def nsum(self,nums,start,n,target):
nlen=len(nums)
res=[]
if nums[start]*n>target or target>nums[nlen-1]*n:
return res
for i in xrange(start,nlen-n+1):
if i>start and nums[i-1]==nums[i]:
continue
if n==1:
if target<nums[i]:break
if target>nums[i]:continue
res.append([target])
break
for li in self.nsum(nums,i+1,n-1,target-nums[i]):
li.append(nums[i])
res.append(li)
return res
def fourSum(self, nums,target):
"""
:type nums: List[int]
:rtype: List[List[int]]
"""
num_len=len(nums)
if num_len<4:
return []
nums.sort()
return self.nsum(nums,0,4,target)
res_list=[]
hash_dict={}
for m in xrange(num_len-3):
if 4*nums[m]>target:
return res_list
for i in xrange(m+1,num_len-2):
start=i+1
end=num_len-1
while start<end:
if nums[m]+nums[i]+nums[start]+nums[end]==target:
if not hash_dict.has_key((nums[m],nums[i],nums[start],nums[end])):
res_list.append([nums[m],nums[i],nums[start],nums[end]])
hash_dict[(nums[m],nums[i],nums[start],nums[end])]=1
start+=1
end-=1
elif nums[m]+nums[i]+nums[start]+nums[end]<target:
start+=1
elif nums[m]+nums[i]+nums[start]+nums[end]>target:
end-=1
return res_list
|
[
"ychtan@email.gwu.edu"
] |
ychtan@email.gwu.edu
|
0e10bb119dad92159c84d65e61fbe16a80bca333
|
10f0193389a161c447061d06a87c4fae8fc31bb5
|
/huobi/model/orderupdatenew.py
|
51c567837fd5cfda68b30d5a52efdf3ac769b7be
|
[
"Apache-2.0"
] |
permissive
|
neosun100/huobi_Python
|
1d9fca2c24673076516d582c263445c17626bd8e
|
70b280f751e6159b76f0cc43251896a754c1b559
|
refs/heads/master
| 2021-01-14T16:47:30.471992
| 2020-02-24T08:45:40
| 2020-02-24T08:45:40
| 242,685,209
| 0
| 0
|
Apache-2.0
| 2020-02-24T08:38:33
| 2020-02-24T08:38:32
| null |
UTF-8
|
Python
| false
| false
| 2,896
|
py
|
from huobi.model.constant import *
class OrderUpdateNew:
"""
The detail order information.
:member
match_id: The Match id for make order.
order_id: The order id.
symbol: The symbol, like "btcusdt".
state: The order state: submitted, partial-filled, cancelling, filled, canceled.
role: value is taker or maker
price: The limit price of limit order.
order_type: The order type, possible values are: buy-market, sell-market, buy-limit, sell-limit, buy-ioc, sell-ioc, buy-limit-maker, sell-limit-maker.
filled_amount: The amount which has been filled.
filled_cash_amount: The filled total in quote currency.
unfilled_amount: The amount which is unfilled.
"""
def __init__(self):
self.match_id = 0
self.order_id = 0
self.symbol = ""
self.state = OrderState.INVALID
self.role = ""
self.price = 0.0
self.filled_amount = 0.0
self.filled_cash_amount = 0.0
self.unfilled_amount = 0.0
self.client_order_id = ""
self.order_type = OrderType.INVALID
@staticmethod
def json_parse(json_data):
order_upd = OrderUpdateNew()
order_upd.match_id = json_data.get_int("match-id")
order_upd.order_id = json_data.get_int("order-id")
order_upd.symbol = json_data.get_string("symbol")
order_upd.state = json_data.get_string("order-state")
order_upd.role = json_data.get_string("role")
order_upd.price = json_data.get_float("price")
order_upd.order_type = json_data.get_string("order-type")
order_upd.filled_amount = json_data.get_float("filled-amount")
order_upd.filled_cash_amount = json_data.get_float("filled-cash-amount")
order_upd.unfilled_amount = json_data.get_float("unfilled-amount")
order_upd.client_order_id = json_data.get_string("client-order-id")
return order_upd
def print_object(self, format_data=""):
from huobi.base.printobject import PrintBasic
PrintBasic.print_basic(self.match_id, format_data + "Match Id")
PrintBasic.print_basic(self.order_id, format_data + "Order Id")
PrintBasic.print_basic(self.symbol, format_data + "Symbol")
PrintBasic.print_basic(self.state, format_data + "Order State")
PrintBasic.print_basic(self.role, format_data + "Role")
PrintBasic.print_basic(self.price, format_data + "Price")
PrintBasic.print_basic(self.filled_amount, format_data + "Filled Amount")
PrintBasic.print_basic(self.filled_cash_amount, format_data + "Filled Cash Amount")
PrintBasic.print_basic(self.unfilled_amount, format_data + "Unfilled Amount")
PrintBasic.print_basic(self.client_order_id, format_data + "Client Order Id")
PrintBasic.print_basic(self.order_type, format_data + "Order Type")
|
[
"devin0thinking@gmail.com"
] |
devin0thinking@gmail.com
|
1ad8e97dd542ba0a2310670381e40b114cef0bc8
|
cd67fbaec6ba71fdd10ac8dd23e034d7df706aa3
|
/bootstrap/urls.py
|
1db8a20ee1f67439cdb7324c6c4e7e6e93525396
|
[] |
no_license
|
ssyctlm/feb27
|
009310d09bc866d7432576d4a867b63591009cbb
|
8c666cf584e1589d06be16ba8b1266cb27646f39
|
refs/heads/master
| 2020-04-25T15:16:55.957159
| 2019-03-19T13:41:07
| 2019-03-19T13:41:07
| 172,873,078
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 350
|
py
|
from django.urls import path
from .views import (
index_view,
about_view,
services_view,
contact_view
)
# app_name = 'articles'
urlpatterns = [
path('',index_view,name='home'),
path('about',about_view,name = 'about'),
path('services',services_view,name = 'services'),
path('contact',contact_view,name = 'contact'),
]
|
[
"none"
] |
none
|
720efd4d9680f6c52878542ad045aff577e5aa38
|
a5a99f646e371b45974a6fb6ccc06b0a674818f2
|
/RecoBTag/Configuration/test/test_cfg.py
|
7fc7d9770ab2228f60b31accaf28c59b6b4e80a7
|
[
"Apache-2.0"
] |
permissive
|
cms-sw/cmssw
|
4ecd2c1105d59c66d385551230542c6615b9ab58
|
19c178740257eb48367778593da55dcad08b7a4f
|
refs/heads/master
| 2023-08-23T21:57:42.491143
| 2023-08-22T20:22:40
| 2023-08-22T20:22:40
| 10,969,551
| 1,006
| 3,696
|
Apache-2.0
| 2023-09-14T19:14:28
| 2013-06-26T14:09:07
|
C++
|
UTF-8
|
Python
| false
| false
| 843
|
py
|
import FWCore.ParameterSet.Config as cms
process = cms.Process("GeometryTest")
process.load("Configuration.StandardSequences.Reconstruction_cff")
process.load("Configuration.StandardSequences.FakeConditions_cff")
process.load("Configuration.EventContent.EventContent_cff")
process.load("Configuration.StandardSequences.MagneticField_cff")
process.maxEvents = cms.untracked.PSet(
input = cms.untracked.int32(1)
)
process.source = cms.Source("PoolSource",
fileNames = cms.untracked.vstring('/store/relval/2008/5/20/RelVal-RelValTTbar-1211209682-FakeConditions-2nd/0000/08765709-5826-DD11-9CE8-000423D94700.root')
)
process.RECO = cms.OutputModule("PoolOutputModule",
process.AODSIMEventContent,
fileName = cms.untracked.string('reco.root')
)
process.p1 = cms.Path(process.btagging)
process.p = cms.EndPath(process.RECO)
|
[
"giulio.eulisse@gmail.com"
] |
giulio.eulisse@gmail.com
|
f9348d48ee596bc1cd89fca043043b4b52b931d2
|
b144c5142226de4e6254e0044a1ca0fcd4c8bbc6
|
/ixnetwork_restpy/testplatform/sessions/ixnetwork/impairment/profile/accumulateandburst/accumulateandburst.py
|
091ec9d83b6e627d30bda819aaea8952ba46111d
|
[
"MIT"
] |
permissive
|
iwanb/ixnetwork_restpy
|
fa8b885ea7a4179048ef2636c37ef7d3f6692e31
|
c2cb68fee9f2cc2f86660760e9e07bd06c0013c2
|
refs/heads/master
| 2021-01-02T17:27:37.096268
| 2020-02-11T09:28:15
| 2020-02-11T09:28:15
| 239,721,780
| 0
| 0
|
NOASSERTION
| 2020-02-11T09:20:22
| 2020-02-11T09:20:21
| null |
UTF-8
|
Python
| false
| false
| 7,559
|
py
|
# MIT LICENSE
#
# Copyright 1997 - 2019 by IXIA Keysight
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from ixnetwork_restpy.base import Base
from ixnetwork_restpy.files import Files
class AccumulateAndBurst(Base):
"""Accumulates packets in a queue and transmit groups of packets as a burst. It can only be used on a profile if delayVariation and customDelayVariation are disabled.
The AccumulateAndBurst class encapsulates a required accumulateAndBurst resource which will be retrieved from the server every time the property is accessed.
"""
__slots__ = ()
_SDM_NAME = 'accumulateAndBurst'
def __init__(self, parent):
super(AccumulateAndBurst, self).__init__(parent)
@property
def BurstSize(self):
"""Represents the burst octet size. The default value is 1014.
Returns:
number
"""
return self._get_attribute('burstSize')
@BurstSize.setter
def BurstSize(self, value):
self._set_attribute('burstSize', value)
@property
def BurstSizeUnit(self):
"""The burst size unit is either megabytes or kilobytes. The default unit is kilobytes.
Returns:
str(kilobytes|kKilobytes|kMegabytes|megabytes)
"""
return self._get_attribute('burstSizeUnit')
@BurstSizeUnit.setter
def BurstSizeUnit(self, value):
self._set_attribute('burstSizeUnit', value)
@property
def BurstTimeout(self):
"""The burst timeout.The default value is 5 seconds.
Returns:
str
"""
return self._get_attribute('burstTimeout')
@BurstTimeout.setter
def BurstTimeout(self, value):
self._set_attribute('burstTimeout', value)
@property
def BurstTimeoutUnit(self):
"""Seconds(default) / milliseconds / mm:ss.fff time format.
Returns:
str(kMilliseconds|kSeconds|kTimeFormat|milliseconds|seconds|timeFormat)
"""
return self._get_attribute('burstTimeoutUnit')
@BurstTimeoutUnit.setter
def BurstTimeoutUnit(self, value):
self._set_attribute('burstTimeoutUnit', value)
@property
def Enabled(self):
"""If true, received packets are queued and transmitted in bursts.
Returns:
bool
"""
return self._get_attribute('enabled')
@Enabled.setter
def Enabled(self, value):
self._set_attribute('enabled', value)
@property
def InterBurstGap(self):
"""Tail to head (default) / Head to head.
Returns:
str(headToHead|kHeadToHead|kTailToHead|tailToHead)
"""
return self._get_attribute('interBurstGap')
@InterBurstGap.setter
def InterBurstGap(self, value):
self._set_attribute('interBurstGap', value)
@property
def InterBurstGapValue(self):
"""The InterBurst gap value. The default value is 20 ms.
Returns:
number
"""
return self._get_attribute('interBurstGapValue')
@InterBurstGapValue.setter
def InterBurstGapValue(self, value):
self._set_attribute('interBurstGapValue', value)
@property
def InterBurstGapValueUnit(self):
"""Seconds / milliseconds (default).
Returns:
str(kMilliseconds|kSeconds|milliseconds|seconds)
"""
return self._get_attribute('interBurstGapValueUnit')
@InterBurstGapValueUnit.setter
def InterBurstGapValueUnit(self, value):
self._set_attribute('interBurstGapValueUnit', value)
@property
def PacketCount(self):
"""Represents the burst packet count. The default value is 1000 packets.
Returns:
number
"""
return self._get_attribute('packetCount')
@PacketCount.setter
def PacketCount(self, value):
self._set_attribute('packetCount', value)
@property
def QueueAutoSize(self):
"""Gets the automatically calculated queue size when queueAutoSizeEnable is true or zero when queueAutoSizeEnable is false.
Returns:
number
"""
return self._get_attribute('queueAutoSize')
@property
def QueueAutoSizeEnabled(self):
"""Automatically calculate queue size. The default value is true.
Returns:
bool
"""
return self._get_attribute('queueAutoSizeEnabled')
@QueueAutoSizeEnabled.setter
def QueueAutoSizeEnabled(self, value):
self._set_attribute('queueAutoSizeEnabled', value)
@property
def QueueSize(self):
"""The accumulate-and-burst queue size expressed in MB. The default value is 1.
Returns:
number
"""
return self._get_attribute('queueSize')
@QueueSize.setter
def QueueSize(self, value):
self._set_attribute('queueSize', value)
def update(self, BurstSize=None, BurstSizeUnit=None, BurstTimeout=None, BurstTimeoutUnit=None, Enabled=None, InterBurstGap=None, InterBurstGapValue=None, InterBurstGapValueUnit=None, PacketCount=None, QueueAutoSizeEnabled=None, QueueSize=None):
"""Updates a child instance of accumulateAndBurst on the server.
Args:
BurstSize (number): Represents the burst octet size. The default value is 1014.
BurstSizeUnit (str(kilobytes|kKilobytes|kMegabytes|megabytes)): The burst size unit is either megabytes or kilobytes. The default unit is kilobytes.
BurstTimeout (str): The burst timeout.The default value is 5 seconds.
BurstTimeoutUnit (str(kMilliseconds|kSeconds|kTimeFormat|milliseconds|seconds|timeFormat)): Seconds(default) / milliseconds / mm:ss.fff time format.
Enabled (bool): If true, received packets are queued and transmitted in bursts.
InterBurstGap (str(headToHead|kHeadToHead|kTailToHead|tailToHead)): Tail to head (default) / Head to head.
InterBurstGapValue (number): The InterBurst gap value. The default value is 20 ms.
InterBurstGapValueUnit (str(kMilliseconds|kSeconds|milliseconds|seconds)): Seconds / milliseconds (default).
PacketCount (number): Represents the burst packet count. The default value is 1000 packets.
QueueAutoSizeEnabled (bool): Automatically calculate queue size. The default value is true.
QueueSize (number): The accumulate-and-burst queue size expressed in MB. The default value is 1.
Raises:
ServerError: The server has encountered an uncategorized error condition
"""
self._update(locals())
|
[
"srvc_cm_packages@keysight.com"
] |
srvc_cm_packages@keysight.com
|
531a249844fca891544a673ad19ecb26f8145614
|
9cb43a47faef0d3f5c7a6986cb2b21a0a89b6972
|
/file-operations-02/clonefootage.py
|
5e3b060fc83ff787e47778f2f1c91a95909bb04b
|
[] |
no_license
|
fsiddi/generic-tools
|
3e0f2c7ecaf469dcb8f173e191cd7d891fff8bc6
|
432463ec468a695551d7093c4851d5248f1d7764
|
refs/heads/master
| 2021-03-12T19:56:53.373367
| 2013-01-13T14:17:15
| 2013-01-13T14:17:15
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,357
|
py
|
import subprocess
import os
import shutil
FOLDER_SRC = "/Users/fsiddi/Desktop/clonefootage/footage_src"
FOLDER_DST = "/Users/fsiddi/Desktop/clonefootage/footage_dst"
for dirname, dirnames, filenames in os.walk(FOLDER_SRC):
for filename in filenames:
if "linear_hd" in dirname:
filename_src = os.path.join(dirname, filename)
dirname_dst = dirname.replace(FOLDER_SRC, FOLDER_DST)
''''if filename.endswith(".png"):
if not os.path.exists(dirname_dst):
os.makedirs(dirname_dst)
filename_jpg = filename.replace(".png", ".jpg")
filename_dst = os.path.join(dirname_dst, filename_jpg)
print filename_src + " >> " + filename_dst
elif filename.endswith(".jpg"):
if not os.path.exists(dirname_dst):
os.makedirs(dirname_dst)
filename_dst = os.path.join(dirname_dst, filename)
print filename_src + " >> " + filename_dst'''
if filename.endswith(".exr"):
if not os.path.exists(dirname_dst):
#pass
os.makedirs(dirname_dst)
filename_dst = os.path.join(dirname_dst, filename)
if not os.path.exists(filename_dst):
print filename_src + " >> " + filename_dst
shutil.copy(filename_src, filename_dst)
else:
print "skipping " + filename_src
else:
pass
#subprocess.call(["convert", filename_src, "-resize", "1280x1280", filename_dst])
else:
print "skipping " + dirname
|
[
"francesco.siddi@gmail.com"
] |
francesco.siddi@gmail.com
|
a2ac5030018622f024f8ca2435442ccd4f597f21
|
8ffc07a5240be5e6bb3106b20e11aee38cb8808a
|
/syloga/utils/symbols.py
|
aaf3a96c6a1d1b704aa3654833e56b44a3d8a098
|
[
"MIT"
] |
permissive
|
xaedes/python-symbolic-logic-to-gate
|
315a242d2000123bf09ea15a439dc6437ea457cb
|
a0dc9be9e04290008cf709fac789d224ab8c14b0
|
refs/heads/main
| 2023-09-02T15:37:53.750722
| 2021-11-08T12:51:51
| 2021-11-08T12:51:51
| 425,550,839
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 148
|
py
|
from syloga.ast.core import Symbol
from syloga.ast.containers import Tuple
def symbols(string):
return Tuple(*map(Symbol,string.split(" ")))
|
[
"xaedes@gmail.com"
] |
xaedes@gmail.com
|
fc6b16aacd2c5eb9924c98c120e85092a8d4ec26
|
233b2958c853dc57dfa5d54caddbc1520dcc35c8
|
/ava/cmds/pod.py
|
51fe03691017ccf91af0b199173ecdc252918607
|
[] |
no_license
|
eavatar/ava.node
|
6295ac6ed5059ebcb6ce58ef6e75adf1bfa24ed7
|
71e3304d038634ef13f44d245c3838d276a275e6
|
refs/heads/master
| 2021-01-19T06:13:01.127585
| 2015-06-03T03:10:59
| 2015-06-03T03:10:59
| 33,645,210
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,159
|
py
|
# -*- coding: utf-8 -*-
"""
Command for managing local pod directory.
"""
from __future__ import (absolute_import, division,
print_function, unicode_literals)
import os
import shutil
import click
from ava.runtime import environ
from .cli import cli
@cli.group()
def pod():
""" Pod management.
"""
pass
@pod.command()
@click.argument("folder", type=click.Path(exists=False))
def init(folder):
"""
Constructs the skeleton of directories if it not there already.
:return:
"""
if os.path.exists(folder):
click.echo("Folder %s is not empty!" % folder, err=True)
return
os.makedirs(folder)
src_dir = environ.pod_dir()
# copy files from base_dir to user_dir
subdirs = os.listdir(src_dir)
for d in subdirs:
src_path = os.path.join(src_dir, d)
dst_path = os.path.join(folder, d)
if os.path.isdir(src_path):
shutil.copytree(src_path, dst_path)
else:
shutil.copy2(src_path, dst_path)
@pod.command()
def open():
""" Open Pod folder in a file explorer or the like.
"""
click.launch(environ.pod_dir())
|
[
"sam@eavatar.com"
] |
sam@eavatar.com
|
64765314abb3f1daa11e18f0f3f06465242daf37
|
ed0ead8389adb7cd81ade57f972afea7de896ffc
|
/haffa/Badge.py
|
14d280ec8b20140e125365d399cdae00e046556e
|
[] |
no_license
|
rblack42/haffa
|
4e55664e1274b9ceec25fdfbc4603a03592229c0
|
526814e4024c1289cb2a79115d173772c82b5a88
|
refs/heads/master
| 2022-11-18T14:00:52.443335
| 2020-07-12T13:28:55
| 2020-07-12T13:28:55
| 273,238,463
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,706
|
py
|
from svgelements import *
from Shapes import paths, boxes
from svgelements import Path
SVGDIR = "svg"
class Badge(object):
def __init__(self, height):
""" load shape and position data"""
print("Generating SVG file for height %d" % height)
self.paths = paths
self.boxes = boxes
def normalize(self, path):
""" normalize path to height of 1000"""
box = path.bbox()
x1, y1, x2, y2 = box
dx = x2 - x1
dy = y2 - y1
scale = 1000 / dy
t = "translate(%s,%s)" % (-x1, -y1)
s = "scale(%s)" % scale
tp = path * t
sp = tp * s
return sp
def transform(self, shape, x, y, w, h):
print("transforming", x, y, w, h)
bbox = shape.bbox()
print(bbox)
x1, y1, x2, y2 = bbox
bdx = x2 - x1
bdy = y2 - y1
scalex = w/bdx
scaley = h/bdy
print(bdx, bdy)
s = 'scale(%s,%s)' % (scalex, scaley)
t = 'translate(%s,%s)' % (x, y)
print(s, t)
sc = shape * s
tc = sc * t
return tc
def gen_raw_svg(self):
"""generate standard view of shapes"""
for s in self.paths:
shape_path = paths[s]
sp = Path(shape_path)
sp = self.normalize(sp)
bb = sp.bbox()
x1, y1, x2, y2 = bb
dx = x2 - x1
dy = y2 - y1
sp = self.transform(sp, 20, 20, dx*0.6, dy*0.6)
d = sp.d()
db = "M 20,20 h %s v %s H 20 V 20 Z" % (dx*0.6, dy*0.6)
svg = """<svg width="%d" height="%d"
xmlns="http://www.w3.org/2000/svg" >""" % (dx, dy)
svg += """ <path style="fill:none"
stroke="black" stroke-width="3" d="%s" />""" % db
svg += """
<path style="fill:none" stroke="red" stroke-width="3" d="%s" />""" % d
svg += """"
</svg>"""
fname = "%s/raw_%s.svg" % (SVGDIR, s)
with open(fname, "w") as fout:
fout.write(svg)
def gen_placement(self):
cx1, cy1, cx2, cy2 = self.boxes["canvas"]
width = cx2 - cx1 + 10
height = cy2 - cy1 + 10
svg = """<svg width="%d" height="%d"
xmlns="http://www.w3.org/2000/svg"
>""" % (width, height)
for b in self.boxes:
if b == "canvas":
continue
shape = b
if len(b) == 2:
shape = shape[0]
print("placing ", b, " with shape: ", shape)
path = self.paths[shape]
x1, y1, x2, y2 = self.boxes[b]
w = x2 - x1
h = y2 - y1
print(x1, y1, x2, y2, w, h)
sp = Path(path)
sp = self.normalize(sp)
sp = self.transform(sp, x1, y1, w, h)
print("shape box:", sp.bbox())
d = sp.d()
svg += """
<rect x="%d" y="%d"
width="%d" height="%d"
stroke="black" stroke-width="2"
fill="none" />""" % (x1, y1, w, h)
svg += """
<path style="fill:none" stroke="red" stroke-width="3" d="%s" />""" % d
svg += "</svg>"
with open("svg/layout.svg", "w") as fout:
fout.write(svg)
def get_logo_placement(self, size):
"""calculate scale and x,y to fit in circle of radius=size"""
x1, y1, x2, y2 = boxes["canvas"]
width = x2 - x1
height = y2 - y1
ar = width / height
if __name__ == "__main__":
l = Logo(1000)
heart = paths["heart"]
bbox = boxes["heart"]
print(heart)
p = Path(heart)
print(p.d())
print(p.bbox())
x1, y1, x2, y2 = bbox
print(x1, y1, x2, y2)
l.gen_raw_svg()
l.gen_placement()
|
[
"roie.black@gmail.com"
] |
roie.black@gmail.com
|
acb8348fecf068802da63c27c4edb3dfd4a38d12
|
95495baeb47fd40b9a7ecb372b79d3847aa7a139
|
/test/test_i_ospfv3_neighbor_configuration.py
|
63449143673653979dfb3179399a45495a40dc62
|
[] |
no_license
|
pt1988/fmc-api
|
b1d8ff110e12c13aa94d737f3fae9174578b019c
|
075f229585fcf9bd9486600200ff9efea5371912
|
refs/heads/main
| 2023-01-07T09:22:07.685524
| 2020-10-30T03:21:24
| 2020-10-30T03:21:24
| 308,226,669
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,308
|
py
|
# coding: utf-8
"""
Cisco Firepower Management Center Open API Specification
**Specifies the REST URLs and methods supported in the Cisco Firepower Management Center API. Refer to the version specific [REST API Quick Start Guide](https://www.cisco.com/c/en/us/support/security/defense-center/products-programming-reference-guides-list.html) for additional information.** # noqa: E501
OpenAPI spec version: 1.0.0
Contact: tac@cisco.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import unittest
import swagger_client
from swagger_client.models.i_ospfv3_neighbor_configuration import IOspfv3NeighborConfiguration # noqa: E501
from swagger_client.rest import ApiException
class TestIOspfv3NeighborConfiguration(unittest.TestCase):
"""IOspfv3NeighborConfiguration unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testIOspfv3NeighborConfiguration(self):
"""Test IOspfv3NeighborConfiguration"""
# FIXME: construct object with mandatory attributes with example values
# model = swagger_client.models.i_ospfv3_neighbor_configuration.IOspfv3NeighborConfiguration() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
|
[
"pt1988@gmail.com"
] |
pt1988@gmail.com
|
0030fcf22dbc77c896437faa18ec33a89a2f6c56
|
29625c33dc9642d984d7cf68763d57a9de62743e
|
/Bot/cogs/snipe.py
|
cd84e9a5d14d06e388ec6620f7a1eaee678360d7
|
[
"BSD-3-Clause",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
DevRKJha/EpicBot
|
7fe508d828ad6cde087213cdd1fbb3a480529905
|
33eb8b7d5ee1120865da91b9a31dc559657c318c
|
refs/heads/main
| 2023-06-07T14:12:26.975039
| 2021-07-04T09:51:53
| 2021-07-04T09:51:53
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,698
|
py
|
import discord
from discord.ext import commands
class Snipe(commands.Cog):
def __init__(self, client):
self.client = client
self.client.sniped_messages = {}
self.client.edit_sniped_messages = {}
@commands.Cog.listener()
async def on_message_delete(self, message):
if message.author.bot:
return
self.client.sniped_messages[message.guild.id, message.channel.id] = (
message.content, message.author, message.channel.name,
message.created_at, message.attachments)
@commands.Cog.listener()
async def on_message_edit(self, before, after):
if before.author.bot:
return
self.client.edit_sniped_messages[before.guild.id, before.channel.id] = (
before.content,
after.content,
before.author,
before.channel.name
)
@commands.command(aliases=['s'])
async def snipe(self, ctx):
try:
contents, author, channel_name, time, attachments = self.client.sniped_messages[
ctx.guild.id, ctx.channel.id]
files = ""
for file in attachments:
files += f"[{file.filename}]({file.proxy_url})" + "\n"
embed = discord.Embed(
description=contents, color=0x00FFFF, timestamp=time)
embed.set_author(
name=f"{author.name}#{author.discriminator}",
icon_url=author.avatar_url)
embed.add_field(
name="Attachments",
value=files[:-1] if len(attachments) != 0 else "None"
)
embed.set_footer(text=f"Deleted in #{channel_name}")
await ctx.send(embed=embed)
except:
await ctx.send("No messages were deleted here.")
@commands.command(aliases = ['es'])
async def editsnipe(self, ctx):
try:
before_content, after_content, author, channel_name = self.client.edit_sniped_messages[ctx.guild.id, ctx.channel.id]
embed = discord.Embed(description = f"**Before:**\n{before_content}\n\n**After:**\n{after_content}", color=0x00FFFF)
embed.set_author(name=f"{author.name}#{author.discriminator}", icon_url=author.avatar_url)
embed.set_footer(text=f"Edited in #{channel_name}")
await ctx.send(embed=embed)
except:
await ctx.send("No messages were edited here.")
def setup(client):
client.add_cog(Snipe(client))
|
[
"hellonirlep@gmail.com"
] |
hellonirlep@gmail.com
|
bdc0e3f4bf90ef80c3e1cbf6474771ad81912cc5
|
be1762141886e27e2e542324ffb4650546aee58d
|
/setup.py
|
24ce1ea64205555595d2b03b54e784c4b012fea5
|
[] |
no_license
|
rgleason/pypilot
|
71c2aad9a9894c84a1a9819078887ea041ff0e7b
|
66eabcc63a11c96b84f58588c87b6ef710ed5826
|
refs/heads/master
| 2023-06-12T09:26:42.846470
| 2021-06-29T06:38:37
| 2021-06-29T06:39:11
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,879
|
py
|
#!/usr/bin/env python
#
# Copyright (C) 2017 Sean D'Epagnier
#
# This Program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation; either
# version 3 of the License, or (at your option) any later version.
import sys
import os, os.path
if sys.version_info[0] < 3:
print('pypilot requires python version 3. python version is', sys.version)
exit(1)
if not os.path.exists('deps'):
import dependencies
try:
from setuptools import setup, Extension
except ImportError:
from distutils.core import setup, Extension
linebuffer_module = Extension('pypilot/linebuffer/_linebuffer',
sources=['pypilot/linebuffer/linebuffer.cpp', 'pypilot/linebuffer/linebuffer.i'],
extra_compile_args=['-Wno-unused-result'],
swig_opts=['-c++']
)
arduino_servo_module = Extension('pypilot/arduino_servo/_arduino_servo',
sources=['pypilot/arduino_servo/arduino_servo.cpp', 'pypilot/arduino_servo/arduino_servo_eeprom.cpp', 'pypilot/arduino_servo/arduino_servo.i'],
extra_compile_args=['-Wno-unused-result'],
swig_opts=['-c++']
)
ugfx_defs = ['-DWIRINGPI']
try:
import RPi.GPIO
ugfx_libraries=['wiringPi']
except:
try:
import OPi.GPIO
ugfx_libraries=['wiringPi']
except:
print('no RPi.GPIO library for ugfx')
ugfx_libraries=[]
ugfx_defs = []
ugfx_module = Extension('pypilot/hat/ugfx/_ugfx',
sources=['hat/ugfx/ugfx.cpp',
'hat/ugfx/ugfx.i'],
extra_compile_args=['-Wno-unused-result'] + ugfx_defs,
libraries=ugfx_libraries,
swig_opts=['-c++'] + ugfx_defs
)
locale_files = []
for walk in os.walk('hat/locale'):
path, dirs, files = walk
path = path[len('hat/'):]
for file in files:
if file[len(file)-3:] == '.mo':
locale_files.append(os.path.join(path, file))
from pypilot import version
packages = ['pypilot', 'pypilot/pilots', 'pypilot/arduino_servo', 'ui', 'hat', 'web', 'pypilot/linebuffer', 'hat/ugfx']
try:
from setuptools import find_packages
packages = find_packages()
except:
pass
# ensure all packages are under pypilot
package_dirs = {}
for package in list(packages):
if not package.startswith('pypilot'):
packages.remove(package)
packages.append('pypilot.'+package)
package_dirs['pypilot.'+package] = package.replace('.', '/')
setup (name = 'pypilot',
version = version.strversion,
description = 'pypilot sailboat autopilot',
license = 'GPLv3',
author="Sean D'Epagnier",
url='http://pypilot.org/',
packages=packages,
package_dir=package_dirs,
ext_modules = [arduino_servo_module, linebuffer_module, ugfx_module],
package_data={'pypilot.hat': ['font.ttf', 'static/*', 'templates/*'] + locale_files,
'pypilot.ui': ['*.png', '*.mtl', '*.obj'],
'pypilot.web': ['static/*', 'templates/*']},
entry_points={
'console_scripts': [
'pypilot=pypilot.autopilot:main',
'pypilot_boatimu=pypilot.boatimu:main',
'pypilot_servo=pypilot.servo:main',
'pypilot_web=pypilot.web.web:main',
'pypilot_hat=pypilot.hat.hat:main',
'pypilot_control=pypilot.ui.autopilot_control:main',
'pypilot_calibration=pypilot.ui.autopilot_calibration:main',
'pypilot_client=pypilot.client:main',
'pypilot_scope=pypilot.ui.scope_wx:main',
'pypilot_client_wx=pypilot.ui.client_wx:main'
]
}
)
|
[
"seandepagnier@gmail.com"
] |
seandepagnier@gmail.com
|
73bc81c737025f384a2d55f27dbb83a3292b5dc9
|
ba9387ad04a79e5e89204b2f292d01323c7198ad
|
/backend/chat_user_profile/migrations/0001_initial.py
|
89ad1019538532877afb9c0211c55783492d29f1
|
[] |
no_license
|
crowdbotics-apps/message52-19836
|
b9ec7d032a1548ba71153c443486a3b7c38da5f9
|
fde8c1c0b0de9e939e156e29bb2cb4dce1607cad
|
refs/heads/master
| 2022-12-04T23:48:19.002965
| 2020-08-28T02:37:50
| 2020-08-28T02:37:50
| 290,932,742
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,659
|
py
|
# Generated by Django 2.2.15 on 2020-08-28 02:37
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Profile',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('mobile_number', models.CharField(max_length=20)),
('pin', models.CharField(max_length=100)),
('photo', models.URLField()),
('status', models.CharField(max_length=50)),
('birthdate', models.DateField()),
('gender', models.CharField(max_length=1)),
('timestamp_created', models.DateTimeField(auto_now_add=True)),
('last_updated', models.DateTimeField(auto_now=True)),
('last_login', models.DateTimeField()),
('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, related_name='profile_user', to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='VerificationCode',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('code', models.CharField(max_length=255)),
('is_verified', models.BooleanField()),
('timestamp_created', models.DateTimeField(auto_now_add=True)),
('timestamp_verified', models.DateTimeField()),
('sent_to', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='verificationcode_sent_to', to='chat_user_profile.Profile')),
],
),
migrations.CreateModel(
name='Contact',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('is_blocked', models.BooleanField()),
('is_favorite', models.BooleanField()),
('timestamp_created', models.DateTimeField(auto_now_add=True)),
('added_by', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='contact_added_by', to=settings.AUTH_USER_MODEL)),
('added_profile', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='contact_added_profile', to='chat_user_profile.Profile')),
],
),
]
|
[
"team@crowdbotics.com"
] |
team@crowdbotics.com
|
016c75557647665c5a3773b8cf354ade5c11502f
|
941c912f44beff33a072e086c1f561f6cdd64626
|
/LeetCode/codes/22.py
|
84f0db4955118accd480b9d684a7ae03a363e1dc
|
[] |
no_license
|
adreena/MyStudyCorner
|
3a13a743769ed144965b767f547c16df4d0fa0dd
|
355c0dbd32ad201800901f1bcc110550696bc96d
|
refs/heads/master
| 2023-02-20T07:39:32.391421
| 2021-01-25T01:46:21
| 2021-01-25T01:46:21
| 255,104,133
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 540
|
py
|
# time catalan numbers (2n n)*1/n
# space: catalan numbers
class Solution:
def generateParenthesis(self, n: int) -> List[str]:
self.outputs = []
def helper(n_left, n_right, output):
if n_left == 0 and n_right == 0:
self.outputs.append(output)
else:
if n_left>0:
helper(n_left-1, n_right, output+'(')
if n_right>n_left:
helper(n_left, n_right-1, output+')')
helper(n,n,'')
return self.outputs
|
[
"kim.hszd@gmail.com"
] |
kim.hszd@gmail.com
|
c114e9e4c5fbe43f5efbc36d8ddc04c35dd32490
|
af82475dc7eb45c478414372c222e7b6016359d4
|
/python书籍/Python For Finance Code/Code of Python For Finance/4375OS_08_Code/4375OS_08_12_Series.py
|
f279f6cc3587504d87af31fda1b21a119cea0200
|
[] |
no_license
|
enfangzhong/PythonBaseCode
|
8f58c8b817eb9f4b0f0a5be437a52d5b5fab3433
|
9ab4a578b2692fdbb6aeeacb310251d51f72e953
|
refs/heads/master
| 2020-05-17T16:26:02.598344
| 2019-04-27T20:49:40
| 2019-04-27T20:49:40
| 183,817,172
| 4
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 365
|
py
|
"""
Name : 4375OS_08_12_Series.py
Book : Python for Finance
Publisher: Packt Publishing Ltd.
Author : Yuxing Yan
Date : 12/26/2013
email : yany@canisius.edu
paulyxy@hotmail.com
"""
import pandas as pd
x = pd.date_range('1/1/2013', periods=252)
data = pd.Series(randn(len(x)), index=x)
print data.head()
print data.tail()
|
[
"944727327@qq.com"
] |
944727327@qq.com
|
5f78d5f22130ef95b5451dbb67e83853d93a80b0
|
a6566ebc69ed5e7a17e2091bdb10e7b6523eefc9
|
/py/notifier/config.py
|
49ae8e99337c9f08bbbe648c1ba901d19f2924d8
|
[
"MIT"
] |
permissive
|
mabotech/mabo.task
|
916e71650b45a24bb3852206a3755a7fd0342e47
|
96752a5ae94349a46e3b6f9369cc0933d5e37be0
|
refs/heads/master
| 2020-06-05T13:05:02.768838
| 2015-11-29T08:18:10
| 2015-11-29T08:18:10
| 23,750,849
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 354
|
py
|
import toml
from singleton import Singleton
class Config(object):
__metacass__ = Singleton
def __init__(self):
conf_fn = "conf.toml"
with open(conf_fn) as conf_fh:
toml_str = conf_fh.read()
self.conf = toml.loads(toml_str)
def get_conf(self):
return self.conf
|
[
"aidear@163.com"
] |
aidear@163.com
|
a08866fdeb02d1584aca3775017ebe2a118292d7
|
04252676935223f4d03eff0393ba921cb00be1e5
|
/api/__init__.py
|
59d9087892b08ac0854226a2346b05ea5f5dff99
|
[] |
no_license
|
kongp3/cross4u
|
4083aec3e5fe9de1d351d25609cbdf996df6abe3
|
8bd4dcfe8ae8fee5a3f169428b138b1294633da0
|
refs/heads/master
| 2020-11-29T23:11:17.838675
| 2019-12-30T06:56:20
| 2019-12-30T06:56:20
| 230,235,707
| 0
| 0
| null | 2019-12-26T09:33:09
| 2019-12-26T09:31:52
|
Python
|
UTF-8
|
Python
| false
| false
| 1,419
|
py
|
# -*- coding: utf-8 -*-
import traceback
from flask import jsonify
from functools import wraps
class RestResponse(object):
""" 标准的接口Response类, 所有的api必须返回这个类的对象, 以便统一处理返回 """
def __init__(self,):
pass
def fail(self, code=500, message="Server Got A Exception"):
d = {'meta': {
'success': False, 'status_code': code,
'message': message
}}
json_response = jsonify(d, )
return json_response
def success(self, code=200, data=None):
d = {'meta': {
'success': True, 'status_code': code,
'message': "Requset Successes"
}, 'data': data}
json_response = jsonify(d)
return json_response
def error_handler(f):
"""
统一处理异常和返回错误信息, 增加了未知的耦合
就目前来看还是没问题的
:param f:
:return:
"""
@wraps(f)
def decorated_function(*args, **kwargs):
response = RestResponse()
try:
result = f(response=response, *args, **kwargs)
return result
except ValueError as e:
traceback.print_exc(limit=5)
return response.fail(400, e.message)
except Exception as e:
traceback.print_exc(limit=5)
return response.fail(500, message=e.message)
return decorated_function
|
[
"kongp3@outlook"
] |
kongp3@outlook
|
f63660d7a58a51a6d96d21c74bf21e35e3469584
|
6bfda75657070e177fa620a43c917096cbd3c550
|
/kubernetes/test/test_v1_job_status.py
|
f929582f971cf175bfa94a502d44fde0352fba4f
|
[
"Apache-2.0"
] |
permissive
|
don41382/client-python
|
8e7e747a62f9f4fc0402eea1a877eab1bb80ab36
|
e69d4fe204b98f7d7ee3ada3996b4f5fbceae5fe
|
refs/heads/master
| 2021-01-19T23:15:50.172933
| 2017-04-18T18:00:48
| 2017-04-18T18:00:48
| 88,943,866
| 0
| 0
| null | 2017-04-21T05:19:52
| 2017-04-21T05:19:52
| null |
UTF-8
|
Python
| false
| false
| 827
|
py
|
# coding: utf-8
"""
Kubernetes
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: v1.6.1
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import os
import sys
import unittest
import kubernetes.client
from kubernetes.client.rest import ApiException
from kubernetes.client.models.v1_job_status import V1JobStatus
class TestV1JobStatus(unittest.TestCase):
""" V1JobStatus unit test stubs """
def setUp(self):
pass
def tearDown(self):
pass
def testV1JobStatus(self):
"""
Test V1JobStatus
"""
model = kubernetes.client.models.v1_job_status.V1JobStatus()
if __name__ == '__main__':
unittest.main()
|
[
"mehdy@google.com"
] |
mehdy@google.com
|
4e2a5aac3f2d8596ac600f55307f6e113b1f375b
|
71da259f71428648d4285b1b4863ec2b7641e58c
|
/ecom/website/filters.py
|
561533e588086d70c2830bed74278b2c4dbe38da
|
[] |
no_license
|
rafimuhammad01/e-com
|
8a58d0ccff27516da260b41c180c703fa22e76b3
|
75d451bfc10075090d88d5a16dbd03f626ff72ef
|
refs/heads/master
| 2023-02-18T08:48:26.144483
| 2021-01-15T08:56:54
| 2021-01-15T08:56:54
| 291,625,792
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 404
|
py
|
import django_filters
from .models import Product
class ProductFilter(django_filters.FilterSet) :
class Meta:
model = Product
fields = {
'price' : ['lt', 'gt'],
'review__rate' : ['iexact']
}
class ProductSearch(django_filters.FilterSet) :
class Meta:
model = Product
fields = {
'name' : ['icontains'],
}
|
[
"rafi10muhammad@gmail.com"
] |
rafi10muhammad@gmail.com
|
6d5a2f9b2ddc3bfe891a3c7d27a364e5c4cb78eb
|
cdbb11473dc8d34767a5916f9f85cb68eb2ca3f2
|
/sde/migrations/0036_auto_20180729_1518.py
|
a313a94deec723d4c8b6e96c080fc6923b8eec30
|
[] |
no_license
|
skyride/evestats
|
fb2a1a248952771731dcfecadab7d02b1f08cd4b
|
4bd2153f65c084b478272513733dcc78f9a0ef98
|
refs/heads/master
| 2020-03-23T13:50:19.216870
| 2018-08-05T19:19:47
| 2018-08-05T19:19:47
| 141,640,834
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 587
|
py
|
# Generated by Django 2.0.7 on 2018-07-29 15:18
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('sde', '0035_auto_20180729_1456'),
]
operations = [
migrations.RemoveField(
model_name='attributetype',
name='unit_id',
),
migrations.AddField(
model_name='attributetype',
name='unit',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='sde.Unit'),
),
]
|
[
"adam.findlay@mercurytide.co.uk"
] |
adam.findlay@mercurytide.co.uk
|
045c865fc678eba2750f62646d81f6c24d5e15cb
|
7e93b1c33045b4c03054f42b6a2b800279b12a9b
|
/core/cache/backends/redis/compressors/base.py
|
8d9e74deabf56d11d14abf6ab944ca71c3f9526c
|
[
"MIT"
] |
permissive
|
anthill-arch/framework
|
6f8036980667843f2be1414850255cf6a10e2dcd
|
a6c238a62ae9c3fb319d12e77f7e9047aab75e8d
|
refs/heads/master
| 2020-05-09T06:01:31.186830
| 2019-08-23T13:52:43
| 2019-08-23T13:52:43
| 180,988,916
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 229
|
py
|
class BaseCompressor(object):
def __init__(self, options):
self._options = options
def compress(self, value):
raise NotImplementedError
def decompress(self, value):
raise NotImplementedError
|
[
"x55aah@gmail.com"
] |
x55aah@gmail.com
|
f6c7e47f18fecf5204af653cae821b0dbc934729
|
3a60b8935f809e300405214a66d949f0042e7e46
|
/src/map/tile.py
|
de575e0a0d16478ca90b3e9f8119073989688f77
|
[] |
no_license
|
stellarlib/centaurus
|
e71fe5c98b94e8e575d00e32f55ba39fe71799e6
|
896ae73165f3f44dfb87378ef2635d447ccbccae
|
refs/heads/master
| 2020-08-29T00:02:47.294370
| 2020-07-06T20:06:02
| 2020-07-06T20:06:02
| 217,860,282
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,355
|
py
|
from random import randint
from src.map.hex_map_properties import EdgeID
class Tile(object):
n = 15
GRASS, WOODS, WATER, ROCKS, SAND, CLAY, ROAD, WALL, EXIT,\
EXIT0, EXIT1, EXIT2, EXIT3, EXIT4, EXIT5 = range(n)
OPEN = {GRASS, ROAD}
IMPASSABLE = {WALL, ROCKS, WATER}
OBSTACLE = {WALL, ROCKS}
SLOWS_CHARGE = {WOODS, WATER, SAND, CLAY}
DEADLY = {WATER}
SHELTERED = {WOODS, WALL, ROCKS, WATER}
EXIT_TILES = {EXIT, EXIT0, EXIT1, EXIT2, EXIT3, EXIT4, EXIT5}
IMPASSABLE.update(EXIT_TILES)
OBSTACLE.update(EXIT_TILES)
SHELTERED.update(EXIT_TILES)
EDGE_ID_TO_EXIT = {
EdgeID.Ae: EXIT0,
EdgeID.Be: EXIT1,
EdgeID.Ce: EXIT2,
EdgeID.De: EXIT3,
EdgeID.Ee: EXIT4,
EdgeID.Fe: EXIT5,
}
@classmethod
def random_tile(cls):
return randint(0, cls.n-1)
@classmethod
def is_open(cls, t):
return t not in cls.OPEN
@classmethod
def is_passable(cls, t):
return t not in cls.IMPASSABLE
@classmethod
def is_targetable(cls, t):
return t not in cls.SHELTERED
@classmethod
def is_obstacle(cls, t):
return t in cls.OBSTACLE
@classmethod
def is_slowing(cls, t):
return t in cls.SLOWS_CHARGE
@classmethod
def is_deadly(cls, t):
return t in cls.DEADLY
|
[
"marzecsean@gmail.com"
] |
marzecsean@gmail.com
|
9ba384d7416217505108520e70e49bd802012c66
|
7b3009e019e081667df67c6b41328b5db632b898
|
/instances/shadows_of_infinity.py
|
34a98b9e889adc772c3ce344061ead7226547267
|
[
"MIT"
] |
permissive
|
frostburn/multibranch-mandelbrot
|
d1e2cc6bce6ab8f065b678fb2133bd3057b832d5
|
84e4887ffc90a5338ae448ced6f62fcf40bc11a1
|
refs/heads/master
| 2023-08-02T18:20:56.671175
| 2021-09-28T09:57:58
| 2021-09-28T09:57:58
| 287,219,716
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,926
|
py
|
import os
import sys
sys.path.insert(0, os.path.abspath('.'))
from pylab import *
from mandelbrot import mandelbrot, mandelbrot_generic, buddhabrot
import nonescaping
import classic
from coloring import red_lavender, black_multi_edge, rainbow, gray, sepia, subharmonics, creature, white_multi_edge
import color_gradients
from scipy.ndimage import gaussian_filter
def make_picture_frame(rgb, dither=1.0/256.0):
if dither:
rgb = [channel + random(channel.shape)*dither for channel in rgb]
frame = stack(rgb, axis=-1)
frame = clip(frame, 0.0, 1.0)
return frame
if __name__ == '__main__':
scale = 10
# Instagram
width, height = 108*scale, 108*scale
anti_aliasing = 2
num_samples = 1<<25
max_iter = 1<<10
min_iter = 1<<9
zoom = -1.7
rotation = -pi*0.5
x, y = -0.2, 0.001
def circle_factory(theta, delta, radius=1.0, spread=0.5, x=0.0, y=0.08):
def circle(num_samples):
phi = rand(num_samples) - rand(num_samples)
phi = theta + delta * phi
r = radius + randn(num_samples) * spread
return x + cos(phi) * r + 1j * (y + sin(phi) * r)
return circle
offset = 0.5
delta = 3.5
exposures = []
num_layers = 1
for i in range(num_layers):
sub_exposures = [
(3*i+min_iter, 3*i+max_iter, circle_factory(offset + j*2*pi/3, delta)) for j in range(3)
]
exposures.extend(sub_exposures)
def color_map(exposed):
e = exposed[0]*0.0
result = array([e, e, e])
for i in range(num_layers):
for j in range(3):
result[j] += (3*scale**2*exposed[i*3 + j] * num_samples**-0.9)**0.78
return result
image = buddhabrot(width, height, x, y, zoom, rotation, -2, 1, num_samples, exposures, color_map, anti_aliasing=anti_aliasing, bailout=1e300)
imsave("/tmp/out.png", make_picture_frame(image))
|
[
"lumi.pakkanen@gmail.com"
] |
lumi.pakkanen@gmail.com
|
5dd88af5ae5e82c13194560776281ec8a542cab7
|
93ad65a519037b2a6c9363f356a00b3e51350537
|
/djR/conf.py
|
c80052a26cce28b6dc8ddf5b5a232ed373f0e040
|
[
"MIT"
] |
permissive
|
JheanMarllos/django-R
|
6ccc9b42dbca50c803c740315fbeda136be1ad9c
|
3c1f8adfa2a16ad9cf9856e4dd7cd889e7a3c229
|
refs/heads/master
| 2020-05-30T12:42:49.200732
| 2019-06-02T11:37:49
| 2019-06-02T11:37:49
| 189,741,022
| 0
| 0
|
MIT
| 2019-06-01T14:16:22
| 2019-06-01T14:16:22
| null |
UTF-8
|
Python
| false
| false
| 418
|
py
|
# -*- coding: utf-8 -*-
from django.conf import settings
RETHINKDB_HOST = getattr(settings, 'RETHINKDB_HOST', 'localhost')
RETHINKDB_PORT = getattr(settings, 'RETHINKDB_PORT', 28015)
RETHINKDB_USER = getattr(settings, 'RETHINKDB_USER', None)
RETHINKDB_PASSWORD = getattr(settings, 'RETHINKDB_PASSWORD', None)
DEFAULT_DB = getattr(settings, 'R_DEFAULT_DB', None)
VERBOSE = getattr(settings, 'R_VERBOSE', False)
|
[
"synwe@yahoo.fr"
] |
synwe@yahoo.fr
|
9782f416c9447c9bea34e745ec11be24c68003db
|
8dbba1dc3b0a9cb3972e6fee6f41459d6fa56d78
|
/ch09/ex9-10.py
|
b644f50c93a1b0bc17981d880b07bc7f5e71550d
|
[] |
no_license
|
freebz/Foundations-for-Analytics-with-Python
|
8da8308981538266e8e982ffcd080657058144ca
|
736b2075e339a679905071b39201e6a575f59229
|
refs/heads/master
| 2020-03-15T21:53:43.100954
| 2018-05-06T18:07:59
| 2018-05-06T18:07:59
| 132,363,669
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 196
|
py
|
import numpy as np
from numpy import concatenate, vstack, r_
array_concat = np.concatenate([array1, array2], axis=0)
array_concat = np.vstack((array1, array2))
array_concat = np.r_[arry1, array2]
|
[
"freebz@hananet.net"
] |
freebz@hananet.net
|
f3409674f6082e19e2cdbb91ddc6cc1956ae779f
|
9aea1b19a8681b4c6b15d628a080982fb2d98b39
|
/mianJing111111/Google/Implement Queue using Stacks.py
|
2e144185623f255bcbf62dc1b0ca3271002fcff4
|
[] |
no_license
|
yzl232/code_training
|
ee7612efc6f166742fcf48e1af715f57a624d3aa
|
fc165027c3d7b1fec58ebfad2f9ada275a6b8c03
|
refs/heads/master
| 2021-01-21T04:32:02.522931
| 2016-07-01T21:35:29
| 2016-07-01T21:35:29
| 26,989,266
| 2
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 901
|
py
|
# encoding=utf-8
'''
In this method, in en-queue operation, the new element is entered at the top of stack1. In de-queue operation, if stack2 is empty then all the elements are moved to stack2 and finally top of stack2 is returned.
enQueue(q, x)
1) Push x to stack1 (assuming size of stacks is unlimited).
deQueue(q)
1) If both stacks are empty then error.
2) If stack2 is empty
While stack1 is not empty, push everything from satck1 to stack2.
3) Pop the element from stack2 and return it.
'''
# G家考过。
class queue:
def __init__(self):
self.stack1 = []
self.stack2 = []
def enqueue(self, x):
self.stack1.append(x)
def dequeue(self):
if not self.stack1 and not self.stack2: raise ValueError()
if not self.stack2:
while self.stack1: self.stack2.append(self.stack1.pop())
return self.stack2.pop()
|
[
"buptyuzhenglin@gmail.com"
] |
buptyuzhenglin@gmail.com
|
96697e0a1210d4821564472422964ebcc50a0e3b
|
1d928c3f90d4a0a9a3919a804597aa0a4aab19a3
|
/python/zulip/2015/12/run-dev.py
|
67b1614d07af8c64d03977bec514a539b207ca73
|
[] |
no_license
|
rosoareslv/SED99
|
d8b2ff5811e7f0ffc59be066a5a0349a92cbb845
|
a062c118f12b93172e31e8ca115ce3f871b64461
|
refs/heads/main
| 2023-02-22T21:59:02.703005
| 2021-01-28T19:40:51
| 2021-01-28T19:40:51
| 306,497,459
| 1
| 1
| null | 2020-11-24T20:56:18
| 2020-10-23T01:18:07
| null |
UTF-8
|
Python
| false
| false
| 3,852
|
py
|
#!/usr/bin/env python2.7
import optparse
import subprocess
import signal
import traceback
import sys
import os
from twisted.internet import reactor
from twisted.web import proxy, server, resource
# Monkey-patch twisted.web.http to avoid request.finish exceptions
# https://trac.zulip.net/ticket/1728
from twisted.web.http import Request
orig_finish = Request.finish
def patched_finish(self):
if self._disconnected:
return
return orig_finish(self)
Request.finish = patched_finish
if 'posix' in os.name and os.geteuid() == 0:
raise RuntimeError("run-dev.py should not be run as root.")
parser = optparse.OptionParser(r"""
Starts the app listening on localhost, for local development.
This script launches the Django and Tornado servers, then runs a reverse proxy
which serves to both of them. After it's all up and running, browse to
http://localhost:9991/
Note that, while runserver and runtornado have the usual auto-restarting
behavior, the reverse proxy itself does *not* automatically restart on changes
to this file.
""")
parser.add_option('--test',
action='store_true', dest='test',
help='Use the testing database and ports')
parser.add_option('--interface',
action='store', dest='interface',
default='127.0.0.1', help='Set the IP or hostname for the proxy to listen on')
(options, args) = parser.parse_args()
base_port = 9991
manage_args = ''
if options.test:
base_port = 9981
settings_module = "zproject.test_settings"
else:
settings_module = "zproject.settings"
manage_args = ['--settings=%s' % (settings_module,)]
os.environ['DJANGO_SETTINGS_MODULE'] = settings_module
sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
proxy_port = base_port
django_port = base_port+1
tornado_port = base_port+2
webpack_port = base_port+3
os.chdir(os.path.join(os.path.dirname(__file__), '..'))
# Clean up stale .pyc files etc.
subprocess.check_call('./tools/clean-repo')
# Set up a new process group, so that we can later kill run{server,tornado}
# and all of the processes they spawn.
os.setpgrp()
# Pass --nostatic because we configure static serving ourselves in
# zulip/urls.py.
cmds = [['./tools/compile-handlebars-templates', 'forever'],
['./tools/webpack', 'watch'],
['python', 'manage.py', 'rundjango'] +
manage_args + ['localhost:%d' % (django_port,)],
['python', 'manage.py', 'runtornado'] +
manage_args + ['localhost:%d' % (tornado_port,)],
['./tools/run-dev-queue-processors'] + manage_args,
['env', 'PGHOST=localhost', # Force password authentication using .pgpass
'./puppet/zulip/files/postgresql/process_fts_updates']]
for cmd in cmds:
subprocess.Popen(cmd)
class Resource(resource.Resource):
def getChild(self, name, request):
# Assume an HTTP 1.1 request
proxy_host = request.requestHeaders.getRawHeaders('Host')
request.requestHeaders.setRawHeaders('X-Forwarded-Host', proxy_host)
if (request.uri in ['/json/get_events'] or
request.uri.startswith('/json/events') or
request.uri.startswith('/api/v1/events') or
request.uri.startswith('/sockjs')):
return proxy.ReverseProxyResource('localhost', tornado_port, '/'+name)
elif (request.uri.startswith('/webpack') or
request.uri.startswith('/socket.io')):
return proxy.ReverseProxyResource('localhost', webpack_port, '/'+name)
return proxy.ReverseProxyResource('localhost', django_port, '/'+name)
try:
reactor.listenTCP(proxy_port, server.Site(Resource()), interface=options.interface)
reactor.run()
except:
# Print the traceback before we get SIGTERM and die.
traceback.print_exc()
raise
finally:
# Kill everything in our process group.
os.killpg(0, signal.SIGTERM)
|
[
"rodrigosoaresilva@gmail.com"
] |
rodrigosoaresilva@gmail.com
|
b4b48833b14eeae1819479c4994e066e45300d1c
|
d0dccd8b1c31c0256dca3472719acab561661aa9
|
/events/views.py
|
8f52985d56e7d9d48486c2516ac1ab2f8b850635
|
[] |
no_license
|
cjredmond/GrouperApp
|
5fe97271bc275e570d2e3565c2bb5233ce34a79d
|
aba431c7def9173150e24686dbbb87685d25ed24
|
refs/heads/master
| 2020-03-19T21:43:12.609648
| 2018-06-29T16:17:10
| 2018-06-29T16:17:10
| 136,947,937
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 636
|
py
|
from django.shortcuts import render
from django.views.generic import *
from django.views.generic.edit import *
from django.contrib.auth import get_user_model
from django.urls import reverse
from .models import Event
from .forms import EventCreateForm
from group.models import Entity
class EventCreateView(CreateView):
model = Event
form_class = EventCreateForm
def form_valid(self,form,**kwargs):
instance = form.save(commit=False)
instance.entity = Entity.objects.get(slug=self.kwargs['slug'])
return super().form_valid(form)
def get_success_url(self):
return reverse('landing_view')
|
[
"connor.redmond@gmail.com"
] |
connor.redmond@gmail.com
|
06072e8a7f63c5d0535c8e97d3d3590ec3ef64bc
|
9743d5fd24822f79c156ad112229e25adb9ed6f6
|
/xai/brain/wordbase/nouns/_backup.py
|
9eff4b2688d253805fa8fdb41115f611870c9543
|
[
"MIT"
] |
permissive
|
cash2one/xai
|
de7adad1758f50dd6786bf0111e71a903f039b64
|
e76f12c9f4dcf3ac1c7c08b0cc8844c0b0a104b6
|
refs/heads/master
| 2021-01-19T12:33:54.964379
| 2017-01-28T02:00:50
| 2017-01-28T02:00:50
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 588
|
py
|
#calss header
class _BACKUP():
def __init__(self,):
self.name = "BACKUP"
self.definitions = [u'(someone or something that provides) support or help, or something that you have arranged in case your main plans, equipment, etc. go wrong: ', u'a copy of information held on a computer that is stored separately from the computer: ', u'a player who plays when the person who usually plays is not available: ']
self.parents = []
self.childen = []
self.properties = []
self.jsondata = {}
self.specie = 'nouns'
def run(self, obj1 = [], obj2 = []):
return self.jsondata
|
[
"xingwang1991@gmail.com"
] |
xingwang1991@gmail.com
|
8fdf3accccfac6904b4799b77cccadf2bfc83862
|
42516b0348936e257d04113c2e632dc72ba58e91
|
/test_env/test_suit_ui_file_explorer/test_suit_ui_file_explorer_case06.py
|
d446ea461909499285fd138a157ee129cd48ee84
|
[] |
no_license
|
wwlwwlqaz/Qualcomm
|
2c3a225875fba955d771101f3c38ca0420d8f468
|
a04b717ae437511abae1e7e9e399373c161a7b65
|
refs/heads/master
| 2021-01-11T19:01:06.123677
| 2017-04-05T07:57:21
| 2017-04-05T07:57:21
| 79,292,426
| 1
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,124
|
py
|
#coding=utf-8
import settings.common as SC
from test_case_base import TestCaseBase
from logging_wrapper import log_test_case, take_screenshot
from test_case_base import TestCaseBase
from qrd_shared.case import *
import fs_wrapper
from case_utility import *
import settings.common as SC
from utility_wrapper import *
############################################
#author:
# huitingn@qualcomm.com.cn
#function:
# copy and paste items cross folder in FileExplorer
#precondition:
#
#step:
# 1.goto FileExplorer;
# if not, goto step4
# 2.try to create a new folder;
# if not, goto step4
# 3.confirm whether new floder is created correctly;
# if not, goto step4
# 4.exit to end case
############################################
import os,re,string,subprocess,shlex
from test_suit_ui_file_explorer import *
class test_suit_ui_file_explorer_case06(TestCaseBase):
tag = 'ui_file_explorer_case06'
def test_case_main(self, case_results):
case_flag = False
#
# STEP 1: goto work_dir in FileExplorer
#
work_dir = '/Phone storage/DCIM/Camera'
number = preprocess(self.tag,work_dir,floor=3)
goto_dir(work_dir,'Folder')
#
# STEP 2: choose items to copy
#
try:
(index_list,num1) = random_index_list_in_folder(work_dir,'.jpg')
log_test_case(self.tag,"num1=%s want to copy %s photos"%(str(num1),str(len(index_list)+1)))
first_name = get_view_text_by_id(VIEW_TEXT_VIEW,'text')
click_textview_by_id('text',waitForView=1, clickType=LONG_CLICK)
name_list = []
for i in range(len(index_list)):
click_textview_by_index(index_list[i])
name_list.append(get_view_text_by_index(VIEW_TEXT_VIEW,index_list[i]))
name_list.insert(0, first_name)
click_textview_by_desc('Copy',isScrollable=0)
except:
take_screenshot()
cur_path = get_view_text_by_index(VIEW_TEXT_VIEW,0)
log_test_case(self.tag, "during COPY: something wrong, maybe no item in " + cur_path)
set_cannot_continue()
#
# STEP 3: goto destination in FileExplorer
#
if can_continue():
destination = '/Phone storage/Download'
goto_dir(destination,'Folder',go_from_home_screen=False)
#
# STEP 4: copy items to destination
#
if can_continue():
try:
click_button_by_text('Paste',waitForView=1)
except:
take_screenshot()
cur_path = get_view_text_by_index(VIEW_TEXT_VIEW,0)
log_test_case(self.tag, "during COPY: no 'Paste' in " + cur_path)
set_cannot_continue()
# check
if can_continue():
goto_dir(destination,'Folder',go_from_home_screen=True)
cur_path = get_view_text_by_index(VIEW_TEXT_VIEW,0)
flag = True
for i in range(len(name_list)):
if search_text('%s'%name_list[i],searchFlag=TEXT_MATCHES_REGEX):
try:scroll_to_top()
except:pass
continue
else:
flag = False
break
if flag is True:
case_flag = True
else:
log_test_case(self.tag, "failed copy %s"%name_list[i] +'in '+ cur_path)
#
# STEP 5: exit
#
exit_cur_case(self.tag)
log_test_case(self.tag, "case_flag = "+str(case_flag))
if case_flag:
qsst_log_case_status(STATUS_SUCCESS, "" , SEVERITY_HIGH)
else:
qsst_log_case_status(STATUS_FAILED, "copy and paste items cross folder is failed", SEVERITY_HIGH)
case_results.append((self.case_config_map[fs_wrapper.CASE_NAME_ATTR], can_continue()))
|
[
"c_wwan@qti.qualcomm.com"
] |
c_wwan@qti.qualcomm.com
|
fc7d7b27a526a43db9c9b511ae29a4442acf81d4
|
0fb2e09c0629cf47045881d7eecc125f674230e5
|
/pps_webapp/main/views.py
|
bf6d96c2869c358792ae6771da7c09201b547904
|
[] |
no_license
|
satwik77/phenopacket-scraper-webapp
|
ea24ad2cc2fbd988e12df1178be5ba940c8a9859
|
4382c2a4e501448e7bfd68c7826a3c4c5ab39a26
|
refs/heads/master
| 2021-01-17T09:33:07.188192
| 2016-08-23T17:24:20
| 2016-08-23T17:24:20
| 61,695,575
| 0
| 0
| null | 2016-06-22T06:45:50
| 2016-06-22T06:45:50
| null |
UTF-8
|
Python
| false
| false
| 1,984
|
py
|
from django.shortcuts import get_object_or_404, render_to_response, redirect
from django.shortcuts import render
from django.contrib import auth
from django.http import HttpResponse
from django.views.decorators.csrf import csrf_exempt
import requests
import pprint
api_url= 'http://localhost:8001/api/'
@csrf_exempt
def home(request):
if request.POST:
choice = str(request.POST['choice'])
url = str(request.POST['url'])
data = ""
if choice == '1':
get_data={'url' : str(url)}
response = requests.get(api_url+ 'scrape', params = get_data)
if response.status_code == 200:
response_data = response.json()
abstract = response_data['Abstract']
title = str(response_data['Title'])
hpo_terms = response_data['HPO Terms']
data+= "Title:\n" + title + "\n"
data+="Abstract:\n" + abstract + "\n"
data+="HPO Terms:\n"
for term in hpo_terms:
data += str(term) + "\n"
if choice == '2':
get_data={'url' : str(url)}
response = requests.get(api_url+ 'annotate', params = get_data)
if response.status_code == 200:
response_data = response.json()
data = {}
data["annotated_terms"] = response_data['Annotated HPO Terms']
data["annotated_abstract"] = response_data['Annotated Abstract']
data= pprint.pformat(data, indent=4)
if choice == '3':
get_data={'url' : str(url)}
response = requests.get(api_url+ 'phenopacket', params = get_data)
if response.status_code == 200:
response_data = response.json()
phenopacket = response_data['phenopacket']
data = phenopacket
return HttpResponse(data)
return render(request, 'main/index.html')
|
[
"satwik55@gmail.com"
] |
satwik55@gmail.com
|
31a0c3c321b124e25d22c7584aa8ccbc4ed0ae04
|
c7a6f8ed434c86b4cdae9c6144b9dd557e594f78
|
/ECE364/.PyCharm40/system/python_stubs/348993582/PyQt4/QtNetwork/__init__.py
|
499f277c4fd5601ad24160f4fb960e5e5fc2f65f
|
[] |
no_license
|
ArbalestV/Purdue-Coursework
|
75d979bbe72106975812b1d46b7d854e16e8e15e
|
ee7f86145edb41c17aefcd442fa42353a9e1b5d1
|
refs/heads/master
| 2020-08-29T05:27:52.342264
| 2018-04-03T17:59:01
| 2018-04-03T17:59:01
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,637
|
py
|
# encoding: utf-8
# module PyQt4.QtNetwork
# from /usr/lib64/python2.6/site-packages/PyQt4/QtNetwork.so
# by generator 1.136
# no doc
# imports
import PyQt4.QtCore as __PyQt4_QtCore
# no functions
# classes
from QAbstractNetworkCache import QAbstractNetworkCache
from QAbstractSocket import QAbstractSocket
from QAuthenticator import QAuthenticator
from QFtp import QFtp
from QHostAddress import QHostAddress
from QHostInfo import QHostInfo
from QHttp import QHttp
from QHttpHeader import QHttpHeader
from QHttpRequestHeader import QHttpRequestHeader
from QHttpResponseHeader import QHttpResponseHeader
from QLocalServer import QLocalServer
from QLocalSocket import QLocalSocket
from QNetworkAccessManager import QNetworkAccessManager
from QNetworkAddressEntry import QNetworkAddressEntry
from QNetworkCacheMetaData import QNetworkCacheMetaData
from QNetworkCookie import QNetworkCookie
from QNetworkCookieJar import QNetworkCookieJar
from QNetworkDiskCache import QNetworkDiskCache
from QNetworkInterface import QNetworkInterface
from QNetworkProxy import QNetworkProxy
from QNetworkProxyFactory import QNetworkProxyFactory
from QNetworkProxyQuery import QNetworkProxyQuery
from QNetworkReply import QNetworkReply
from QNetworkRequest import QNetworkRequest
from QSsl import QSsl
from QSslCertificate import QSslCertificate
from QSslCipher import QSslCipher
from QSslConfiguration import QSslConfiguration
from QSslError import QSslError
from QSslKey import QSslKey
from QTcpSocket import QTcpSocket
from QSslSocket import QSslSocket
from QTcpServer import QTcpServer
from QUdpSocket import QUdpSocket
from QUrlInfo import QUrlInfo
|
[
"pkalita@princeton.edu"
] |
pkalita@princeton.edu
|
4d4ecc1e1bddc6ac36317f8f1c3f8dc07d77ef43
|
8e79de4b73998dd0ee1dae4881784a2b12410615
|
/Bite_83/test_timezone.py
|
48b2574919d1328895aa94b5cdc1f6966ae66c3b
|
[
"MIT"
] |
permissive
|
alehpineda/bitesofpy
|
e6eb7c9413cf407a12643efece01bef5457e5dcb
|
bfd319a606cd0b7b9bfb85a3e8942872a2d43c48
|
refs/heads/master
| 2021-07-15T19:59:35.061049
| 2020-09-25T17:49:32
| 2020-09-25T17:49:32
| 209,878,791
| 0
| 0
|
MIT
| 2020-09-06T00:11:45
| 2019-09-20T20:49:51
|
Python
|
UTF-8
|
Python
| false
| false
| 1,060
|
py
|
from datetime import datetime
from timezone import what_time_lives_pybites
def test_what_time_lives_pybites_spanish_summertime():
# AUS is 8 hours ahead of ES
naive_utc_dt = datetime(2018, 4, 27, 22, 55, 0)
aus_dt, es_dt = what_time_lives_pybites(naive_utc_dt)
assert aus_dt.year == 2018
assert aus_dt.month == 4
assert aus_dt.day == 28
assert aus_dt.hour == 8
assert aus_dt.minute == 55
assert es_dt.year == 2018
assert es_dt.month == 4
assert es_dt.day == 28
assert es_dt.hour == 0
assert es_dt.minute == 55
def test_what_time_lives_pybites_spanish_wintertime():
# AUS is 10 hours ahead of ES
naive_utc_dt = datetime(2018, 11, 1, 14, 10, 0)
aus_dt, es_dt = what_time_lives_pybites(naive_utc_dt)
assert aus_dt.year == 2018
assert aus_dt.month == 11
assert aus_dt.day == 2
assert aus_dt.hour == 1
assert aus_dt.minute == 10
assert es_dt.year == 2018
assert es_dt.month == 11
assert es_dt.day == 1
assert es_dt.hour == 15
assert es_dt.minute == 10
|
[
"ale.hpineda@gmail.com"
] |
ale.hpineda@gmail.com
|
5d396f8a619172ddd3f61c1c285aedc696426ca7
|
ca7aa979e7059467e158830b76673f5b77a0f5a3
|
/Python_codes/p03001/s613586641.py
|
5dadc4795c529eb1e7ffe05c54da04dc2de9168e
|
[] |
no_license
|
Aasthaengg/IBMdataset
|
7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901
|
f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8
|
refs/heads/main
| 2023-04-22T10:22:44.763102
| 2021-05-13T17:27:22
| 2021-05-13T17:27:22
| 367,112,348
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 162
|
py
|
import sys
sys.setrecursionlimit(10**6)
w, h, x, y = map(int, input().split())
ans1 = w*h/2
ans2 = 0
if x == w/2 and y == h/2:
ans2 = 1
print(ans1, ans2)
|
[
"66529651+Aastha2104@users.noreply.github.com"
] |
66529651+Aastha2104@users.noreply.github.com
|
2ca6dd6d9e283d56848cb08dedccbc18699489cf
|
f40c65a649206261d7255eb3132ea67963f13a17
|
/src/wait.py
|
d830074cea069e62e268c87b9f7ee5afbff4750b
|
[
"MIT"
] |
permissive
|
fcurella/gh-status-check
|
2ec47ca212bfe471cda97d1ae0c1ee41f16420e3
|
1fdb5f7be1dcdb9f2338839ad55ad7c9188b159b
|
refs/heads/main
| 2022-12-22T18:35:00.543508
| 2020-10-01T17:46:35
| 2020-10-01T17:46:35
| 297,731,570
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,852
|
py
|
import asyncio
import logging
import os
import sys
import aiohttp
from asgiref.sync import sync_to_async
from github import Github
REPOSITORY = os.environ["GITHUB_REPOSITORY"]
SHA = os.environ["GITHUB_SHA"]
EVENT = os.environ["GITHUB_EVENT_NAME"]
EVENT_PATH = os.environ["GITHUB_EVENT_PATH"]
TOKEN = os.environ["INPUT_GITHUBTOKEN"]
IGNORECONTEXTS = os.environ["INPUT_IGNORECONTEXTS"].split(',')
IGNOREACTIONS = os.environ["INPUT_IGNOREACTIONS"].split(',')
INTERVAL = float(os.environ["INPUT_CHECKINTERVAL"])
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
async def poll_checks(session, repo, ref):
headers = {
"Content-Type": "application/vnd.github.antiope-preview+json",
"Authorization": f"token {TOKEN}",
}
url = f"https://api.github.com/repos/{repo}/commits/{ref}/check-runs"
async with session.get(url, headers=headers, raise_for_status=True) as resp:
data = await resp.json()
check_runs = [
check_run for check_run in data["check_runs"]
if check_run["name"] not in IGNOREACTIONS
]
logger.info(
"Checking %s actions: %s",
len(check_runs),
", ".join([check_run["name"] for check_run in check_runs])
)
for check_run in check_runs:
name, status = check_run["name"], check_run["status"]
logger.info("%s: %s", name, status)
if status != "completed":
return False
return True
async def poll_statuses(commit):
combined_status = await sync_to_async(commit.get_combined_status)()
statuses = [
status for status in combined_status.statuses
if status.context not in IGNORECONTEXTS
]
logger.info(
"Checking %s statuses: %s",
len(statuses),
", ".join([status.context for status in statuses])
)
for status in statuses:
context, state = status.context, status.state
logger.info("%s: %s", context, state)
if state != "success":
return False
return True
async def main():
g = Github(TOKEN)
repo = await sync_to_async(g.get_repo)(REPOSITORY)
commit = await sync_to_async(repo.get_commit)(sha=SHA)
results = [False, False]
async with aiohttp.ClientSession() as session:
while False in results:
results = await asyncio.gather(
poll_statuses(commit),
poll_checks(session, REPOSITORY, SHA),
return_exceptions=False,
)
if False in results:
logger.info("Checking again in %s seconds", INTERVAL)
await asyncio.sleep(INTERVAL)
return results
if __name__ == "__main__":
try:
asyncio.run(main())
print("::set-output name=status::success")
except:
print("::set-output name=status::failure")
raise
|
[
"flavio.curella@gmail.com"
] |
flavio.curella@gmail.com
|
d7804ea8b7f2ecc0bd38927b3992aa58daadc478
|
af9b7a00b55aac5eaed58592cf8a9d69e659a076
|
/learning_log/learning_logs/forms.py
|
da3fa007a63b1f3e2886d67e9cb7c2ee946bc820
|
[] |
no_license
|
dujiaojingyu/Django-Practice
|
bc246d2283a8f994567756b4e391ea167359620b
|
cab5db123eb97bd424a84fae24629cc0e1be4652
|
refs/heads/master
| 2020-03-25T17:33:25.597885
| 2018-08-08T08:38:09
| 2018-08-08T08:38:09
| 143,983,344
| 2
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 204
|
py
|
__author__ = "Narwhale"
from django import forms
from .models import Topic
class TopicForm(forms.ModelForm):
class Meta:
model = Topic
fields= ['text']
labels = {'text':''}
|
[
"34296128+dujiaojingyu@users.noreply.github.com"
] |
34296128+dujiaojingyu@users.noreply.github.com
|
8af1f2b9b43cf26c7d092f16479f3b479eed5d23
|
90f52d0348aa0f82dc1f9013faeb7041c8f04cf8
|
/wxPython3.0 Docs and Demos/wxPython/samples/wxPIA_book/Chapter-10/popupmenu.py
|
5226849ca7224afab2ef1c1e69a3aae5158a74d5
|
[] |
no_license
|
resource-jason-org/python-wxPythonTool
|
93a25ad93c768ca8b69ba783543cddf7deaf396b
|
fab6ec3155e6c1ae08ea30a23310006a32d08c36
|
refs/heads/master
| 2021-06-15T10:58:35.924543
| 2017-04-14T03:39:27
| 2017-04-14T03:39:27
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,388
|
py
|
import wx
class MyFrame(wx.Frame):
def __init__(self):
wx.Frame.__init__(self, None, -1,
"Popup Menu Example")
self.panel = p = wx.Panel(self)
menu = wx.Menu()
exit = menu.Append(-1, "Exit")
self.Bind(wx.EVT_MENU, self.OnExit, exit)
menuBar = wx.MenuBar()
menuBar.Append(menu, "Menu")
self.SetMenuBar(menuBar)
wx.StaticText(p, -1,
"Right-click on the panel to show a popup menu",
(25,25))
self.popupmenu = wx.Menu()
for text in "one two three four five".split():
item = self.popupmenu.Append(-1, text)
self.Bind(wx.EVT_MENU, self.OnPopupItemSelected, item)
p.Bind(wx.EVT_CONTEXT_MENU, self.OnShowPopup)
def OnShowPopup(self, event):
pos = event.GetPosition()
pos = self.panel.ScreenToClient(pos)
self.panel.PopupMenu(self.popupmenu, pos)
def OnPopupItemSelected(self, event):
item = self.popupmenu.FindItemById(event.GetId())
text = item.GetText()
wx.MessageBox("You selected item '%s'" % text)
def OnExit(self, event):
self.Close()
if __name__ == "__main__":
app = wx.App()
frame = MyFrame()
frame.Show()
app.MainLoop()
|
[
"869999860@qq.com"
] |
869999860@qq.com
|
0709557c1f679fa1a41d7157bfe2c991f6adadfc
|
85a9ffeccb64f6159adbd164ff98edf4ac315e33
|
/pysnmp/NTWS-AP-IF-MIB.py
|
d9da09616db8ef8ddc0d2db88e651ab9fd3c63d5
|
[
"Apache-2.0"
] |
permissive
|
agustinhenze/mibs.snmplabs.com
|
5d7d5d4da84424c5f5a1ed2752f5043ae00019fb
|
1fc5c07860542b89212f4c8ab807057d9a9206c7
|
refs/heads/master
| 2020-12-26T12:41:41.132395
| 2019-08-16T15:51:41
| 2019-08-16T15:53:57
| 237,512,469
| 0
| 0
|
Apache-2.0
| 2020-01-31T20:41:36
| 2020-01-31T20:41:35
| null |
UTF-8
|
Python
| false
| false
| 5,408
|
py
|
#
# PySNMP MIB module NTWS-AP-IF-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/NTWS-AP-IF-MIB
# Produced by pysmi-0.3.4 at Mon Apr 29 20:16:00 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
OctetString, ObjectIdentifier, Integer = mibBuilder.importSymbols("ASN1", "OctetString", "ObjectIdentifier", "Integer")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ConstraintsUnion, ValueRangeConstraint, ConstraintsIntersection, SingleValueConstraint, ValueSizeConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "ConstraintsUnion", "ValueRangeConstraint", "ConstraintsIntersection", "SingleValueConstraint", "ValueSizeConstraint")
IANAifType, = mibBuilder.importSymbols("IANAifType-MIB", "IANAifType")
NtwsApSerialNum, = mibBuilder.importSymbols("NTWS-AP-TC", "NtwsApSerialNum")
ntwsMibs, = mibBuilder.importSymbols("NTWS-ROOT-MIB", "ntwsMibs")
ObjectGroup, NotificationGroup, ModuleCompliance = mibBuilder.importSymbols("SNMPv2-CONF", "ObjectGroup", "NotificationGroup", "ModuleCompliance")
Counter64, IpAddress, iso, Bits, Integer32, TimeTicks, Counter32, ObjectIdentity, ModuleIdentity, MibIdentifier, Unsigned32, MibScalar, MibTable, MibTableRow, MibTableColumn, Gauge32, NotificationType = mibBuilder.importSymbols("SNMPv2-SMI", "Counter64", "IpAddress", "iso", "Bits", "Integer32", "TimeTicks", "Counter32", "ObjectIdentity", "ModuleIdentity", "MibIdentifier", "Unsigned32", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "Gauge32", "NotificationType")
MacAddress, TextualConvention, DisplayString = mibBuilder.importSymbols("SNMPv2-TC", "MacAddress", "TextualConvention", "DisplayString")
ntwsApIfMib = ModuleIdentity((1, 3, 6, 1, 4, 1, 45, 6, 1, 4, 16))
ntwsApIfMib.setRevisions(('2008-11-20 00:01',))
if mibBuilder.loadTexts: ntwsApIfMib.setLastUpdated('200811200001Z')
if mibBuilder.loadTexts: ntwsApIfMib.setOrganization('Nortel Networks')
class NtwsApInterfaceIndex(TextualConvention, Unsigned32):
status = 'current'
displayHint = 'd'
subtypeSpec = Unsigned32.subtypeSpec + ValueRangeConstraint(1, 1024)
ntwsApIfMibObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 45, 6, 1, 4, 16, 1))
ntwsApIfTable = MibTable((1, 3, 6, 1, 4, 1, 45, 6, 1, 4, 16, 1, 1), )
if mibBuilder.loadTexts: ntwsApIfTable.setStatus('current')
ntwsApIfEntry = MibTableRow((1, 3, 6, 1, 4, 1, 45, 6, 1, 4, 16, 1, 1, 1), ).setIndexNames((0, "NTWS-AP-IF-MIB", "ntwsApIfApSerialNum"), (0, "NTWS-AP-IF-MIB", "ntwsApIfIndex"))
if mibBuilder.loadTexts: ntwsApIfEntry.setStatus('current')
ntwsApIfApSerialNum = MibTableColumn((1, 3, 6, 1, 4, 1, 45, 6, 1, 4, 16, 1, 1, 1, 1), NtwsApSerialNum())
if mibBuilder.loadTexts: ntwsApIfApSerialNum.setStatus('current')
ntwsApIfIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 45, 6, 1, 4, 16, 1, 1, 1, 2), NtwsApInterfaceIndex())
if mibBuilder.loadTexts: ntwsApIfIndex.setStatus('current')
ntwsApIfName = MibTableColumn((1, 3, 6, 1, 4, 1, 45, 6, 1, 4, 16, 1, 1, 1, 3), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ntwsApIfName.setStatus('current')
ntwsApIfType = MibTableColumn((1, 3, 6, 1, 4, 1, 45, 6, 1, 4, 16, 1, 1, 1, 4), IANAifType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ntwsApIfType.setStatus('current')
ntwsApIfMtu = MibTableColumn((1, 3, 6, 1, 4, 1, 45, 6, 1, 4, 16, 1, 1, 1, 5), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ntwsApIfMtu.setStatus('current')
ntwsApIfHighSpeed = MibTableColumn((1, 3, 6, 1, 4, 1, 45, 6, 1, 4, 16, 1, 1, 1, 6), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ntwsApIfHighSpeed.setStatus('current')
ntwsApIfMac = MibTableColumn((1, 3, 6, 1, 4, 1, 45, 6, 1, 4, 16, 1, 1, 1, 7), MacAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ntwsApIfMac.setStatus('current')
ntwsApIfConformance = MibIdentifier((1, 3, 6, 1, 4, 1, 45, 6, 1, 4, 16, 2))
ntwsApIfCompliances = MibIdentifier((1, 3, 6, 1, 4, 1, 45, 6, 1, 4, 16, 2, 1))
ntwsApIfGroups = MibIdentifier((1, 3, 6, 1, 4, 1, 45, 6, 1, 4, 16, 2, 2))
ntwsApIfCompliance = ModuleCompliance((1, 3, 6, 1, 4, 1, 45, 6, 1, 4, 16, 2, 1, 1)).setObjects(("NTWS-AP-IF-MIB", "ntwsApIfBasicGroup"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ntwsApIfCompliance = ntwsApIfCompliance.setStatus('current')
ntwsApIfBasicGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 45, 6, 1, 4, 16, 2, 2, 1)).setObjects(("NTWS-AP-IF-MIB", "ntwsApIfName"), ("NTWS-AP-IF-MIB", "ntwsApIfType"), ("NTWS-AP-IF-MIB", "ntwsApIfMtu"), ("NTWS-AP-IF-MIB", "ntwsApIfHighSpeed"), ("NTWS-AP-IF-MIB", "ntwsApIfMac"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ntwsApIfBasicGroup = ntwsApIfBasicGroup.setStatus('current')
mibBuilder.exportSymbols("NTWS-AP-IF-MIB", ntwsApIfApSerialNum=ntwsApIfApSerialNum, ntwsApIfConformance=ntwsApIfConformance, ntwsApIfCompliance=ntwsApIfCompliance, PYSNMP_MODULE_ID=ntwsApIfMib, ntwsApIfName=ntwsApIfName, ntwsApIfMib=ntwsApIfMib, ntwsApIfHighSpeed=ntwsApIfHighSpeed, NtwsApInterfaceIndex=NtwsApInterfaceIndex, ntwsApIfBasicGroup=ntwsApIfBasicGroup, ntwsApIfEntry=ntwsApIfEntry, ntwsApIfMac=ntwsApIfMac, ntwsApIfIndex=ntwsApIfIndex, ntwsApIfMtu=ntwsApIfMtu, ntwsApIfType=ntwsApIfType, ntwsApIfTable=ntwsApIfTable, ntwsApIfCompliances=ntwsApIfCompliances, ntwsApIfMibObjects=ntwsApIfMibObjects, ntwsApIfGroups=ntwsApIfGroups)
|
[
"dcwangmit01@gmail.com"
] |
dcwangmit01@gmail.com
|
fbe0979bb9bfd1111ac0cd12f14a2aecde30e551
|
892266713e500efa5ac04e1b8de812200410c956
|
/devset.py
|
cd8b6e2d344c504aedbc001fde9be6ebc8fc85de
|
[
"BSD-2-Clause"
] |
permissive
|
martinphellwig/django-g11n
|
972eb95128637ec0b21efabad6b40ba02c30356c
|
94eb9da7d7027061873cd44356fdf3378cdb3820
|
refs/heads/master
| 2020-08-29T12:24:04.687019
| 2016-10-10T15:54:32
| 2016-10-10T15:54:32
| 218,030,322
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,315
|
py
|
#! /usr/bin/env python
"""
Developer Reset.
"""
import os
APP = 'django_g11n'
DIR = os.path.dirname(os.path.abspath(__file__))
def get_last_migration_file():
"Fetch the latest migration file."
_ = os.path.join(DIR, APP, 'migrations')
_ = [os.path.join(_, item) for item in os.listdir(_) if not item.startswith('_')]
_.sort()
if len(_) > 0:
return _[-1]
else:
return None
def modify_migration():
"Modify migration, add pylint disable line."
path = get_last_migration_file()
if path is None:
return
text = '# pylint: disable=invalid-name, missing-docstring, line-too-long\n'
with open(path, 'r+') as file_open:
data = file_open.readlines()
data.insert(1, text)
file_open.seek(0)
file_open.write(''.join(data))
def execute_shell(command, prefix='python manage.py', pipe=None):
"Execute shell python manage.py"
import subprocess
cmd = prefix + ' ' + command
if pipe is not None:
cmd = pipe + ' | ' + cmd
subprocess.call(cmd, shell=True)
def add_superuser(username, password):
"Add superuser"
from django.contrib.auth.models import User
user = User(username=username)
user.set_password(password)
user.is_superuser = True
user.is_staff = True
user.save()
return user
def remove_db():
"remove the db if it exists"
_ = os.path.join(DIR, 'db.sqlite3')
if os.path.exists(_):
os.remove(_)
def remove_last_migration():
"remove last migration file."
_ = get_last_migration_file()
if _ is not None:
os.remove(_)
def add_migrations():
"set up the new migrations and migrate"
execute_shell('makemigrations ' + APP)
execute_shell('makemigrations')
execute_shell('migrate')
modify_migration()
def main():
"Executed when this is the interface module"
remove_db()
remove_last_migration()
add_migrations()
#
# This will run a shell which imports this file as a module, this means
# we can execute things in a Django environment.
execute_shell('shell', pipe='echo "import devset"')
#
execute_shell('runserver')
def as_module():
"Executed when this is imported."
add_superuser('admin', 'admin')
if __name__ == '__main__':
main()
else:
as_module()
|
[
"martin@localhost"
] |
martin@localhost
|
7db90b76ad8b3755f314e61da0f7b4ddf29bd341
|
ce196aba0adde47ea2767eae1d7983a1ef548bb8
|
/lambda单行表达式_0.py
|
dd751c9cc43d2361811c60ac8ee87e8da1b77fb7
|
[] |
no_license
|
xiang-daode/Python3_codes
|
5d2639ffd5d65065b98d029e79b8f3608a37cf0b
|
06c64f85ce2c299aef7f9311e9473e0203a05b09
|
refs/heads/main
| 2023-08-30T14:59:55.123128
| 2021-11-03T05:12:24
| 2021-11-03T05:12:24
| 333,632,892
| 0
| 2
| null | null | null | null |
GB18030
|
Python
| false
| false
| 500
|
py
|
#!/usr/bin/python
# -*- coding: cp936 -*-
b= [x for x in range(2,100) if not[y for y in range(2,int(x**0.5)) if not x%y]]
print("100以内的全部质数是:",b)
c= [y for y in range(2,36)]
print('2--35全部输出',c)
b= [x for x in range(2,24) if True]
print('2--23全部输出',b)
d= [x for x in range(2,24) if False]
print('无返回: ',d)
d= [x for x in range(1,25) if x%2]
print('奇数有:',d)
d= [x for x in range(1,25) if not x%5]
print('5的倍数有:',d)
|
[
"noreply@github.com"
] |
xiang-daode.noreply@github.com
|
748b1a4c649433f18bc779c59fa3d4da540bf330
|
bd185738ea6a74d1e76d9fc9d8cbc59f94990842
|
/onadata/libs/pagination.py
|
f3aaf30a3bad15075443aa054f66f133a9d41638
|
[
"BSD-2-Clause"
] |
permissive
|
aondiaye/myhelpline
|
c4ad9e812b3a13c6c3c8bc65028a3d3567fd6a98
|
d72120ee31b6713cbaec79f299f5ee8bcb7ea429
|
refs/heads/master
| 2020-12-22T05:32:59.576519
| 2019-10-29T08:52:55
| 2019-10-29T08:52:55
| 236,683,448
| 1
| 0
|
NOASSERTION
| 2020-01-28T07:50:18
| 2020-01-28T07:50:17
| null |
UTF-8
|
Python
| false
| false
| 206
|
py
|
from rest_framework.pagination import PageNumberPagination
class StandardPageNumberPagination(PageNumberPagination):
page_size = 1000
page_size_query_param = 'page_size'
max_page_size = 10000
|
[
"patrickmithamo@gmail.com"
] |
patrickmithamo@gmail.com
|
42533e87831e34941babde24267e52e9219a54f1
|
6fa13067f1f5f50a48f7a535184c8abfb0334012
|
/old/fall2019/lecture8/sqlite_example2.py
|
e6d430e24ea7aacf5ae9ecffb2af1c1309060823
|
[] |
no_license
|
mkzia/eas503
|
89193b889c39fc5dbc81217e1c6c3d2581b6929d
|
4d7b548cc7fa8e938842d390f3df710c23d5f8fb
|
refs/heads/master
| 2023-09-04T06:56:49.796298
| 2023-09-01T02:05:16
| 2023-09-01T02:05:16
| 205,002,120
| 70
| 123
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,280
|
py
|
import os
import sqlite3
from sqlite3 import Error
def create_connection(db_file):
conn = None
try:
conn = sqlite3.connect(db_file)
conn.execute("PRAGMA foreign_keys = 1")
except Error as e:
print(e)
return conn
def create_table(conn, create_table_sql):
try:
c = conn.cursor()
c.execute(create_table_sql)
except Error as e:
print(e)
def insert_depts(conn, values):
sql = ''' INSERT INTO Departments(DepartmentName)
VALUES(?) '''
cur = conn.cursor()
cur.execute(sql, values)
return cur.lastrowid
def insert_student(conn, values):
sql = ''' INSERT INTO Students(StudentName, DepartmentId, DateOfBirth)
VALUES(?,?,?) '''
cur = conn.cursor()
cur.execute(sql, values)
return cur.lastrowid
def select_all_students(conn):
cur = conn.cursor()
cur.execute("""SELECT * FROM Students INNER JOIN Departments USING(DepartmentId);""")
rows = cur.fetchall()
for row in rows:
print(row)
return rows
db_file = 'sample_data_py.db'
if os.path.exists(db_file):
os.remove(db_file)
create_table_departments_sql = """ CREATE TABLE [Departments] (
[DepartmentId] INTEGER NOT NULL PRIMARY KEY,
[DepartmentName] NVARCHAR(50) NULL
); """
create_table_students_sql = """ CREATE TABLE [Students] (
[StudentId] INTEGER PRIMARY KEY NOT NULL,
[StudentName] NVARCHAR(50) NOT NULL,
[DepartmentId] INTEGER NULL,
[DateOfBirth] DATE NULL,
FOREIGN KEY(DepartmentId) REFERENCES Departments(DepartmentId)
); """
conn = create_connection(db_file)
depts = ('IT', 'Physics', 'Arts', 'Math')
students = (
('Michael', 1, '1998-10-12'),
('John', 1, '1998-10-12'),
('Jack', 1, '1998-10-12'),
('Sara', 2, '1998-10-12'),
('Sally', 2, '1998-10-12'),
('Jena', None, '1998-10-12'),
('Nancy', 2, '1998-10-12'),
('Adam', 3, '1998-10-12'),
('Stevens', 3, '1998-10-12'),
('George', None, '1998-10-12')
)
with conn:
create_table(conn, create_table_departments_sql)
create_table(conn, create_table_students_sql)
for values in depts:
insert_depts(conn, (values, ))
for values in students:
insert_student(conn, values)
select_all_students(conn)
|
[
"mkhawarzia@gmail.com"
] |
mkhawarzia@gmail.com
|
fbcc54fea5b182b3e2383026e517dcaa50974606
|
f20516958c39123f204e2bc442c91df7df1cc34a
|
/amqpstorm/exchange.py
|
865a03bd8e75475a400c5bdf1d4068945cb5fa0b
|
[
"BSD-3-Clause"
] |
permissive
|
bradparks/ReadableWebProxy
|
3c2732cff64007afa8318b5b159616a529068322
|
81fbce3083471126942d2e2a298dba9eaf1092b1
|
refs/heads/master
| 2020-05-29T11:48:40.189530
| 2016-08-25T15:17:14
| 2016-08-25T15:17:14
| 66,568,996
| 0
| 0
| null | 2016-08-25T15:13:39
| 2016-08-25T15:13:39
| null |
UTF-8
|
Python
| false
| false
| 5,689
|
py
|
"""AMQP-Storm Channel.Exchange."""
import logging
from pamqp.specification import Exchange as pamqp_exchange
from amqpstorm import compatibility
from amqpstorm.base import Handler
from amqpstorm.exception import AMQPInvalidArgument
LOGGER = logging.getLogger(__name__)
class Exchange(Handler):
"""AMQP Channel.exchange"""
__slots__ = []
def declare(self, exchange='', exchange_type='direct', passive=False,
durable=False, auto_delete=False, arguments=None):
"""Declare an Exchange.
:param str exchange:
:param str exchange_type:
:param bool passive:
:param bool durable:
:param bool auto_delete:
:param dict arguments:
:raises AMQPInvalidArgument: Invalid Parameters
:raises AMQPChannelError: Raises if the channel encountered an error.
:raises AMQPConnectionError: Raises if the connection
encountered an error.
:rtype: dict
"""
if not compatibility.is_string(exchange):
raise AMQPInvalidArgument('exchange should be a string')
elif not compatibility.is_string(exchange_type):
raise AMQPInvalidArgument('exchange_type should be a string')
elif not isinstance(passive, bool):
raise AMQPInvalidArgument('passive should be a boolean')
elif not isinstance(durable, bool):
raise AMQPInvalidArgument('durable should be a boolean')
elif not isinstance(auto_delete, bool):
raise AMQPInvalidArgument('auto_delete should be a boolean')
elif arguments is not None and not isinstance(arguments, dict):
raise AMQPInvalidArgument('arguments should be a dict or None')
declare_frame = pamqp_exchange.Declare(exchange=exchange,
exchange_type=exchange_type,
passive=passive,
durable=durable,
auto_delete=auto_delete,
arguments=arguments)
return self._channel.rpc_request(declare_frame)
def delete(self, exchange='', if_unused=False):
"""Delete an Exchange.
:param str exchange:
:param bool if_unused:
:raises AMQPInvalidArgument: Invalid Parameters
:raises AMQPChannelError: Raises if the channel encountered an error.
:raises AMQPConnectionError: Raises if the connection
encountered an error.
:rtype: dict
"""
if not compatibility.is_string(exchange):
raise AMQPInvalidArgument('exchange should be a string')
delete_frame = pamqp_exchange.Delete(exchange=exchange,
if_unused=if_unused)
return self._channel.rpc_request(delete_frame)
def bind(self, destination='', source='', routing_key='',
arguments=None):
"""Bind an Exchange.
:param str destination:
:param str source:
:param str routing_key:
:param dict arguments:
:raises AMQPInvalidArgument: Invalid Parameters
:raises AMQPChannelError: Raises if the channel encountered an error.
:raises AMQPConnectionError: Raises if the connection
encountered an error.
:rtype: dict
"""
if not compatibility.is_string(destination):
raise AMQPInvalidArgument('destination should be a string')
elif not compatibility.is_string(source):
raise AMQPInvalidArgument('source should be a string')
elif not compatibility.is_string(routing_key):
raise AMQPInvalidArgument('routing_key should be a string')
elif arguments is not None and not isinstance(arguments, dict):
raise AMQPInvalidArgument('arguments should be a dict or None')
bind_frame = pamqp_exchange.Bind(destination=destination,
source=source,
routing_key=routing_key,
arguments=arguments)
return self._channel.rpc_request(bind_frame)
def unbind(self, destination='', source='', routing_key='',
arguments=None):
"""Unbind an Exchange.
:param str destination:
:param str source:
:param str routing_key:
:param dict arguments:
:raises AMQPInvalidArgument: Invalid Parameters
:raises AMQPChannelError: Raises if the channel encountered an error.
:raises AMQPConnectionError: Raises if the connection
encountered an error.
:rtype: dict
"""
if not compatibility.is_string(destination):
raise AMQPInvalidArgument('destination should be a string')
elif not compatibility.is_string(source):
raise AMQPInvalidArgument('source should be a string')
elif not compatibility.is_string(routing_key):
raise AMQPInvalidArgument('routing_key should be a string')
elif arguments is not None and not isinstance(arguments, dict):
raise AMQPInvalidArgument('arguments should be a dict or None')
unbind_frame = pamqp_exchange.Unbind(destination=destination,
source=source,
routing_key=routing_key,
arguments=arguments)
return self._channel.rpc_request(unbind_frame)
|
[
"something@fake-url.com"
] |
something@fake-url.com
|
9372d896b7050b2587a7d13762a113a2e9af5b33
|
cfa2417f07259e512a1bbface4f1f4ccd66502c6
|
/test/test_Likelihood/test_LensLikelihood/test_base_lens_likelihood.py
|
6ecb7556a125e7d02dc4226cdb44037390dd4b9d
|
[
"BSD-3-Clause"
] |
permissive
|
jiwoncpark/hierArc
|
3779439533d3c9c5fe2e687f4bdf737dfc7673e8
|
3f31c0ae7540387fe98f778035d415c3cff38756
|
refs/heads/master
| 2021-05-18T21:32:45.590675
| 2020-12-23T00:01:01
| 2020-12-23T00:01:01
| 251,431,028
| 0
| 0
|
NOASSERTION
| 2020-03-30T21:20:08
| 2020-03-30T21:20:08
| null |
UTF-8
|
Python
| false
| false
| 4,045
|
py
|
import numpy as np
import pytest
import unittest
from hierarc.Likelihood.LensLikelihood.base_lens_likelihood import LensLikelihoodBase
class TestLensLikelihood(object):
def setup(self):
np.random.seed(seed=41)
self.z_lens = 0.8
self.z_source = 3.0
num_samples = 10000
ddt_samples = np.random.normal(1, 0.1, num_samples)
dd_samples = np.random.normal(1, 0.1, num_samples)
self.likelihood_type_list = ['DdtGaussian',
'DdtDdKDE',
'DdtDdGaussian',
'DsDdsGaussian',
'DdtLogNorm',
'IFUKinCov',
'DdtHist',
'DdtHistKDE',
'DdtHistKin',
'DdtGaussKin',
'Mag',
'TDMag']
self.kwargs_likelihood_list = [{'ddt_mean': 1, 'ddt_sigma': 0.1},
{'dd_samples': dd_samples, 'ddt_samples': ddt_samples, 'kde_type': 'scipy_gaussian', 'bandwidth': 1},
{'ddt_mean': 1, 'ddt_sigma': 0.1, 'dd_mean': 1, 'dd_sigma': 0.1},
{'ds_dds_mean': 1, 'ds_dds_sigma': 0.1},
{'ddt_mu': 1, 'ddt_sigma': 0.1},
{'sigma_v_measurement': [1], 'j_model': [1], 'error_cov_measurement': [[1]], 'error_cov_j_sqrt': [[1]]},
{'ddt_samples': ddt_samples},
{'ddt_samples': ddt_samples},
{'ddt_samples': ddt_samples, 'sigma_v_measurement': [1], 'j_model': [1], 'error_cov_measurement': [[1]], 'error_cov_j_sqrt': [[1]]},
{'ddt_mean': 1, 'ddt_sigma': 0.1, 'sigma_v_measurement': [1], 'j_model': [1], 'error_cov_measurement': [[1]], 'error_cov_j_sqrt': [[1]]},
{'amp_measured': [1], 'cov_amp_measured': [[1]], 'mag_model': [1], 'cov_model': [[1]]},
{'time_delay_measured': [1.], 'cov_td_measured': [[1.]], 'amp_measured': [1., 1.], 'cov_amp_measured': [[1., 0], [0, 1.]], 'fermat_diff': [1.], 'mag_model': [1., 1.], 'cov_model': np.ones((3, 3))}
]
def test_log_likelihood(self):
for i, likelihood_type in enumerate(self.likelihood_type_list):
likelihood = LensLikelihoodBase(z_lens=self.z_lens, z_source=self.z_source, likelihood_type=likelihood_type,
**self.kwargs_likelihood_list[i])
print(likelihood_type)
logl = likelihood.log_likelihood(ddt=1, dd=1, aniso_scaling=None, sigma_v_sys_error=1, mu_intrinsic=1)
print(logl)
assert logl > -np.inf
def test_predictions_measurements(self):
for i, likelihood_type in enumerate(self.likelihood_type_list):
likelihood = LensLikelihoodBase(z_lens=self.z_lens, z_source=self.z_source, likelihood_type=likelihood_type,
**self.kwargs_likelihood_list[i])
ddt_measurement = likelihood.ddt_measurement()
likelihood.sigma_v_measurement(sigma_v_sys_error=0)
likelihood.sigma_v_prediction(ddt=1, dd=1, aniso_scaling=1)
assert len(ddt_measurement) == 2
class TestRaise(unittest.TestCase):
def test_raise(self):
with self.assertRaises(ValueError):
LensLikelihoodBase(z_lens=0.5, z_source=2, likelihood_type='BAD')
with self.assertRaises(ValueError):
likelihood = LensLikelihoodBase(z_lens=0.5, z_source=2, likelihood_type='DdtGaussian',
**{'ddt_mean': 1, 'ddt_sigma': 0.1})
likelihood.likelihood_type = 'BAD'
likelihood.log_likelihood(ddt=1, dd=1)
if __name__ == '__main__':
pytest.main()
|
[
"sibirrer@gmail.com"
] |
sibirrer@gmail.com
|
f8aa9cc771efab36e523016cc18be7dd92b8bf88
|
43ab33b2f50e47f5dbe322daa03c86a99e5ee77c
|
/test/test_study_group_values_controller_api.py
|
671f7e874460bcd47617d26a420f26a608131ef4
|
[] |
no_license
|
Sage-Bionetworks/rcc-client
|
c770432de2d2950e00f7c7bd2bac22f3a81c2061
|
57c4a621aecd3a2f3f9faaa94f53b2727992a01a
|
refs/heads/main
| 2023-02-23T05:55:39.279352
| 2021-01-21T02:06:08
| 2021-01-21T02:06:08
| 331,486,099
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,603
|
py
|
# coding: utf-8
"""
nPhase REST Resource
REDCap REST API v.2 # noqa: E501
The version of the OpenAPI document: 2.0
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import unittest
import rcc
from rcc.api.study_group_values_controller_api import StudyGroupValuesControllerApi # noqa: E501
from rcc.rest import ApiException
class TestStudyGroupValuesControllerApi(unittest.TestCase):
"""StudyGroupValuesControllerApi unit test stubs"""
def setUp(self):
self.api = rcc.api.study_group_values_controller_api.StudyGroupValuesControllerApi() # noqa: E501
def tearDown(self):
pass
def test_create11(self):
"""Test case for create11
Create new Study Group Value for current Study based on auth token provided # noqa: E501
"""
pass
def test_delete8(self):
"""Test case for delete8
Delete Study Group Value for current Study based on auth token provided # noqa: E501
"""
pass
def test_get_details8(self):
"""Test case for get_details8
Get specified Study Group Value details # noqa: E501
"""
pass
def test_get_list9(self):
"""Test case for get_list9
Get list of all Study Group Values for specified Study # noqa: E501
"""
pass
def test_update10(self):
"""Test case for update10
Update Study Group Value for current Study based on auth token provided # noqa: E501
"""
pass
if __name__ == '__main__':
unittest.main()
|
[
"thomas.yu@sagebase.org"
] |
thomas.yu@sagebase.org
|
197947df3f6c3b552f542cad538188861870d86f
|
95c027e7302751b335b33d287e0efac7483edfc3
|
/boj/BOJ_평균.py
|
aaf5066b2ef4cd7f9d8f10ec2c10ff292124ceba
|
[] |
no_license
|
kimchaelin13/Algorithm
|
01bd4bcb24c58d5d82714e60272d5af91d2d9ce8
|
53f7f3cff5a141cf705af3c9f31cdb9ae997caff
|
refs/heads/master
| 2023-02-03T08:58:26.660299
| 2020-12-20T17:01:16
| 2020-12-20T17:01:16
| 296,996,924
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 185
|
py
|
import sys
sys.stdin = open("input.txt", "r")
s = []
for i in range(5):
s.append(int(input()))
for j in range(len(s)):
if s[j] < 40:
s[j]=40
print(round(sum(s)/len(s)))
|
[
"kimchaelin13@gmail.com"
] |
kimchaelin13@gmail.com
|
3775521386c59304a0872b9053c2111fdfe7ca55
|
da687718aa8ce62974090af63d25e057262e9dfe
|
/cap14-funcoes/extras/entrada.py
|
8f9a269e72ba810cb2bb7d637f9fbdeaae697fbd
|
[] |
no_license
|
frclasso/revisao_Python_modulo1
|
77928fa4409c97d49cc7deccdf291f44c337d290
|
1e83d0ef9657440db46a8e84b136ac5f9a7c556e
|
refs/heads/master
| 2020-06-25T05:37:28.768343
| 2019-07-27T22:23:58
| 2019-07-27T22:23:58
| 199,217,969
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 329
|
py
|
def valida_inteiro(mensagem, minimo, maximo):
while True:
try:
v = int(input(mensagem))
if v >= minimo and v <= maximo:
return v
else:
print(f'Digite um valor entre {maximo} e {minimo}.')
except: print('Voce deve digitar um numero inteiro.')
|
[
"frcalsso@yahoo.com.br"
] |
frcalsso@yahoo.com.br
|
1c1842851e7ef3306eade4b5362a299e7a952d0f
|
4cdf4e243891c0aa0b99dd5ee84f09a7ed6dd8c8
|
/django2/bookmarks/bookmarks/settings.py
|
8277bde8c3c7f242eb407532c2ef68e2c0ae896b
|
[
"MIT"
] |
permissive
|
gozeon/code-collections
|
464986c7765df5dca980ac5146b847416b750998
|
13f07176a6c7b6ac13586228cec4c1e2ed32cae4
|
refs/heads/master
| 2023-08-17T18:53:24.189958
| 2023-08-10T04:52:47
| 2023-08-10T04:52:47
| 99,432,793
| 1
| 0
|
NOASSERTION
| 2020-07-17T09:25:44
| 2017-08-05T15:56:53
|
JavaScript
|
UTF-8
|
Python
| false
| false
| 3,367
|
py
|
"""
Django settings for bookmarks project.
Generated by 'django-admin startproject' using Django 2.0.8.
For more information on this file, see
https://docs.djangoproject.com/en/2.0/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.0/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.0/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'a9va+)ulziy57*cci0qv^v#7lo04$%&t-qj*77hg@77q1_&#_d'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'account.apps.AccountConfig',
'images.apps.ImagesConfig'
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'bookmarks.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'bookmarks.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.0/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/2.0/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.0/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.0/howto/static-files/
STATIC_URL = '/static/'
LOGIN_REDIRECT_URL = 'dashboard'
LOGIN_URL = 'login'
LOGOUT_URL = 'logout'
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
MEDIA_URL = '/media/'
MEDIA_ROOT = os.path.join(BASE_DIR, 'media/')
|
[
"goze.qiu@gmail.com"
] |
goze.qiu@gmail.com
|
ef04eda75ce46249309cb75b6cbb7b0c9858fbe3
|
9f3991f4e7b405c04f2ef03ac7747b5a69d26b4b
|
/openpyxl/csv_to_excel.py
|
8efbab73e40cd93a497cd0cec0ccd755b3044f64
|
[] |
no_license
|
zcxyun/pythonDemo
|
f66eb5e6e4274db2137480786eae4d6ca7e73163
|
adf18cf6b58282a7f2f9203aa09d5cb60ced2e35
|
refs/heads/master
| 2021-07-29T19:06:52.481792
| 2021-07-27T16:10:38
| 2021-07-27T16:10:38
| 101,542,803
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,467
|
py
|
#!/usr/bin/env
# -*- coding: utf-8 -*-
import csv
import re
import openpyxl
import itertools
from datetime import datetime
# from openpyxl.utils import get_column_letter, column_index_from_string
money = []
payee = []
payeeNumber = []
firstPayDate = []
#with open('resource/example.csv', encoding='utf-8') as originFile:
# 将csv文件的数据提取到相应的列表中
with open('E:/新农合银行转账统计表/新农合网银统计银行下载原件/12.1-12.4.csv', encoding='utf-16') as originFile:
originReader = csv.reader(originFile, delimiter='\t')
originData = list(originReader)
for index, title in enumerate(originData[0]):
if title == '金额':
for m in originData[1: len(originData)+1]:
money.append(float(''.join(m[index].split(','))))
if title == '收款人名称':
for p in originData[1: len(originData)+1]:
payee.append(p[index])
if title == '收款人账号':
for pn in originData[1: len(originData)+1]:
payeeNumber.append(pn[index])
if title == '初次委托日期':
for fpd in originData[1: len(originData)+1]:
firstPayDate.append(fpd[index][:10])
# 将相应的列表转换为相应的迭代器
moneyIter = iter(money)
payeeIter = iter(payee)
payeeNumberIter = iter(payeeNumber)
firstPayDateIter = iter(firstPayDate)
# 加载 excel 文件
wb = openpyxl.load_workbook('E:/新农合银行转账统计表/2017-12-01至2017-12-31.xlsx')
# 获取工作表
sheet0 = wb.get_sheet_by_name('sheet0')
# 获取工作表模板
sheetTemplate = wb.get_sheet_by_name('sheetTemplate')
# 计数器
natuals = itertools.count(1)
ns = itertools.takewhile(lambda x: x <= len(money), natuals)
# csv 文件中的数据根据一定的规则复制到相应的 Excel 文件中
def copy(sheet):
try:
# print(sheet.title)
for rowOfCellObjects in sheet['B5':'H34']:
for index, cell in enumerate(rowOfCellObjects):
if cell.value == None:
if index == 0:
cell.value = next(payeeIter)
if index == 1:
cell.value = next(firstPayDateIter)
if index == 2:
cell.value = next(moneyIter)
if index == 3:
cell.value = next(payeeNumberIter)
if index == 4:
cell.value = rowOfCellObjects[0].value
if index == 5:
cell.value = rowOfCellObjects[2].value
# if index == 6:
# cell.value = datetime.now().date()
ws_next = wb.copy_worksheet(sheetTemplate)
ws_next.title = sheetTemplate.title[:5] + str(next(ns))
copy(ws_next)
except StopIteration as e:
return
copy(sheet0)
# 根据前一个工作表的索引建立新工作表的索引
def makeIndex(sheet):
title = re.match(r'^([a-zA-Z]+)(\d+)$', sheet.title)
titleStr = title.group(1)
titleExt = title.group(2)
titleExtToInt = int(titleExt)
# print(str(titleExtToInt+1))
sheetPrev = wb.get_sheet_by_name(titleStr + str(titleExtToInt-1))
# print(sheetPrev)
sheet['A5'] = sheetPrev['A34'].value + 1
# print(sheet['A2'].value)
for i in range(len(sheet['A5':'A34'])):
if i >= 1:
sheet['A5':'A34'][i][0].value = sheet['A5':'A34'][i-1][0].value + 1
# 合计支付金额
def moneySum(sheet):
sheet['D35'] = "=SUM(D5:D34)"
sheet['G35'] = "=SUM(G5:G34)"
for sh in wb:
moneySum(sh)
if sh.title != 'sheetTemplate' and sh.title != 'sheet0' :
makeIndex(sh)
wb.save('E:/新农合银行转账统计表/2017-12-01至2017-12-31.xlsx')
|
[
"zcxyun@126.com"
] |
zcxyun@126.com
|
a08b6a7a99b0ab5b2de2ff6bf12388fbf6319a48
|
c4bfd8ba4c4c0f21bd6a54a9131f0985a5a4fa56
|
/crescent/resources/s3/bucket_policy/constants.py
|
5ba83647b2baf057d3d871cc99288b7e11f8f64e
|
[
"Apache-2.0"
] |
permissive
|
mpolatcan/crescent
|
405936ec001002e88a8f62d73b0dc193bcd83010
|
2fd0b1b9b21613b5876a51fe8b5f9e3afbec1b67
|
refs/heads/master
| 2022-09-05T04:19:43.745557
| 2020-05-25T00:09:11
| 2020-05-25T00:09:11
| 244,903,370
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 325
|
py
|
from crescent.core.constants import get_values
class _RequiredProperties:
class BucketPolicy:
BUCKET = "Bucket"
POLICY_DOCUMENT = "PolicyDocument"
# --------------------------------------------------
class ResourceRequiredProperties:
BUCKET_POLICY = get_values(_RequiredProperties.BucketPolicy)
|
[
"mutlupolatcan@gmail.com"
] |
mutlupolatcan@gmail.com
|
5aadabb6bec3aec95c8f54c9736e197ced6a47ab
|
0daf6763c960cd898e9bb5612b1314d7e34b8870
|
/mnist_1/data.py
|
b1bf29e2af4aca2bbe3f70fd3c775cddef6107cf
|
[
"MIT"
] |
permissive
|
evanthebouncy/nnhmm
|
a6ba2a1f0ed2c90a0188de8b5e162351e6668565
|
acd76edaa1b3aa0c03d39f6a30e60d167359c6ad
|
refs/heads/master
| 2021-01-12T02:27:32.814908
| 2017-04-01T05:01:24
| 2017-04-01T05:01:24
| 77,956,435
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 7,180
|
py
|
import numpy as np
from scipy.misc import imresize
from scipy.ndimage.filters import gaussian_filter
from tensorflow.examples.tutorials.mnist import input_data
mnist = input_data.read_data_sets("MNIST_data/", one_hot=True)
X_L = 10
L = 14
N_BATCH = 50
OBS_SIZE = 20
KEEP = 0.6
# ---------------------------- helpers
def black_white(img):
new_img = np.copy(img)
img_flat = img.flatten()
nonzeros = img_flat[np.nonzero(img_flat)]
sortedd = np.sort(nonzeros)
idxx = round(len(sortedd) * (1.0 - KEEP))
thold = sortedd[idxx]
mask_pos = img >= thold
mask_neg = img < thold
new_img[mask_pos] = 1.0
new_img[mask_neg] = 0.0
return new_img
def vectorize(coords):
retX, retY = np.zeros([L]), np.zeros([L])
retX[coords[0]] = 1.0
retY[coords[1]] = 1.0
return retX, retY
# show dimension of a data object (list of list or a tensor)
def show_dim(lst1):
if hasattr(lst1, '__len__') and len(lst1) > 0:
return [len(lst1), show_dim(lst1[0])]
else:
try:
return lst1.get_shape()
except:
try:
return lst1.shape
except:
return type(lst1)
# -------------------------------------- making the datas
# assume X is already a 2D matrix
def mk_query(X):
def query(O):
Ox, Oy = O
if X[Ox][Oy] == 1.0:
return [1.0, 0.0]
else:
return [0.0, 1.0]
return query
def sample_coord():
return np.random.randint(0, L), np.random.randint(0, L)
def sample_coord_center():
Ox, Oy = np.random.multivariate_normal([L/2,L/2], [[L*0.7, 0.0], [0.0, L*0.7]])
Ox, Oy = round(Ox), round(Oy)
if 0 <= Ox < L:
if 0 <= Oy < L:
return Ox, Oy
return sample_coord()
def sample_coord_bias(qq):
def find_positive(qq):
C = sample_coord()
if qq(C) == [1.0, 0.0]:
return C
else:
return find_positive(qq)
def find_negative(qq):
C = sample_coord()
if qq(C) == [0.0, 1.0]:
return C
else:
return find_negative(qq)
toss = np.random.random() < 0.5
if toss:
return find_positive(qq)
else:
return find_negative(qq)
def gen_O(X):
query = mk_query(X)
Ox, Oy = sample_coord()
O = (Ox, Oy)
return O, query(O)
def get_img_class(test=False):
img, _x = mnist.train.next_batch(1)
if test:
img, _x = mnist.test.next_batch(1)
img = np.reshape(img[0], [2*L,2*L])
# rescale the image to 14 x 14
# img = imresize(img, (14,14), interp='nearest') / 255.0
img = gaussian_filter(imresize(img, (14,14)) / 255.0, 0.11)
img = black_white(img)
return img, _x[0]
# a trace is named tuple
# (Img, S, Os)
# where Img is the black/white image
# where S is the hidden hypothesis (i.e. label of the img)
# Os is a set of Observations which is (qry_pt, label)
import collections
Trace = collections.namedtuple('Trace', 'Img S Os')
def gen_rand_trace(test=False):
img, _x = get_img_class(test)
obs = []
for ob_idx in range(OBS_SIZE):
obs.append(gen_O(img))
return Trace(img, _x, obs)
# a class to hold the experiences
class Experience:
def __init__(self, buf_len):
self.buf = []
self.buf_len = buf_len
def trim(self):
while len(self.buf) > self.buf_len:
self.buf.pop()
def add(self, trace):
self.buf.append(trace)
self.trim()
def sample(self):
idxxs = np.random.choice(len(self.buf), size=1, replace=False)
return self.buf[idxxs[0]]
def data_from_exp(exp):
traces = [exp.sample() for _ in range(N_BATCH)]
x = []
obs_x = [[] for i in range(OBS_SIZE)]
obs_y = [[] for i in range(OBS_SIZE)]
obs_tfs = [[] for i in range(OBS_SIZE)]
new_ob_x = []
new_ob_y = []
new_ob_tf = []
imgs = []
for bb in range(N_BATCH):
trr = traces[bb]
# generate a hidden variable X
# get a single thing out
img = trr.Img
_x = trr.S
imgs.append(img)
x.append(_x)
# generate a FRESH new observation for demanding an answer
_new_ob_coord, _new_ob_lab = gen_O(img)
_new_ob_x, _new_ob_y = vectorize(_new_ob_coord)
new_ob_x.append(_new_ob_x)
new_ob_y.append(_new_ob_y)
new_ob_tf.append(_new_ob_lab)
# generate observations for this hidden variable x
for ob_idx in range(OBS_SIZE):
_ob_coord, _ob_lab = trr.Os[ob_idx]
_ob_x, _ob_y = vectorize(_ob_coord)
obs_x[ob_idx].append(_ob_x)
obs_y[ob_idx].append(_ob_y)
obs_tfs[ob_idx].append(_ob_lab)
return np.array(x, np.float32),\
np.array(obs_x, np.float32),\
np.array(obs_y, np.float32),\
np.array(obs_tfs, np.float32),\
np.array(new_ob_x, np.float32),\
np.array(new_ob_y, np.float32),\
np.array(new_ob_tf, np.float32), imgs
# the thing is we do NOT use the trace observations, we need to generate random observations
# to be sure we can handle all kinds of randomizations
def inv_data_from_label_data(labelz, inputz):
labs = []
obss = []
for bb in range(N_BATCH):
img = inputz[bb]
lab = labelz[bb]
labs.append(lab)
obs = np.zeros([L,L,2])
# generate observations for this hidden variable x
for ob_idx in range(OBS_SIZE):
ob_coord, ob_lab = gen_O(img)
ox, oy = ob_coord
if ob_lab[0] == 1.0:
obs[ox][oy][0] = 1.0
if ob_lab[1] == 1.0:
obs[ox][oy][1] = 1.0
obss.append(obs)
return np.array(labs, np.float32),\
np.array(obss, np.float32)
# uses trace info
def inv_batch_obs(labz, batch_Os):
obss = []
for bb in range(N_BATCH):
Os = batch_Os[bb]
obs = np.zeros([L,L,2])
# generate observations for this hidden variable x
for ob_idx in range(OBS_SIZE):
ob_coord, ob_lab = Os[ob_idx]
ox, oy = ob_coord
if ob_lab[0] == 1.0:
obs[ox][oy][0] = 1.0
if ob_lab[1] == 1.0:
obs[ox][oy][1] = 1.0
obss.append(obs)
return np.array(labz, np.float32),\
np.array(obss, np.float32)
# def gen_data():
# x = []
#
# obs_x = [[] for i in range(OBS_SIZE)]
# obs_y = [[] for i in range(OBS_SIZE)]
# obs_tfs = [[] for i in range(OBS_SIZE)]
# new_ob_x = []
# new_ob_y = []
# new_ob_tf = []
#
# imgs = []
#
# for bb in range(N_BATCH):
# # generate a hidden variable X
# # get a single thing out
# img, _x = get_img_class()
# imgs.append(img)
#
# # add to x
# x.append(_x[0])
# # generate new observation
# _new_ob_coord, _new_ob_lab = gen_O(img)
# _new_ob_x, _new_ob_y = vectorize(_new_ob_coord)
# new_ob_x.append(_new_ob_x)
# new_ob_y.append(_new_ob_y)
# new_ob_tf.append(_new_ob_lab)
#
# # generate observations for this hidden variable x
# for ob_idx in range(OBS_SIZE):
# _ob_coord, _ob_lab = gen_O(img)
# _ob_x, _ob_y = vectorize(_ob_coord)
# obs_x[ob_idx].append(_ob_x)
# obs_y[ob_idx].append(_ob_y)
# obs_tfs[ob_idx].append(_ob_lab)
#
# return np.array(x, np.float32),\
# np.array(obs_x, np.float32),\
# np.array(obs_y, np.float32),\
# np.array(obs_tfs, np.float32),\
# np.array(new_ob_x, np.float32),\
# np.array(new_ob_y, np.float32),\
# np.array(new_ob_tf, np.float32), imgs
|
[
"evanthebouncy@gmail.com"
] |
evanthebouncy@gmail.com
|
a232db2616848042691d1e7c825e015af8882aab
|
5fe72bb13baf3649058ebe11aa86ad4fc56c69ed
|
/hard-gists/3409403/snippet.py
|
0e119325f7fbab1ea9ab0c1fde4fd52135828a49
|
[
"Apache-2.0"
] |
permissive
|
dockerizeme/dockerizeme
|
8825fed45ff0ce8fb1dbe34959237e8048900a29
|
408f3fa3d36542d8fc1236ba1cac804de6f14b0c
|
refs/heads/master
| 2022-12-10T09:30:51.029846
| 2020-09-02T13:34:49
| 2020-09-02T13:34:49
| 144,501,661
| 24
| 20
|
Apache-2.0
| 2022-11-21T12:34:29
| 2018-08-12T21:21:04
|
Python
|
UTF-8
|
Python
| false
| false
| 547
|
py
|
from google.appengine.ext import db, ndb
from google.appengine.datastore import entity_pb
def db_entity_to_protobuf(e):
return db.model_to_protobuf(e).Encode()
def protobuf_to_db_entity(pb):
# precondition: model class must be imported
return db.model_from_protobuf(entity_pb.EntityProto(pb))
def ndb_entity_to_protobuf(e):
return ndb.ModelAdapter().entity_to_pb(e).Encode()
def protobuf_to_ndb_entity(pb):
# precondition: model class must be imported
return ndb.ModelAdapter().pb_to_entity(entity_pb.EntityProto(pb))
|
[
"42325807+dockerizeme@users.noreply.github.com"
] |
42325807+dockerizeme@users.noreply.github.com
|
9a3f3542a14276c1794492528c5d906908c7f791
|
6b9084d234c87d7597f97ec95808e13f599bf9a1
|
/evaluation/logger/pytracking.py
|
6e17451f77982c297479789660635ffca35a1ee4
|
[] |
no_license
|
LitingLin/ubiquitous-happiness
|
4b46234ce0cb29c4d27b00ec5a60d3eeb52c26fc
|
aae2d764e136ca4a36c054212b361dd7e8b22cba
|
refs/heads/main
| 2023-07-13T19:51:32.227633
| 2021-08-03T16:02:03
| 2021-08-03T16:02:03
| 316,664,903
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 606
|
py
|
import numpy as np
import os
class PyTrackingLogger:
def __init__(self, output_path=None):
self.output_path = output_path
def log_sequence_result(self, name: str, predicted_bboxes: np.ndarray, **kwargs):
print(f'Sequence: {name}')
print(f'FPS: {kwargs["fps"]}')
predicted_bboxes = predicted_bboxes.copy()
predicted_bboxes[:, 0] += 1
predicted_bboxes[:, 1] += 1
if self.output_path is not None:
np.savetxt(os.path.join(self.output_path, '{}.txt'.format(name)), predicted_bboxes, delimiter='\t',
fmt='%d')
|
[
"linliting06@live.com"
] |
linliting06@live.com
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.