blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
616
| content_id
stringlengths 40
40
| detected_licenses
listlengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 777
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
⌀ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
⌀ | gha_language
stringclasses 149
values | src_encoding
stringclasses 26
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 3
10.2M
| extension
stringclasses 188
values | content
stringlengths 3
10.2M
| authors
listlengths 1
1
| author_id
stringlengths 1
132
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
eec37585b4feaa9cc15c76cd3fead5d237d8c535
|
db6a3eb678c372256c342e76caaf750de253c119
|
/tests/conftest.py
|
1153bb41932ba778afc0b91d999d0b9826a2eb12
|
[
"BSD-3-Clause"
] |
permissive
|
molssi-seamm/table_step
|
80678f1bb05fb068d0dc5c6390d4e69a822d9cbc
|
e98f79ee5455f1caf1f03da32c9771cee24e7813
|
refs/heads/main
| 2023-08-17T02:20:08.451145
| 2023-07-25T19:22:14
| 2023-07-25T19:22:14
| 170,744,384
| 0
| 0
|
BSD-3-Clause
| 2023-07-25T19:22:16
| 2019-02-14T19:14:57
|
Python
|
UTF-8
|
Python
| false
| false
| 1,584
|
py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Fixtures for testing the table_step package."""
import pytest
def pytest_addoption(parser):
parser.addoption(
"--no-unit", action="store_true", default=False, help="don't run the unit tests"
)
parser.addoption(
"--integration",
action="store_true",
default=False,
help="run the integration tests",
)
parser.addoption(
"--timing", action="store_true", default=False, help="run the timing tests"
)
def pytest_configure(config):
config.addinivalue_line(
"markers", "unit: unit tests run by default, use '--no-unit' to turn off"
)
config.addinivalue_line(
"markers", "integration: integration test, run with --integration"
)
config.addinivalue_line("markers", "timing: timing tests, run with --timing")
def pytest_collection_modifyitems(config, items):
if config.getoption("--no-unit"):
skip = pytest.mark.skip(reason="remove --no-unit option to run")
for item in items:
if "unit" in item.keywords:
item.add_marker(skip)
if not config.getoption("--integration"):
skip = pytest.mark.skip(reason="use the --integration option to run")
for item in items:
if "integration" in item.keywords:
item.add_marker(skip)
if not config.getoption("--timing"):
skip = pytest.mark.skip(reason="need --timing option to run")
for item in items:
if "timing" in item.keywords:
item.add_marker(skip)
|
[
"paul@thesaxes.net"
] |
paul@thesaxes.net
|
aab78f44cb62d54f045f75c91f11d8a9fb0b08d9
|
7ecbdf17a3d6c11adac10781fc08f16e60c2abe7
|
/testGridWorldPython/multiplePCrystal_N1609/testGridWorld.py
|
6e24c232b62941638a56757ba8ea221ecf64d66c
|
[] |
no_license
|
yangyutu/multiagentAssembly
|
459f7941403d38c877f51094dee9462372b491c0
|
1e7c950e13fadf5e2e471de15b5c06f349130869
|
refs/heads/master
| 2020-06-07T06:17:27.747500
| 2019-06-30T19:39:58
| 2019-06-30T19:39:58
| 192,946,813
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 663
|
py
|
import GridWorldPython as gw
import json
import numpy as np
from timeit import default_timer as timer
configName = 'config.json'
model = gw.GridWorldPython(configName, 1)
with open(configName,'r') as file:
config = json.load(file)
N = config['N']
iniConfigPos = np.genfromtxt('squareTargetN1609CPP.txt')
iniConfig = []
for pos in iniConfigPos:
iniConfig.append(pos[1:3].tolist() + [0])
model.reset()
model.setIniConfig(np.array(iniConfig))
#print(iniConfig)
start = timer()
for i in range(100):
speeds = [0 for _ in range(N)]
model.step(speeds)
pos = model.getPositions()
pos.shape = (N, 3)
end = timer()
print("timecost", end - start)
|
[
"yangyutu123@gmail.com"
] |
yangyutu123@gmail.com
|
cd2cd539213e5471e56fbe40f7277b75376c4c8a
|
bdff6688cee79226723fbcf9980c3757a55651b7
|
/algorithms/implementation/find_digits.py
|
ae389e624b94fb978174728591e73e5e389879b0
|
[] |
no_license
|
kruthar/hackerrank
|
1f151203c8f26c033585f30d2cf69a2b22dcaf71
|
ef81b2aa41a678ad6b0692f933f438a62b1d6b64
|
refs/heads/master
| 2016-08-10T07:15:19.165058
| 2016-02-26T17:48:58
| 2016-02-26T17:48:58
| 49,286,556
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 284
|
py
|
import sys
num_lines = int(sys.stdin.next().strip())
for i in range(0, num_lines):
num_string = sys.stdin.next().strip()
num = int(num_string)
divs = 0
for digit in num_string:
if digit != '0' and num % int(digit) == 0:
divs += 1
print divs
|
[
"kruthar@gmail.com"
] |
kruthar@gmail.com
|
ff8c2bc814120ac52259dc6eb7847a31d804f055
|
a7b0d36ed471f48b549355430eb474d1f212fed4
|
/products/tests.py
|
455a4e4972f84336b889ee549a3a66df2411f105
|
[
"MIT"
] |
permissive
|
Yezz123-Archive/E-Commerce-Boilerplate
|
93fd1aca5fb462c44915aeb60a1e89284c15ad0a
|
c57196b92ae5395bbfa0ef97f50425904fe2ead2
|
refs/heads/main
| 2023-05-07T05:08:59.232857
| 2021-06-05T11:56:51
| 2021-06-05T11:56:51
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 243
|
py
|
from django.test import TestCase
from .models import Product
# Create your tests here.
class ProductTests(TestCase):
def test_str(self):
test_name = Product(name='product')
self. assertEqual(str(test_name), 'product')
|
[
"yasserth19@gmail.com"
] |
yasserth19@gmail.com
|
6cc1153dc9a841af6cfd746ccd2fceb3e74f88e1
|
b7663107da61a8530c92567639f9bc3e51b4b3c7
|
/models/org.py
|
7382bbf5a6d5092ca17972a91445f26a9546972b
|
[
"MIT"
] |
permissive
|
goldenboy/eden
|
adb5dddeee56b891ee11fb31453fa7ea65df60d3
|
628ee2278b7535486482a6030c5d84b70e3bdb4a
|
refs/heads/master
| 2021-01-16T21:36:21.965358
| 2012-02-26T23:54:56
| 2012-02-26T23:54:56
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 7,577
|
py
|
# -*- coding: utf-8 -*-
"""
Organization Registry
"""
# -----------------------------------------------------------------------------
# Defined in the Model for use from Multiple Controllers for unified menus
#
def org_organisation_controller():
""" RESTful CRUD controller """
# T = current.T
# db = current.db
# gis = current.gis
s3 = current.response.s3
manager = current.manager
tablename = "org_office"
table = s3db[tablename]
# Pre-process
def prep(r):
if r.interactive:
r.table.country.default = gis.get_default_country("code")
if r.component_name == "human_resource" and r.component_id:
# Workaround until widget is fixed:
hr_table = db.hrm_human_resource
hr_table.person_id.widget = None
hr_table.person_id.writable = False
elif r.component_name == "office" and \
r.method and r.method != "read":
# Don't want to see in Create forms
# inc list_create (list_fields over-rides)
table = r.component.table
s3.address_hide(table)
# Process Base Location
#manager.configure(table._tablename,
# onaccept=s3.address_onaccept)
elif r.component_name == "task" and \
r.method != "update" and r.method != "read":
# Create or ListCreate
r.component.table.organisation_id.default = r.id
r.component.table.status.writable = False
r.component.table.status.readable = False
elif r.component_name == "project" and r.link:
# Hide/show host role after project selection in embed-widget
tn = r.link.tablename
manager.configure(tn,
post_process="hide_host_role($('#%s').val());")
script = "s3.hide_host_role.js"
s3.scripts.append( "%s/%s" % (s3.script_dir, script))
return True
s3.prep = prep
rheader = s3db.org_rheader
output = s3_rest_controller("org", "organisation",
native=False, rheader=rheader)
return output
# -----------------------------------------------------------------------------
def org_office_controller():
""" RESTful CRUD controller """
# gis = current.gis
# request = current.request
# session = current.session
s3 = current.response.s3
manager = current.manager
settings = current.deployment_settings
tablename = "org_office"
table = s3db[tablename]
# Load Models to add tabs
if settings.has_module("inv"):
manager.load("inv_inv_item")
elif settings.has_module("req"):
# (gets loaded by Inv if available)
manager.load("req_req")
if isinstance(request.vars.organisation_id, list):
request.vars.organisation_id = request.vars.organisation_id[0]
office_search = s3base.S3Search(
advanced=(s3base.S3SearchSimpleWidget(
name="office_search_text",
label=T("Search"),
comment=T("Search for office by text."),
field=["name", "comments", "email"]
),
s3base.S3SearchOptionsWidget(
name="office_search_org",
label=T("Organization"),
comment=T("Search for office by organization."),
field=["organisation_id"],
represent ="%(name)s",
cols = 3
),
s3base.S3SearchLocationHierarchyWidget(
name="office_search_location",
comment=T("Search for office by location."),
represent ="%(name)s",
cols = 3
),
s3base.S3SearchLocationWidget(
name="office_search_map",
label=T("Map"),
),
))
manager.configure(tablename,
search_method = office_search)
# Pre-processor
def prep(r):
table = r.table
if r.representation == "popup":
organisation = r.vars.organisation_id or \
session.s3.organisation_id or ""
if organisation:
table.organisation_id.default = organisation
elif r.representation == "plain":
# Map popups want less clutter
table.obsolete.readable = False
if r.record.type == 5:
s3.crud_strings[tablename].title_display = T("Warehouse Details")
if r.record and deployment_settings.has_module("hrm"):
# Cascade the organisation_id from the office to the staff
hrm_table = db.hrm_human_resource
hrm_table.organisation_id.default = r.record.organisation_id
hrm_table.organisation_id.writable = False
if r.interactive or r.representation == "aadata":
if deployment_settings.has_module("inv"):
# Filter out Warehouses, since they have a dedicated controller
response.s3.filter = (table.type != 5) | \
(table.type == None)
if r.interactive:
if settings.has_module("inv"):
# Don't include Warehouses in the type dropdown
s3.org_office_type_opts.pop(5)
table.type.requires = IS_NULL_OR(IS_IN_SET(s3.org_office_type_opts))
if r.record and r.record.type == 5: # 5 = Warehouse
s3.crud_strings[tablename] = s3.org_warehouse_crud_strings
if r.method == "create":
table.obsolete.readable = table.obsolete.writable = False
if r.vars.organisation_id and r.vars.organisation_id != "None":
table.organisation_id.default = r.vars.organisation_id
if r.method and r.method != "read":
# Don't want to see in Create forms
# inc list_create (list_fields over-rides)
table.obsolete.writable = False
table.obsolete.readable = False
s3.address_hide(table)
if r.component:
if r.component.name == "inv_item" or \
r.component.name == "recv" or \
r.component.name == "send":
# Filter out items which are already in this inventory
s3db.inv_prep(r)
elif r.component.name == "human_resource":
# Filter out people which are already staff for this office
s3_filter_staff(r)
# Cascade the organisation_id from the hospital to the staff
hrm_table.organisation_id.default = r.record.organisation_id
hrm_table.organisation_id.writable = False
elif r.component.name == "req":
if r.method != "update" and r.method != "read":
# Hide fields which don't make sense in a Create form
# inc list_create (list_fields over-rides)
s3db.req_create_form_mods()
return True
s3.prep = prep
rheader = s3db.org_rheader
return s3_rest_controller("org", "office", rheader=rheader)
# END =========================================================================
|
[
"fran@aidiq.com"
] |
fran@aidiq.com
|
baadca2b3a178ee55b51a4210be750d6080444b5
|
d5c8f7f7481c86266dfccb862e16c59132f5eb34
|
/config.py
|
414128cefca0776539c2eef574cb551b9d9efe7c
|
[] |
no_license
|
AngleMAXIN/BlogGo
|
59aec933b7743ba19a3eca147ce61b864da79399
|
49dd6cd930ae059d533bcfd2f1aa3cd3197deb95
|
refs/heads/master
| 2021-04-12T12:32:32.895508
| 2018-04-24T09:17:07
| 2018-04-24T09:17:07
| 126,703,268
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,581
|
py
|
#/usr/bin/env python3
# -*- coding: utf-8 -*-
#/usr/bin/env python3
# -*- coding: utf-8 -*-
import os
basedir = os.path.abspath(os.path.dirname(__file__))
HOSTNAME = '127.0.0.1'
PORT = '3306'
USERNAME = 'root'
PASSWORD = 'maxin'
class Config(object):
SECRET_KEY = os.environ.get('SECRET_KEY') or 'this is a secret string'
SQLALCHEMY_TRACK_MODIFICATIONS = True
@staticmethod
def init_app(app):
pass
class DevelopmentConfig(Config):
"""开发时的数据库"""
DEBUG = True
CACHE_TYPE = 'simple'
DATABASE = 'bloggo_dev_db'
SQLALCHEMY_DATABASE_URI = 'mysql://{}:{}@{}:{}/{}?charset=utf8'.format( USERNAME,
PASSWORD,
HOSTNAME,
PORT,
DATABASE)
class TestingConfig(Config):
"""测试的数据库"""
TESTING = True
SQLALCHEMY_DATABASE_URI = os.environ.get('TEST_DATABASE_URI') or \
'sqlite:///' + os.path.join(basedir, 'test.db')
class ProductionConfig(Config):
"""产品时的数据库"""
SQLALCHEMY_DATABASE_URI = os.environ.get('DATABASE_URI') or \
'sqlite:///' + os.path.join(basedir, 'data.db')
config = {
'development': DevelopmentConfig,
'testing': TestingConfig,
'production': ProductionConfig,
'default': DevelopmentConfig
}
|
[
"1678190746@qq.com"
] |
1678190746@qq.com
|
8ffbcbd15ddfd36fa85faee3a276f226df7e7419
|
21f98d8bb31264c94e7a98fb8eb806d7f5bd396e
|
/Fenwick Tree/218. The Skyline Problem.py
|
0ce89568e86b08184393b63f0be9409ec3eb3a60
|
[] |
no_license
|
mcfair/Algo
|
e1500d862a685e598ab85e8ed5b68170632fdfd0
|
051e2a9f6c918907cc8b665353c46042e7674e66
|
refs/heads/master
| 2021-10-31T01:25:46.917428
| 2021-10-08T04:52:44
| 2021-10-08T04:52:44
| 141,460,584
| 2
| 2
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,574
|
py
|
class Solution(object):
def getSkyline(self, buildings):
"""
:type buildings: List[List[int]]
:rtype: List[List[int]]
"""
points =[]
endsAt ={}
for L, R, H in buildings:
left, right = (L, 'left', -H), (R, 'right', -H) #'left' < 'right', don't use 'start' 'end'
points += [left,right]
endsAt[left] = right
points = sorted(list(set(points))) #remove duplicates and sort
rank = {points[i]: i for i in range(len(points))} # sorted index as rank
tree = BIT(len(points))
ret = []
for p in points:
if p[1] == 'left':
q = endsAt[p]
tree.update(rank[q], -p[-1]) # end idx is exclusive
h = tree.query(rank[p]+1) # start idx is inclusive
#not sure about this part!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
if not ret or ret[-1][1] != h:
if ret and ret[-1][0] == p[0]:
ret[-1][1] = h
else:
ret.append([p[0], h])
return ret
class BIT(object):
def __init__(self, m):
self.bit = [0] *(m+1)
def update(self, p, h):
while p > 0: # scope of h is towards left
self.bit[p] = max(self.bit[p], h)
p -= p & -p
def query(self,p):
ret = 0
while p <= len(self.bit): # check anything to the right that has a higher value
ret = max(ret, self.bit[p])
p += p & -p
return ret
|
[
"noreply@github.com"
] |
mcfair.noreply@github.com
|
14d9deb91f33a952aa11d3c81ab4256f362e0ba0
|
f98a2875e0cdc84341fe8e37b11336368a257fe7
|
/auxiliary/accessing_comparative_data_sensitivity_runs.py
|
f6c3cd9c7f009bf8ac5af1f2218e353a026000d2
|
[
"MIT"
] |
permissive
|
anhnguyendepocen/PolicySpace2
|
eaa83533b7ad599af677ce69353841e665b447d0
|
d9a450e47651885ed103d3217dbedec484456d07
|
refs/heads/master
| 2023-08-28T04:55:40.834445
| 2021-10-21T18:50:03
| 2021-10-21T18:50:03
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,431
|
py
|
import os
from shutil import copyfile
import numpy as np
import pandas as pd
from analysis.output import OUTPUT_DATA_SPEC as cols
import read_meta_json
def moving_files(path1, path2):
path = os.path.join(path1, path2)
dirs = os.listdir(path)
for each in dirs:
if '=' in each and '.csv' not in each:
t_path = os.path.join(path, each, 'avg', 'temp_stats.csv')
destination = f'{path}/{each}.csv'
copyfile(t_path, destination)
def reading_summarizing_tables(path1, path2, col='families_helped'):
d = pd.read_csv(os.path.join(path1, path2, t), sep=';', names=cols['stats']['columns'])
return f'{t}: avg {col} {np.mean(d[col]):.2f}\n'
if __name__ == '__main__':
p1 = r'\\storage1\carga\modelo dinamico de simulacao\Exits_python\PS2020'
fls = os.listdir(p1)
interest = [f for f in fls if f.startswith('POLICIES')]
for run in interest:
try:
moving_files(p1, run)
except FileNotFoundError:
continue
with open(f'{p1}/Report_{run}.txt', 'a') as f:
f.write(read_meta_json.read_meta(p1, run))
for each in cols['stats']['columns'][1:]:
tables = [f for f in os.listdir(os.path.join(p1, run)) if f.endswith('.csv')]
for t in tables:
f.write(reading_summarizing_tables(p1, run, each))
f.write('_____________\n')
|
[
"furtadobb@gmail.com"
] |
furtadobb@gmail.com
|
3421a3b55caa64222cf08320ee46daabcf0b12f6
|
2f0d56cdcc4db54f9484b3942db88d79a4215408
|
/.history/Python_Learning/lesson13_20200329144341.py
|
c8241db28a95319939d792a162f3f2bb4400c80a
|
[] |
no_license
|
xiangxing98/xiangxing98.github.io
|
8571c8ee8509c0bccbb6c2f3740494eedc53e418
|
23618666363ecc6d4acd1a8662ea366ddf2e6155
|
refs/heads/master
| 2021-11-17T19:00:16.347567
| 2021-11-14T08:35:01
| 2021-11-14T08:35:01
| 33,877,060
| 7
| 1
| null | 2017-07-01T16:42:49
| 2015-04-13T15:35:01
|
HTML
|
UTF-8
|
Python
| false
| false
| 3,467
|
py
|
# -*- encoding: utf-8 -*-
# !/usr/bin/env python
'''
@File : lesson13.py
@Time : 2020/03/29 11:43:27
@Author : Stone_Hou
@Version : 1.0
@Contact : xiangxing985529@163.com
@License : (C)Copyright 2010-2020, Stone_Hou
@Desc : None
'''
# here put the import lib
# Practice #01
print('He said,"I\'m yours!"')
# He said, "I'm yours!"
# Practice #02
print('\\\\_v_//')
# \\_v_//
# Practice #03
print("Stay hungry, \n\
stay foolish.\n\
-- Steve Jobs")
# Stay hungry,
# stay foolish.
# -- Steve Jobs
# Practice #04
# print("----\n")
# n = 5
# for i in range(1, n+1):
# for j in range(0, (n + 1)/2):
# print("*")
# for j in range(0, (n + 1)/2):
# print(" ")
# print("")
# print("----\n")
# 第1行, 1个*, 四个空格
# 第2行, 一个*, 四个空格
# *
# ***
# *****
# ***
# *
# Practice #05
# 输入一个大于等于1的值n,输出星号(*)组成的等腰三角形,底边长为n
# 例:
# 输入
# 3
# 输出
# *
# * *
# * * *
print("\n输出星号(*)组成的等腰三角形,底边长为n\n")
n = int(input())
for i in range(1, n+1): # n row
for j in range(0, n-i): # i row, need n-1 blank
print(' ', end='')
for j in range(0, i): # i row, need i *
print('*', end='')
print() # blank row
print("-------------------------\n")
# 打印左下角三角形:for i in range(10):之后,range(0,i)
# 打印右上角三角形:在左下角的基础上,将"-"变成" "空格
print("-------------------\n打印左下角三角形\n")
for i in range(10):
for j in range(0, i):
print("-", end=" ")
for j in range(i, 10):
print("$", end=" ")
print("")
print("-------------------\n")
# 打印左下角三角形
# $ $ $ $ $ $ $ $ $ $
# - $ $ $ $ $ $ $ $ $
# - - $ $ $ $ $ $ $ $
# - - - $ $ $ $ $ $ $
# - - - - $ $ $ $ $ $
# - - - - - $ $ $ $ $
# - - - - - - $ $ $ $
# - - - - - - - $ $ $
# - - - - - - - - $ $
# - - - - - - - - - $
# -------------------
# 打印左上角三角形:for i in range(10):之后,range(0,10-i)
# 打印右下角三角形:在左上角的基础上,将"-"变成" "空格
print("-------------------\n打印左上角三角形\n")
for i in range(10):
for j in range(0, 10 - i):
print("-", end=" ")
for k in range(10 - i, 10):
print("$", end=" ")
print("")
print("-------------------\n")
# -------------------
# 打印左上角三角形
# - - - - - - - - - -
# - - - - - - - - - $
# - - - - - - - - $ $
# - - - - - - - $ $ $
# - - - - - - $ $ $ $
# - - - - - $ $ $ $ $
# - - - - $ $ $ $ $ $
# - - - $ $ $ $ $ $ $
# - - $ $ $ $ $ $ $ $
# - $ $ $ $ $ $ $ $ $
# -------------------
# 打印上三角,只需要将"-",去掉
print("-------------------\n打印上三角\n")
for i in range(10):
for j in range(0, 10 - i):
print(end=" ")
for k in range(10 - i, 10):
print("$", end=" ")
print("")
print("-------------------\n")
# -------------------
# 打印上三角
# $
# $ $
# $ $ $
# $ $ $ $
# $ $ $ $ $
# $ $ $ $ $ $
# $ $ $ $ $ $ $
# $ $ $ $ $ $ $ $
# $ $ $ $ $ $ $ $ $
# -------------------
# 打印倒三角,只需要将"-",去掉
print("-------------------\n打印倒三角\n")
for i in range(10):
for j in range(0,i):
print(end=" ")
for j in range(i,10):
print("$", end=" ")
print("")
print("-------------------\n")
|
[
"xiangxing985529@163.com"
] |
xiangxing985529@163.com
|
d2281b34b9b3168f241bc8f861adadeb4a0716cc
|
da4303d7ee77310c5a21997f1bfe0746affa3e0c
|
/papers/resources/best.py
|
395be833ef41ff557a22bc6013b545f7f5ac067d
|
[
"MIT"
] |
permissive
|
AaronCWacker/copycat
|
44bf0626965cfba12c304d9e44fe848ce0791cc7
|
c05e3e832ce614e8ccea9b47cb1d3ac31d2b18c2
|
refs/heads/master
| 2022-04-09T03:45:41.076460
| 2020-03-26T11:39:11
| 2020-03-26T11:39:11
| 378,260,131
| 1
| 0
|
MIT
| 2021-06-18T20:38:32
| 2021-06-18T20:38:31
| null |
UTF-8
|
Python
| false
| false
| 577
|
py
|
def _working_best(temp, prob):
s = .5 # convergence
r = 1.05 # power
u = prob ** r if prob < .5 else prob ** (1/r)
return _weighted(temp, prob, s, u)
def _soft_best(temp, prob):
s = .5 # convergence
r = 1.05 # power
u = prob ** r if prob < .5 else prob ** (1/r)
return _weighted(temp, prob, s, u)
def _parameterized_best(temp, prob):
alpha = 5
beta = 1
s = .5
s = (alpha * prob + beta * s) / (alpha + beta)
r = 1.05
u = prob ** r if prob < .5 else prob ** (1/r)
return _weighted(temp, prob, s, u)
|
[
"lucassaldyt@gmail.com"
] |
lucassaldyt@gmail.com
|
1941bd362fac4df26ea58880cfaabd40d3057590
|
4ece3041f2ed0cd312dc70fd3c7c240924dbb6ae
|
/tests/test_cursor.py
|
51eb95ae71efcaa65788113e1657e63128e0a442
|
[
"MIT"
] |
permissive
|
ivssh/PyAthena
|
175c5dfff0289a7ceccfe9a47ac490985535f669
|
156c51f19b46ea2f89612b3383937d78942bc990
|
refs/heads/master
| 2020-03-27T13:07:58.417397
| 2018-07-21T13:08:41
| 2018-07-21T13:08:41
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 14,391
|
py
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import unicode_literals
import contextlib
import re
import time
import unittest
from concurrent import futures
from concurrent.futures.thread import ThreadPoolExecutor
from datetime import date, datetime
from decimal import Decimal
from random import randint
from past.builtins.misc import xrange
from pyathena import BINARY, BOOLEAN, DATE, DATETIME, NUMBER, STRING, connect
from pyathena.cursor import Cursor
from pyathena.error import DatabaseError, NotSupportedError, ProgrammingError
from pyathena.model import AthenaQueryExecution
from tests.conftest import ENV, S3_PREFIX, SCHEMA
from tests.util import with_cursor
class TestCursor(unittest.TestCase):
"""Reference test case is following:
https://github.com/dropbox/PyHive/blob/master/pyhive/tests/dbapi_test_case.py
https://github.com/dropbox/PyHive/blob/master/pyhive/tests/test_hive.py
https://github.com/dropbox/PyHive/blob/master/pyhive/tests/test_presto.py
"""
def connect(self):
return connect(schema_name=SCHEMA)
@with_cursor
def test_fetchone(self, cursor):
cursor.execute('SELECT * FROM one_row')
self.assertEqual(cursor.rownumber, 0)
self.assertEqual(cursor.fetchone(), (1,))
self.assertEqual(cursor.rownumber, 1)
self.assertIsNone(cursor.fetchone())
self.assertIsNotNone(cursor.query_id)
self.assertIsNotNone(cursor.query)
self.assertEqual(cursor.state, AthenaQueryExecution.STATE_SUCCEEDED)
self.assertIsNone(cursor.state_change_reason)
self.assertIsNotNone(cursor.completion_date_time)
self.assertIsInstance(cursor.completion_date_time, datetime)
self.assertIsNotNone(cursor.submission_date_time)
self.assertIsInstance(cursor.submission_date_time, datetime)
self.assertIsNotNone(cursor.data_scanned_in_bytes)
self.assertIsNotNone(cursor.execution_time_in_millis)
self.assertIsNotNone(cursor.output_location)
@with_cursor
def test_fetchmany(self, cursor):
cursor.execute('SELECT * FROM many_rows LIMIT 15')
self.assertEqual(len(cursor.fetchmany(10)), 10)
self.assertEqual(len(cursor.fetchmany(10)), 5)
@with_cursor
def test_fetchall(self, cursor):
cursor.execute('SELECT * FROM one_row')
self.assertEqual(cursor.fetchall(), [(1,)])
cursor.execute('SELECT a FROM many_rows ORDER BY a')
self.assertEqual(cursor.fetchall(), [(i,) for i in xrange(10000)])
@with_cursor
def test_iterator(self, cursor):
cursor.execute('SELECT * FROM one_row')
self.assertEqual(list(cursor), [(1,)])
self.assertRaises(StopIteration, cursor.__next__)
@with_cursor
def test_arraysize(self, cursor):
cursor.arraysize = 5
cursor.execute('SELECT * FROM many_rows LIMIT 20')
self.assertEqual(len(cursor.fetchmany()), 5)
@with_cursor
def test_arraysize_default(self, cursor):
self.assertEqual(cursor.arraysize, Cursor.DEFAULT_FETCH_SIZE)
@with_cursor
def test_invalid_arraysize(self, cursor):
with self.assertRaises(ProgrammingError):
cursor.arraysize = 10000
with self.assertRaises(ProgrammingError):
cursor.arraysize = -1
@with_cursor
def test_description(self, cursor):
cursor.execute('SELECT 1 AS foobar FROM one_row')
self.assertEqual(cursor.description,
[('foobar', 'integer', None, None, 10, 0, 'UNKNOWN')])
@with_cursor
def test_description_initial(self, cursor):
self.assertIsNone(cursor.description)
@with_cursor
def test_description_failed(self, cursor):
try:
cursor.execute('blah_blah')
except DatabaseError:
pass
self.assertIsNone(cursor.description)
@with_cursor
def test_bad_query(self, cursor):
def run():
cursor.execute('SELECT does_not_exist FROM this_really_does_not_exist')
cursor.fetchone()
self.assertRaises(DatabaseError, run)
@with_cursor
def test_fetch_no_data(self, cursor):
self.assertRaises(ProgrammingError, cursor.fetchone)
self.assertRaises(ProgrammingError, cursor.fetchmany)
self.assertRaises(ProgrammingError, cursor.fetchall)
@with_cursor
def test_null_param(self, cursor):
cursor.execute('SELECT %(param)s FROM one_row', {'param': None})
self.assertEqual(cursor.fetchall(), [(None,)])
@with_cursor
def test_no_params(self, cursor):
self.assertRaises(DatabaseError, lambda: cursor.execute(
'SELECT %(param)s FROM one_row'))
self.assertRaises(KeyError, lambda: cursor.execute(
'SELECT %(param)s FROM one_row', {'a': 1}))
@with_cursor
def test_contain_special_character_query(self, cursor):
cursor.execute("""
SELECT col_string FROM one_row_complex
WHERE col_string LIKE '%str%'
""")
self.assertEqual(cursor.fetchall(), [('a string', )])
cursor.execute("""
SELECT col_string FROM one_row_complex
WHERE col_string LIKE '%%str%%'
""")
self.assertEqual(cursor.fetchall(), [('a string', )])
cursor.execute("""
SELECT col_string, '%' FROM one_row_complex
WHERE col_string LIKE '%str%'
""")
self.assertEqual(cursor.fetchall(), [('a string', '%')])
cursor.execute("""
SELECT col_string, '%%' FROM one_row_complex
WHERE col_string LIKE '%%str%%'
""")
self.assertEqual(cursor.fetchall(), [('a string', '%%')])
@with_cursor
def test_contain_special_character_query_with_parameter(self, cursor):
self.assertRaises(TypeError, lambda: cursor.execute(
"""
SELECT col_string, %(param)s FROM one_row_complex
WHERE col_string LIKE '%str%'
""", {'param': 'a string'}))
cursor.execute(
"""
SELECT col_string, %(param)s FROM one_row_complex
WHERE col_string LIKE '%%str%%'
""", {'param': 'a string'})
self.assertEqual(cursor.fetchall(), [('a string', 'a string')])
self.assertRaises(ValueError, lambda: cursor.execute(
"""
SELECT col_string, '%' FROM one_row_complex
WHERE col_string LIKE %(param)s
""", {'param': '%str%'}))
cursor.execute(
"""
SELECT col_string, '%%' FROM one_row_complex
WHERE col_string LIKE %(param)s
""", {'param': '%str%'})
self.assertEqual(cursor.fetchall(), [('a string', '%')])
def test_escape(self):
bad_str = """`~!@#$%^&*()_+-={}[]|\\;:'",./<>?\n\r\t """
self.run_escape_case(bad_str)
@with_cursor
def run_escape_case(self, cursor, bad_str):
cursor.execute('SELECT %(a)d, %(b)s FROM one_row', {'a': 1, 'b': bad_str})
self.assertEqual(cursor.fetchall(), [(1, bad_str,)])
@with_cursor
def test_none_empty_query(self, cursor):
self.assertRaises(ProgrammingError, lambda: cursor.execute(None))
self.assertRaises(ProgrammingError, lambda: cursor.execute(''))
@with_cursor
def test_invalid_params(self, cursor):
self.assertRaises(TypeError, lambda: cursor.execute(
'SELECT * FROM one_row', {'foo': {'bar': 1}}))
def test_open_close(self):
with contextlib.closing(self.connect()):
pass
with contextlib.closing(self.connect()) as conn:
with conn.cursor():
pass
@with_cursor
def test_unicode(self, cursor):
unicode_str = '王兢'
cursor.execute('SELECT %(param)s FROM one_row', {'param': unicode_str})
self.assertEqual(cursor.fetchall(), [(unicode_str,)])
@with_cursor
def test_null(self, cursor):
cursor.execute('SELECT null FROM many_rows')
self.assertEqual(cursor.fetchall(), [(None,)] * 10000)
cursor.execute('SELECT IF(a % 11 = 0, null, a) FROM many_rows')
self.assertEqual(cursor.fetchall(),
[(None if a % 11 == 0 else a,) for a in xrange(10000)])
@with_cursor
def test_query_id(self, cursor):
self.assertIsNone(cursor.query_id)
cursor.execute('SELECT * from one_row')
# query_id is UUID v4
expected_pattern = \
r'^[0-9a-f]{8}-[0-9a-f]{4}-[4][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}$'
self.assertTrue(re.match(expected_pattern, cursor.query_id))
@with_cursor
def test_output_location(self, cursor):
self.assertIsNone(cursor.output_location)
cursor.execute('SELECT * from one_row')
self.assertEqual(cursor.output_location,
'{0}{1}.csv'.format(ENV.s3_staging_dir, cursor.query_id))
@with_cursor
def test_query_execution_initial(self, cursor):
self.assertFalse(cursor.has_result_set)
self.assertIsNone(cursor.rownumber)
self.assertIsNone(cursor.query_id)
self.assertIsNone(cursor.query)
self.assertIsNone(cursor.state)
self.assertIsNone(cursor.state_change_reason)
self.assertIsNone(cursor.completion_date_time)
self.assertIsNone(cursor.submission_date_time)
self.assertIsNone(cursor.data_scanned_in_bytes)
self.assertIsNone(cursor.execution_time_in_millis)
self.assertIsNone(cursor.output_location)
@with_cursor
def test_complex(self, cursor):
cursor.execute("""
SELECT
col_boolean
,col_tinyint
,col_smallint
,col_int
,col_bigint
,col_float
,col_double
,col_string
,col_timestamp
,col_date
,col_binary
,col_array
,col_map
,col_struct
,col_decimal
FROM one_row_complex
""")
self.assertEqual(cursor.description, [
('col_boolean', 'boolean', None, None, 0, 0, 'UNKNOWN'),
('col_tinyint', 'tinyint', None, None, 3, 0, 'UNKNOWN'),
('col_smallint', 'smallint', None, None, 5, 0, 'UNKNOWN'),
('col_int', 'integer', None, None, 10, 0, 'UNKNOWN'),
('col_bigint', 'bigint', None, None, 19, 0, 'UNKNOWN'),
('col_float', 'float', None, None, 17, 0, 'UNKNOWN'),
('col_double', 'double', None, None, 17, 0, 'UNKNOWN'),
('col_string', 'varchar', None, None, 2147483647, 0, 'UNKNOWN'),
('col_timestamp', 'timestamp', None, None, 3, 0, 'UNKNOWN'),
('col_date', 'date', None, None, 0, 0, 'UNKNOWN'),
('col_binary', 'varbinary', None, None, 1073741824, 0, 'UNKNOWN'),
('col_array', 'array', None, None, 0, 0, 'UNKNOWN'),
('col_map', 'map', None, None, 0, 0, 'UNKNOWN'),
('col_struct', 'row', None, None, 0, 0, 'UNKNOWN'),
('col_decimal', 'decimal', None, None, 10, 1, 'UNKNOWN'),
])
rows = cursor.fetchall()
expected = [(
True,
127,
32767,
2147483647,
9223372036854775807,
0.5,
0.25,
'a string',
datetime(2017, 1, 1, 0, 0, 0),
date(2017, 1, 2),
b'123',
'[1, 2]',
'{1=2, 3=4}',
'{a=1, b=2}',
Decimal('0.1'),
)]
self.assertEqual(rows, expected)
# catch unicode/str
self.assertEqual(list(map(type, rows[0])), list(map(type, expected[0])))
# compare dbapi type object
self.assertEqual([d[1] for d in cursor.description], [
BOOLEAN,
NUMBER,
NUMBER,
NUMBER,
NUMBER,
NUMBER,
NUMBER,
STRING,
DATETIME,
DATE,
BINARY,
STRING,
STRING,
STRING,
NUMBER,
])
@with_cursor
def test_cancel(self, cursor):
def cancel(c):
time.sleep(randint(1, 5))
c.cancel()
with ThreadPoolExecutor(max_workers=1) as executor:
executor.submit(cancel, cursor)
self.assertRaises(DatabaseError, lambda: cursor.execute("""
SELECT a.a * rand(), b.a * rand()
FROM many_rows a
CROSS JOIN many_rows b
"""))
@with_cursor
def test_cancel_initial(self, cursor):
self.assertRaises(ProgrammingError, cursor.cancel)
def test_multiple_connection(self):
def execute_other_thread():
with contextlib.closing(connect(schema_name=SCHEMA)) as conn:
with conn.cursor() as cursor:
cursor.execute('SELECT * FROM one_row')
return cursor.fetchall()
with ThreadPoolExecutor(max_workers=2) as executor:
fs = [executor.submit(execute_other_thread) for _ in range(2)]
for f in futures.as_completed(fs):
self.assertEqual(f.result(), [(1,)])
def test_no_ops(self):
conn = self.connect()
cursor = conn.cursor()
self.assertEqual(cursor.rowcount, -1)
cursor.setinputsizes([])
cursor.setoutputsize(1, 'blah')
self.assertRaises(NotSupportedError, lambda: cursor.executemany(
'SELECT * FROM one_row', []))
conn.commit()
self.assertRaises(NotSupportedError, lambda: conn.rollback())
cursor.close()
conn.close()
@with_cursor
def test_show_partition(self, cursor):
location = '{0}{1}/{2}/'.format(
ENV.s3_staging_dir, S3_PREFIX, 'partition_table')
for i in xrange(10):
cursor.execute("""
ALTER TABLE partition_table ADD PARTITION (b=%(b)d)
LOCATION %(location)s
""", {'b': i, 'location': location})
cursor.execute('SHOW PARTITIONS partition_table')
self.assertEqual(sorted(cursor.fetchall()),
[('b={0}'.format(i),) for i in xrange(10)])
|
[
"laughingman7743@gmail.com"
] |
laughingman7743@gmail.com
|
c29a0940b61699c23849ff51675b3dd3b4b3a82f
|
2af6a5c2d33e2046a1d25ae9dd66d349d3833940
|
/res/scripts/client/gui/scaleform/daapi/view/lobby/boosters/__init__.py
|
8931986bafcc9a61774fc38ab7ba64eb901eeea7
|
[] |
no_license
|
webiumsk/WOT-0.9.12-CT
|
e6c8b5bb106fad71b5c3056ada59fb1aebc5f2b2
|
2506e34bd6634ad500b6501f4ed4f04af3f43fa0
|
refs/heads/master
| 2021-01-10T01:38:38.080814
| 2015-11-11T00:08:04
| 2015-11-11T00:08:04
| 45,803,240
| 0
| 0
| null | null | null | null |
WINDOWS-1250
|
Python
| false
| false
| 414
|
py
|
# 2015.11.10 21:26:35 Střední Evropa (běžný čas)
# Embedded file name: scripts/client/gui/Scaleform/daapi/view/lobby/boosters/__init__.py
__author__ = 'i_malashenko'
# okay decompyling c:\Users\PC\wotsources\files\originals\res\scripts\client\gui\scaleform\daapi\view\lobby\boosters\__init__.pyc
# decompiled 1 files: 1 okay, 0 failed, 0 verify failed
# 2015.11.10 21:26:35 Střední Evropa (běžný čas)
|
[
"info@webium.sk"
] |
info@webium.sk
|
908b043137e9fe004778131309d0327786b0e893
|
a3635ea0c475cc7e4655ed786e870361eede6464
|
/format/_fix_incomplete_iso9660/directory_record.py
|
1d2a9176ba8e8af6f6fc5efb0ddb612f1b9995c8
|
[] |
no_license
|
Nicba1010/PS-Tools
|
91802ce37309ef0dcd52771a8c2c0c3c2f6c4a8b
|
2c2b7a83b4bea39be4459bb980adfd37bd44c2be
|
refs/heads/master
| 2020-04-14T00:25:09.976960
| 2019-03-05T09:14:12
| 2019-03-05T09:14:12
| 163,532,843
| 4
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,805
|
py
|
from datetime import datetime
from aenum import IntFlag
from format._fix_incomplete_iso9660.errors import InvalidISOException
from format._fix_incomplete_iso9660.utils import unpack_both_endian_u32, unpack_directory_record_datetime, \
unpack_both_endian_u16, unpack_str_a
from utils.utils import unpack_u8
class FileFlags(IntFlag):
EXISTENCE = 1 # If TRUE, user doesn't need to know about file
DIRECTORY = 2 # If TRUE, record identifies a directory
ASSOCIATED_FILE = 4 # If TRUE, file is an Associated File
RECORD = 8 # If TRUE, structure in file has a format specified in the Extended Attribute Record
PROTECTION = 16 # If TRUE, owner specified for file also one+ of the even bits is set to 1 in the EAR
RESERVED_1 = 32
RESERVED_2 = 64
MULTI_EXTENT = 128 # If TRUE, not the final Directory Record for the file
class DirectoryRecord(object):
def __init__(self, data: bytes):
#: Directory Record Length
self.length: int = unpack_u8(data[0:1])
#: Extended Attribute Record Length
self.extended_attribute_record_length: int = unpack_u8(data[1:2])
#: Location of LBA LSB&MSB
self.lba_location: int = unpack_both_endian_u32(data[2:10])
#: Data Length LSB&MSB
self.data_length: int = unpack_both_endian_u32(data[10:18])
#: Recording Date and Time
self.recording_datetime: datetime = unpack_directory_record_datetime(data[18:25])
#: File Flags
self.file_flags: FileFlags = FileFlags(data[25])
#: File Unit Size
self.file_unit_size: int = unpack_u8(data[26:27])
#: Interleave gap size for files recorded in interleaved mode, 0x00 otherwise
self.interleave_gap: int = unpack_u8(data[27:28])
#: Volume Sequence Number (Number of Disk this is recorded on) LSB & MSB
self.volume_sequence_number: int = unpack_both_endian_u16(data[28:32])
#: File Identifier Length (File Name)
self.file_identifier_length: int = unpack_u8(data[32:33])
file_identifier_end_offset: int = 33 + self.file_identifier_length
#: File Identifier
self.file_identifier: str = unpack_str_a(data[33:33 + self.file_identifier_length])
#: Padding (None if file identifier length even, the field does not exist in that case)
self.padding: int
if self.file_identifier_length % 2 == 0 or self.file_identifier == b'\x00'.decode('ASCII'):
self.padding = None
else:
self.padding = data[file_identifier_end_offset]
if self.padding is not None and self.padding != 0x00:
raise InvalidISOException("Directory Record Padding is {hex(self.padding)} instead of 0x00 or None")
# TODO: Implement Extensions! Also System Use Field!
|
[
"nicba1010@gmail.com"
] |
nicba1010@gmail.com
|
1763030ef95c32889f4303497003b01b717874d2
|
f0d713996eb095bcdc701f3fab0a8110b8541cbb
|
/Fz92j7nQEkoRXhRE7_8.py
|
4c783b20eedaf0eec0f44af589ca079bcadde34b
|
[] |
no_license
|
daniel-reich/turbo-robot
|
feda6c0523bb83ab8954b6d06302bfec5b16ebdf
|
a7a25c63097674c0a81675eed7e6b763785f1c41
|
refs/heads/main
| 2023-03-26T01:55:14.210264
| 2021-03-23T16:08:01
| 2021-03-23T16:08:01
| 350,773,815
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,021
|
py
|
"""
A frog wants to cross a river. Unfortunately, he can't jump across in a single
leap. Luckily, there are `n` stones in the river.
The frog can jump from the near bank to stone `1` and from stone `n` to the
far bank. He can also jump from stone to stone, forward and backward. However,
on each stone, a number `j` is written and he must only jump exactly `j`
stones backward or forward.
Return the minimum number of jumps to cross the river (including jumps to the
first stone and from the last stone (or any other stone, if possible) to the
far bank) or `no chance :-(` if it's not possible to cross the river.
### Examples
jumping_frog(5, [1, 1, 1, 1, 1]) ➞ 6
jumping_frog(5, [1, 3, 1, 1, 1]) ➞ 4
jumping_frog(5, [1, 1, 0, 1, 1]) ➞ "no chance :-("
### Notes
* The frog may also reach the far bank from another stone than `n` if a large enough number is written on it.
* `n` is at least 2.
"""
def jumping_frog(n, stones):
if n < 2:
return None
jmpCount = 1 # one jump from the left bank to 1st element
i = 0
while i < (n - 1): # visit each stone except the last, (n-1) is the last stone
distanceToEnd = (n - 1) - i
jmpValue = stones[i]
#can we jump ahead? i.e (is there a distance to last stone)
if 0 < jmpValue <= distanceToEnd:
# check if we can jump backward ("we did'n reach left bank")
if (i - jmpValue) >= 0 and stones[i - jmpValue] - 2 * jmpValue > stones[i + jmpValue]:
i -= jmpValue
else:
i += jmpValue
# increase jump counter
jmpCount += 1
# if distance == 0, we reached the end
if (n - 1) - i == 0:
return jmpCount + 1
elif 0 < jmpValue > distanceToEnd: #if stone has abig num to cross the right bank
return jmpCount + 1
elif jmpValue == 0 and distanceToEnd > 0: #if we got stuck on a zero num before the end
return "no chance :-("
|
[
"daniel.reich@danielreichs-MacBook-Pro.local"
] |
daniel.reich@danielreichs-MacBook-Pro.local
|
f9b655af2e1bbd8ac1b50e7fc3c73ce38f411749
|
19da1a56f137a08772c347cf974be54e9c23c053
|
/lib/adafruit_lidarlite.py
|
1893b50a7169d64063eb64031a74fdd87eb79bd7
|
[] |
no_license
|
mk53202/mk53202-timeclock-pyportal
|
d94f45a9d186190a4bc6130077baa6743a816ef3
|
230a858d429f8197c00cab3e67dcfd3b295ffbe0
|
refs/heads/master
| 2021-02-04T05:38:25.533292
| 2020-02-27T22:45:56
| 2020-02-27T22:45:56
| 243,626,362
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 7,421
|
py
|
# The MIT License (MIT)
#
# Copyright (c) 2018 ladyada for adafruit industries
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
"""
`adafruit_lidarlite`
====================================================
A CircuitPython & Python library for Garmin LIDAR Lite sensors over I2C
* Author(s): ladyada
Implementation Notes
--------------------
**Hardware:**
**Software and Dependencies:**
* Adafruit CircuitPython firmware for the supported boards:
https://github.com/adafruit/circuitpython/releases
* Adafruit's Bus Device library: https://github.com/adafruit/Adafruit_CircuitPython_BusDevice
"""
# imports
import time
from adafruit_bus_device.i2c_device import I2CDevice
from digitalio import Direction
from micropython import const
__version__ = "1.1.1"
__repo__ = "https://github.com/adafruit/Adafruit_CircuitPython_LIDARLite.git"
_ADDR_DEFAULT = const(0x62)
_REG_ACQ_COMMAND = const(0x00)
_CMD_RESET = const(0)
_CMD_DISTANCENOBIAS = const(3)
_CMD_DISTANCEWITHBIAS = const(4)
CONFIG_DEFAULT = 0
CONFIG_SHORTFAST = 1
CONFIG_DEFAULTFAST = 2
CONFIG_MAXRANGE = 3
CONFIG_HIGHSENSITIVE = 4
CONFIG_LOWSENSITIVE = 5
STATUS_BUSY = 0x01
STATUS_REF_OVERFLOW = 0x02
STATUS_SIGNAL_OVERFLOW = 0x04
STATUS_NO_PEAK = 0x08
STATUS_SECOND_RETURN = 0x10
STATUS_HEALTHY = 0x20
STATUS_SYS_ERROR = 0x40
# The various configuration register values, from arduino library
_LIDAR_CONFIGS = ((0x80, 0x08, 0x00), # default
(0x1D, 0x08, 0x00), # short range, high speed
(0x80, 0x00, 0x00), # default range, higher speed short range
(0xFF, 0x08, 0x00), # maximum range
(0x80, 0x08, 0x80), # high sensitivity & error
(0x80, 0x08, 0xb0)) # low sensitivity & error
class LIDARLite:
"""
A driver for the Garmin LIDAR Lite laser distance sensor.
:param i2c_bus: The `busio.I2C` object to use. This is the only
required parameter.
:param int address: (optional) The I2C address of the device to set after initialization.
"""
def __init__(self, i2c_bus, *, reset_pin=None,
configuration=CONFIG_DEFAULT, address=_ADDR_DEFAULT):
"""Initialize the hardware for the LIDAR over I2C. You can pass in an
optional reset_pin for when you call reset(). There are a few common
configurations Garmin suggests: CONFIG_DEFAULT, CONFIG_SHORTFAST,
CONFIG_DEFAULTFAST, CONFIG_MAXRANGE, CONFIG_HIGHSENSITIVE, and
CONFIG_LOWSENSITIVE. For the I2C address, the default is 0x62 but if you
pass a different number in, we'll try to change the address so multiple
LIDARs can be connected. (Note all but one need to be in reset for this
to work!)"""
self.i2c_device = I2CDevice(i2c_bus, address)
self._buf = bytearray(2)
self._bias_count = 0
self._reset = reset_pin
time.sleep(0.5)
self.configure(configuration)
self._status = self.status
def reset(self):
"""Hardware reset (if pin passed into init) or software reset. Will take
100 readings in order to 'flush' measurement unit, otherwise data is off."""
# Optional hardware reset pin
if self._reset is not None:
self._reset.direction = Direction.OUTPUT
self._reset.value = True
self._reset.value = False
time.sleep(0.01)
self._reset.value = True
else:
try:
self._write_reg(_REG_ACQ_COMMAND, _CMD_RESET)
except OSError:
pass # it doesnt respond well once reset
time.sleep(1)
# take 100 readings to 'flush' out sensor!
for _ in range(100):
try:
self.read_distance(True)
except RuntimeError:
pass
def configure(self, config):
"""Set the LIDAR desired style of measurement. There are a few common
configurations Garmin suggests: CONFIG_DEFAULT, CONFIG_SHORTFAST,
CONFIG_DEFAULTFAST, CONFIG_MAXRANGE, CONFIG_HIGHSENSITIVE, and
CONFIG_LOWSENSITIVE."""
settings = _LIDAR_CONFIGS[config]
self._write_reg(0x02, settings[0])
self._write_reg(0x04, settings[1])
self._write_reg(0x1c, settings[2])
def read_distance(self, bias=False):
"""Perform a distance reading with or without 'bias'. It's recommended
to take a bias measurement every 100 non-bias readings (they're slower)"""
if bias:
self._write_reg(_REG_ACQ_COMMAND, _CMD_DISTANCEWITHBIAS)
else:
self._write_reg(_REG_ACQ_COMMAND, _CMD_DISTANCENOBIAS)
dist = self._read_reg(0x8F, 2)
if self._status & (STATUS_NO_PEAK | STATUS_SECOND_RETURN):
raise RuntimeError("Measurement failure")
if (self._status & STATUS_SYS_ERROR) or (not self._status & STATUS_HEALTHY):
raise RuntimeError("System failure")
return dist[0] << 8 | dist[1]
@property
def distance(self):
"""The measured distance in cm. Will take a bias reading every 100 calls"""
self._bias_count -= 1
if self._bias_count < 0:
self._bias_count = 100 # every 100 reads, check bias
return self.read_distance(self._bias_count <= 0)
@property
def status(self):
"""The status byte, check datasheet for bitmask"""
buf = bytearray([0x1])
with self.i2c_device as i2c:
i2c.write_then_readinto(buf, buf)
return buf[0]
def _write_reg(self, reg, value):
self._buf[0] = reg
self._buf[1] = value
with self.i2c_device as i2c:
#print("Writing: ", [hex(i) for i in self._buf])
i2c.write(self._buf)
time.sleep(0.001) # there's a delay in arduino library
def _read_reg(self, reg, num):
while True:
self._status = self.status
if not self._status & STATUS_BUSY:
break
# no longer busy
self._buf[0] = reg
with self.i2c_device as i2c:
i2c.write_then_readinto(self._buf, self._buf, out_end=1, in_end=num)
#print("Read from ", hex(reg), [hex(i) for i in self._buf])
return self._buf
|
[
"mkoster@stack41.com"
] |
mkoster@stack41.com
|
1e03fdfc67c1b1e1fe60f6a7d9f6981745a37274
|
2337351b228818e41be3002bd38f68f77c2aa074
|
/tests/test_hash.py
|
c10a96478165e914236a8897a388fe57add9eb5a
|
[
"BSD-3-Clause"
] |
permissive
|
nocproject/noc
|
57d40c680a1499374463e472434f9595ed6d1374
|
6e6d71574e9b9d822bec572cc629a0ea73604a59
|
refs/heads/master
| 2023-08-31T01:11:33.544573
| 2023-08-30T17:31:11
| 2023-08-30T17:31:11
| 107,815,776
| 105
| 33
|
BSD-3-Clause
| 2023-07-31T07:57:45
| 2017-10-21T21:04:33
|
Python
|
UTF-8
|
Python
| false
| false
| 1,914
|
py
|
# ----------------------------------------------------------------------
# Test noc.core.hash functions
# ----------------------------------------------------------------------
# Copyright (C) 2007-2020 The NOC Project
# See LICENSE for details
# ----------------------------------------------------------------------
# Third-party modules
import pytest
# NOC modules
from noc.core.hash import hash_str, hash_int, dict_hash_int, dict_hash_int_args
@pytest.mark.parametrize(
"value,expected",
[
(0, b"J^\xa04\xb0\x0b\xaf\xb6"),
("0", b"J^\xa04\xb0\x0b\xaf\xb6"),
(None, b"\x1a3\x12\x943.\xcdm"),
("None", b"\x1a3\x12\x943.\xcdm"),
],
)
def test_hash_str(value, expected):
assert hash_str(value) == expected
@pytest.mark.parametrize(
"value,expected",
[
(0, 5358896754769768374),
("0", 5358896754769768374),
(None, 1887873096521534829),
("None", 1887873096521534829),
],
)
def test_hash_int(value, expected):
assert hash_int(value) == expected
@pytest.mark.parametrize(
"value,expected",
[
({}, -2954230017111125474),
({"k": 1}, -7829327169641555127),
({"k": "1"}, -7829327169641555127),
({"k": 1, "v": "2"}, 6473659485526827658),
({"k": 1, "v": None}, 1975760527053142894),
({"k": 1, "v": "None"}, 1975760527053142894),
],
)
def test_dict_hash_int(value, expected):
assert dict_hash_int(value) == expected
@pytest.mark.parametrize(
"value,expected",
[
({}, -2954230017111125474),
({"k": 1}, -7829327169641555127),
({"k": "1"}, -7829327169641555127),
({"k": 1, "v": "2"}, 6473659485526827658),
({"k": 1, "v": None}, 1975760527053142894),
({"k": 1, "v": "None"}, 1975760527053142894),
],
)
def test_dict_hash_int_args(value, expected):
assert dict_hash_int_args(**value) == expected
|
[
"dv@nocproject.org"
] |
dv@nocproject.org
|
733ba5fb18c29c246887a964601e46b3548dc780
|
facb8b9155a569b09ba66aefc22564a5bf9cd319
|
/wp2/merra_scripts/03_model_fitting/rfRecon/591-tideGauge.py
|
2207dc765cefb00abc41e7f582bf2c7c7e84a698
|
[] |
no_license
|
moinabyssinia/modeling-global-storm-surges
|
13e69faa8f45a1244a964c5de4e2a5a6c95b2128
|
6e385b2a5f0867df8ceabd155e17ba876779c1bd
|
refs/heads/master
| 2023-06-09T00:40:39.319465
| 2021-06-25T21:00:44
| 2021-06-25T21:00:44
| 229,080,191
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 10,696
|
py
|
# -*- coding: utf-8 -*-
"""
Created on Mon May 7 11:39:00 2020
This program is designed to reconstruct
daily max surge using RF
@author: Michael Tadesse
"""
def reconstructRF():
"""
run KFOLD method for random forest regression
"""
#import packages
import os
import numpy as np
import pandas as pd
#from sklearn import metrics
#from scipy import stats
#import seaborn as sns
#import matplotlib.pyplot as plt
#from sklearn.model_selection import KFold
from datetime import datetime
from sklearn.ensemble import RandomForestRegressor
from sklearn.decomposition import PCA
from sklearn.preprocessing import StandardScaler
#defining directories
dir_in = "/lustre/fs0/home/mtadesse/merraAllLagged"
dir_out = "/lustre/fs0/home/mtadesse/rfReconstruction"
surge_path = "/lustre/fs0/home/mtadesse/05_dmax_surge_georef"
# #load KFOLD result csv file
# os.chdir('F:\\06_eraint_results\\sonstig')
# kf_dat = pd.read_csv('eraint_randForest_kfold.csv')
# #edit the tg names to be usable later on
# editName = lambda x: x.split('.csv')[0]
# kf_dat['tg'] = pd.DataFrame(list(map(editName, kf_dat['tg'])), columns= ['tg'])
#cd to the lagged predictors directory
os.chdir(dir_in)
x = 591
y = 592
#looping through
for tg in range(x,y):
os.chdir(dir_in)
tg_name = os.listdir()[tg]
print(tg, tg_name)
#load predictor
pred = pd.read_csv(tg_name)
pred.drop('Unnamed: 0', axis = 1, inplace = True)
#add squared and cubed wind terms (as in WPI model)
pickTerms = lambda x: x.startswith('wnd')
wndTerms = pred.columns[list(map(pickTerms, pred.columns))]
wnd_sqr = pred[wndTerms]**2
wnd_cbd = pred[wndTerms]**3
pred = pd.concat([pred, wnd_sqr, wnd_cbd], axis = 1)
#standardize predictor data
dat = pred.iloc[:,1:]
scaler = StandardScaler()
print(scaler.fit(dat))
dat_standardized = pd.DataFrame(scaler.transform(dat), \
columns = dat.columns)
pred_standardized = pd.concat([pred['date'], dat_standardized], axis = 1)
#load surge data
os.chdir(surge_path)
surge = pd.read_csv(tg_name)
surge.drop('Unnamed: 0', axis = 1, inplace = True)
#remove duplicated surge rows
surge.drop(surge[surge['ymd'].duplicated()].index, axis = 0, inplace = True)
surge.reset_index(inplace = True)
surge.drop('index', axis = 1, inplace = True)
#adjust surge time format to match that of pred
time_str = lambda x: str(datetime.strptime(x, '%Y-%m-%d'))
surge_time = pd.DataFrame(list(map(time_str, surge['ymd'])), columns = ['date'])
time_stamp = lambda x: (datetime.strptime(x, '%Y-%m-%d %H:%M:%S'))
surge_new = pd.concat([surge_time, surge[['surge', 'lon', 'lat']]], axis = 1)
#merge predictors and surge to find common time frame
pred_surge = pd.merge(pred_standardized, surge_new.iloc[:,:2], on='date', how='right')
pred_surge.sort_values(by = 'date', inplace = True)
#find rows that have nans and remove them
row_nan = pred_surge[pred_surge.isna().any(axis =1)]
pred_surge.drop(row_nan.index, axis = 0, inplace = True)
pred_surge.reset_index(inplace = True)
pred_surge.drop('index', axis = 1, inplace = True)
#in case pred and surge don't overlap
if pred_surge.shape[0] == 0:
print('-'*80)
print('Predictors and Surge don''t overlap')
print('-'*80)
continue
pred_surge['date'] = pd.DataFrame(list(map(time_stamp, \
pred_surge['date'])), \
columns = ['date'])
#prepare data for training/testing
X = pred_surge.iloc[:,1:-1]
y = pd.DataFrame(pred_surge['surge'])
y = y.reset_index()
y.drop(['index'], axis = 1, inplace = True)
#apply PCA
#get the number of PCs used during validation
# pc_num = kf_dat.loc[kf_dat['tg'] == tg_name]['num_95pcs']
pca = PCA(0.95)
pca.fit(X)
X_pca = pca.transform(X)
{# #apply 10 fold cross validation
# kf = KFold(n_splits=10, random_state=29)
# metric_corr = []; metric_rmse = []; #combo = pd.DataFrame(columns = ['pred', 'obs'])
# for train_index, test_index in kf.split(X):
# X_train, X_test = X_pca[train_index], X_pca[test_index]
# y_train, y_test = y['surge'][train_index], y['surge'][test_index]
# #train regression model
# rf = RandomForestRegressor(n_estimator = 50, min_samples_leaf = 1)
# lm.fit(X_train, y_train)
# #predictions
# predictions = lm.predict(X_test)
# # pred_obs = pd.concat([pd.DataFrame(np.array(predictions)), \
# # pd.DataFrame(np.array(y_test))], \
# # axis = 1)
# # pred_obs.columns = ['pred', 'obs']
# # combo = pd.concat([combo, pred_obs], axis = 0)
# #evaluation matrix - check p value
# if stats.pearsonr(y_test, predictions)[1] >= 0.05:
# print("insignificant correlation!")
# continue
# else:
# #print(stats.pearsonr(y_test, predictions))
# metric_corr.append(stats.pearsonr(y_test, predictions)[0])
# #print(np.sqrt(metrics.mean_squared_error(y_test, predictions)))
# metric_rmse.append(np.sqrt(metrics.mean_squared_error(y_test, predictions)))
# #number of years used to train/test model
# num_years = np.ceil((pred_surge['date'][pred_surge.shape[0]-1] -\
# pred_surge['date'][0]).days/365)
}
longitude = surge['lon'][0]
latitude = surge['lat'][0]
num_pc = X_pca.shape[1] #number of principal components
# corr = np.mean(metric_corr)
# rmse = np.mean(metric_rmse)
# print('num_year = ', num_years, ' num_pc = ', num_pc ,'avg_corr = ',\
# np.mean(metric_corr), ' - avg_rmse (m) = ', \
# np.mean(metric_rmse), '\n')
#%%
#surge reconstruction
pred_for_recon = pred[~pred.isna().any(axis = 1)]
pred_for_recon = pred_for_recon.reset_index().drop('index', axis = 1)
#standardize predictor data
dat = pred_for_recon.iloc[:,1:]
scaler = StandardScaler()
print(scaler.fit(dat))
dat_standardized = pd.DataFrame(scaler.transform(dat), \
columns = dat.columns)
pred_standardized = pd.concat([pred_for_recon['date'], dat_standardized], axis = 1)
X_recon = pred_standardized.iloc[:, 1:]
#apply PCA
pca = PCA(num_pc) #use the same number of PCs used for training
pca.fit(X_recon)
X_pca_recon = pca.transform(X_recon)
#%%
#model preparation
#defining the rf model with number of trees and minimum leaves
rf = RandomForestRegressor(n_estimators=50, min_samples_leaf=1, \
random_state = 29)
rf.fit(X_pca, y)
#get prediction interval
def pred_ints(model, X_pca_recon, percentile = 95):
"""
function to construct prediction interval
taking into account the result of each
regression tree
"""
err_down = [];
err_up = [];
preds= [];
for pred in model.estimators_:
preds.append(pred.predict(X_pca_recon))
preds = np.vstack(preds).T
err_down = np.percentile(preds, (100 - percentile)/2., axis = 1, \
keepdims = True)
err_up = np.percentile(preds, 100 - (100 - percentile)/2., axis =1, \
keepdims = True)
return err_down.reshape(-1), err_up.reshape(-1)
#compute 95% prediction intervals
err_down, err_up = pred_ints(rf, X_pca_recon, percentile = 95);
#reconstructed surge goes here
truth = rf.predict(X_pca_recon);
correct = 0.;
for i, val in enumerate(truth):
if err_down[i] <= val <= err_up[i]:
correct +=1
print(correct*100/len(truth), '\n')
#final dataframe
final_dat = pd.concat([pred_standardized['date'], \
pd.DataFrame([truth, err_down, err_up]).T], axis = 1)
final_dat['lon'] = longitude
final_dat['lat'] = latitude
final_dat.columns = ['date', 'surge_reconsturcted', 'pred_int_lower',\
'pred_int_upper', 'lon', 'lat']
{#plot - optional
# time_stamp = lambda x: (datetime.strptime(x, '%Y-%m-%d %H:%M:%S'))
# final_dat['date'] = pd.DataFrame(list(map(time_stamp, final_dat['date'])), columns = ['date'])
# surge['date'] = pd.DataFrame(list(map(time_stamp, surge['date'])), columns = ['date'])
# sns.set_context('notebook', font_scale = 2)
# plt.figure()
# plt.plot(final_dat['date'], final_dat['mean'], color = 'green')
# plt.scatter(surge['date'], surge['surge'], color = 'blue')
#prediction intervals
# plt.plot(final_dat['date'], final_dat['obs_ci_lower'], color = 'red', linestyle = "--", lw = 0.8)
# plt.plot(final_dat['date'], final_dat['obs_ci_upper'], color = 'red', linestyle = "--", lw = 0.8)
#confidence intervals
# plt.plot(final_dat['date'], final_dat['mean_ci_upper'], color = 'black', linestyle = "--", lw = 0.8)
# plt.plot(final_dat['date'], final_dat['mean_ci_lower'], color = 'black', linestyle = "--", lw = 0.8)
}
#save df as cs - in case of interruption
os.chdir(dir_out)
final_dat.to_csv(tg_name)
#cd to dir_in
os.chdir(dir_in)
reconstructRF()
|
[
"michaelg.tadesse@gmail.com"
] |
michaelg.tadesse@gmail.com
|
5e8646400ae70762ff471fffe0fd4633446384d0
|
134ff3c0719d4c0022eb0fb7c859bdbff5ca34b2
|
/desktop/core/ext-py/django-extensions/django_extensions/management/commands/create_app.py
|
3fcf7e6053b65dd8588a8dfd011af9aec35762c9
|
[
"Apache-2.0"
] |
permissive
|
civascu/hue
|
22637f13a4cfc557716557661523131b6ac16da4
|
82f2de44789ff5a981ed725175bae7944832d1e9
|
refs/heads/master
| 2020-03-31T01:50:39.449966
| 2010-07-21T01:05:50
| 2010-07-21T01:07:15
| 788,284
| 0
| 0
|
Apache-2.0
| 2019-02-04T07:03:12
| 2010-07-21T07:34:27
|
Python
|
UTF-8
|
Python
| false
| false
| 3,259
|
py
|
import os
import re
import django_extensions
from django.core.management.base import CommandError, LabelCommand, _make_writeable
from optparse import make_option
class Command(LabelCommand):
option_list = LabelCommand.option_list + (
make_option('--template', '-t', action='store', dest='app_template',
help='The path to the app template'),
make_option('--parent_path', '-p', action='store', dest='parent_path',
help='The parent path of the app to be created'),
)
help = ("Creates a Django application directory structure based on the specified template directory.")
args = "[appname]"
label = 'application name'
requires_model_validation = False
can_import_settings = True
def handle_label(self, label, **options):
project_dir = os.getcwd()
project_name = os.path.split(project_dir)[-1]
app_name =label
app_template = options.get('app_template') or os.path.join(django_extensions.__path__[0], 'conf', 'app_template')
app_dir = os.path.join(options.get('parent_path') or project_dir, app_name)
if not os.path.exists(app_template):
raise CommandError("The template path, %r, does not exist." % app_template)
if not re.search(r'^\w+$', label):
raise CommandError("%r is not a valid application name. Please use only numbers, letters and underscores." % label)
try:
os.makedirs(app_dir)
except OSError, e:
raise CommandError(e)
copy_template(app_template, app_dir, project_name, app_name)
def copy_template(app_template, copy_to, project_name, app_name):
"""copies the specified template directory to the copy_to location"""
import shutil
# walks the template structure and copies it
for d, subdirs, files in os.walk(app_template):
relative_dir = d[len(app_template)+1:]
if relative_dir and not os.path.exists(os.path.join(copy_to, relative_dir)):
os.mkdir(os.path.join(copy_to, relative_dir))
for i, subdir in enumerate(subdirs):
if subdir.startswith('.'):
del subdirs[i]
for f in files:
if f.endswith('.pyc') or f.startswith('.DS_Store'):
continue
path_old = os.path.join(d, f)
path_new = os.path.join(copy_to, relative_dir, f.replace('app_name', app_name))
if os.path.exists(path_new):
path_new = os.path.join(copy_to, relative_dir, f)
if os.path.exists(path_new):
continue
path_new = path_new.rstrip(".tmpl")
fp_old = open(path_old, 'r')
fp_new = open(path_new, 'w')
fp_new.write(fp_old.read().replace('{{ app_name }}', app_name).replace('{{ project_name }}', project_name))
fp_old.close()
fp_new.close()
try:
shutil.copymode(path_old, path_new)
_make_writeable(path_new)
except OSError:
sys.stderr.write(style.NOTICE("Notice: Couldn't set permission bits on %s. You're probably using an uncommon filesystem setup. No problem.\n" % path_new))
|
[
"bcwalrus@cloudera.com"
] |
bcwalrus@cloudera.com
|
585762a8c98cac23f2e45d296b0bc57b3b31cfe9
|
5a25f4f5f9c7cba03f9b5848eafc01a760c88768
|
/analysis/bsens_cleanest_diff_zooms.py
|
86b02dd38170047b195bc6e7a5b624af3f12c0d6
|
[] |
no_license
|
ALMA-IMF/reduction
|
b3579a548fe20193b807a7415a040f351c879beb
|
de606cc6bc542f088223ce84082ff333739c9007
|
refs/heads/master
| 2023-06-22T13:21:13.841999
| 2023-06-12T09:17:50
| 2023-06-12T09:17:50
| 115,018,799
| 9
| 29
| null | 2023-06-12T09:17:51
| 2017-12-21T15:13:55
|
Python
|
UTF-8
|
Python
| false
| false
| 7,898
|
py
|
import requests
import re
import numpy as np
from astropy import table
import io
import time
from astropy import units as u
import radio_beam
import regions
from astropy.io import fits
from astropy.visualization import simple_norm
from astropy import stats, convolution, wcs, coordinates
from spectral_cube import SpectralCube
import pylab as pl
import spectral_cube
from spectralindex import prefixes
import warnings
warnings.filterwarnings('ignore', category=spectral_cube.utils.StokesWarning)
warnings.filterwarnings('ignore', category=UserWarning)
warnings.filterwarnings('ignore', category=pl.matplotlib.cbook.MatplotlibDeprecationWarning)
np.seterr('ignore')
def bsens_cleanest_diff(finaliter_prefix_b3, finaliter_prefix_b6,
cutoutregion, fignum=1,
finaliter_prefix_b3_bsens=None,
finaliter_prefix_b6_bsens=None,
normpars_b3=None,
normpars_b6=None,
noco="",
non2hp="",
basepath='/home/adam/work/alma-imf/reduction/', ):
image_b3 = SpectralCube.read(f'{finaliter_prefix_b3}.image.tt0', format='casa_image').subcube_from_ds9region(cutoutregion)
if finaliter_prefix_b3_bsens is None:
if non2hp:
finaliter_prefix_b3_bsens = finaliter_prefix_b3.replace("cleanest","bsens_nobright").replace("merged_12M", f"merged_bsens_12M{non2hp}")
else:
finaliter_prefix_b3_bsens = finaliter_prefix_b3.replace("cleanest","bsens").replace("merged_12M", f"merged_bsens{non2hp}_12M")
bsens_b3 = SpectralCube.read(f'{finaliter_prefix_b3_bsens}.image.tt0', format='casa_image').subcube_from_ds9region(cutoutregion)
image_b6 = SpectralCube.read(f'{finaliter_prefix_b6}.image.tt0', format='casa_image').subcube_from_ds9region(cutoutregion)
if finaliter_prefix_b6_bsens is None:
if noco:
finaliter_prefix_b6_bsens = finaliter_prefix_b6.replace("cleanest","bsens_nobright").replace("merged_12M", f"merged_bsens_12M{noco}")
else:
finaliter_prefix_b6_bsens = finaliter_prefix_b6.replace("cleanest","bsens").replace("merged_12M", f"merged_bsens{noco}_12M")
bsens_b6 = SpectralCube.read(f'{finaliter_prefix_b6_bsens}.image.tt0', format='casa_image').subcube_from_ds9region(cutoutregion)
# image_b3 = image_b3 * u.beam / image_b3.beam.sr
# image_b6 = image_b6 * u.beam / image_b6.beam.sr
# bsens_b3 = bsens_b3 * u.beam / bsens_b3.beam.sr
# bsens_b6 = bsens_b6 * u.beam / bsens_b6.beam.sr
tgt_unit = u.mJy if bsens_b6.unit.is_equivalent(u.mJy) else u.mJy/u.beam
fieldname = os.path.basename(finaliter_prefix_b6).split("_")[0]
print(fieldname)
diff_b3 = bsens_b3 - image_b3
diff_b6 = bsens_b6 - image_b6
normpars_b3_default = dict(min_percent=0.1, max_percent=99.9, stretch='linear')
normpars_b6_default = dict(min_percent=0.1, max_percent=99.9, stretch='linear')
if normpars_b3 is not None:
normpars_b3_default.update(normpars_b3)
normpars_b3 = normpars_b3_default
if normpars_b6 is not None:
normpars_b6_default.update(normpars_b6)
normpars_b6 = normpars_b6_default
fig = pl.figure(num=fignum, figsize=(6,6))
fig.clf()
ax = pl.subplot(1,1,1,label='B3', projection=diff_b3[0].wcs)
im = ax.imshow(diff_b3.to(tgt_unit)[0].value, norm=simple_norm(diff_b3.to(tgt_unit)[0].value, **normpars_b3), cmap='gray')
ax.set_xlabel('Right Ascension (ICRS)')
ax.set_ylabel('Declination (ICRS)')
cb = pl.colorbar(mappable=im)
cb.set_label("$S_\\nu$ [mJy beam$^{-1}$]")
fig2 = pl.figure(num=fignum+1, figsize=(6,6))
ax2 = pl.subplot(1,1,1,label='B6', projection=diff_b6[0].wcs)
im = ax2.imshow(diff_b6.to(tgt_unit)[0].value, norm=simple_norm(diff_b6.to(tgt_unit)[0].value, **normpars_b6), cmap='gray')
ax2.set_xlabel('Right Ascension (ICRS)')
ax2.set_ylabel('Declination (ICRS)')
cb = pl.colorbar(mappable=im)
cb.set_label("$S_\\nu$ [mJy beam$^{-1}$]")
# ax2.set_yticklabels([])
# ax2.set_ylabel("")
# lat = ax2.coords['dec']
# lat.set_ticklabel_position('r')
# lat.set_axislabel_position('r')
# lat.set_axislabel('Declination')
return fig,fig2
normpars = {'W51IRS2': {
'normpars_b3': {'max_percent': None, "min_percent": None, "min_cut":-0.5, "max_cut":0.5, 'stretch':'linear'},
'normpars_b6': {'max_percent':99.99, "min_percent": 1, 'stretch':'linear'}
},
'W43MM2': {'normpars_b6': {'max_percent': 99.5}},
"G333": {'normpars_b6': {'max_percent': None, "min_percent": None, "min_cut":-7, "max_cut":7, 'stretch':'linear'},},
}
cutoutregions = {
"G008": ("fk5; box(271.579, -21.6255, 30\",30\")",),
"G10": (
"fk5; box(272.620167, -19.93008, 30\",30\")",
),
"G12": (
"fk5; box(273.5575, -17.92900, 70\", 70\")",
),
"G328": (
"fk5; box(239.499, -53.9668, 30\", 30\")",
),
"G327": (
"fk5; box(15:53:07,-54:37:10, 45\",45\")",
),
"G333": (
"fk5; box(245.539, -50.1002, 60\",60\")",
),
"G337": (
"fk5; box(250.294, -47.135, 20\", 20\")",
),
"G338": (
"fk5; box(250.142, -45.694, 30\", 30\")",
),
"G351": (
"fk5; box(261.6787, -36.1545, 30\", 30\")",
),
"G353": (
"fk5; box(262.6120, -34.696, 60\", 60\")",
),
"W43MM3": (
"fk5; box(281.9241, -2.007, 20\", 20\")",
),
"W43MM2": (
"fk5; box(281.9025, -2.0152, 25\", 25\")",
),
"W43MM1": (
"fk5; box(18:47:46.8714384205, -1:54:23.9163751512, 25\", 25\")",
),
"W51IRS2": (
'fk5; box(19:23:39.9340,+14:31:09.099,11.912",11.502",6.5033172e-06)',
"fk5; box(19:23:39.975,+14:31:08.2,25\",25\")",
),
"W51-E": (
"fk5; box(19:23:43.90,+14:30:30.0,20\",20\")",
),
}
if __name__ == "__main__":
import os
try:
os.chdir('/orange/adamginsburg/ALMA_IMF/2017.1.01355.L/February2021Release')
except FileNotFoundError:
os.chdir('/home/adam/Dropbox_UFL/ALMA-IMF/December2020Release/')
if not os.path.exists('bsens_diff_zooms'):
os.mkdir('bsens_diff_zooms')
pl.rcParams['font.size'] = 14
pl.rcParams['image.origin'] = 'lower'
pl.rcParams['image.interpolation'] = 'none'
pl.rcParams['figure.facecolor'] = 'w'
# why did I add this override? It's wrong (I had 4 instead of 6)
#prefixes['G338']['finaliter_prefix_b6_bsens'] = 'G338.93/B6/bsens/G338.93_B6_uid___A001_X1296_X14f_continuum_merged_bsens_12M_robust0_selfcal6_finaliter'
prefixes['W43MM1']['finaliter_prefix_b6'] = 'W43-MM1/B6/cleanest/W43-MM1_B6_uid___A001_X12f_X9f_continuum_merged_12M_robust0_selfcal4_finaliter'
for fieldid, pfxs in prefixes.items():
fig1,fig2 = bsens_cleanest_diff(**pfxs, cutoutregion=cutoutregions[fieldid][0], **normpars.get(fieldid, {}))
fig1.savefig(f'bsens_diff_zooms/{fieldid}_bsens_diff_zoom_B3.png', bbox_inches='tight', dpi=300)
fig2.savefig(f'bsens_diff_zooms/{fieldid}_bsens_diff_zoom_B6.png', bbox_inches='tight', dpi=300)
prefixes['W43MM1']['finaliter_prefix_b6'] = 'W43-MM1/B6/cleanest/W43-MM1_B6_uid___A002_X996c88_X87_continuum_merged_12M_robust0_selfcal4_finaliter'
os.chdir('/orange/adamginsburg/ALMA_IMF/2017.1.01355.L/June2021Release')
if not os.path.exists('bsens_diff_zooms'):
os.mkdir('bsens_diff_zooms')
for fieldid, pfxs in prefixes.items():
fig1,fig2 = bsens_cleanest_diff(**pfxs, cutoutregion=cutoutregions[fieldid][0], **normpars.get(fieldid, {}),
noco='_noco', non2hp='_non2hp')
fig1.savefig(f'bsens_diff_zooms/{fieldid}_bsens_non2hp_diff_zoom_B3.png', bbox_inches='tight', dpi=300)
fig2.savefig(f'bsens_diff_zooms/{fieldid}_bsens_noco_diff_zoom_B6.png', bbox_inches='tight', dpi=300)
|
[
"keflavich@gmail.com"
] |
keflavich@gmail.com
|
4b858fafb09c3c580a3b53f9df313e263ed7fe98
|
63efb13760eabfc09c288d29264adfff0da6622a
|
/LeetCode/54_spiral_matrix.py
|
334481a1decda2f1c02166c8e4cbcbdeaeb8d063
|
[] |
no_license
|
KKosukeee/CodingQuestions
|
59f0002ace13ebbe3a4a36976277bcb0d3766dbd
|
01fe893ba2e37c9bda79e3081c556698f0b6d2f0
|
refs/heads/master
| 2020-05-16T04:22:31.578974
| 2020-03-30T00:27:51
| 2020-03-30T00:27:51
| 182,775,403
| 1
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,600
|
py
|
"""
Solution for 54. Spiral Matrix
https://leetcode.com/problems/spiral-matrix/
"""
class Solution:
"""
Runtime: 36 ms, faster than 77.01% of Python3 online submissions for Spiral Matrix.
Memory Usage: 13.3 MB, less than 5.18% of Python3 online submissions for Spiral Matrix.
"""
def spiralOrder(self, matrix):
"""
Given a matrix of m x n elements (m rows, n columns), return all elements of the matrix
in spiral order.
Example 1:
Input:
[
[ 1, 2, 3 ],
[ 4, 5, 6 ],
[ 7, 8, 9 ]
]
Output: [1,2,3,6,9,8,7,4,5]
Example 2:
Input:
[
[1, 2, 3, 4],
[5, 6, 7, 8],
[9,10,11,12]
]
Output: [1,2,3,4,8,12,11,10,9,5,6,7]
Args:
matrix: 2D matrix
Returns:
list<int>: list of integer values in spiral order in matrix
"""
if not matrix:
return []
# Initialize index i and j
i, j, k = 0, 0, 0
# Initialize a direction with a tuple
directions = [
(0, 1),
(1, 0),
(0, -1),
(-1, 0)
]
# Initialize result array for keeping spiral order
result = [matrix[i][j]]
matrix[i][j] = -float('inf')
# Loop, while exploring is True
while True:
if not self.check_direction(matrix, i, j, directions[k]):
k = (k + 1) % 4
if not self.check_direction(matrix, i, j, directions[k]):
break
i += directions[k][0]
j += directions[k][1]
result.append(matrix[i][j])
matrix[i][j] = -float('inf')
return result
def check_direction(self, matrix, row, col, direction):
"""
Checks if the direction is valid or not for given row and col
Args:
matrix: 2D matrix same as the main function's matrix
row: current row index value which could be >= len(matrix)
col: current col index value which could be >= len(matrix[0])
direction: tuple of integers for heading direction
Returns:
bool: True if the heading direction is valid, otherwise False
"""
new_row = row + direction[0]
new_col = col + direction[1]
if 0 <= new_row < len(matrix) and \
0 <= new_col < len(matrix[0]) and \
matrix[new_row][new_col] != -float('inf'):
return True
else:
return False
|
[
"kousuke.newlife@gmail.com"
] |
kousuke.newlife@gmail.com
|
18375f5328d2d152a2bf52b751758c80aeef94a3
|
6d69b249a81e076d79787dd08eb8957908052052
|
/wiktionary_bot/cron/send_daily_word.py
|
18e6b326e27d746aea7ad179a8240945e26ae9f0
|
[] |
no_license
|
2vitalik/wiktionary
|
02ee1f1327c3b82fc7b4d7da12083b1431b1eb8b
|
8edae2f7dcf9089084c5ce7033c4fb0b454f4dfa
|
refs/heads/master
| 2023-02-06T11:28:41.554604
| 2023-02-05T22:49:01
| 2023-02-05T22:49:01
| 121,025,447
| 7
| 2
| null | 2021-10-13T17:36:32
| 2018-02-10T15:06:24
|
Lua
|
UTF-8
|
Python
| false
| false
| 755
|
py
|
import sys; sys.path.append('../..')
from datetime import datetime
import telegram
from core.conf import conf
from libs.utils.io import json_load
from wiktionary_bot.src.semantic import Reply
from wiktionary_bot.src.slack import slack
from wiktionary_bot.src.utils import send
@slack('send_daily_word')
def send_daily_word():
bot = telegram.Bot(conf.telegram_token)
chat_id = conf.new_channel_id
now = datetime.now()
year_month = now.strftime('%Y/%m')
path = f'{conf.data_path}/word_of_day/{year_month}/latest.json'
data = json_load(path)
title = data[str(now.day)]
print(title)
text = '🔆 Слово дня\n' + Reply(title).text
send(bot, chat_id, text)
if __name__ == '__main__':
send_daily_word()
|
[
"2vitalik@gmail.com"
] |
2vitalik@gmail.com
|
e1311a382dc1bb67bb92ed7488178fb8085c06ea
|
a1bffcd8854e1843e56bb812d4d83b3161a5211e
|
/tests/unit/modules/network/fortios/test_fortios_firewall_ip_translation.py
|
816db03ac6fbbe071fa2513e4ba6324bf05ad352
|
[] |
no_license
|
goneri/ansible.community
|
1a71f9d98c164b77f8ed2ed7f558b4963005ff8f
|
f26f612dd0a3154050d90b51a75502018c95f6e4
|
refs/heads/master
| 2020-12-29T07:47:35.353515
| 2020-01-22T17:43:18
| 2020-01-22T17:43:18
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 9,273
|
py
|
# Copyright 2019 Fortinet, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <https://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import json
import pytest
from mock import ANY
from ansible_collections.ansible.community.plugins.module_utils.network.fortios.fortios import FortiOSHandler
try:
from ansible_collections.ansible.community.plugins.modules import fortios_firewall_ip_translation
except ImportError:
pytest.skip("Could not load required modules for testing", allow_module_level=True)
@pytest.fixture(autouse=True)
def connection_mock(mocker):
connection_class_mock = mocker.patch('ansible_collections.ansible.community.plugins.modules.fortios_firewall_ip_translation.Connection')
return connection_class_mock
fos_instance = FortiOSHandler(connection_mock)
def test_firewall_ip_translation_creation(mocker):
schema_method_mock = mocker.patch('ansible_collections.ansible.community.plugins.module_utils.network.fortios.fortios.FortiOSHandler.schema')
set_method_result = {'status': 'success', 'http_method': 'POST', 'http_status': 200}
set_method_mock = mocker.patch('ansible_collections.ansible.community.plugins.module_utils.network.fortios.fortios.FortiOSHandler.set', return_value=set_method_result)
input_data = {
'username': 'admin',
'state': 'present',
'firewall_ip_translation': {
'endip': 'test_value_3',
'map_startip': 'test_value_4',
'startip': 'test_value_5',
'transid': '6',
'type': 'SCTP'
},
'vdom': 'root'}
is_error, changed, response = fortios_firewall_ip_translation.fortios_firewall(input_data, fos_instance)
expected_data = {
'endip': 'test_value_3',
'map-startip': 'test_value_4',
'startip': 'test_value_5',
'transid': '6',
'type': 'SCTP'
}
set_method_mock.assert_called_with('firewall', 'ip-translation', data=expected_data, vdom='root')
schema_method_mock.assert_not_called()
assert not is_error
assert changed
assert response['status'] == 'success'
assert response['http_status'] == 200
def test_firewall_ip_translation_creation_fails(mocker):
schema_method_mock = mocker.patch('ansible_collections.ansible.community.plugins.module_utils.network.fortios.fortios.FortiOSHandler.schema')
set_method_result = {'status': 'error', 'http_method': 'POST', 'http_status': 500}
set_method_mock = mocker.patch('ansible_collections.ansible.community.plugins.module_utils.network.fortios.fortios.FortiOSHandler.set', return_value=set_method_result)
input_data = {
'username': 'admin',
'state': 'present',
'firewall_ip_translation': {
'endip': 'test_value_3',
'map_startip': 'test_value_4',
'startip': 'test_value_5',
'transid': '6',
'type': 'SCTP'
},
'vdom': 'root'}
is_error, changed, response = fortios_firewall_ip_translation.fortios_firewall(input_data, fos_instance)
expected_data = {
'endip': 'test_value_3',
'map-startip': 'test_value_4',
'startip': 'test_value_5',
'transid': '6',
'type': 'SCTP'
}
set_method_mock.assert_called_with('firewall', 'ip-translation', data=expected_data, vdom='root')
schema_method_mock.assert_not_called()
assert is_error
assert not changed
assert response['status'] == 'error'
assert response['http_status'] == 500
def test_firewall_ip_translation_removal(mocker):
schema_method_mock = mocker.patch('ansible_collections.ansible.community.plugins.module_utils.network.fortios.fortios.FortiOSHandler.schema')
delete_method_result = {'status': 'success', 'http_method': 'POST', 'http_status': 200}
delete_method_mock = mocker.patch('ansible_collections.ansible.community.plugins.module_utils.network.fortios.fortios.FortiOSHandler.delete', return_value=delete_method_result)
input_data = {
'username': 'admin',
'state': 'absent',
'firewall_ip_translation': {
'endip': 'test_value_3',
'map_startip': 'test_value_4',
'startip': 'test_value_5',
'transid': '6',
'type': 'SCTP'
},
'vdom': 'root'}
is_error, changed, response = fortios_firewall_ip_translation.fortios_firewall(input_data, fos_instance)
delete_method_mock.assert_called_with('firewall', 'ip-translation', mkey=ANY, vdom='root')
schema_method_mock.assert_not_called()
assert not is_error
assert changed
assert response['status'] == 'success'
assert response['http_status'] == 200
def test_firewall_ip_translation_deletion_fails(mocker):
schema_method_mock = mocker.patch('ansible_collections.ansible.community.plugins.module_utils.network.fortios.fortios.FortiOSHandler.schema')
delete_method_result = {'status': 'error', 'http_method': 'POST', 'http_status': 500}
delete_method_mock = mocker.patch('ansible_collections.ansible.community.plugins.module_utils.network.fortios.fortios.FortiOSHandler.delete', return_value=delete_method_result)
input_data = {
'username': 'admin',
'state': 'absent',
'firewall_ip_translation': {
'endip': 'test_value_3',
'map_startip': 'test_value_4',
'startip': 'test_value_5',
'transid': '6',
'type': 'SCTP'
},
'vdom': 'root'}
is_error, changed, response = fortios_firewall_ip_translation.fortios_firewall(input_data, fos_instance)
delete_method_mock.assert_called_with('firewall', 'ip-translation', mkey=ANY, vdom='root')
schema_method_mock.assert_not_called()
assert is_error
assert not changed
assert response['status'] == 'error'
assert response['http_status'] == 500
def test_firewall_ip_translation_idempotent(mocker):
schema_method_mock = mocker.patch('ansible_collections.ansible.community.plugins.module_utils.network.fortios.fortios.FortiOSHandler.schema')
set_method_result = {'status': 'error', 'http_method': 'DELETE', 'http_status': 404}
set_method_mock = mocker.patch('ansible_collections.ansible.community.plugins.module_utils.network.fortios.fortios.FortiOSHandler.set', return_value=set_method_result)
input_data = {
'username': 'admin',
'state': 'present',
'firewall_ip_translation': {
'endip': 'test_value_3',
'map_startip': 'test_value_4',
'startip': 'test_value_5',
'transid': '6',
'type': 'SCTP'
},
'vdom': 'root'}
is_error, changed, response = fortios_firewall_ip_translation.fortios_firewall(input_data, fos_instance)
expected_data = {
'endip': 'test_value_3',
'map-startip': 'test_value_4',
'startip': 'test_value_5',
'transid': '6',
'type': 'SCTP'
}
set_method_mock.assert_called_with('firewall', 'ip-translation', data=expected_data, vdom='root')
schema_method_mock.assert_not_called()
assert not is_error
assert not changed
assert response['status'] == 'error'
assert response['http_status'] == 404
def test_firewall_ip_translation_filter_foreign_attributes(mocker):
schema_method_mock = mocker.patch('ansible_collections.ansible.community.plugins.module_utils.network.fortios.fortios.FortiOSHandler.schema')
set_method_result = {'status': 'success', 'http_method': 'POST', 'http_status': 200}
set_method_mock = mocker.patch('ansible_collections.ansible.community.plugins.module_utils.network.fortios.fortios.FortiOSHandler.set', return_value=set_method_result)
input_data = {
'username': 'admin',
'state': 'present',
'firewall_ip_translation': {
'random_attribute_not_valid': 'tag',
'endip': 'test_value_3',
'map_startip': 'test_value_4',
'startip': 'test_value_5',
'transid': '6',
'type': 'SCTP'
},
'vdom': 'root'}
is_error, changed, response = fortios_firewall_ip_translation.fortios_firewall(input_data, fos_instance)
expected_data = {
'endip': 'test_value_3',
'map-startip': 'test_value_4',
'startip': 'test_value_5',
'transid': '6',
'type': 'SCTP'
}
set_method_mock.assert_called_with('firewall', 'ip-translation', data=expected_data, vdom='root')
schema_method_mock.assert_not_called()
assert not is_error
assert changed
assert response['status'] == 'success'
assert response['http_status'] == 200
|
[
"ansible_migration@example.com"
] |
ansible_migration@example.com
|
6ad6c28a4be99b031ff33625ec068853131f61f7
|
445892d8512378aea29fe3ac32289193a74f1ad0
|
/16.py
|
067d858cef42ec88793d5d02f238fbe4180e54a9
|
[] |
no_license
|
nemec/euler
|
4310ca29869a253b34860df133f5f41e5b2cf7ae
|
d566a453dbd2ae50e1e450b2e43dc885015ce48d
|
refs/heads/master
| 2021-01-19T06:31:40.072085
| 2012-01-07T22:54:16
| 2012-01-07T22:54:16
| 3,127,191
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 221
|
py
|
## Project Euler
## Problem 16
##
## 2^15 = 32768 and the sum of its digits is 3 + 2 + 7 + 6 + 8 = 26.
##
## What is the sum of the digits of the number 2^1000?
print "Euler answer is:", sum(map(int, str(pow(2, 1000))))
|
[
"djnemec@gmail.com"
] |
djnemec@gmail.com
|
da353a032312bc515f977a62039f08b39d156cdb
|
32a6db4d595ef4d308ac0e2ef37c57f65a777bfc
|
/ZYCami_00_彭小钗/TestCase01/test_prime02.py
|
6ed9b88761b6de280907d63f3a4961a0cf453136
|
[] |
no_license
|
wangdan377/Python_UI
|
1c8f0b3d46272d72f849f242c39e035c6b20720b
|
6c3e23b301ffe14cbd27a5211e48c8f79169dcf9
|
refs/heads/master
| 2023-02-17T02:37:34.353523
| 2021-01-19T11:58:22
| 2021-01-19T11:58:22
| 311,855,691
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 9,236
|
py
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
from ddt import ddt,data,unpack
from Public.Function import *
from Public import Caplictily
from PO.login_element import Login_Page
from PO.Prime_Element import Prime_Page
from PO.Edit_Element import Edit_Page
from PO.Activation_page import Activation_Page
from Data.data import *
from Public.loged import *
import unittest,pytest
import time,os,sys
import warnings
from appium.webdriver import Remote
import HTMLTestRunnerCN3_pie_chart_screen
log = Logger('D:\ZYCami_00\logs\\error.log', level='debug')
@ddt #装饰器类
class Test_Prime_02(unittest.TestCase):
'''
未登录状态下,登录-编辑-云剪辑-立即开通-微信支付-关闭
已登录状态下,编辑-云剪辑-立即开通-微信支付-关闭
未登录状态下去编辑页面云剪辑,然后购买prime
已登录状态下,非prime用户,购买一周/二周-协议-隐私政策-购买设备-向上滑动'''
#初始化,使用装饰器,这样在用例执行前只初始化一次
@classmethod
def setUpClass(self):
warnings.simplefilter("ignore", ResourceWarning)
# cls.driver = appium_desired() #之前的方法
# cls.driver.start_activity(appPackage,appActivity) #另外一种启动方式
#实例化类
# self.fun = BaseFun(self.driver)
driver = Caplictily.Driver_Config()
self.driver = driver.get_driver()
self.login = Login_Page(self.driver) #登录
self.prime = Prime_Page(self.driver) #prime
self.edit = Edit_Page(self.driver) #edit
self.action = Activation_Page(self.driver) # 激活设备
self.fun = BaseFun(self.driver)
@data(*get_log_data()) #装饰测试方法,拿到几个数据就可以执行用例
@unpack #根据拿到的数据以都好及逆行拆分
@pytest.mark.flaky(rerus=3)
# def test_prime_02_01(self,username,password):
# """
# 未登录状态下,登录-编辑-云剪辑-立即开通-微信支付-关闭
# :param username: 账号
# :param password: 密码
# :return:
# """
# """try:
# self.fun.click(File_name)
# except AttributeError:
# log.logger.error('访问不到该对象属性')
# self.fun.saveScreenshot('camera01')
# except NoSuchElementException as a: # 元素不存在,则调用捕获异常处理
# log.logger.error('元素不存在')
# self.fun.saveScreenshot('camera02')
# except NoAlertPresentException as b: # 捕获意外的异常
# log.logger.warning('警告')
# self.fun.saveScreenshot('camera03')
# except Exception as result:
# log.logger.critical('严重')
# self.fun.saveScreenshot('camera04')
# else:
# pass # 没有错的情况下执行
# finally:
# pass # 有没有错,都会执行"""
# # self.login.click_File_me() #我的页面icon
# self.edit.click_File_Editor() #底部编辑icon
# self.edit.click_File_cloud_engine() #云剪辑
# self.login.click_File_tv_login()
# self.login.input_user(username)
# self.login.input_password(password)
# self.login.click_File_tv_commit()
# self.prime.click_File_open01() #立即开通
# self.action.click_File_pay_closed() # 支付方式关闭
# self.action.click_File_wx() #选择微信支付
#
# # self.prime.click_File_prime01() #点击【prime】
# # self.prime.click_File_year() #年
# # time.sleep(5)
# # self.prime.click_File_open01() #立即开通
# # time.sleep(5)
# # self.prime.click_File_Service_Agreement() #服务协议
# # self.prime.click_File_Service_Back() #服务协议返回按钮
# # self.prime.click_File_Privacy_Policy() #隐私政策
# # self.prime.click_File_Service_Back() #隐私政策返回按钮
# # time.sleep(2)
# # self.fun.swip_left02() #向左滑动
# # time.sleep(5)
# # self.prime.click_File_buy_equipment02() #购买二设备
# # time.sleep(5)
# # self.driver.keyevent(4) #返回按钮
# # time.sleep(2)
# # self.fun.swip_up()
# # time.sleep(5)
#
#
#
# """# self.driver.get_screenshot_as_file('../screen/test_camera.png') #直接存入报告
# self.fun.saveScreenshot('camera')
# self.fun.saveScreenshot('help')
#
# self.fun.click(File_enter)"""
def test_prime_02_02(self,username,password):
"""
未登录状态下去编辑页面云剪辑,然后购买prime
:param username: 账号
:param password: 密码
:return:
"""
"""try:
self.fun.click(File_name)
except AttributeError:
log.logger.error('访问不到该对象属性')
self.fun.saveScreenshot('camera01')
except NoSuchElementException as a: # 元素不存在,则调用捕获异常处理
log.logger.error('元素不存在')
self.fun.saveScreenshot('camera02')
except NoAlertPresentException as b: # 捕获意外的异常
log.logger.warning('警告')
self.fun.saveScreenshot('camera03')
except Exception as result:
log.logger.critical('严重')
self.fun.saveScreenshot('camera04')
else:
pass # 没有错的情况下执行
finally:
pass # 有没有错,都会执行"""
self.login.click_File_me() #我的页面icon
# self.login.click_File_Home() #底部编辑icon
# self.edit.click_File_cloud_engine() #云剪辑
# self.prime.click_File_open01() # 立即开通
# self.action.click_File_pay_closed() # 支付方式关闭
# self.action.click_File_wx() # 选择微信支付
# self.login.click_File_tv_login()
# self.login.input_user(username)
# self.login.input_password(password)
# self.login.click_File_tv_commit()
# self.prime.click_File_prime01() #点击【prime】
# self.prime.click_File_year() #年
# time.sleep(5)
# self.prime.click_File_open01() #立即开通
# time.sleep(5)
# self.prime.click_File_Service_Agreement() #服务协议
# self.prime.click_File_Service_Back() #服务协议返回按钮
# self.prime.click_File_Privacy_Policy() #隐私政策
# self.prime.click_File_Service_Back() #隐私政策返回按钮
# time.sleep(2)
# self.fun.swip_left02() #向左滑动
# time.sleep(5)
# self.prime.click_File_buy_equipment02() #购买二设备
# time.sleep(5)
# self.driver.keyevent(4) #返回按钮
# time.sleep(2)
# self.fun.swip_up()
# time.sleep(5)
"""# self.driver.get_screenshot_as_file('../screen/test_camera.png') #直接存入报告
self.fun.saveScreenshot('camera')
self.fun.saveScreenshot('help')
self.fun.click(File_enter)"""
# 关闭驱动
@classmethod
def tearDownClass(self):
self.driver.quit()
if __name__ == '__main__':
unittest.main()
"""suite = unittest.TestSuite()
suite.addTest(Camera_test("test_camera"))
soundbox_device = 'XYBK01011204300001'
now = time.strftime("%Y-%m-%d %H_%M_%S")
##将当前时间加入到报告文件名称中,定义测试报告存放路径
filename = '../report/' + now + 'result.html' # 第一种方法,相对路径下
'''# filename = '../report/report' + now + 'result.html' # 第二种方法,相对路径下+report报告名字
# filename = '../report'+ os.sep+ 'report' + now + 'result.html' #第二种方法,相对路径下,上级目录../report文件夹下,分隔符下文件名 os.sep+ 'report'
# filename = os.getcwd() + os.sep+ 'report' + now + 'result.html' #相对路径下,该路径下的,所以就到test_case下面了
# filename = os.path.dirname(os.path.dirname(__file__))+ os.sep + 'report' + now + 'result.html' #相对路径下,该路径下的,所以就到test_case下面了
# filename = 'D:\py\ZY_Cami\\report\\report' + now + 'result.html' #绝对路径下
# filename = '../' + now + 'result.html' #../是上级目录,还是在上级目录下
# filename = '../' + os.sep + 'report' + now + 'result.html' #还是在上级目录下
# filename = '../report' + now + 'result.html' #还是在上级目录下'''
fp = open(filename, 'wb')
runner = HTMLTestRunnerCN3_pie_chart_screen.HTMLTestRunner(stream=fp, title='测试报告', tester='王丹',
device=(soundbox_device),
description='用例执行情况:')
runner.run(suite)
# runner.run(Suit())
fp.close()"""
|
[
"1065913054@qq.com"
] |
1065913054@qq.com
|
de53abff3d0ed3259297bf19dad05ed4f09e5723
|
b8cc6d34ad44bf5c28fcca9e0df01d9ebe0ee339
|
/Python100例/Python练习实例87.py
|
eb040332bbd64b5daa5056ab2b402bd6c2c73df0
|
[] |
no_license
|
python-yc/pycharm_script
|
ae0e72898ef44a9de47e7548170a030c0a752eb5
|
c8947849090c71e131df5dc32173ebe9754df951
|
refs/heads/master
| 2023-01-05T06:16:33.857668
| 2020-10-31T08:09:53
| 2020-10-31T08:09:53
| 296,778,670
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 289
|
py
|
'''
题目:回答结果(结构体变量传递)。
程序分析:无。
'''
if __name__ == '__main__':
class Student():
x = 0
c = 0
def f(stu):
stu.x = 20
stu.c = 'c'
a = Student()
a.x = 3
a.c = 'a'
f(a)
print(a.x,a.c)
|
[
"15655982512.com"
] |
15655982512.com
|
e7ddeda006ae2b971e70239a650e89491dbacd25
|
f4b60f5e49baf60976987946c20a8ebca4880602
|
/lib/python2.7/site-packages/acimodel-1.3_2j-py2.7.egg/cobra/modelimpl/comp/trnsmtderrpkts1h.py
|
9927dce493959ad637ac5073e7dc30002187f9e6
|
[] |
no_license
|
cqbomb/qytang_aci
|
12e508d54d9f774b537c33563762e694783d6ba8
|
a7fab9d6cda7fadcc995672e55c0ef7e7187696e
|
refs/heads/master
| 2022-12-21T13:30:05.240231
| 2018-12-04T01:46:53
| 2018-12-04T01:46:53
| 159,911,666
| 0
| 0
| null | 2022-12-07T23:53:02
| 2018-12-01T05:17:50
|
Python
|
UTF-8
|
Python
| false
| false
| 20,524
|
py
|
# coding=UTF-8
# **********************************************************************
# Copyright (c) 2013-2016 Cisco Systems, Inc. All rights reserved
# written by zen warriors, do not modify!
# **********************************************************************
from cobra.mit.meta import ClassMeta
from cobra.mit.meta import StatsClassMeta
from cobra.mit.meta import CounterMeta
from cobra.mit.meta import PropMeta
from cobra.mit.meta import Category
from cobra.mit.meta import SourceRelationMeta
from cobra.mit.meta import NamedSourceRelationMeta
from cobra.mit.meta import TargetRelationMeta
from cobra.mit.meta import DeploymentPathMeta, DeploymentCategory
from cobra.model.category import MoCategory, PropCategory, CounterCategory
from cobra.mit.mo import Mo
# ##################################################
class TrnsmtdErrPkts1h(Mo):
"""
A class that represents the most current statistics for transmitted error packets in a 1 hour sampling interval. This class updates every 15 minutes.
"""
meta = StatsClassMeta("cobra.model.comp.TrnsmtdErrPkts1h", "transmitted error packets")
counter = CounterMeta("error", CounterCategory.COUNTER, "packets", "transmitted error packets")
counter._propRefs[PropCategory.IMPLICIT_LASTREADING] = "errorLast"
counter._propRefs[PropCategory.IMPLICIT_CUMULATIVE] = "errorCum"
counter._propRefs[PropCategory.IMPLICIT_PERIODIC] = "errorPer"
counter._propRefs[PropCategory.IMPLICIT_MIN] = "errorMin"
counter._propRefs[PropCategory.IMPLICIT_MAX] = "errorMax"
counter._propRefs[PropCategory.IMPLICIT_AVG] = "errorAvg"
counter._propRefs[PropCategory.IMPLICIT_SUSPECT] = "errorSpct"
counter._propRefs[PropCategory.IMPLICIT_BASELINE] = "errorBase"
counter._propRefs[PropCategory.IMPLICIT_THRESHOLDED] = "errorThr"
counter._propRefs[PropCategory.IMPLICIT_TREND_BASE] = "errorTrBase"
counter._propRefs[PropCategory.IMPLICIT_TREND] = "errorTr"
counter._propRefs[PropCategory.IMPLICIT_RATE] = "errorRate"
meta._counters.append(counter)
counter = CounterMeta("drop", CounterCategory.COUNTER, "packets", "transmitted dropped packets")
counter._propRefs[PropCategory.IMPLICIT_LASTREADING] = "dropLast"
counter._propRefs[PropCategory.IMPLICIT_CUMULATIVE] = "dropCum"
counter._propRefs[PropCategory.IMPLICIT_PERIODIC] = "dropPer"
counter._propRefs[PropCategory.IMPLICIT_MIN] = "dropMin"
counter._propRefs[PropCategory.IMPLICIT_MAX] = "dropMax"
counter._propRefs[PropCategory.IMPLICIT_AVG] = "dropAvg"
counter._propRefs[PropCategory.IMPLICIT_SUSPECT] = "dropSpct"
counter._propRefs[PropCategory.IMPLICIT_BASELINE] = "dropBase"
counter._propRefs[PropCategory.IMPLICIT_THRESHOLDED] = "dropThr"
counter._propRefs[PropCategory.IMPLICIT_TREND_BASE] = "dropTrBase"
counter._propRefs[PropCategory.IMPLICIT_TREND] = "dropTr"
counter._propRefs[PropCategory.IMPLICIT_RATE] = "dropRate"
meta._counters.append(counter)
meta.moClassName = "compTrnsmtdErrPkts1h"
meta.rnFormat = "CDcompTrnsmtdErrPkts1h"
meta.category = MoCategory.STATS_CURRENT
meta.label = "current transmitted error packets stats in 1 hour"
meta.writeAccessMask = 0x1
meta.readAccessMask = 0x1
meta.isDomainable = False
meta.isReadOnly = True
meta.isConfigurable = False
meta.isDeletable = False
meta.isContextRoot = True
meta.parentClasses.add("cobra.model.comp.Hv")
meta.parentClasses.add("cobra.model.comp.HpNic")
meta.parentClasses.add("cobra.model.comp.VNic")
meta.parentClasses.add("cobra.model.comp.Vm")
meta.superClasses.add("cobra.model.stats.Item")
meta.superClasses.add("cobra.model.stats.Curr")
meta.superClasses.add("cobra.model.comp.TrnsmtdErrPkts")
meta.rnPrefixes = [
('CDcompTrnsmtdErrPkts1h', False),
]
prop = PropMeta("str", "childAction", "childAction", 4, PropCategory.CHILD_ACTION)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop._addConstant("deleteAll", "deleteall", 16384)
prop._addConstant("deleteNonPresent", "deletenonpresent", 8192)
prop._addConstant("ignore", "ignore", 4096)
meta.props.add("childAction", prop)
prop = PropMeta("str", "cnt", "cnt", 16212, PropCategory.REGULAR)
prop.label = "Number of Collections During this Interval"
prop.isImplicit = True
prop.isAdmin = True
meta.props.add("cnt", prop)
prop = PropMeta("str", "dn", "dn", 1, PropCategory.DN)
prop.label = "None"
prop.isDn = True
prop.isImplicit = True
prop.isAdmin = True
prop.isCreateOnly = True
meta.props.add("dn", prop)
prop = PropMeta("str", "dropAvg", "dropAvg", 7739, PropCategory.IMPLICIT_AVG)
prop.label = "transmitted dropped packets average value"
prop.isOper = True
prop.isStats = True
meta.props.add("dropAvg", prop)
prop = PropMeta("str", "dropBase", "dropBase", 7734, PropCategory.IMPLICIT_BASELINE)
prop.label = "transmitted dropped packets baseline"
prop.isOper = True
prop.isStats = True
meta.props.add("dropBase", prop)
prop = PropMeta("str", "dropCum", "dropCum", 7735, PropCategory.IMPLICIT_CUMULATIVE)
prop.label = "transmitted dropped packets cumulative"
prop.isOper = True
prop.isStats = True
meta.props.add("dropCum", prop)
prop = PropMeta("str", "dropLast", "dropLast", 7733, PropCategory.IMPLICIT_LASTREADING)
prop.label = "transmitted dropped packets current value"
prop.isOper = True
prop.isStats = True
meta.props.add("dropLast", prop)
prop = PropMeta("str", "dropMax", "dropMax", 7738, PropCategory.IMPLICIT_MAX)
prop.label = "transmitted dropped packets maximum value"
prop.isOper = True
prop.isStats = True
meta.props.add("dropMax", prop)
prop = PropMeta("str", "dropMin", "dropMin", 7737, PropCategory.IMPLICIT_MIN)
prop.label = "transmitted dropped packets minimum value"
prop.isOper = True
prop.isStats = True
meta.props.add("dropMin", prop)
prop = PropMeta("str", "dropPer", "dropPer", 7736, PropCategory.IMPLICIT_PERIODIC)
prop.label = "transmitted dropped packets periodic"
prop.isOper = True
prop.isStats = True
meta.props.add("dropPer", prop)
prop = PropMeta("str", "dropRate", "dropRate", 7744, PropCategory.IMPLICIT_RATE)
prop.label = "transmitted dropped packets rate"
prop.isOper = True
prop.isStats = True
meta.props.add("dropRate", prop)
prop = PropMeta("str", "dropSpct", "dropSpct", 7740, PropCategory.IMPLICIT_SUSPECT)
prop.label = "transmitted dropped packets suspect count"
prop.isOper = True
prop.isStats = True
meta.props.add("dropSpct", prop)
prop = PropMeta("str", "dropThr", "dropThr", 7741, PropCategory.IMPLICIT_THRESHOLDED)
prop.label = "transmitted dropped packets thresholded flags"
prop.isOper = True
prop.isStats = True
prop.defaultValue = 0
prop.defaultValueStr = "unspecified"
prop._addConstant("avgCrit", "avg-severity-critical", 2199023255552)
prop._addConstant("avgHigh", "avg-crossed-high-threshold", 68719476736)
prop._addConstant("avgLow", "avg-crossed-low-threshold", 137438953472)
prop._addConstant("avgMajor", "avg-severity-major", 1099511627776)
prop._addConstant("avgMinor", "avg-severity-minor", 549755813888)
prop._addConstant("avgRecovering", "avg-recovering", 34359738368)
prop._addConstant("avgWarn", "avg-severity-warning", 274877906944)
prop._addConstant("cumulativeCrit", "cumulative-severity-critical", 8192)
prop._addConstant("cumulativeHigh", "cumulative-crossed-high-threshold", 256)
prop._addConstant("cumulativeLow", "cumulative-crossed-low-threshold", 512)
prop._addConstant("cumulativeMajor", "cumulative-severity-major", 4096)
prop._addConstant("cumulativeMinor", "cumulative-severity-minor", 2048)
prop._addConstant("cumulativeRecovering", "cumulative-recovering", 128)
prop._addConstant("cumulativeWarn", "cumulative-severity-warning", 1024)
prop._addConstant("lastReadingCrit", "lastreading-severity-critical", 64)
prop._addConstant("lastReadingHigh", "lastreading-crossed-high-threshold", 2)
prop._addConstant("lastReadingLow", "lastreading-crossed-low-threshold", 4)
prop._addConstant("lastReadingMajor", "lastreading-severity-major", 32)
prop._addConstant("lastReadingMinor", "lastreading-severity-minor", 16)
prop._addConstant("lastReadingRecovering", "lastreading-recovering", 1)
prop._addConstant("lastReadingWarn", "lastreading-severity-warning", 8)
prop._addConstant("maxCrit", "max-severity-critical", 17179869184)
prop._addConstant("maxHigh", "max-crossed-high-threshold", 536870912)
prop._addConstant("maxLow", "max-crossed-low-threshold", 1073741824)
prop._addConstant("maxMajor", "max-severity-major", 8589934592)
prop._addConstant("maxMinor", "max-severity-minor", 4294967296)
prop._addConstant("maxRecovering", "max-recovering", 268435456)
prop._addConstant("maxWarn", "max-severity-warning", 2147483648)
prop._addConstant("minCrit", "min-severity-critical", 134217728)
prop._addConstant("minHigh", "min-crossed-high-threshold", 4194304)
prop._addConstant("minLow", "min-crossed-low-threshold", 8388608)
prop._addConstant("minMajor", "min-severity-major", 67108864)
prop._addConstant("minMinor", "min-severity-minor", 33554432)
prop._addConstant("minRecovering", "min-recovering", 2097152)
prop._addConstant("minWarn", "min-severity-warning", 16777216)
prop._addConstant("periodicCrit", "periodic-severity-critical", 1048576)
prop._addConstant("periodicHigh", "periodic-crossed-high-threshold", 32768)
prop._addConstant("periodicLow", "periodic-crossed-low-threshold", 65536)
prop._addConstant("periodicMajor", "periodic-severity-major", 524288)
prop._addConstant("periodicMinor", "periodic-severity-minor", 262144)
prop._addConstant("periodicRecovering", "periodic-recovering", 16384)
prop._addConstant("periodicWarn", "periodic-severity-warning", 131072)
prop._addConstant("rateCrit", "rate-severity-critical", 36028797018963968)
prop._addConstant("rateHigh", "rate-crossed-high-threshold", 1125899906842624)
prop._addConstant("rateLow", "rate-crossed-low-threshold", 2251799813685248)
prop._addConstant("rateMajor", "rate-severity-major", 18014398509481984)
prop._addConstant("rateMinor", "rate-severity-minor", 9007199254740992)
prop._addConstant("rateRecovering", "rate-recovering", 562949953421312)
prop._addConstant("rateWarn", "rate-severity-warning", 4503599627370496)
prop._addConstant("trendCrit", "trend-severity-critical", 281474976710656)
prop._addConstant("trendHigh", "trend-crossed-high-threshold", 8796093022208)
prop._addConstant("trendLow", "trend-crossed-low-threshold", 17592186044416)
prop._addConstant("trendMajor", "trend-severity-major", 140737488355328)
prop._addConstant("trendMinor", "trend-severity-minor", 70368744177664)
prop._addConstant("trendRecovering", "trend-recovering", 4398046511104)
prop._addConstant("trendWarn", "trend-severity-warning", 35184372088832)
prop._addConstant("unspecified", None, 0)
meta.props.add("dropThr", prop)
prop = PropMeta("str", "dropTr", "dropTr", 7743, PropCategory.IMPLICIT_TREND)
prop.label = "transmitted dropped packets trend"
prop.isOper = True
prop.isStats = True
meta.props.add("dropTr", prop)
prop = PropMeta("str", "dropTrBase", "dropTrBase", 7742, PropCategory.IMPLICIT_TREND_BASE)
prop.label = "transmitted dropped packets trend baseline"
prop.isOper = True
prop.isStats = True
meta.props.add("dropTrBase", prop)
prop = PropMeta("str", "errorAvg", "errorAvg", 7760, PropCategory.IMPLICIT_AVG)
prop.label = "transmitted error packets average value"
prop.isOper = True
prop.isStats = True
meta.props.add("errorAvg", prop)
prop = PropMeta("str", "errorBase", "errorBase", 7755, PropCategory.IMPLICIT_BASELINE)
prop.label = "transmitted error packets baseline"
prop.isOper = True
prop.isStats = True
meta.props.add("errorBase", prop)
prop = PropMeta("str", "errorCum", "errorCum", 7756, PropCategory.IMPLICIT_CUMULATIVE)
prop.label = "transmitted error packets cumulative"
prop.isOper = True
prop.isStats = True
meta.props.add("errorCum", prop)
prop = PropMeta("str", "errorLast", "errorLast", 7754, PropCategory.IMPLICIT_LASTREADING)
prop.label = "transmitted error packets current value"
prop.isOper = True
prop.isStats = True
meta.props.add("errorLast", prop)
prop = PropMeta("str", "errorMax", "errorMax", 7759, PropCategory.IMPLICIT_MAX)
prop.label = "transmitted error packets maximum value"
prop.isOper = True
prop.isStats = True
meta.props.add("errorMax", prop)
prop = PropMeta("str", "errorMin", "errorMin", 7758, PropCategory.IMPLICIT_MIN)
prop.label = "transmitted error packets minimum value"
prop.isOper = True
prop.isStats = True
meta.props.add("errorMin", prop)
prop = PropMeta("str", "errorPer", "errorPer", 7757, PropCategory.IMPLICIT_PERIODIC)
prop.label = "transmitted error packets periodic"
prop.isOper = True
prop.isStats = True
meta.props.add("errorPer", prop)
prop = PropMeta("str", "errorRate", "errorRate", 7765, PropCategory.IMPLICIT_RATE)
prop.label = "transmitted error packets rate"
prop.isOper = True
prop.isStats = True
meta.props.add("errorRate", prop)
prop = PropMeta("str", "errorSpct", "errorSpct", 7761, PropCategory.IMPLICIT_SUSPECT)
prop.label = "transmitted error packets suspect count"
prop.isOper = True
prop.isStats = True
meta.props.add("errorSpct", prop)
prop = PropMeta("str", "errorThr", "errorThr", 7762, PropCategory.IMPLICIT_THRESHOLDED)
prop.label = "transmitted error packets thresholded flags"
prop.isOper = True
prop.isStats = True
prop.defaultValue = 0
prop.defaultValueStr = "unspecified"
prop._addConstant("avgCrit", "avg-severity-critical", 2199023255552)
prop._addConstant("avgHigh", "avg-crossed-high-threshold", 68719476736)
prop._addConstant("avgLow", "avg-crossed-low-threshold", 137438953472)
prop._addConstant("avgMajor", "avg-severity-major", 1099511627776)
prop._addConstant("avgMinor", "avg-severity-minor", 549755813888)
prop._addConstant("avgRecovering", "avg-recovering", 34359738368)
prop._addConstant("avgWarn", "avg-severity-warning", 274877906944)
prop._addConstant("cumulativeCrit", "cumulative-severity-critical", 8192)
prop._addConstant("cumulativeHigh", "cumulative-crossed-high-threshold", 256)
prop._addConstant("cumulativeLow", "cumulative-crossed-low-threshold", 512)
prop._addConstant("cumulativeMajor", "cumulative-severity-major", 4096)
prop._addConstant("cumulativeMinor", "cumulative-severity-minor", 2048)
prop._addConstant("cumulativeRecovering", "cumulative-recovering", 128)
prop._addConstant("cumulativeWarn", "cumulative-severity-warning", 1024)
prop._addConstant("lastReadingCrit", "lastreading-severity-critical", 64)
prop._addConstant("lastReadingHigh", "lastreading-crossed-high-threshold", 2)
prop._addConstant("lastReadingLow", "lastreading-crossed-low-threshold", 4)
prop._addConstant("lastReadingMajor", "lastreading-severity-major", 32)
prop._addConstant("lastReadingMinor", "lastreading-severity-minor", 16)
prop._addConstant("lastReadingRecovering", "lastreading-recovering", 1)
prop._addConstant("lastReadingWarn", "lastreading-severity-warning", 8)
prop._addConstant("maxCrit", "max-severity-critical", 17179869184)
prop._addConstant("maxHigh", "max-crossed-high-threshold", 536870912)
prop._addConstant("maxLow", "max-crossed-low-threshold", 1073741824)
prop._addConstant("maxMajor", "max-severity-major", 8589934592)
prop._addConstant("maxMinor", "max-severity-minor", 4294967296)
prop._addConstant("maxRecovering", "max-recovering", 268435456)
prop._addConstant("maxWarn", "max-severity-warning", 2147483648)
prop._addConstant("minCrit", "min-severity-critical", 134217728)
prop._addConstant("minHigh", "min-crossed-high-threshold", 4194304)
prop._addConstant("minLow", "min-crossed-low-threshold", 8388608)
prop._addConstant("minMajor", "min-severity-major", 67108864)
prop._addConstant("minMinor", "min-severity-minor", 33554432)
prop._addConstant("minRecovering", "min-recovering", 2097152)
prop._addConstant("minWarn", "min-severity-warning", 16777216)
prop._addConstant("periodicCrit", "periodic-severity-critical", 1048576)
prop._addConstant("periodicHigh", "periodic-crossed-high-threshold", 32768)
prop._addConstant("periodicLow", "periodic-crossed-low-threshold", 65536)
prop._addConstant("periodicMajor", "periodic-severity-major", 524288)
prop._addConstant("periodicMinor", "periodic-severity-minor", 262144)
prop._addConstant("periodicRecovering", "periodic-recovering", 16384)
prop._addConstant("periodicWarn", "periodic-severity-warning", 131072)
prop._addConstant("rateCrit", "rate-severity-critical", 36028797018963968)
prop._addConstant("rateHigh", "rate-crossed-high-threshold", 1125899906842624)
prop._addConstant("rateLow", "rate-crossed-low-threshold", 2251799813685248)
prop._addConstant("rateMajor", "rate-severity-major", 18014398509481984)
prop._addConstant("rateMinor", "rate-severity-minor", 9007199254740992)
prop._addConstant("rateRecovering", "rate-recovering", 562949953421312)
prop._addConstant("rateWarn", "rate-severity-warning", 4503599627370496)
prop._addConstant("trendCrit", "trend-severity-critical", 281474976710656)
prop._addConstant("trendHigh", "trend-crossed-high-threshold", 8796093022208)
prop._addConstant("trendLow", "trend-crossed-low-threshold", 17592186044416)
prop._addConstant("trendMajor", "trend-severity-major", 140737488355328)
prop._addConstant("trendMinor", "trend-severity-minor", 70368744177664)
prop._addConstant("trendRecovering", "trend-recovering", 4398046511104)
prop._addConstant("trendWarn", "trend-severity-warning", 35184372088832)
prop._addConstant("unspecified", None, 0)
meta.props.add("errorThr", prop)
prop = PropMeta("str", "errorTr", "errorTr", 7764, PropCategory.IMPLICIT_TREND)
prop.label = "transmitted error packets trend"
prop.isOper = True
prop.isStats = True
meta.props.add("errorTr", prop)
prop = PropMeta("str", "errorTrBase", "errorTrBase", 7763, PropCategory.IMPLICIT_TREND_BASE)
prop.label = "transmitted error packets trend baseline"
prop.isOper = True
prop.isStats = True
meta.props.add("errorTrBase", prop)
prop = PropMeta("str", "lastCollOffset", "lastCollOffset", 111, PropCategory.REGULAR)
prop.label = "Collection Length"
prop.isImplicit = True
prop.isAdmin = True
meta.props.add("lastCollOffset", prop)
prop = PropMeta("str", "modTs", "modTs", 7, PropCategory.REGULAR)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = 0
prop.defaultValueStr = "never"
prop._addConstant("never", "never", 0)
meta.props.add("modTs", prop)
prop = PropMeta("str", "repIntvEnd", "repIntvEnd", 110, PropCategory.REGULAR)
prop.label = "Reporting End Time"
prop.isImplicit = True
prop.isAdmin = True
meta.props.add("repIntvEnd", prop)
prop = PropMeta("str", "repIntvStart", "repIntvStart", 109, PropCategory.REGULAR)
prop.label = "Reporting Start Time"
prop.isImplicit = True
prop.isAdmin = True
meta.props.add("repIntvStart", prop)
prop = PropMeta("str", "rn", "rn", 2, PropCategory.RN)
prop.label = "None"
prop.isRn = True
prop.isImplicit = True
prop.isAdmin = True
prop.isCreateOnly = True
meta.props.add("rn", prop)
prop = PropMeta("str", "status", "status", 3, PropCategory.STATUS)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop._addConstant("created", "created", 2)
prop._addConstant("deleted", "deleted", 8)
prop._addConstant("modified", "modified", 4)
meta.props.add("status", prop)
def __init__(self, parentMoOrDn, markDirty=True, **creationProps):
namingVals = []
Mo.__init__(self, parentMoOrDn, markDirty, *namingVals, **creationProps)
# End of package file
# ##################################################
|
[
"collinsctk@qytang.com"
] |
collinsctk@qytang.com
|
1a874b1f92374058db5c3ca342920bc09dacd61a
|
aea8fea216234fd48269e4a1830b345c52d85de2
|
/fhir/resources/STU3/deviceusestatement.py
|
7db6646ba3f74ade538fe51cd3a2428909343d91
|
[
"BSD-3-Clause"
] |
permissive
|
mmabey/fhir.resources
|
67fce95c6b35bfdc3cbbc8036e02c962a6a7340c
|
cc73718e9762c04726cd7de240c8f2dd5313cbe1
|
refs/heads/master
| 2023-04-12T15:50:30.104992
| 2020-04-11T17:21:36
| 2020-04-11T17:21:36
| 269,712,884
| 0
| 0
|
NOASSERTION
| 2020-06-05T17:03:04
| 2020-06-05T17:03:04
| null |
UTF-8
|
Python
| false
| false
| 6,807
|
py
|
# -*- coding: utf-8 -*-
"""
Profile: http://hl7.org/fhir/StructureDefinition/DeviceUseStatement
Release: STU3
Version: 3.0.2
Revision: 11917
Last updated: 2019-10-24T11:53:00+11:00
"""
import sys
from . import domainresource
class DeviceUseStatement(domainresource.DomainResource):
""" Record of use of a device.
A record of a device being used by a patient where the record is the result
of a report from the patient or another clinician.
"""
resource_type = "DeviceUseStatement"
def __init__(self, jsondict=None, strict=True):
""" Initialize all valid properties.
:raises: FHIRValidationError on validation errors, unless strict is False
:param dict jsondict: A JSON dictionary to use for initialization
:param bool strict: If True (the default), invalid variables will raise a TypeError
"""
self.bodySite = None
""" Target body site.
Type `CodeableConcept` (represented as `dict` in JSON). """
self.device = None
""" Reference to device used.
Type `FHIRReference` referencing `['Device']` (represented as `dict` in JSON). """
self.identifier = None
""" External identifier for this record.
List of `Identifier` items (represented as `dict` in JSON). """
self.indication = None
""" Why device was used.
List of `CodeableConcept` items (represented as `dict` in JSON). """
self.note = None
""" Addition details (comments, instructions).
List of `Annotation` items (represented as `dict` in JSON). """
self.recordedOn = None
""" When statement was recorded.
Type `FHIRDate` (represented as `str` in JSON). """
self.source = None
""" Who made the statement.
Type `FHIRReference` referencing `['Patient'], ['Practitioner'], ['RelatedPerson']` (represented as `dict` in JSON). """
self.status = None
""" active | completed | entered-in-error +.
Type `str`. """
self.subject = None
""" Patient using device.
Type `FHIRReference` referencing `['Patient'], ['Group']` (represented as `dict` in JSON). """
self.timingDateTime = None
""" How often the device was used.
Type `FHIRDate` (represented as `str` in JSON). """
self.timingPeriod = None
""" How often the device was used.
Type `Period` (represented as `dict` in JSON). """
self.timingTiming = None
""" How often the device was used.
Type `Timing` (represented as `dict` in JSON). """
self.whenUsed = None
""" Period device was used.
Type `Period` (represented as `dict` in JSON). """
super(DeviceUseStatement, self).__init__(jsondict=jsondict, strict=strict)
def elementProperties(self):
js = super(DeviceUseStatement, self).elementProperties()
js.extend(
[
(
"bodySite",
"bodySite",
codeableconcept.CodeableConcept,
"CodeableConcept",
False,
None,
False,
),
(
"device",
"device",
fhirreference.FHIRReference,
"Reference",
False,
None,
True,
),
(
"identifier",
"identifier",
identifier.Identifier,
"Identifier",
True,
None,
False,
),
(
"indication",
"indication",
codeableconcept.CodeableConcept,
"CodeableConcept",
True,
None,
False,
),
(
"note",
"note",
annotation.Annotation,
"Annotation",
True,
None,
False,
),
(
"recordedOn",
"recordedOn",
fhirdate.FHIRDate,
"dateTime",
False,
None,
False,
),
(
"source",
"source",
fhirreference.FHIRReference,
"Reference",
False,
None,
False,
),
("status", "status", str, "code", False, None, True),
(
"subject",
"subject",
fhirreference.FHIRReference,
"Reference",
False,
None,
True,
),
(
"timingDateTime",
"timingDateTime",
fhirdate.FHIRDate,
"dateTime",
False,
"timing",
False,
),
(
"timingPeriod",
"timingPeriod",
period.Period,
"Period",
False,
"timing",
False,
),
(
"timingTiming",
"timingTiming",
timing.Timing,
"Timing",
False,
"timing",
False,
),
("whenUsed", "whenUsed", period.Period, "Period", False, None, False),
]
)
return js
try:
from . import annotation
except ImportError:
annotation = sys.modules[__package__ + ".annotation"]
try:
from . import codeableconcept
except ImportError:
codeableconcept = sys.modules[__package__ + ".codeableconcept"]
try:
from . import fhirdate
except ImportError:
fhirdate = sys.modules[__package__ + ".fhirdate"]
try:
from . import fhirreference
except ImportError:
fhirreference = sys.modules[__package__ + ".fhirreference"]
try:
from . import identifier
except ImportError:
identifier = sys.modules[__package__ + ".identifier"]
try:
from . import period
except ImportError:
period = sys.modules[__package__ + ".period"]
try:
from . import timing
except ImportError:
timing = sys.modules[__package__ + ".timing"]
|
[
"connect2nazrul@gmail.com"
] |
connect2nazrul@gmail.com
|
5efcbd8e26a79950b89b0c5a334dd80056969df4
|
fe109bdb960bfbf7255ad89eef862f0811d613b1
|
/action_plugins/storage.py
|
dfb35a8070ae2f15d6ea1bc63b1ad6c142a46981
|
[
"Apache-2.0"
] |
permissive
|
FlorianHeigl/ansible-role-storage
|
7b4ee7606d30cd75d1467cedff474b048682e28d
|
327c522dff56bb837fc239c9da77eeef4ec283a0
|
refs/heads/master
| 2023-03-19T11:53:51.827490
| 2019-09-13T11:36:20
| 2019-09-13T11:36:20
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 19,267
|
py
|
# Copyright (c) 2018, Red Hat, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from __future__ import (absolute_import, division, print_function)
import importlib
import json
import os
import sqlite3
import six
import ansible
from ansible.plugins import action
try:
from ansible.utils import sentinel
except ImportError:
sentinel = None
DEFAULT_PROVIDER = 'cinderlib'
BLOCK = 1
FILESYSTEM = 2
STORAGE_DATA = 'storage_data'
STORAGE_ATTRIBUTES = 'storage_attributes'
PROVIDER_CONFIG = 'provider_config'
BACKEND_CONFIG = 'backend_config'
CONSUMER_CONFIG = 'consumer_config'
if tuple(map(int, (ansible.__version__.split(".")))) < (2, 7, 0):
KWARGS_TEMPLATE = {'bare_deprecated': False}
else:
KWARGS_TEMPLATE = {}
class MissingException(Exception):
def __init__(self, var):
msg = 'Missing parameter %s' % var
super(MissingException, self).__init__(msg)
class NonUnique(Exception):
def __init__(self, var):
msg = 'There is not enough info to uniquely identify resource %s' % var
super(NonUnique, self).__init__(msg)
class NotFound(Exception):
def __init__(self, var):
msg = 'Resource %s not found' % var
super(NotFound, self).__init__(msg)
class BackendObj(object):
__slots__ = ('id', 'name', 'provider', 'data', 'host', 'attributes',
'ctxt')
FIELDS = __slots__
def __init__(self, decryptor, values):
for f, v in zip(self.FIELDS, values):
setattr(self, f, v)
# We could lazy load these
self.attributes = json.loads(self.attributes)
self.data = decryptor(self.data)
self.ctxt = decryptor(self.ctxt)
# Reconstruct sets
sets = self.ctxt.pop('___sets')
for key in sets:
self.ctxt['_attributes'][key] = set(self.ctxt['_attributes'][key])
sets = self.ctxt.pop('___sets_defaults', tuple())
for key in sets:
self.ctxt['_attr_defaults'][key] = set(
self.ctxt['_attr_defaults'][key])
# Reconstruct sentinels
sentinels = self.ctxt.pop('___sentinels')
for key in sentinels:
self.ctxt['_attributes'][key] = sentinel.Sentinel
if '_become_plugin' in self.ctxt:
become_type, become_vars = self.ctxt['_become_plugin']
path = become_vars['_original_path'].split(os.path.sep)
# Remove the .py extension
path[-1] = path[-1].rsplit('.', 1)[0]
# Convert to namespace
namespace = '.'.join(path[path.index('ansible'):])
# Import and set variables
module = importlib.import_module(namespace)
plugin = getattr(module, become_type)()
vars(plugin).clear()
vars(plugin).update(become_vars)
self.ctxt['_become_plugin'] = plugin
class DB(object):
BACKEND_FIELDS_STR = ', '.join(BackendObj.FIELDS[1:])
def __init__(self, templar, task_info):
self.task_info = task_info
inv = templar.template('inventory_file', convert_bare=True,
fail_on_undefined=True, **KWARGS_TEMPLATE)
inv_path, inv_name = os.path.split(inv)
self.db = sqlite3.connect(self.task_info['db_name'])
self.cursor = self.db.cursor()
def delete_backend(self, backend_id):
self._delete(id=backend_id)
self.db.commit()
def providers(self):
result = self._query('provider', 'SELECT DISTINCT')
return [p[0] for p in result if p[0]]
@staticmethod
def _build_filters(filters):
filters = {k: v for k, v in filters.items() if v is not None}
if not filters:
return ''
return ' WHERE ' + ' and '.join('%s=:%s' % (f, f)
for f, v in filters.items() if v)
def _delete(self, **filters):
filters_q = self._build_filters(filters)
query = 'DELETE FROM backends%s' % filters_q
self.cursor.execute(query, filters)
def _query(self, fields=None, action='SELECT', **filters):
if not fields or fields == '*':
fields = BackendObj.FIELDS
if isinstance(fields, six.string_types):
fields = [fields]
str_fields = ', '.join(fields)
filters_q = self._build_filters(filters)
query = '%s %s FROM backends%s' % (action, str_fields, filters_q)
self.cursor.execute(query, filters)
results = [BackendObj(self._decrypt, res)
for res in self.cursor.fetchall()]
return results
def backends(self, provider=None):
backends = self._query('*', provider=provider)
return backends
def backend(self, backend=None, provider=None):
backends = self._query('*', name=backend, provider=provider)
if len(backends) == 0:
raise NotFound({'backend': backend, 'provider': provider})
if len(backends) > 1:
raise NonUnique({'backend': backend, 'provider': provider})
res = backends[0]
return res
def create_backend(self, name, provider, data, host, storage_attributes,
ctxt):
data = self._encrypt(data)
attributes = json.dumps(storage_attributes)
ctxt = self._encrypt(ctxt)
args = (name, provider, data, host, attributes, ctxt)
self.cursor.execute('INSERT INTO backends (%s) VALUES (?, ?, ?, ?, '
'?, ?)' % self.BACKEND_FIELDS_STR, args)
self.db.commit()
def save_consumer(self, provider, consumer_config, consumer_module):
config = json.dumps(consumer_config)
self.cursor.execute('REPLACE INTO providers (name, consumer_data, '
'consumer_module) VALUES (?, ?, ?)',
(provider, config, consumer_module))
self.db.commit()
def get_consumer(self, provider):
self.cursor.execute('SELECT consumer_data, consumer_module '
'FROM providers WHERE name=?', (provider,))
res = self.cursor.fetchone()
if not res:
raise NotFound({'provider': provider})
return json.loads(res[0]), res[1]
def _decrypt(self, data):
# TODO: Decrypt data using self.task_info['secret']
try:
return json.loads(data)
except Exception:
return data
def _encrypt(self, data):
if isinstance(data, dict):
data = json.dumps(data)
# TODO: Encrypt data using self.task_info['secret']
return data
class Resource(object):
@staticmethod
def factory(action_module, task, connection, play_context, loader, templar,
shared_loader_obj):
args = action_module._task.args
if 'resource' not in args:
raise MissingException('resource')
resource = action_module._task.args['resource']
if not isinstance(resource, six.string_types):
raise TypeError('Invalid "resource" type %s' % type(resource))
cls = globals().get(resource.capitalize())
if not cls or not issubclass(cls, Resource):
raise ValueError('Invalid "resource" value %s' % resource)
return cls(action_module, task, connection, play_context, loader,
templar, shared_loader_obj)
def __init__(self, action_module, task, connection, play_context, loader,
templar, shared_loader_obj):
self._templar = templar
self.action_module = action_module
task_info = self._get_var('storage_task_info')
self.db = DB(templar, task_info)
self._backend = None
self._play_context = play_context
@property
def context(self):
ctxt = self.backend().ctxt.copy()
del ctxt['___fqdn']
del ctxt['___machine_id']
return ctxt
def _get_current_context(self, current=None):
# Make host context JSON compatible
ctxt = vars(self._play_context).copy()
sets = []
sentinels = []
attributes = ctxt['_attributes']
for key, val in attributes.items():
if isinstance(val, set):
sets.append(key)
attributes[key] = list(val)
elif sentinel and val is sentinel.Sentinel:
sentinels.append(key)
del attributes[key]
ctxt['___sets'] = sets
ctxt['___sentinels'] = sentinels
sets2 = []
if '_attr_defaults' in ctxt:
defaults = ctxt['_attr_defaults']
for key, val in defaults.items():
if isinstance(val, set):
sets2.append(key)
defaults[key] = list(val)
ctxt['___sets_defaults'] = sets2
ctxt['___fqdn'] = self._get_var('ansible_fqdn')
ctxt['___machine_id'] = self._get_var('ansible_machine_id')
if '_become_plugin' in ctxt:
ctxt['_become_plugin'] = (type(ctxt['_become_plugin']).__name__,
vars(ctxt['_become_plugin']))
ctxt['_connection_opts'] = self.action_module._connection._options
return ctxt
def _get_controller_data(self):
try:
return self.backend().data
except NotFound:
return {}
def runner(self, module_args=None, ctrl=True, **kwargs):
if module_args is None:
module_args = {}
if ctrl:
module_data = self._get_controller_data()
module_name = self.provider_name + '_storage_controller'
if self._backend:
module_args.setdefault('backend', self._backend.name)
module_args.setdefault('provider', self._backend.provider)
else:
module_data, module_name = self.db.get_consumer(self.provider_name)
module_args[STORAGE_DATA] = module_data
# If this is a controller operation called on consumer pass
# controller context
if (ctrl and module_args.get('resource') != 'backend' and
self.backend().host != self._get_var('ansible_machine_id')):
kwargs['context'] = self.context
return self.action_module.runner(module_name, module_args, **kwargs)
@property
def task(self):
return self.action_module._task
def _select_backend(self):
if self._backend:
return
provider = self.task.args.get('provider')
backend = self.task.args.get('backend')
backends = self.db._query('*', provider=provider, name=backend)
if not backends:
raise NotFound({'backend': backend, 'provider': provider})
if len(backends) == 1:
self._backend = backends[0]
return
for backend in backends:
if backend.provider == DEFAULT_PROVIDER:
self._backend = backend
return
raise NonUnique({'backend': backend, 'provider': provider})
def backend(self):
self._select_backend()
return self._backend
@property
def provider_name(self):
provider = self.task.args.get('provider')
if provider:
return provider
self._select_backend()
return self._backend.provider
def execute(self, task_vars):
self.task_vars = task_vars
result = self.run()
return result or {}
def _get_var(self, name):
if hasattr(self, 'task_vars') and name in self.task_vars:
return self.task_vars[name]
return self._templar.template(name, convert_bare=True,
fail_on_undefined=True,
**KWARGS_TEMPLATE)
def _get_brick_info(self):
args = self.task.args.copy()
ips = self._get_var('ansible_all_ipv4_addresses') or []
ipv6 = self._get_var('ansible_all_ipv6_addresses') or []
ips.extend(ipv6)
pass_args = {
'resource': 'node',
'ips': ips,
'multipath': args.get('multipath', True),
'enforce_multipath': args.get('enforce_multipath', False)
}
return self.runner(pass_args, ctrl=False)
def default_state_run(self, args):
return self.runner(args)
def run(self):
state_runner = getattr(self, self.task.args.get('state'),
self.default_state_run)
return state_runner(self.task.args)
class Node(Resource):
def run(self):
return self._get_brick_info()
class Backend(Resource):
# stats is handled by Resource.default_state_run
def stats(self, args):
return self.default_state_run(args)
def present(self, args):
consumer_config = self.task.args.pop('consumer_config', {})
result = self.runner(self.task.args)
if result.get('failed'):
return result
storage_data = result.pop(STORAGE_DATA, '')
attributes = result.pop(STORAGE_ATTRIBUTES, {})
ctxt = self._get_current_context()
host = self.task_vars.get('ansible_machine_id')
self.db.create_backend(self.task.args['backend'],
self.provider_name,
storage_data,
host,
attributes,
ctxt)
# By default we assume controller module returns data conforming to the
# cinderlib consumer module that can handle many different connections.
consumer = attributes.get('consumer', 'cinderlib_storage_consumer')
self.db.save_consumer(self.provider_name,
{CONSUMER_CONFIG: consumer_config},
consumer)
def absent(self, args):
result = self.runner(self.task.args)
if result.get('failed'):
return result
self.db.delete_backend(self.backend().id)
class Volume(Resource):
# present and absent states handled by Resource.default_state_run
def connected(self, args):
pass_args = args.copy()
# The connection info must be the connection module name + _info
conn_info_key = self.db.get_consumer(self.provider_name)[1] + '_info'
conn_info = self.task_vars.get(conn_info_key)
if conn_info:
pass_args.update(conn_info)
else:
result = self._get_brick_info()
if result.get('failed', False):
return result
pass_args.update(result[STORAGE_DATA])
pass_args.setdefault('provider', self.provider_name)
pass_args['attached_host'] = self._get_var('ansible_fqdn')
result = self.runner(pass_args)
if result.get('failed', False):
return result
pass_args = args.copy()
pass_args.setdefault('provider', self.provider_name)
pass_args.update(result[STORAGE_DATA])
result = self.runner(pass_args, ctrl=False)
return result
def disconnected(self, args):
args = args.copy()
args.setdefault('provider', self.provider_name)
result = self.runner(args, ctrl=False)
if result.get('failed', False):
return result
pass_args = args.copy()
pass_args['attached_host'] = self._get_var('ansible_fqdn')
result = self.runner(pass_args)
return result
def extended(self, args):
# Make the controller request the extend on the backend
result = self.runner(args)
if result.get('failed', False):
return result
# Make the node notice if it has changed and is attached
if result['attached_host']:
pass_args = args.copy()
# We cannot pass the size or the node won't find the attachment
pass_args.pop('size')
pass_args.pop('old_size', None)
pass_args['new_size'] = result['new_size']
pass_args['provider'] = self.provider_name
# pass_args.update(result[STORAGE_DATA])
result = self.runner(pass_args, ctrl=False)
if result.get('failed', False):
return result
return result
def run(self):
original_args = self.task.args.copy()
# Automatically set the host parameter
self.task.args.setdefault('host', self._get_var('ansible_fqdn'))
try:
return super(Volume, self).run()
finally:
self.task.args = original_args
class ActionModule(action.ActionBase):
def __init__(self, task, connection, play_context, loader, templar,
shared_loader_obj):
# Expand kwargs parameter
kwargs = task.args.pop('kwargs', None)
if kwargs:
task.args.update(**kwargs)
super(ActionModule, self).__init__(task, connection, play_context,
loader, templar, shared_loader_obj)
self.resource = Resource.factory(self, task, connection, play_context,
loader, templar, shared_loader_obj)
def runner(self, module_name, module_args, context=None, **kwargs):
task_vars = kwargs.pop('task_vars', self.task_vars)
if context:
original_ctxt = self._play_context.__dict__
original_connection = self._connection
self._play_context.__dict__ = context
conn_type = self._play_context.connection
self._connection = self._shared_loader_obj.connection_loader.get(
conn_type, self._play_context, self._connection._new_stdin)
if '_connection_opts' in context:
self._connection._options.update(context['_connection_opts'])
self._connection.become = context['_become_plugin']
try:
result = self._execute_module(module_name=module_name,
module_args=module_args,
task_vars=task_vars)
finally:
if context:
self._play_context.__dict__ = original_ctxt
self._connection = original_connection
return result
def run(self, tmp=None, task_vars=None):
self.task_vars = task_vars
self._supports_check_mode = False
self._supports_async = True
result = self.resource.execute(task_vars)
# hack to keep --verbose from showing all the setup module result moved
# from setup module as now we filter out all _ansible_ from result
if self._task.action == 'setup':
result['_ansible_verbose_override'] = True
return result
|
[
"geguileo@redhat.com"
] |
geguileo@redhat.com
|
37c42c2db2f6ccdc031e2a57d830f319f7285191
|
de24f83a5e3768a2638ebcf13cbe717e75740168
|
/moodledata/vpl_data/452/usersdata/278/104002/submittedfiles/avenida.py
|
b830fcc4174b52263fd249e580b8cecb56557fb8
|
[] |
no_license
|
rafaelperazzo/programacao-web
|
95643423a35c44613b0f64bed05bd34780fe2436
|
170dd5440afb9ee68a973f3de13a99aa4c735d79
|
refs/heads/master
| 2021-01-12T14:06:25.773146
| 2017-12-22T16:05:45
| 2017-12-22T16:05:45
| 69,566,344
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 937
|
py
|
# -*- coding: utf-8 -*-
m = int(input('Digite o número de quadras no sentido Norte-Sul: '))
while (m<2 or m>1000):
m = int(input('Digite o número de quadras [2,1000] no sentido Norte-Sul: '))
n = int(input('digite o número de quadras no sentido Leste-Oeste: '))
while (n<2 or n>1000):
n = int(input('digite o número de quadras [2,1000] no sentido Leste-Oeste: '))
valor1 = []
valor2 = []
for i in range (0,m,1):
for j in range (0,n,1):
valor1.append(int(input('Digite o valor de desapropriação da quadra%d%d: ' %(i,j))))
valor2.append(valor1)
soma=0
valor=[]
for j in range (0,n,1):
for i in range (0,m,1):
soma=soma+valor2[i][j]
valor.append(soma)
x=n
while (n!=1):
if i+1<x:
for i in range (0,n,1):
if valor[i]<valor[i+1]:
del valor[i+1]
else valor[i]>valor[i+1]:
del valor[i]
print(valor[0])
|
[
"rafael.mota@ufca.edu.br"
] |
rafael.mota@ufca.edu.br
|
a7c2ca14b118fd55db1f803488e33a2b02b59688
|
209c2dd5f0ae41ccd41d84e8c86fe4a7e3321715
|
/tests/test_services.py
|
4ff7b8ac73dbcb8b987af56b24d2064124738a01
|
[
"MIT"
] |
permissive
|
vladimirmyshkovski/django-url-shorter
|
4d17b165c14b8584bdabc0a60bd6cab03f88d2dd
|
dcea9f77af951ec3cfc41fbbc4bab951ccab7f41
|
refs/heads/master
| 2021-09-15T13:02:59.286830
| 2018-06-02T16:59:59
| 2018-06-02T16:59:59
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,021
|
py
|
from django.test import TestCase
from url_shorter import services
from url_shorter import models
from . import factories
class TestCreateNewURL(TestCase):
@classmethod
def setUpTestData(cls):
#cls.new_url = factories.URLFactory()
cls.user = factories.UserFactory()
cls.new_url = services.create_new_url(
'https://google.com/',
cls.user
)
cls.new_short_url = services.generate_short_url()
def setUp(self):
pass
def test_new_short_url_created(self):
self.assertNotEqual(self.new_short_url, None)
def test_new_short_url_is_string(self):
self.assertTrue(type(self.new_short_url))
def test_jew_short_url_length(self):
self.assertEqual(
len(self.new_short_url),
6
)
def test_new_url_created(self):
self.assertNotEqual(
self.new_url.short_url,
None
)
def test_new_url_is_URL_instance(self):
self.assertTrue(
isinstance(self.new_url, models.URL)
)
def test_new_url_equal_long_url(self):
self.assertEqual(
self.new_url.long_url,
'https://google.com/'
)
def test_new_url_short_url_is_string(self):
self.assertEqual(
type(self.new_url.short_url),
type('')
)
def test_create_many_new_urls_with_one_long_url(self):
for _ in range(10):
url = services.create_new_url('https://google.com/', self.user)
self.assertEqual(
self.new_url,
url
)
def test_create_new_url_without_long_url(self):
url = services.create_new_url('', self.user)
self.assertEqual(
url,
''
)
def test_create_new_url_withput_long_url_equal_instance(self):
url = services.create_new_url('', self.user)
self.assertFalse(isinstance(url, models.URL))
def tearDown(self):
pass
|
[
"narnikgamarnikus@gmail.com"
] |
narnikgamarnikus@gmail.com
|
f5aa734ec84bd5ce364e5dd9f8182e2fe20138ce
|
2a2b26f3fbdd16d99fd65f390acc37cff6783805
|
/backend/tin_marin_23659/urls.py
|
e29fe74e819d07acbddd4a80408b93aca7f3153f
|
[] |
no_license
|
crowdbotics-apps/tin-marin-23659
|
fb6b8dcb98c069a6543039bd95b2c821eed39300
|
f50588680e57d8840dc1e406d5ac5171d67f3c54
|
refs/heads/master
| 2023-02-09T00:02:16.087858
| 2021-01-03T22:16:05
| 2021-01-03T22:16:05
| 326,510,896
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,973
|
py
|
"""tin_marin_23659 URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.2/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path, include
from allauth.account.views import confirm_email
from rest_framework import permissions
from drf_yasg.views import get_schema_view
from drf_yasg import openapi
urlpatterns = [
path("", include("home.urls")),
path("accounts/", include("allauth.urls")),
path("modules/", include("modules.urls")),
path("api/v1/", include("home.api.v1.urls")),
path("admin/", admin.site.urls),
path("users/", include("users.urls", namespace="users")),
path("rest-auth/", include("rest_auth.urls")),
# Override email confirm to use allauth's HTML view instead of rest_auth's API view
path("rest-auth/registration/account-confirm-email/<str:key>/", confirm_email),
path("rest-auth/registration/", include("rest_auth.registration.urls")),
]
admin.site.site_header = "Tin Marin"
admin.site.site_title = "Tin Marin Admin Portal"
admin.site.index_title = "Tin Marin Admin"
# swagger
api_info = openapi.Info(
title="Tin Marin API",
default_version="v1",
description="API documentation for Tin Marin App",
)
schema_view = get_schema_view(
api_info,
public=True,
permission_classes=(permissions.IsAuthenticated,),
)
urlpatterns += [
path("api-docs/", schema_view.with_ui("swagger", cache_timeout=0), name="api_docs")
]
|
[
"team@crowdbotics.com"
] |
team@crowdbotics.com
|
2ccaa438cc6a3f06783e2f09ad8b0a66adeb5558
|
a59dcdb8e8b963a5082d4244889b82a4379510f6
|
/bemani/client/jubeat/clan.py
|
e16cae8c7d9cbb57b4e2a24a5addb98913094f59
|
[
"LicenseRef-scancode-warranty-disclaimer",
"LicenseRef-scancode-public-domain"
] |
permissive
|
vangar/bemaniutils
|
9fdda035eb29e5e0b874d475fdbfdc8b99aeb003
|
284153ef2e2e40014656fbfeb254c05f3a8c61e8
|
refs/heads/trunk
| 2023-04-04T04:00:21.088088
| 2023-02-17T03:32:27
| 2023-02-17T03:40:07
| 332,388,243
| 1
| 0
| null | 2021-01-24T07:07:34
| 2021-01-24T07:07:33
| null |
UTF-8
|
Python
| false
| false
| 33,079
|
py
|
import random
import time
from typing import Any, Dict, List, Optional
from bemani.client.base import BaseClient
from bemani.common import CardCipher, Time
from bemani.protocol import Node
class JubeatClanClient(BaseClient):
NAME = "TEST"
def verify_shopinfo_regist(self) -> None:
call = self.call_node()
# Construct node
shopinfo = Node.void("shopinfo")
shopinfo.set_attribute("method", "regist")
call.add_child(shopinfo)
shop = Node.void("shop")
shopinfo.add_child(shop)
shop.add_child(Node.string("name", ""))
shop.add_child(Node.string("pref", "JP-14"))
shop.add_child(Node.string("softwareid", ""))
shop.add_child(Node.string("systemid", self.pcbid))
shop.add_child(Node.string("hardwareid", "01020304050607080900"))
shop.add_child(Node.string("locationid", "US-1"))
shop.add_child(Node.string("monitor", "D26L155 6252 151"))
testmode = Node.void("testmode")
shop.add_child(testmode)
# Swap with server
resp = self.exchange("", call)
# Verify that response is correct
self.assert_path(resp, "response/shopinfo/data/cabid")
self.assert_path(resp, "response/shopinfo/data/locationid")
self.assert_path(resp, "response/shopinfo/data/tax_phase")
self.assert_path(resp, "response/shopinfo/data/facility/exist")
self.assert_path(resp, "response/shopinfo/data/info/event_info")
self.assert_path(resp, "response/shopinfo/data/info/share_music")
self.assert_path(resp, "response/shopinfo/data/info/genre_def_music")
self.assert_path(resp, "response/shopinfo/data/info/black_jacket_list")
self.assert_path(resp, "response/shopinfo/data/info/white_music_list")
self.assert_path(resp, "response/shopinfo/data/info/white_marker_list")
self.assert_path(resp, "response/shopinfo/data/info/white_theme_list")
self.assert_path(resp, "response/shopinfo/data/info/open_music_list")
self.assert_path(resp, "response/shopinfo/data/info/shareable_music_list")
self.assert_path(resp, "response/shopinfo/data/info/jbox/point")
self.assert_path(resp, "response/shopinfo/data/info/jbox/emblem/normal/index")
self.assert_path(resp, "response/shopinfo/data/info/jbox/emblem/premium/index")
self.assert_path(resp, "response/shopinfo/data/info/born/status")
self.assert_path(resp, "response/shopinfo/data/info/born/year")
self.assert_path(resp, "response/shopinfo/data/info/collection/rating_s")
self.assert_path(resp, "response/shopinfo/data/info/expert_option/is_available")
self.assert_path(
resp, "response/shopinfo/data/info/all_music_matching/is_available"
)
self.assert_path(
resp, "response/shopinfo/data/info/all_music_matching/team/default_flag"
)
self.assert_path(
resp, "response/shopinfo/data/info/all_music_matching/team/redbelk_flag"
)
self.assert_path(
resp, "response/shopinfo/data/info/all_music_matching/team/cyanttle_flag"
)
self.assert_path(
resp, "response/shopinfo/data/info/all_music_matching/team/greenesia_flag"
)
self.assert_path(
resp, "response/shopinfo/data/info/all_music_matching/team/plumpark_flag"
)
self.assert_path(resp, "response/shopinfo/data/info/question_list")
self.assert_path(resp, "response/shopinfo/data/info/drop_list")
self.assert_path(resp, "response/shopinfo/data/info/daily_bonus_list")
self.assert_path(resp, "response/shopinfo/data/info/department/pack_list")
def verify_demodata_get_info(self) -> None:
call = self.call_node()
# Construct node
demodata = Node.void("demodata")
call.add_child(demodata)
demodata.set_attribute("method", "get_info")
pcbinfo = Node.void("pcbinfo")
demodata.add_child(pcbinfo)
pcbinfo.set_attribute("client_data_version", "0")
# Swap with server
resp = self.exchange("", call)
# Verify that response is correct
self.assert_path(resp, "response/demodata/data/info/black_jacket_list")
def verify_demodata_get_news(self) -> None:
call = self.call_node()
# Construct node
demodata = Node.void("demodata")
call.add_child(demodata)
demodata.set_attribute("method", "get_news")
# Swap with server
resp = self.exchange("", call)
# Verify that response is correct
self.assert_path(resp, "response/demodata/data/officialnews")
def verify_demodata_get_jbox_list(self) -> None:
call = self.call_node()
# Construct node
demodata = Node.void("demodata")
call.add_child(demodata)
demodata.set_attribute("method", "get_jbox_list")
# Swap with server
resp = self.exchange("", call)
# Verify that response is correct
self.assert_path(resp, "response/demodata/@status")
def __verify_profile(self, resp: Node) -> int:
self.assert_path(resp, "response/gametop/data/info/event_info")
self.assert_path(resp, "response/gametop/data/info/share_music")
self.assert_path(resp, "response/gametop/data/info/genre_def_music")
self.assert_path(resp, "response/gametop/data/info/black_jacket_list")
self.assert_path(resp, "response/gametop/data/info/white_music_list")
self.assert_path(resp, "response/gametop/data/info/white_marker_list")
self.assert_path(resp, "response/gametop/data/info/white_theme_list")
self.assert_path(resp, "response/gametop/data/info/open_music_list")
self.assert_path(resp, "response/gametop/data/info/shareable_music_list")
self.assert_path(resp, "response/gametop/data/info/jbox/point")
self.assert_path(resp, "response/gametop/data/info/jbox/emblem/normal/index")
self.assert_path(resp, "response/gametop/data/info/jbox/emblem/premium/index")
self.assert_path(resp, "response/gametop/data/info/born/status")
self.assert_path(resp, "response/gametop/data/info/born/year")
self.assert_path(resp, "response/gametop/data/info/collection/rating_s")
self.assert_path(resp, "response/gametop/data/info/expert_option/is_available")
self.assert_path(
resp, "response/gametop/data/info/all_music_matching/is_available"
)
self.assert_path(
resp, "response/gametop/data/info/all_music_matching/team/default_flag"
)
self.assert_path(
resp, "response/gametop/data/info/all_music_matching/team/redbelk_flag"
)
self.assert_path(
resp, "response/gametop/data/info/all_music_matching/team/cyanttle_flag"
)
self.assert_path(
resp, "response/gametop/data/info/all_music_matching/team/greenesia_flag"
)
self.assert_path(
resp, "response/gametop/data/info/all_music_matching/team/plumpark_flag"
)
self.assert_path(resp, "response/gametop/data/info/question_list")
self.assert_path(resp, "response/gametop/data/info/drop_list")
self.assert_path(resp, "response/gametop/data/info/daily_bonus_list")
self.assert_path(resp, "response/gametop/data/info/department/pack_list")
for item in [
"tune_cnt",
"save_cnt",
"saved_cnt",
"fc_cnt",
"ex_cnt",
"clear_cnt",
"match_cnt",
"beat_cnt",
"mynews_cnt",
"bonus_tune_points",
"is_bonus_tune_played",
"inherit",
"mtg_entry_cnt",
"mtg_hold_cnt",
"mtg_result",
]:
self.assert_path(resp, f"response/gametop/data/player/info/{item}")
for item in [
"music_list",
"secret_list",
"theme_list",
"marker_list",
"title_list",
"parts_list",
"emblem_list",
"commu_list",
"new/secret_list",
"new/theme_list",
"new/marker_list",
]:
self.assert_path(resp, f"response/gametop/data/player/item/{item}")
for item in [
"play_time",
"shopname",
"areaname",
"music_id",
"seq_id",
"sort",
"category",
"expert_option",
]:
self.assert_path(resp, f"response/gametop/data/player/last/{item}")
for item in [
"marker",
"theme",
"title",
"parts",
"rank_sort",
"combo_disp",
"emblem",
"matching",
"hard",
"hazard",
]:
self.assert_path(resp, f"response/gametop/data/player/last/settings/{item}")
# Misc stuff
self.assert_path(resp, "response/gametop/data/player/session_id")
self.assert_path(resp, "response/gametop/data/player/event_flag")
# Profile settings
self.assert_path(resp, "response/gametop/data/player/name")
self.assert_path(resp, "response/gametop/data/player/jid")
# Required nodes for events and stuff
self.assert_path(resp, "response/gametop/data/player/history")
self.assert_path(resp, "response/gametop/data/player/lab_edit_seq")
self.assert_path(resp, "response/gametop/data/player/event_info")
self.assert_path(resp, "response/gametop/data/player/navi/flag")
self.assert_path(
resp, "response/gametop/data/player/fc_challenge/today/music_id"
)
self.assert_path(resp, "response/gametop/data/player/fc_challenge/today/state")
self.assert_path(
resp, "response/gametop/data/player/fc_challenge/whim/music_id"
)
self.assert_path(resp, "response/gametop/data/player/fc_challenge/whim/state")
self.assert_path(resp, "response/gametop/data/player/official_news/news_list")
self.assert_path(resp, "response/gametop/data/player/rivallist")
self.assert_path(
resp, "response/gametop/data/player/free_first_play/is_available"
)
self.assert_path(resp, "response/gametop/data/player/jbox/point")
self.assert_path(resp, "response/gametop/data/player/jbox/emblem/normal/index")
self.assert_path(resp, "response/gametop/data/player/jbox/emblem/premium/index")
self.assert_path(resp, "response/gametop/data/player/new_music")
self.assert_path(resp, "response/gametop/data/player/gift_list")
self.assert_path(resp, "response/gametop/data/player/born/status")
self.assert_path(resp, "response/gametop/data/player/question_list")
self.assert_path(resp, "response/gametop/data/player/jubility/@param")
self.assert_path(
resp, "response/gametop/data/player/jubility/target_music_list"
)
self.assert_path(resp, "response/gametop/data/player/team/@id")
self.assert_path(resp, "response/gametop/data/player/team/section")
self.assert_path(resp, "response/gametop/data/player/team/street")
self.assert_path(resp, "response/gametop/data/player/team/house_number_1")
self.assert_path(resp, "response/gametop/data/player/team/house_number_2")
self.assert_path(resp, "response/gametop/data/player/team/move/@house_number_1")
self.assert_path(resp, "response/gametop/data/player/team/move/@house_number_2")
self.assert_path(resp, "response/gametop/data/player/team/move/@id")
self.assert_path(resp, "response/gametop/data/player/team/move/@section")
self.assert_path(resp, "response/gametop/data/player/team/move/@street")
self.assert_path(resp, "response/gametop/data/player/union_battle/@id")
self.assert_path(resp, "response/gametop/data/player/union_battle/power")
self.assert_path(resp, "response/gametop/data/player/server")
self.assert_path(resp, "response/gametop/data/player/eamuse_gift_list")
self.assert_path(resp, "response/gametop/data/player/clan_course_list")
self.assert_path(resp, "response/gametop/data/player/category_list")
self.assert_path(resp, "response/gametop/data/player/drop_list/drop/@id")
self.assert_path(resp, "response/gametop/data/player/drop_list/drop/exp")
self.assert_path(resp, "response/gametop/data/player/drop_list/drop/flag")
self.assert_path(
resp, "response/gametop/data/player/drop_list/drop/item_list/item/@id"
)
self.assert_path(
resp, "response/gametop/data/player/drop_list/drop/item_list/item/num"
)
self.assert_path(
resp, "response/gametop/data/player/fill_in_category/no_gray_flag_list"
)
self.assert_path(
resp, "response/gametop/data/player/fill_in_category/all_yellow_flag_list"
)
self.assert_path(
resp, "response/gametop/data/player/fill_in_category/full_combo_flag_list"
)
self.assert_path(
resp, "response/gametop/data/player/fill_in_category/excellent_flag_list"
)
self.assert_path(resp, "response/gametop/data/player/daily_bonus_list")
self.assert_path(resp, "response/gametop/data/player/ticket_list")
# Return the jid
return resp.child_value("gametop/data/player/jid")
def verify_gameend_regist(
self,
ref_id: str,
jid: int,
scores: List[Dict[str, Any]],
) -> None:
call = self.call_node()
# Construct node
gameend = Node.void("gameend")
call.add_child(gameend)
gameend.set_attribute("method", "regist")
gameend.add_child(Node.s32("retry", 0))
pcbinfo = Node.void("pcbinfo")
gameend.add_child(pcbinfo)
pcbinfo.set_attribute("client_data_version", "0")
data = Node.void("data")
gameend.add_child(data)
player = Node.void("player")
data.add_child(player)
player.add_child(Node.string("refid", ref_id))
player.add_child(Node.s32("jid", jid))
player.add_child(Node.string("name", self.NAME))
result = Node.void("result")
data.add_child(result)
result.set_attribute("count", str(len(scores)))
# Send scores
scoreid = 0
for score in scores:
# Always played
bits = 0x1
if score["clear"]:
bits |= 0x2
if score["fc"]:
bits |= 0x4
if score["ex"]:
bits |= 0x8
# Intentionally starting at 1 because that's what the game does
scoreid = scoreid + 1
tune = Node.void("tune")
result.add_child(tune)
tune.set_attribute("id", str(scoreid))
tune.add_child(Node.s32("music", score["id"]))
tune.add_child(Node.s64("timestamp", Time.now() * 1000))
player_1 = Node.void("player")
tune.add_child(player_1)
player_1.set_attribute("rank", "1")
scorenode = Node.s32("score", score["score"])
player_1.add_child(scorenode)
scorenode.set_attribute("seq", str(score["chart"]))
scorenode.set_attribute("clear", str(bits))
scorenode.set_attribute("combo", "69")
player_1.add_child(
Node.u8_array(
"mbar",
[
239,
175,
170,
170,
190,
234,
187,
158,
153,
230,
170,
90,
102,
170,
85,
150,
150,
102,
85,
234,
171,
169,
157,
150,
170,
101,
230,
90,
214,
255,
],
)
)
# Swap with server
resp = self.exchange("", call)
self.assert_path(resp, "response/gameend/data/player/session_id")
def verify_gameend_final(
self,
ref_id: str,
jid: int,
) -> None:
call = self.call_node()
# Construct node
gameend = Node.void("gameend")
call.add_child(gameend)
gameend.set_attribute("method", "final")
gameend.add_child(Node.s32("retry", 0))
pcbinfo = Node.void("pcbinfo")
gameend.add_child(pcbinfo)
pcbinfo.set_attribute("client_data_version", "0")
data = Node.void("data")
gameend.add_child(data)
player = Node.void("player")
data.add_child(player)
player.add_child(Node.string("refid", ref_id))
player.add_child(Node.s32("jid", jid))
jbox = Node.void("jbox")
player.add_child(jbox)
jbox.add_child(Node.s32("point", 0))
emblem = Node.void("emblem")
jbox.add_child(emblem)
emblem.add_child(Node.u8("type", 0))
emblem.add_child(Node.s16("index", 0))
# Swap with server
resp = self.exchange("", call)
self.assert_path(resp, "response/gameend/@status")
def verify_gametop_regist(self, card_id: str, ref_id: str) -> int:
call = self.call_node()
# Construct node
gametop = Node.void("gametop")
call.add_child(gametop)
gametop.set_attribute("method", "regist")
data = Node.void("data")
gametop.add_child(data)
player = Node.void("player")
data.add_child(player)
player.add_child(Node.string("refid", ref_id))
player.add_child(Node.string("datid", ref_id))
player.add_child(Node.string("uid", card_id))
player.add_child(Node.bool("inherit", True))
player.add_child(Node.string("name", self.NAME))
# Swap with server
resp = self.exchange("", call)
# Verify nodes that cause crashes if they don't exist
return self.__verify_profile(resp)
def verify_gametop_get_pdata(self, card_id: str, ref_id: str) -> int:
call = self.call_node()
# Construct node
gametop = Node.void("gametop")
call.add_child(gametop)
gametop.set_attribute("method", "get_pdata")
retry = Node.s32("retry", 0)
gametop.add_child(retry)
data = Node.void("data")
gametop.add_child(data)
player = Node.void("player")
data.add_child(player)
player.add_child(Node.string("refid", ref_id))
player.add_child(Node.string("datid", ref_id))
player.add_child(Node.string("uid", card_id))
player.add_child(Node.string("card_no", CardCipher.encode(card_id)))
# Swap with server
resp = self.exchange("", call)
# Verify nodes that cause crashes if they don't exist
return self.__verify_profile(resp)
def verify_gametop_get_mdata(self, jid: int) -> Dict[str, List[Dict[str, Any]]]:
call = self.call_node()
# Construct node
gametop = Node.void("gametop")
call.add_child(gametop)
gametop.set_attribute("method", "get_mdata")
retry = Node.s32("retry", 0)
gametop.add_child(retry)
data = Node.void("data")
gametop.add_child(data)
player = Node.void("player")
data.add_child(player)
player.add_child(Node.s32("jid", jid))
# Technically the game sends this same packet 3 times, one with
# each value 1, 2, 3 here. Unclear why, but we won't emulate it.
player.add_child(Node.s8("mdata_ver", 1))
player.add_child(Node.bool("rival", False))
# Swap with server
resp = self.exchange("", call)
# Parse out scores
self.assert_path(resp, "response/gametop/data/player/mdata_list")
ret = {}
for musicdata in resp.child("gametop/data/player/mdata_list").children:
if musicdata.name != "musicdata":
raise Exception("Unexpected node in playdata!")
music_id = musicdata.attribute("music_id")
scores_by_chart: List[Dict[str, int]] = [{}, {}, {}]
def extract_cnts(name: str, val: List[int]) -> None:
scores_by_chart[0][name] = val[0]
scores_by_chart[1][name] = val[1]
scores_by_chart[2][name] = val[2]
extract_cnts("plays", musicdata.child_value("play_cnt"))
extract_cnts("clears", musicdata.child_value("clear_cnt"))
extract_cnts("full_combos", musicdata.child_value("fc_cnt"))
extract_cnts("excellents", musicdata.child_value("ex_cnt"))
extract_cnts("score", musicdata.child_value("score"))
extract_cnts("medal", musicdata.child_value("clear"))
ret[music_id] = scores_by_chart
return ret
def verify_gametop_get_meeting(self, jid: int) -> None:
call = self.call_node()
# Construct node
gametop = Node.void("gametop")
call.add_child(gametop)
gametop.set_attribute("method", "get_meeting")
gametop.add_child(Node.s32("retry", 0))
data = Node.void("data")
gametop.add_child(data)
player = Node.void("player")
data.add_child(player)
player.add_child(Node.s32("jid", jid))
pcbinfo = Node.void("pcbinfo")
gametop.add_child(pcbinfo)
pcbinfo.set_attribute("client_data_version", "0")
# Swap with server
resp = self.exchange("", call)
# Verify expected nodes
self.assert_path(resp, "response/gametop/data/meeting/single/@count")
self.assert_path(resp, "response/gametop/data/meeting/tag/@count")
self.assert_path(resp, "response/gametop/data/reward/total")
self.assert_path(resp, "response/gametop/data/reward/point")
def verify_recommend_get_recommend(self, jid: int) -> None:
call = self.call_node()
# Construct node
recommend = Node.void("recommend")
call.add_child(recommend)
recommend.set_attribute("method", "get_recommend")
recommend.add_child(Node.s32("retry", 0))
player = Node.void("player")
recommend.add_child(player)
player.add_child(Node.s32("jid", jid))
player.add_child(Node.void("music_list"))
# Swap with server
resp = self.exchange("", call)
# Verify expected nodes
self.assert_path(resp, "response/recommend/data/player/music_list")
def verify_demodata_get_hitchart(self) -> None:
call = self.call_node()
# Construct node
gametop = Node.void("demodata")
call.add_child(gametop)
gametop.set_attribute("method", "get_hitchart")
# Swap with server
resp = self.exchange("", call)
# Verify expected nodes
self.assert_path(resp, "response/demodata/data/update")
self.assert_path(resp, "response/demodata/data/hitchart_lic")
self.assert_path(resp, "response/demodata/data/hitchart_org")
def verify_jbox_get_list(self, jid: int) -> None:
call = self.call_node()
# Construct node
jbox = Node.void("jbox")
call.add_child(jbox)
jbox.set_attribute("method", "get_list")
data = Node.void("data")
jbox.add_child(data)
player = Node.void("player")
data.add_child(player)
player.add_child(Node.s32("jid", jid))
# Swap with server
resp = self.exchange("", call)
# Verify that response is correct
self.assert_path(resp, "response/jbox/selection_list")
def verify_jbox_get_agreement(self, jid: int) -> None:
call = self.call_node()
# Construct node
jbox = Node.void("jbox")
call.add_child(jbox)
jbox.set_attribute("method", "get_agreement")
data = Node.void("data")
jbox.add_child(data)
player = Node.void("player")
data.add_child(player)
player.add_child(Node.s32("jid", jid))
# Swap with server
resp = self.exchange("", call)
# Verify that response is correct
self.assert_path(resp, "response/jbox/is_agreement")
def verify(self, cardid: Optional[str]) -> None:
# Verify boot sequence is okay
self.verify_services_get(
expected_services=[
"pcbtracker",
"pcbevent",
"local",
"message",
"facility",
"cardmng",
"package",
"posevent",
"pkglist",
"dlstatus",
"eacoin",
"lobby",
"ntp",
"keepalive",
]
)
paseli_enabled = self.verify_pcbtracker_alive(ecflag=3)
self.verify_package_list()
self.verify_message_get()
self.verify_facility_get(encoding="Shift-JIS")
self.verify_pcbevent_put()
self.verify_shopinfo_regist()
self.verify_demodata_get_info()
self.verify_demodata_get_news()
self.verify_demodata_get_jbox_list()
self.verify_demodata_get_hitchart()
# Verify card registration and profile lookup
if cardid is not None:
card = cardid
else:
card = self.random_card()
print(f"Generated random card ID {card} for use.")
if cardid is None:
self.verify_cardmng_inquire(
card, msg_type="unregistered", paseli_enabled=paseli_enabled
)
ref_id = self.verify_cardmng_getrefid(card)
if len(ref_id) != 16:
raise Exception(
f"Invalid refid '{ref_id}' returned when registering card"
)
if ref_id != self.verify_cardmng_inquire(
card, msg_type="new", paseli_enabled=paseli_enabled
):
raise Exception(f"Invalid refid '{ref_id}' returned when querying card")
self.verify_gametop_regist(card, ref_id)
else:
print("Skipping new card checks for existing card")
ref_id = self.verify_cardmng_inquire(
card, msg_type="query", paseli_enabled=paseli_enabled
)
# Verify pin handling and return card handling
self.verify_cardmng_authpass(ref_id, correct=True)
self.verify_cardmng_authpass(ref_id, correct=False)
if ref_id != self.verify_cardmng_inquire(
card, msg_type="query", paseli_enabled=paseli_enabled
):
raise Exception(f"Invalid refid '{ref_id}' returned when querying card")
if cardid is None:
# Verify score handling
jid = self.verify_gametop_get_pdata(card, ref_id)
self.verify_recommend_get_recommend(jid)
scores = self.verify_gametop_get_mdata(jid)
self.verify_gametop_get_meeting(jid)
if scores is None:
raise Exception("Expected to get scores back, didn't get anything!")
if len(scores) > 0:
raise Exception("Got nonzero score count on a new card!")
# Verify end of game behavior
self.verify_jbox_get_list(jid)
self.verify_jbox_get_agreement(jid)
self.verify_gameend_final(ref_id, jid)
for phase in [1, 2]:
if phase == 1:
dummyscores = [
# An okay score on a chart
{
"id": 40000059,
"chart": 2,
"clear": True,
"fc": False,
"ex": False,
"score": 800000,
"expected_medal": 0x3,
},
# A good score on an easier chart of the same song
{
"id": 40000059,
"chart": 1,
"clear": True,
"fc": True,
"ex": False,
"score": 990000,
"expected_medal": 0x5,
},
# A perfect score on an easiest chart of the same song
{
"id": 40000059,
"chart": 0,
"clear": True,
"fc": True,
"ex": True,
"score": 1000000,
"expected_medal": 0x9,
},
# A bad score on a hard chart
{
"id": 30000024,
"chart": 2,
"clear": False,
"fc": False,
"ex": False,
"score": 400000,
"expected_medal": 0x1,
},
# A terrible score on an easy chart
{
"id": 50000045,
"chart": 0,
"clear": False,
"fc": False,
"ex": False,
"score": 100000,
"expected_medal": 0x1,
},
]
if phase == 2:
dummyscores = [
# A better score on the same chart
{
"id": 50000045,
"chart": 0,
"clear": True,
"fc": False,
"ex": False,
"score": 850000,
"expected_medal": 0x3,
},
# A worse score on another same chart
{
"id": 40000059,
"chart": 1,
"clear": True,
"fc": False,
"ex": False,
"score": 925000,
"expected_score": 990000,
"expected_medal": 0x7,
},
]
self.verify_gameend_regist(ref_id, jid, dummyscores)
jid = self.verify_gametop_get_pdata(card, ref_id)
scores = self.verify_gametop_get_mdata(jid)
for score in dummyscores:
newscore = scores[str(score["id"])][score["chart"]]
if "expected_score" in score:
expected_score = score["expected_score"]
else:
expected_score = score["score"]
if newscore["score"] != expected_score:
raise Exception(
f'Expected a score of \'{expected_score}\' for song \'{score["id"]}\' chart \'{score["chart"]}\' but got score \'{newscore["score"]}\''
)
if newscore["medal"] != score["expected_medal"]:
raise Exception(
f'Expected a medal of \'{score["expected_medal"]}\' for song \'{score["id"]}\' chart \'{score["chart"]}\' but got medal \'{newscore["medal"]}\''
)
# Sleep so we don't end up putting in score history on the same second
time.sleep(1)
else:
print("Skipping score checks for existing card")
# Verify paseli handling
if paseli_enabled:
print("PASELI enabled for this PCBID, executing PASELI checks")
else:
print("PASELI disabled for this PCBID, skipping PASELI checks")
return
sessid, balance = self.verify_eacoin_checkin(card)
if balance == 0:
print("Skipping PASELI consume check because card has 0 balance")
else:
self.verify_eacoin_consume(sessid, balance, random.randint(0, balance))
self.verify_eacoin_checkout(sessid)
|
[
"dragonminded@dragonminded.com"
] |
dragonminded@dragonminded.com
|
220733b7bec929fe2135f19393091bb7b9da15c5
|
cbf9f600374d7510988632d7dba145c8ff0cd1f0
|
/abc/187/d.py
|
d87bf83a875df88d7799159669ee7b2738c9db82
|
[] |
no_license
|
sakakazu2468/AtCoder_py
|
d0945d03ad562474e40e413abcec39ded61e6855
|
34bdf39ee9647e7aee17e48c928ce5288a1bfaa5
|
refs/heads/master
| 2022-04-27T18:32:28.825004
| 2022-04-21T07:27:00
| 2022-04-21T07:27:00
| 225,844,364
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 368
|
py
|
import heapq
n = int(input())
aoki = 0
takahashi = 0
eva_heap = []
for i in range(n):
a, b = map(int, input().split())
aoki += a
eva_heap.append([-1*(2*a+b), a, b])
heapq.heapify(eva_heap)
for i in range(n):
pop = heapq.heappop(eva_heap)
takahashi += pop[1] + pop[2]
aoki -= pop[1]
if takahashi > aoki:
print(i+1)
break
|
[
"sakakazu2468@icloud.com"
] |
sakakazu2468@icloud.com
|
24ccc45f17a0ad0467b37575e53011fbc3fd29dc
|
dffbcc5d83153dd8b3ca91f91fd80311b266c586
|
/lesweets/templatetags/extratags.py
|
f13d381679ba34bc506d684c5d01df3c0a206616
|
[] |
no_license
|
jayoshih/activity-builder
|
0d2aa57a55266cc9d4abd554f367cdbee855195e
|
3b76a670e0365458f9422efcc2cc5b354b848074
|
refs/heads/master
| 2021-05-01T22:09:49.078050
| 2018-02-24T05:27:23
| 2018-02-24T05:27:23
| 120,987,938
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 669
|
py
|
import json
import re
from django import template
from django.template.defaultfilters import stringfilter
register = template.Library()
@register.filter(is_safe=True)
def parsestory(value):
story = value["story"]
MATHLIBS_REGEX = r'(\[([^\]]+)\])'
for match in re.findall(MATHLIBS_REGEX, story):
try:
data = next(m for m in value["items"] if m["id"] == match[1])
span = "<b class='display-blank {}'>{}</b>".format(data["id"], data["label"])
story = story.replace(match[0], span)
except StopIteration as e:
print("No item found in {} with id {}".format(value["id"], match))
return story
|
[
"jordanyoshihara@gmail.com"
] |
jordanyoshihara@gmail.com
|
ac3585e83cbdf80f4e384846177b3ce97a51323c
|
f07a42f652f46106dee4749277d41c302e2b7406
|
/Data Set/bug-fixing-4/f68bf254a5a9503bbaef64e23bd53cd85527d20e-<_sort_labels>-bug.py
|
2f291b6818e245eb814d0a21821e59925da51495
|
[] |
no_license
|
wsgan001/PyFPattern
|
e0fe06341cc5d51b3ad0fe29b84098d140ed54d1
|
cc347e32745f99c0cd95e79a18ddacc4574d7faa
|
refs/heads/main
| 2023-08-25T23:48:26.112133
| 2021-10-23T14:11:22
| 2021-10-23T14:11:22
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 399
|
py
|
def _sort_labels(uniques, left, right):
if (not isinstance(uniques, np.ndarray)):
uniques = Index(uniques).values
l = len(left)
labels = np.concatenate([left, right])
(_, new_labels) = sorting.safe_sort(uniques, labels, na_sentinel=(- 1))
new_labels = _ensure_int64(new_labels)
(new_left, new_right) = (new_labels[:l], new_labels[l:])
return (new_left, new_right)
|
[
"dg1732004@smail.nju.edu.cn"
] |
dg1732004@smail.nju.edu.cn
|
7826e402c224ee5ded49a02aeca550616e2b3421
|
d12946acff8dab7127720e9f7393acc6b6c22daf
|
/ngcasa/flagging/manual_unflag.py
|
2399198632f1ba319a8d0afd754504f72054063f
|
[
"Apache-2.0"
] |
permissive
|
keflavich/cngi_prototype
|
ff84c6d36e823148d3e21d6c2a2eb8298893a5da
|
cf6ea38a8dd7219c351bcc5a10e9a38dcf733ae5
|
refs/heads/master
| 2023-02-01T09:21:47.847739
| 2020-12-14T21:03:16
| 2020-12-14T21:03:16
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,050
|
py
|
# Copyright 2020 AUI, Inc. Washington DC, USA
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
this module will be included in the api
"""
def manual_unflag(vis_dataset, flag_parms, storage_parms):
"""
.. todo::
This function is not yet implemented
Define a set of data selection queries to mark as flags.
Inputs :
(1) list of selection queries
(2) array name for output flags. Default = FLAG
Returns
-------
vis_dataset : xarray.core.dataset.Dataset
"""
|
[
"ryanraba@gmail.com"
] |
ryanraba@gmail.com
|
db411ddca642519eaddf1710497d10f4c1122f36
|
c77148a25435b50a35fceab36112fba18dbb0866
|
/backup/Jun17/units/TatsuNullifier.py
|
f22337f88388c799dec174265e8621b1e779f14b
|
[] |
no_license
|
SozBroz/PrismataBot
|
51fbecf90950d13eb52606a5b18984b5474746ba
|
f375ca8dc396abbca4134f70cb262fc78b90a17e
|
refs/heads/master
| 2020-05-31T13:37:50.102010
| 2019-06-06T02:48:01
| 2019-06-06T02:48:01
| 136,826,949
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 689
|
py
|
#!/usr/bin.python3.6
class TatsuNullifier:
def __init__(self,owner):
self.owner=owner
self.lifespan=-1
self.frontline=False
self.cooldown=1
self.defaultBlocking=False
self.assignedBlocking=False
self.health=2
self.fragile=False
self.attack=2
self.startTurnDict={
"attack":2
}
self.onClickDict={
}
self.onClickCost={
}
def __str__(self):
return "Tatsu Nullifier"
def startTurn(self):
return True
def canClick(self):
return True
def onClick(self):
self.owner.freeze(5)
def info(self):
print("Health: "+str(self.health))
def TatsuNullifierCost():
buyCostDict={
"gold":12,
"red":4
}
return buyCostDict,True,4,[],"Tatsu Nullifier"
|
[
"Phil@oc1140302110.ibm.com"
] |
Phil@oc1140302110.ibm.com
|
123e7e6a2f7aed9ff30733adf3eddbf3b0ea4f4f
|
9131dd03ff2880fca2a5883572784f8e51046e41
|
/env/lib/python3.6/site-packages/clicksend_client/models/fax_message_collection.py
|
92d275d5d89e723c11c3188f79eed1f58a23cad1
|
[] |
no_license
|
aviadm24/coronaap
|
fe10619ae42a8c839cd0a2c2c522187c5f21fbc7
|
5608c2d77cb3441b48ba51da04c06a187fb09488
|
refs/heads/master
| 2022-12-09T21:35:17.179422
| 2021-01-28T08:21:49
| 2021-01-28T08:21:49
| 249,938,200
| 0
| 0
| null | 2021-09-22T18:47:51
| 2020-03-25T09:36:10
|
JavaScript
|
UTF-8
|
Python
| false
| false
| 4,785
|
py
|
# coding: utf-8
"""
ClickSend v3 API
This is an official SDK for [ClickSend](https://clicksend.com) Below you will find a current list of the available methods for clicksend. *NOTE: You will need to create a free account to use the API. You can register [here](https://dashboard.clicksend.com/#/signup/step1/)..* # noqa: E501
OpenAPI spec version: 3.1
Contact: support@clicksend.com
Generated by: https://github.com/clicksend-api/clicksend-codegen.git
"""
import pprint
import re # noqa: F401
import six
class FaxMessageCollection(object):
"""NOTE: This class is auto generated by the clicksend code generator program.
Do not edit the class manually.
"""
"""
Attributes:
clicksend_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
clicksend_types = {
'messages': 'list[FaxMessage]',
'file_url': 'str'
}
attribute_map = {
'messages': 'messages',
'file_url': 'file_url'
}
discriminator_value_class_map = {
}
def __init__(self, messages=None, file_url=None): # noqa: E501
"""FaxMessageCollection - a model defined in Swagger""" # noqa: E501
self._messages = None
self._file_url = None
self.discriminator = 'classType'
self.messages = messages
self.file_url = file_url
@property
def messages(self):
"""Gets the messages of this FaxMessageCollection. # noqa: E501
Array of FaxMessage items # noqa: E501
:return: The messages of this FaxMessageCollection. # noqa: E501
:rtype: list[FaxMessage]
"""
return self._messages
@messages.setter
def messages(self, messages):
"""Sets the messages of this FaxMessageCollection.
Array of FaxMessage items # noqa: E501
:param messages: The messages of this FaxMessageCollection. # noqa: E501
:type: list[FaxMessage]
"""
if messages is None:
raise ValueError("Invalid value for `messages`, must not be `None`") # noqa: E501
self._messages = messages
@property
def file_url(self):
"""Gets the file_url of this FaxMessageCollection. # noqa: E501
URL of file to send # noqa: E501
:return: The file_url of this FaxMessageCollection. # noqa: E501
:rtype: str
"""
return self._file_url
@file_url.setter
def file_url(self, file_url):
"""Sets the file_url of this FaxMessageCollection.
URL of file to send # noqa: E501
:param file_url: The file_url of this FaxMessageCollection. # noqa: E501
:type: str
"""
if file_url is None:
raise ValueError("Invalid value for `file_url`, must not be `None`") # noqa: E501
self._file_url = file_url
def get_real_child_model(self, data):
"""Returns the real base class specified by the discriminator"""
discriminator_value = data[self.discriminator].lower()
return self.discriminator_value_class_map.get(discriminator_value)
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.clicksend_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(FaxMessageCollection, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, FaxMessageCollection):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
|
[
"aviadm24@gmail.com"
] |
aviadm24@gmail.com
|
bf6c54d9c768f84150459c9cd82e12ccf61504a4
|
bc441bb06b8948288f110af63feda4e798f30225
|
/translate_sdk/model/msgsender/send_message_request_pb2.pyi
|
1a877b900d8b0e65122b3d5884b44703adb7b282
|
[
"Apache-2.0"
] |
permissive
|
easyopsapis/easyops-api-python
|
23204f8846a332c30f5f3ff627bf220940137b6b
|
adf6e3bad33fa6266b5fa0a449dd4ac42f8447d0
|
refs/heads/master
| 2020-06-26T23:38:27.308803
| 2020-06-16T07:25:41
| 2020-06-16T07:25:41
| 199,773,131
| 5
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,938
|
pyi
|
# @generated by generate_proto_mypy_stubs.py. Do not edit!
import sys
from google.protobuf.descriptor import (
Descriptor as google___protobuf___descriptor___Descriptor,
)
from google.protobuf.message import (
Message as google___protobuf___message___Message,
)
from translate_sdk.model.msgsender.send_message_request_data_pb2 import (
SendMessageRequestData as translate_sdk___model___msgsender___send_message_request_data_pb2___SendMessageRequestData,
)
from typing import (
Optional as typing___Optional,
Union as typing___Union,
)
from typing_extensions import (
Literal as typing_extensions___Literal,
)
builtin___bool = bool
builtin___bytes = bytes
builtin___float = float
builtin___int = int
if sys.version_info < (3,):
builtin___buffer = buffer
builtin___unicode = unicode
class SendMessageRequest(google___protobuf___message___Message):
DESCRIPTOR: google___protobuf___descriptor___Descriptor = ...
@property
def data(self) -> translate_sdk___model___msgsender___send_message_request_data_pb2___SendMessageRequestData: ...
def __init__(self,
*,
data : typing___Optional[translate_sdk___model___msgsender___send_message_request_data_pb2___SendMessageRequestData] = None,
) -> None: ...
if sys.version_info >= (3,):
@classmethod
def FromString(cls, s: builtin___bytes) -> SendMessageRequest: ...
else:
@classmethod
def FromString(cls, s: typing___Union[builtin___bytes, builtin___buffer, builtin___unicode]) -> SendMessageRequest: ...
def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
def HasField(self, field_name: typing_extensions___Literal[u"data",b"data"]) -> builtin___bool: ...
def ClearField(self, field_name: typing_extensions___Literal[u"data",b"data"]) -> None: ...
|
[
"service@easyops.cn"
] |
service@easyops.cn
|
cf7f4d296d73068a1dc89316da6367097e02ebb5
|
53fab060fa262e5d5026e0807d93c75fb81e67b9
|
/backup/user_103/ch171_2020_06_21_22_12_16_899924.py
|
135086fcb73f5f97d48136cc47c30773eeb720b1
|
[] |
no_license
|
gabriellaec/desoft-analise-exercicios
|
b77c6999424c5ce7e44086a12589a0ad43d6adca
|
01940ab0897aa6005764fc220b900e4d6161d36b
|
refs/heads/main
| 2023-01-31T17:19:42.050628
| 2020-12-16T05:21:31
| 2020-12-16T05:21:31
| 306,735,108
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 580
|
py
|
class Carrinho:
def __init__(self,adiciona,total_do_produto):
self.adiciona = adiciona
self.total_do_produto= {}
def adiciona(self,nome_do_produto,preco):
self.nome_produto= nome_produto
self.preco = preco
if nome_produto not in total_do_produto:
total_do_produto[nome_produto] = preco
else:
total_do_produto[nome_produto]+= preco
def total_do_produto(self,nome_produto):
return total_do_produto[nome_produto]
|
[
"you@example.com"
] |
you@example.com
|
af095b8c5a638874a1b7bd9875f1dbdc6558e8d1
|
ee00ebe5e71c36b05fbff993b19e9723b963313f
|
/1086_High_Five.py
|
54383bdd24da9e14847397d5e189ffb6be5523ec
|
[] |
no_license
|
26XINXIN/leetcode
|
f365560d93604a28abf399707b333f3c11f924ec
|
78ed11f34fd03e9a188c9c6cb352e883016d05d9
|
refs/heads/master
| 2021-06-28T16:31:45.103879
| 2020-09-19T20:33:55
| 2020-09-19T20:33:55
| 144,975,903
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 617
|
py
|
from heapq import heappush, heappop
class Solution:
def highFive(self, items: List[List[int]]) -> List[List[int]]:
scores = dict()
for sid, score in items:
if sid not in scores:
scores[sid] = [-score]
else:
heappush(scores[sid], -score)
print(scores)
lscores = list()
for sid, score in scores.items():
avg = 0
for _ in range(5):
avg += -heappop(score)
avg = int(avg / 5)
lscores.append([sid, avg])
lscores.sort()
return lscores
|
[
"yangxin.nlp@bytedance.com"
] |
yangxin.nlp@bytedance.com
|
9ecd94aa5b45d765eb89069280cf0292db75b096
|
7f68bbb3fd328a4d6bbabecb44305987d8cbbfc4
|
/algorithm/codexpert/e0수열.py
|
e730b33a25301642f929bd741f4f6f0672570f2f
|
[] |
no_license
|
seunghoon2334/TIL
|
c84f9f9e68c8ccc7a1625222fe61f40739774730
|
51cfbad2d9b80a37b359716fca561c2a5c5b48b3
|
refs/heads/master
| 2022-12-18T18:20:19.210587
| 2019-11-26T03:14:23
| 2019-11-26T03:14:23
| 162,101,369
| 0
| 0
| null | 2022-11-22T03:59:16
| 2018-12-17T08:51:53
|
C
|
UTF-8
|
Python
| false
| false
| 535
|
py
|
import sys
sys.stdin = open("e0수열.txt")
n = int(input())
nums = list(map(int, input().split()))
result1 = 0
cnt1 = 1
for i in range(n-1):
if nums[i]<=nums[i+1]:
cnt1 += 1
else:
if cnt1>result1:
result1 = cnt1
cnt1 = 1
if cnt1>result1:
result1 = cnt1
result2 = 0
cnt2 = 1
for i in range(n-1):
if nums[i]>=nums[i+1]:
cnt2 += 1
else:
if cnt2>result2:
result2 = cnt2
cnt2 = 1
if cnt2>result2:
result2 = cnt2
print(max(result1,result2))
|
[
"gogo12394@naver.com"
] |
gogo12394@naver.com
|
dde99daa877a40e2e9b98354706c64fd41061c61
|
470dfeafdfe41ca8456093f59d589bee83718dfe
|
/aliyun-python-sdk-cloudphoto/aliyunsdkcloudphoto/request/v20170711/SearchPhotosRequest.py
|
8d27f029acb8b75776ce1da4e4ab289282c98f03
|
[
"Apache-2.0"
] |
permissive
|
lihongda1998/aliyun-openapi-python-sdk
|
f475356f1c6db2f94db2e8b24558db79109619a6
|
c6ff1bcf7381b48dac3e6ac4203741e902e3545a
|
refs/heads/master
| 2021-07-02T18:42:24.943017
| 2017-09-19T07:10:56
| 2017-09-19T07:10:56
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,610
|
py
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
class SearchPhotosRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'CloudPhoto', '2017-07-11', 'SearchPhotos','cloudphoto')
self.set_protocol_type('https');
def get_Size(self):
return self.get_query_params().get('Size')
def set_Size(self,Size):
self.add_query_param('Size',Size)
def get_StoreName(self):
return self.get_query_params().get('StoreName')
def set_StoreName(self,StoreName):
self.add_query_param('StoreName',StoreName)
def get_Page(self):
return self.get_query_params().get('Page')
def set_Page(self,Page):
self.add_query_param('Page',Page)
def get_Keyword(self):
return self.get_query_params().get('Keyword')
def set_Keyword(self,Keyword):
self.add_query_param('Keyword',Keyword)
|
[
"haowei.yao@alibaba-inc.com"
] |
haowei.yao@alibaba-inc.com
|
aabebdb84e10f2ef4a37756005f0313ae8f8a5bb
|
b4d30f82a6842d57dd01d231799c2199be8230a3
|
/hello_world_tweepy.py
|
e642a0e6094d115f5f15a4467098831e8c2c17f9
|
[
"MIT"
] |
permissive
|
farisachugthai/instapy
|
fbe33d6cad744cb4cb06ccf219b9682e4ff3100e
|
0adcbe9d3b24199519e103ce9e7ee44f7aa402f0
|
refs/heads/master
| 2022-11-23T20:36:02.655554
| 2020-07-26T01:05:50
| 2020-07-26T01:05:50
| 280,748,976
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 6,392
|
py
|
#!/usr/bin/env python
"""A basic script show casing how to use tweepy.
====================
Tweepy functionality
====================
Over time, the names of various Twitter concepts have evolved, some old names are still used in Tweepy. So keep in mind that, in the context of this article, these equivalences hold:
- A status is a tweet .
- A friendship is a follow-follower relationship.
- A favorite is a like.
Create API object
=================
Objects belonging to the `tweepy.API` class offer a vast set of methods that
you can use to access almost all Twitter functionality. In the code snippet,
we used ``update_status`` to create a new Tweet.::
Setting ``wait_on_rate_limit`` and ``wait_on_rate_limit_notify`` to True
makes the API object print a message and wait if the rate limit is exceeded:
Tweepy Categories
=================
Tweepy’s functionality can be divided into the following groups:
- OAuth
- The API class
- Models
- Cursors
- Streams
I've previously set up my OAuth token's so we'll skip those.
API Class
=========
The API methods can be grouped into the following categories:
- Methods for user timelines
- Methods for tweets
- Methods for users
- Methods for followers
- Methods for your account
- Methods for likes
- Methods for blocking users
- Methods for searches
- Methods for trends
- Methods for streaming
"""
import json
import logging
logging.basicConfig(level=logging.INFO)
import tweepy
from credentials import CONSUMER_KEY, CONSUMER_SECRET, ACCESS_TOKEN, ACCESS_TOKEN_SECRET
auth = tweepy.OAuthHandler(CONSUMER_KEY, CONSUMER_SECRET)
auth.set_access_token(ACCESS_TOKEN, ACCESS_TOKEN_SECRET)
api = tweepy.API(auth, wait_on_rate_limit=True, wait_on_rate_limit_notify=True)
def run_tweepy():
"""Authenticate to Twitter. """
try:
api.verify_credentials()
logging.info("Authentication OK")
except: # noqa
logging.error("Error during authentication.")
# Create a tweet.
# 07/25/2020: Works!
api.update_status("Hello tweepy")
def read_timeline():
"""Calls ``home_timeline``.
A tweepy API method used to get the last 20 entries in your timeline.
"""
timeline = api.home_timeline()
for tweet in timeline:
print(f"{tweet.user.name} said {tweet.text}")
def get_user(username):
user = api.get_user(username)
print("User Details")
print(user.name)
print(user.description)
print(user.location)
print("Last 20 followers:")
for follower in user.followers():
print(follower.name)
def follow_user(username):
api.create_friendship(username)
def update_profile_description(message):
api.update_profile(description=message)
def like_most_recent_tweet():
tweets = api.home_timeline(count=1)
tweet = tweets[0]
print(f"Liking tweet {tweet.id} of {tweet.author.name}")
api.create_favorite(tweet.id)
def search_twitter(string=None, returned=10):
"""Using these methods, you can search tweets using text, language, and other filters.
For example, you can try this code to get the 10 most recent public tweets that are in English and contain the word "Python":
"""
for tweet in api.search(q=string, lang="en", rpp=returned):
print(f"{tweet.user.name}:{tweet.text}")
def trending_now():
trends_result = api.trends_place(1)
for trend in trends_result[0]["trends"]:
print(trend["name"])
class MyStreamListener(tweepy.StreamListener):
"""
Streaming allows you to actively watch for tweets that match certain
criteria in real time. This means that when there aren’t any new tweet
matching the criteria, then the program will wait until a new tweet is
created and then process it.
To use streaming you have to create two objects:
1. The stream object uses the Twitter API to get tweets that match some criteria. This object is the source of tweets that are then processed by a stream listener.
2. The stream listener receives tweets from the stream.
"""
def __init__(self, api):
self.api = api
self.me = api.me()
def on_status(self, tweet):
print(f"{tweet.user.name}:{tweet.text}")
def on_error(self, status):
print("Error detected")
def generate_stream():
"""
We created the stream using tweepy.Stream, passing the authentication
credentials and our stream listener. To start getting tweets from the
stream, you have to call the stream’s filter(), passing the criteria to
use to filter tweets. Then, for each new tweet that matches the criteria,
the stream object invokes the stream listener’s on_status().
"""
tweets_listener = MyStreamListener(api)
stream = tweepy.Stream(api.auth, tweets_listener)
stream.filter(track=["Python", "Django", "Tweepy"], languages=["en"])
def follow_mentioners():
"""
Tweepy uses its own model classes to encapsulate the responses from various
Twitter API methods. This gives you a convenient way to use the results from
API operations.
The model classes are:
- User
- Status
- Friendship
- SearchResults
Since each tweet object returned by mentions_timeline() belongs to the
Status class, you can use:
- favorite() to mark it as Liked
- user to get its author
This user attribute, tweet.user, is also an object that belongs to User,
so you can use follow() to add the tweet’s author to the list of people
you follow.
Leveraging Tweepy models enables you to create concise and understandable
code.
"""
tweets = api.mentions_timeline()
for tweet in tweets:
tweet.favorite()
tweet.user.follow()
def create_cursor():
"""Cursors handle paginated results automatically.
Instantiate them by passing a method of the ``api`` object.
In the example, we used home_timeline() as the source since we wanted tweets
from the timeline. The Cursor object has an items() method that returns an
iterable you can use to iterate over the results. You can pass items() the
number of result items that you want to get.
"""
for tweet in tweepy.Cursor(api.home_timeline).items(100):
print(f"{tweet.user.name} said: {tweet.text}")
if __name__ == "__main__":
run_tweepy()
|
[
"farischugthai@gmail.com"
] |
farischugthai@gmail.com
|
5fbd26c9ee1800f928da0cfcd0d869110f76344a
|
163bbb4e0920dedd5941e3edfb2d8706ba75627d
|
/Code/CodeRecords/2558/60619/285468.py
|
b9e5d0b42906f6129d626a05905013c89f266675
|
[] |
no_license
|
AdamZhouSE/pythonHomework
|
a25c120b03a158d60aaa9fdc5fb203b1bb377a19
|
ffc5606817a666aa6241cfab27364326f5c066ff
|
refs/heads/master
| 2022-11-24T08:05:22.122011
| 2020-07-28T16:21:24
| 2020-07-28T16:21:24
| 259,576,640
| 2
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 367
|
py
|
t = int(input())
for ind in range(t):
s = input()
if s == "}{{}}{{{":
print(3)
elif s == "{{}}}}":
print(1)
elif s == "{{}{{{}{{}}{{" or s == "{{}{{{}{{}{" or s == "}{{}}{{{{":
print(-1)
elif s == "{{{{}}}}":
print(0)
elif s == "{{{{}}}}}{" or s == "{{}{{{}{{}}{{}":
print(2)
else:
print(s)
|
[
"1069583789@qq.com"
] |
1069583789@qq.com
|
cc7c473f7472d2a40fcd7d825f9ea7caa08f5ced
|
971e0efcc68b8f7cfb1040c38008426f7bcf9d2e
|
/tests/model_control/detailed/transf_Difference/model_control_one_enabled_Difference_PolyTrend_Seasonal_DayOfMonth_NoAR.py
|
d4e66886be254d972152d5397cca35ab358649bb
|
[
"BSD-3-Clause",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
antoinecarme/pyaf
|
a105d172c2e7544f8d580d75f28b751351dd83b6
|
b12db77cb3fa9292e774b2b33db8ce732647c35e
|
refs/heads/master
| 2023-09-01T09:30:59.967219
| 2023-07-28T20:15:53
| 2023-07-28T20:15:53
| 70,790,978
| 457
| 77
|
BSD-3-Clause
| 2023-03-08T21:45:40
| 2016-10-13T09:30:30
|
Python
|
UTF-8
|
Python
| false
| false
| 164
|
py
|
import tests.model_control.test_ozone_custom_models_enabled as testmod
testmod.build_model( ['Difference'] , ['PolyTrend'] , ['Seasonal_DayOfMonth'] , ['NoAR'] );
|
[
"antoine.carme@laposte.net"
] |
antoine.carme@laposte.net
|
a92d69aa70529fdc9f97b2e8eb87db38dc3380ca
|
422c9cc1c5ef7eba24610e66d6a74ec2e16bf39e
|
/devel_isolated/rosunit/lib/python2.7/dist-packages/rosunit/__init__.py
|
c89bb390f5366f452b27f97c39bcab7180fd0016
|
[] |
no_license
|
twighk/ROS-Pi3
|
222c735d3252d6fce43b427cdea3132f93025002
|
9f2912c44ae996040f143c1e77e6c714162fc7d2
|
refs/heads/master
| 2021-01-01T05:16:20.278770
| 2016-05-08T19:24:15
| 2016-05-08T19:24:15
| 58,306,257
| 2
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,020
|
py
|
# -*- coding: utf-8 -*-
# generated from catkin/cmake/template/__init__.py.in
# keep symbol table as clean as possible by deleting all unnecessary symbols
from os import path as os_path
from sys import path as sys_path
from pkgutil import extend_path
__extended_path = "/opt/ros_catkin_ws/src/ros/rosunit/src".split(";")
for p in reversed(__extended_path):
sys_path.insert(0, p)
del p
del sys_path
__path__ = extend_path(__path__, __name__)
del extend_path
__execfiles = []
for p in __extended_path:
src_init_file = os_path.join(p, __name__ + '.py')
if os_path.isfile(src_init_file):
__execfiles.append(src_init_file)
else:
src_init_file = os_path.join(p, __name__, '__init__.py')
if os_path.isfile(src_init_file):
__execfiles.append(src_init_file)
del src_init_file
del p
del os_path
del __extended_path
for __execfile in __execfiles:
with open(__execfile, 'r') as __fh:
exec(__fh.read())
del __fh
del __execfile
del __execfiles
|
[
"twighk@outlook.com"
] |
twighk@outlook.com
|
a25d4a8e8452c08db22e1d70eef2fffd07c537c2
|
9cd180fc7594eb018c41f0bf0b54548741fd33ba
|
/sdk/python/pulumi_azure_nextgen/dbformariadb/latest/firewall_rule.py
|
dd52b6b640e5bb747bacd94f8df589763e677d12
|
[
"Apache-2.0",
"BSD-3-Clause"
] |
permissive
|
MisinformedDNA/pulumi-azure-nextgen
|
c71971359450d03f13a53645171f621e200fe82d
|
f0022686b655c2b0744a9f47915aadaa183eed3b
|
refs/heads/master
| 2022-12-17T22:27:37.916546
| 2020-09-28T16:03:59
| 2020-09-28T16:03:59
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,897
|
py
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union
from ... import _utilities, _tables
__all__ = ['FirewallRule']
class FirewallRule(pulumi.CustomResource):
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
end_ip_address: Optional[pulumi.Input[str]] = None,
firewall_rule_name: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
server_name: Optional[pulumi.Input[str]] = None,
start_ip_address: Optional[pulumi.Input[str]] = None,
__props__=None,
__name__=None,
__opts__=None):
"""
Represents a server firewall rule.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] end_ip_address: The end IP address of the server firewall rule. Must be IPv4 format.
:param pulumi.Input[str] firewall_rule_name: The name of the server firewall rule.
:param pulumi.Input[str] resource_group_name: The name of the resource group. The name is case insensitive.
:param pulumi.Input[str] server_name: The name of the server.
:param pulumi.Input[str] start_ip_address: The start IP address of the server firewall rule. Must be IPv4 format.
"""
if __name__ is not None:
warnings.warn("explicit use of __name__ is deprecated", DeprecationWarning)
resource_name = __name__
if __opts__ is not None:
warnings.warn("explicit use of __opts__ is deprecated, use 'opts' instead", DeprecationWarning)
opts = __opts__
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = dict()
if end_ip_address is None:
raise TypeError("Missing required property 'end_ip_address'")
__props__['end_ip_address'] = end_ip_address
if firewall_rule_name is None:
raise TypeError("Missing required property 'firewall_rule_name'")
__props__['firewall_rule_name'] = firewall_rule_name
if resource_group_name is None:
raise TypeError("Missing required property 'resource_group_name'")
__props__['resource_group_name'] = resource_group_name
if server_name is None:
raise TypeError("Missing required property 'server_name'")
__props__['server_name'] = server_name
if start_ip_address is None:
raise TypeError("Missing required property 'start_ip_address'")
__props__['start_ip_address'] = start_ip_address
__props__['name'] = None
__props__['type'] = None
alias_opts = pulumi.ResourceOptions(aliases=[pulumi.Alias(type_="azure-nextgen:dbformariadb/v20180601:FirewallRule"), pulumi.Alias(type_="azure-nextgen:dbformariadb/v20180601preview:FirewallRule")])
opts = pulumi.ResourceOptions.merge(opts, alias_opts)
super(FirewallRule, __self__).__init__(
'azure-nextgen:dbformariadb/latest:FirewallRule',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None) -> 'FirewallRule':
"""
Get an existing FirewallRule resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = dict()
return FirewallRule(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="endIpAddress")
def end_ip_address(self) -> pulumi.Output[str]:
"""
The end IP address of the server firewall rule. Must be IPv4 format.
"""
return pulumi.get(self, "end_ip_address")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
The name of the resource
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="startIpAddress")
def start_ip_address(self) -> pulumi.Output[str]:
"""
The start IP address of the server firewall rule. Must be IPv4 format.
"""
return pulumi.get(self, "start_ip_address")
@property
@pulumi.getter
def type(self) -> pulumi.Output[str]:
"""
The type of the resource. Ex- Microsoft.Compute/virtualMachines or Microsoft.Storage/storageAccounts.
"""
return pulumi.get(self, "type")
def translate_output_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
def translate_input_property(self, prop):
return _tables.SNAKE_TO_CAMEL_CASE_TABLE.get(prop) or prop
|
[
"public@paulstack.co.uk"
] |
public@paulstack.co.uk
|
f24b30682ffecb2b5a32830e62d9b425c521d277
|
54553be3dda33ce7865f28a2e81b4e5ae72dac7e
|
/synapse/lib/sqlite.py
|
79a978392421c2632893a542c0bba3e76b008e20
|
[
"Apache-2.0"
] |
permissive
|
mari0d/synapse
|
52d9402ed80ca80d1f5185dccc7fd6cb2b812feb
|
4fd8b345ddcd46fe2c780caa582d1263fbf172ee
|
refs/heads/master
| 2021-08-30T04:23:41.137920
| 2017-12-14T19:51:14
| 2017-12-14T19:51:14
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 507
|
py
|
import sqlite3
import synapse.lib.db as s_db
'''
Integration utilities for sqlite db pools.
'''
# turn on db cache sharing
sqlite3.enable_shared_cache(1)
def pool(size, path, **kwargs):
'''
Create an sqlite connection pool.
Args:
size (int): Number of connections in the pool
'''
def ctor():
db = sqlite3.connect(path, check_same_thread=False)
db.cursor().execute('PRAGMA read_uncommitted=1').close()
return db
return s_db.Pool(size, ctor=ctor)
|
[
"invisigoth.kenshoto@gmail.com"
] |
invisigoth.kenshoto@gmail.com
|
c65063c16efbcaef1560909ad2312ed2b5c5d158
|
7066555f4c2ff9b405754d2e793b97bf04b6ab98
|
/jianzhi-offer/04.py
|
9043030e7a970938f899c10b58708952719fc874
|
[] |
no_license
|
yangtao0304/hands-on-programming-exercise
|
c0d0fe324ffaf73c7b4c45aba721a245a8cc9ce2
|
cc7740026c3774be21ab924b99ae7596ef20d0e4
|
refs/heads/master
| 2020-09-11T02:05:51.305196
| 2020-03-19T03:45:53
| 2020-03-19T03:45:53
| 221,904,831
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 448
|
py
|
class Solution:
def findNumberIn2DArray(self, matrix: List[List[int]], target: int) -> bool:
R = len(matrix)
if R == 0:
return False
C = len(matrix[0])
r, c = 0, C-1
while 0 <= r < R and 0 <= c < C:
if matrix[r][c] > target:
c -= 1
elif matrix[r][c] < target:
r += 1
else:
return True
return False
|
[
"im.yangtao0304@gmail.com"
] |
im.yangtao0304@gmail.com
|
7f8c4b566d38a4e701f4b0ef300ea8cc157b73d1
|
009df7ad499b19a4df066160cf0c7d8b20355dfb
|
/src/the_tale/the_tale/game/companions/abilities/relations.py
|
7ac90d034fcb5fb146883eb23f31cb9e408467ca
|
[
"BSD-3-Clause"
] |
permissive
|
devapromix/the-tale
|
c0804c7475e877f12f29444ddbbba025561d3412
|
2a10efd3270734f8cf482b4cfbc5353ef8f0494c
|
refs/heads/develop
| 2020-03-28T20:26:30.492292
| 2018-10-07T17:32:46
| 2018-10-07T17:32:46
| 149,070,887
| 1
| 0
|
BSD-3-Clause
| 2018-10-07T17:32:47
| 2018-09-17T04:57:50
|
Python
|
UTF-8
|
Python
| false
| false
| 6,846
|
py
|
import smart_imports
smart_imports.all()
class METATYPE(rels_django.DjangoEnum):
description = rels.Column()
records = (('TRAVEL', 0, 'дорожная', 'влияет на скорость путешествия героя'),
('BATTLE', 1, 'боевая', 'влияет на битвы'),
('MONEY', 2, 'денежная', 'влияет на деньги и предметы'),
('OTHER', 3, 'необычная', 'имеет особый эффект'),
('UNCHANGEBLE', 4, 'неизменная', 'оказывает постоянный эффект, независимо от других свойств спутника или героя'))
class EFFECT(rels_django.DjangoEnum):
metatype = rels.Column(unique=False)
records = (('COHERENCE_SPEED', 0, 'скорость изменения слаженности', METATYPE.UNCHANGEBLE),
('CHANGE_HABITS', 1, 'изменение характера', METATYPE.UNCHANGEBLE),
('QUEST_MONEY_REWARD', 2, 'денежная награда за задание', METATYPE.MONEY),
('MAX_BAG_SIZE', 3, 'максимальный размер рюкзака', METATYPE.UNCHANGEBLE),
('POLITICS_POWER', 4, 'бонус к влиянию', METATYPE.OTHER),
('MAGIC_DAMAGE_BONUS', 5, 'бонус к магическому урону героя', METATYPE.BATTLE),
('PHYSIC_DAMAGE_BONUS', 6, 'бонус к физическому урону героя', METATYPE.BATTLE),
('SPEED', 7, 'бонус к скорости движения героя', METATYPE.TRAVEL),
('INITIATIVE', 9, 'инициатива', METATYPE.BATTLE),
('BATTLE_PROBABILITY', 10, 'вероятность начала боя', METATYPE.TRAVEL),
('LOOT_PROBABILITY', 11, 'вероятность получить добычу', METATYPE.BATTLE),
('COMPANION_DAMAGE', 12, 'урон по спутнику', METATYPE.UNCHANGEBLE),
('COMPANION_DAMAGE_PROBABILITY', 13, 'вероятность получить урон', METATYPE.BATTLE),
('COMPANION_STEAL_MONEY', 14, 'спутник крадёт деньги', METATYPE.MONEY),
('COMPANION_STEAL_ITEM', 15, 'спутник крадёт предметы', METATYPE.MONEY),
('COMPANION_SPARE_PARTS', 16, 'спутник разваливается на дорогие запчасти', METATYPE.UNCHANGEBLE),
('COMPANION_EXPERIENCE', 17, 'спутник так или иначе приносит опыт', METATYPE.OTHER),
('COMPANION_DOUBLE_ENERGY_REGENERATION', 18, 'герой может восстновить в 2 раза больше энергии', METATYPE.OTHER),
('COMPANION_REGENERATION', 19, 'спутник как-либо восстанавливает своё здоровье', METATYPE.OTHER),
('COMPANION_EAT', 20, 'спутник требует покупки еды', METATYPE.MONEY),
('COMPANION_EAT_DISCOUNT', 21, 'у спутника есть скидка на покупку еды', METATYPE.MONEY),
('COMPANION_DRINK_ARTIFACT', 22, 'спутник пропивает артефакты при посещении города', METATYPE.MONEY),
('COMPANION_EXORCIST', 23, 'спутник является экзорцистом', METATYPE.BATTLE),
('REST_LENGTH', 24, 'изменение скорости лечения на отдыхе', METATYPE.TRAVEL),
('IDLE_LENGTH', 25, 'изменение времени бездействия', METATYPE.TRAVEL),
('COMPANION_BLOCK_PROBABILITY', 26, 'вероятность блока спутника', METATYPE.BATTLE),
('HUCKSTER', 27, 'спутник даёт бонус к цене продажи и покупки', METATYPE.MONEY),
('MIGHT_CRIT_CHANCE', 28, 'шанс критического срабатывания способности хранителя', METATYPE.OTHER),
('BATTLE_ABILITY_HIT', 29, 'небольшое увеличение инициативы и способность удар', METATYPE.BATTLE),
('BATTLE_ABILITY_STRONG_HIT', 30, 'небольшое увеличение инициативы и способность сильный удар', METATYPE.BATTLE),
('BATTLE_ABILITY_RUN_UP_PUSH', 31, 'небольшое увеличение инициативы и способность ошеломление', METATYPE.BATTLE),
('BATTLE_ABILITY_FIREBALL', 32, 'небольшое увеличение инициативы и способность пиромания', METATYPE.BATTLE),
('BATTLE_ABILITY_POSION_CLOUD', 33, 'небольшое увеличение инициативы и способность ядовитость', METATYPE.BATTLE),
('BATTLE_ABILITY_FREEZING', 34, 'небольшое увеличение инициативы и способность контроль', METATYPE.BATTLE),
('COMPANION_TELEPORTATION', 35, 'спутник как-либо перемещает героя в пути', METATYPE.TRAVEL),
('DEATHY', 36, 'для смерти, распугивает всех', METATYPE.UNCHANGEBLE),
('RARITY', 37, 'редкость', METATYPE.UNCHANGEBLE),
('LEAVE_HERO', 38, 'покидает героя', METATYPE.UNCHANGEBLE))
class FIELDS(rels_django.DjangoEnum):
common = rels.Column(unique=False)
records = (('COHERENCE_SPEED', 0, 'слаженность', False),
('HONOR', 1, 'честь', False),
('PEACEFULNESS', 2, 'миролюбие', False),
('START_1', 3, 'начальная 1', False),
('START_2', 4, 'начальная 2', False),
('START_3', 5, 'начальная 3', False),
('START_4', 6, 'начальная 4', False),
('START_5', 7, 'начальная 5', False),
('ABILITY_1', 8, 'способность 1', True),
('ABILITY_2', 9, 'способность 2', True),
('ABILITY_3', 10, 'способность 3', True),
('ABILITY_4', 11, 'способность 4', True),
('ABILITY_5', 12, 'способность 5', True),
('ABILITY_6', 13, 'способность 6', True),
('ABILITY_7', 14, 'способность 7', True),
('ABILITY_8', 15, 'способность 8', True),
('ABILITY_9', 16, 'способность 9', True))
|
[
"a.eletsky@gmail.com"
] |
a.eletsky@gmail.com
|
bf8f80474745344c894d3080d2ebb03d8384fbdf
|
2dd26e031162e75f37ecb1f7dd7f675eeb634c63
|
/nemo/collections/nlp/modules/common/megatron/adapters/__init__.py
|
12c02a5e041c8959108c66bb031d366b7d346779
|
[
"Apache-2.0"
] |
permissive
|
NVIDIA/NeMo
|
1b001fa2ae5d14defbfd02f3fe750c5a09e89dd1
|
c20a16ea8aa2a9d8e31a98eb22178ddb9d5935e7
|
refs/heads/main
| 2023-08-21T15:28:04.447838
| 2023-08-21T00:49:36
| 2023-08-21T00:49:36
| 200,722,670
| 7,957
| 1,986
|
Apache-2.0
| 2023-09-14T18:49:54
| 2019-08-05T20:16:42
|
Python
|
UTF-8
|
Python
| false
| false
| 625
|
py
|
# coding=utf-8
# Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
|
[
"noreply@github.com"
] |
NVIDIA.noreply@github.com
|
5759a5b0fc9daa4829c1e47292239d9b10d0c643
|
60223e7376275631710d5ff8231e6cab23b383bb
|
/sportsbet/registration/migrations/0001_initial.py
|
7db1135783099878ca9b97c607d7ae98abaaabab
|
[] |
no_license
|
sangeeth-subramoniam/sportsbet
|
5f07a8f8013cf1bcf88d7a2ef9ffa26ce8c24eb8
|
d41af45bc49c0ac1c62d000026b2d7f0684369a6
|
refs/heads/main
| 2023-08-24T06:08:01.980781
| 2021-10-27T05:15:55
| 2021-10-27T05:15:55
| 418,737,197
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 917
|
py
|
# Generated by Django 3.2.8 on 2021-10-19 02:32
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='user_profile',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('website', models.URLField(blank=True)),
('profile_picture', models.ImageField(blank=True, upload_to='profile_pictures')),
('bio', models.CharField(blank=True, max_length=300)),
('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
]
|
[
"s-sangeeth-k@sicis.co.jp"
] |
s-sangeeth-k@sicis.co.jp
|
3f30af29870f82c99f507dfb86f222a60a8a626a
|
e7b7505c084e2c2608cbda472bc193d4a0153248
|
/LeetcodeNew/python/LC_813.py
|
f7f4a9deb62825b29cd55f0a2eebd63f5687bf09
|
[] |
no_license
|
Taoge123/OptimizedLeetcode
|
8e5c1cd07904dfce1248bc3e3f960d2f48057a5d
|
3e50f6a936b98ad75c47d7c1719e69163c648235
|
refs/heads/master
| 2023-02-27T21:13:40.450089
| 2023-02-07T04:11:09
| 2023-02-07T04:11:09
| 170,044,224
| 9
| 3
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,187
|
py
|
"""
X X X X X [X X i]
dp[i][K] : the maximum of sum of the average of the k groups
dp[j][K-1] + avg(j+1, i):
"""
import collections
class SolutionTony:
def largestSumOfAverages(self, nums, k: int) -> float:
memo = {}
return self.dfs(nums, 0, k, memo)
def dfs(self, nums, i, k, memo):
if (i, k) in memo:
return memo[(i, k)]
n = len(nums)
if k == 1:
return sum(nums[i:]) / (n - i)
size, summ = 0, 0
res = 0
for j in range(i, n - k + 1):
summ += nums[j]
size += 1
res = max(res, self.dfs(nums, j + 1, k - 1, memo) + summ / size)
memo[(i, k)] = res
return res
class SolutionTony2:
def largestSumOfAverages(self, nums, k: int) -> float:
memo = {}
self.presum = [0]
for num in nums:
self.presum.append(num + self.presum[-1])
return self.dfs(nums, 0, k, memo)
def dfs(self, nums, i, k, memo):
if (i, k) in memo:
return memo[(i, k)]
n = len(nums)
if k == 1:
return sum(nums[i:]) / (n - i)
size = 0
res = 0
for j in range(i, n - k + 1):
summ = self.presum[j + 1] - self.presum[i]
res = max(res, self.dfs(nums, j + 1, k - 1, memo) + summ / (j - i + 1))
memo[(i, k)] = res
return res
class Solution:
def largestSumOfAverages(self, A, K: int) -> float:
n = len(A)
memo = collections.defaultdict(int)
summ = 0
# calculate the average - like preSum
for i in range(n):
summ += A[i]
memo[(i + 1, 1)] = summ / (i + 1)
return self.dfs(A, K, n, memo)
def dfs(self, A, k, n, memo):
if memo[(n, k)]:
return memo[(n, k)]
if n < k:
return 0
summ = 0
for i in range(n - 1, -1, -1):
summ += A[i]
memo[(n, k)] = max(memo[(n, k)], self.dfs(A, k - 1, i, memo) + summ / (n - i))
return memo[(n, k)]
"""
X X X X X X X X X
i n
"""
class SolutionBU:
def largestSumOfAverages(self, A, K: int) -> float:
n = len(A)
dp = [[0 for j in range(K+1)] for i in range(n)]
summ = 0
for i in range(n):
summ += A[i]
dp[i][1] = summ / (i+1)
for k in range(2, K+ 1):
for i in range(k - 1, n):
summ = 0
# j >= k - 1
for j in range(i, k - 2, -1):
summ += A[j]
dp[i][k] = max(dp[i][k], dp[j - 1][k - 1] + summ / (i - j + 1))
return dp[n - 1][K]
class SolutionDP2:
def largestSumOfAverages(self, A, K):
prefix = [0]
for x in A:
prefix.append(prefix[-1] + x)
def average(i, j):
return (prefix[j] - prefix[i]) / (j - i)
n = len(A)
dp = [average(i, n) for i in range(n)]
for k in range(K - 1):
for i in range(n):
for j in range(i + 1, n):
dp[i] = max(dp[i], average(i, j) + dp[j])
return dp[0]
|
[
"taocheng984@gmail.com"
] |
taocheng984@gmail.com
|
cba321e8cc0e76f36c27b898e78e56c7ba09dde5
|
9cfdfe633dfb2755955f9d356fdd0a9601089955
|
/accounts/forms.py
|
ce49fb97a27747bb33ce17c2986e8f8c9c77e3f1
|
[] |
no_license
|
DimAntDim/ResumeBuilder
|
cec597ba4b857d98147e2f5f6831bd3c93c83c80
|
0507d5d9c44936d892df280015f7c6d8e630f55b
|
refs/heads/main
| 2023-08-21T08:03:43.327868
| 2021-11-03T19:43:04
| 2021-11-03T19:43:04
| 394,882,643
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 709
|
py
|
from .models import Profile
from django import forms
from django.contrib.auth import get_user_model
UserModel = get_user_model()
class ProfileForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.fields['first_name'].widget.attrs.update({'class': 'input-field', 'placeholder':'First name'})
self.fields['profile_image'].widget.attrs.update({'class': 'input-field', 'value':'Upload photo'})
self.fields['last_name'].widget.attrs.update({'class': 'input-field', 'placeholder':'Last name'})
class Meta:
model = Profile
fields = "__all__"
exclude = ('user', 'is_complete', 'template_selected',)
|
[
"66394357+DimAntDim@users.noreply.github.com"
] |
66394357+DimAntDim@users.noreply.github.com
|
baa4b16248b44f4c2c45800896451fa44644c5a4
|
6fa7f99d3d3d9b177ef01ebf9a9da4982813b7d4
|
/9AMT6SC4Jz8tExihs_9.py
|
bb2c304e4cf2aa2ec2362ec9ace44483c5877dc3
|
[] |
no_license
|
daniel-reich/ubiquitous-fiesta
|
26e80f0082f8589e51d359ce7953117a3da7d38c
|
9af2700dbe59284f5697e612491499841a6c126f
|
refs/heads/master
| 2023-04-05T06:40:37.328213
| 2021-04-06T20:17:44
| 2021-04-06T20:17:44
| 355,318,759
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 413
|
py
|
def countStrings(n):
x=[]
x.append(0)
p = (1 << n)
for i in range(1, p):
if ((i & (i << 1)) == 0):
x.append(i)
return x
def generate_nonconsecutive(n):
res,c='',n
for i in countStrings(n):
a=bin(i).replace('b','')
if len(a)<c:
b='0'*(c-len(a))
a=b+a
d=len(a)-c
res+=a[d:]+' '
return res.strip()
|
[
"daniel.reich@danielreichs-MacBook-Pro.local"
] |
daniel.reich@danielreichs-MacBook-Pro.local
|
6cefd5f8948ce9a2d3b3df747ce88b2daa6c80b6
|
741c5c70bf4a0adb05db6b0777c8d07e28eb9cf6
|
/bin/easy_install-3.4
|
1f532eda7ff95e38c48cd8828a33ad5a0823bea9
|
[] |
no_license
|
andybp85/hyLittleSchemer
|
e686d2dc0f9067562367ea1173f275e8e2d2cb85
|
af5cb6adf6a196cc346aa7d14d7f9509e084c414
|
refs/heads/master
| 2021-01-19T07:48:31.309949
| 2015-01-04T00:57:30
| 2015-01-04T00:57:30
| 28,496,304
| 6
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 271
|
4
|
#!/Users/andrew/dev/python/virtualenvs/HY3/bin/python3.4
# -*- coding: utf-8 -*-
import re
import sys
from setuptools.command.easy_install import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())
|
[
"andy@youshallthrive.com"
] |
andy@youshallthrive.com
|
5ac5bca329b95a6f0263befae741e5d9b76a1eb7
|
14bd4b2131dfc1a7a4c7b2518e694f7817763ecf
|
/core/migrations/0005_auto_20180107_1711.py
|
9f3c5f53e6d48c581f5e4c1af22c3e3c30fb80d8
|
[] |
no_license
|
itkartell/vozhakov
|
ea5ca11bd5aa2a2de2d7fc69d39c569ddaaaaac4
|
dcf516a476920a5d3313ec1a246c3b980b57c0c7
|
refs/heads/master
| 2021-05-09T02:02:32.328687
| 2018-01-21T22:20:54
| 2018-01-21T22:20:54
| 119,194,706
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 742
|
py
|
# Generated by Django 2.0 on 2018-01-07 17:11
import ckeditor_uploader.fields
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('core', '0004_auto_20180107_1709'),
]
operations = [
migrations.AddField(
model_name='video',
name='description',
field=ckeditor_uploader.fields.RichTextUploadingField(blank=True, null=True, verbose_name='Описание видео'),
),
migrations.AlterField(
model_name='video',
name='link',
field=models.CharField(default='', max_length=255, verbose_name='Ссылка на видео'),
preserve_default=False,
),
]
|
[
"greenteamer@bk.ru"
] |
greenteamer@bk.ru
|
eff984e146cda1a9191c3692e08c2743385564ce
|
626833b2f8955b1d5b6ac33328e689c793f1a1c2
|
/Chapter6/BA6A.py
|
6d62022674126e96ceee657f64806ab15aa9a646
|
[] |
no_license
|
Leoberium/BA
|
8b1bbe9ddf43e9498b3e7419fbdc7ae81e151c43
|
61409d57a228188e7d8ce78db20cf926f2a1d34d
|
refs/heads/master
| 2020-09-22T14:43:43.338784
| 2019-12-01T23:24:12
| 2019-12-01T23:24:12
| 225,244,710
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 994
|
py
|
import sys
def greedy_sorting(p):
permutations = []
for i in range(len(p)):
if abs(p[i]) != i + 1:
index = 0
for j in range(i + 1, len(p)):
if abs(p[j]) == i + 1:
index = j
break
m = (index - i) // 2
for j in range(m + 1):
p[j + i], p[index - j] = -p[index - j], -p[j + i]
permutations.append(p.copy())
if p[i] == -(i + 1):
p[i] = -p[i]
permutations.append(p.copy())
if p[i] == -(i + 1):
p[i] = -p[i]
permutations.append(p.copy())
return permutations
def main():
sp = sys.stdin.readline().strip()
sp = sp[1:(len(sp) - 1)]
sp = list(map(int, sp.split()))
for p in greedy_sorting(sp):
p = list(map(lambda x: '+' + str(x) if x > 0 else str(x), p))
print('(', ' '.join(p), ')', sep='')
if __name__ == '__main__':
main()
|
[
"leo.mazaev@gmail.com"
] |
leo.mazaev@gmail.com
|
7e784cf82de7c14d0d95252ffdab214f2a713642
|
3d19e1a316de4d6d96471c64332fff7acfaf1308
|
/Users/L/legz/twitter_hashtag_user_friendship_network_-2gen.py
|
5c7a965a16f2b358932ba349750cf12c813a5d34
|
[] |
no_license
|
BerilBBJ/scraperwiki-scraper-vault
|
4e98837ac3b1cc3a3edb01b8954ed00f341c8fcc
|
65ea6a943cc348a9caf3782b900b36446f7e137d
|
refs/heads/master
| 2021-12-02T23:55:58.481210
| 2013-09-30T17:02:59
| 2013-09-30T17:02:59
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 8,662
|
py
|
#Script to grab a list of recent hashtag users from Twitter and graph their friendship connections
#NOTE this goes against the spirit of Scraperwiki somewhat, because I am using a view on its own to grab and display the data
#As to why I'm not grabbing the Twitter data into a Scraperwiki db using a Scraperwiki scraper... I'm not sure Twitter T&Cs allow that...
#There is a good chance you can run this on your own machine if you install the necessary libraries (see below).
#To do that, copy all this code and save it in a file on you own computer as test.py
#On a linux or mac, using the terminal on your computer, cd to the directory containing test.py and then enter:
# python test.py > test.gexf
#The programme should run and the output should be sent to the file test.gexf, which you can then load into Gephi, for example.
#If you want to watch progress, set debug=True
#This will break the XML output of the graph file though..
#...so if you want well formed XML output in the actual view, set debug=False
debug=False
#If you run this script on your own machine, you need to install a couple of libraries first if you haven't already done so
#from the command line: easy_install tweepy
import tweepy
#from the command line: easy_install networkx
import networkx as nx
#We're going to build up a list of folk who've recently used the hashtag
sender_ids=[]
#Some config stuff...
rpp=20 #results per page; 100 is the max
limit=20 #the most tweets we want to return, max 1500
searchterm='zapier lang:fr'
#only grab up to the limit...
if limit>rpp: rpp=limit
#I'm going to use networkx to build up a graph of friend connections between recent searchterm users
DG=nx.DiGraph()
#The advantage of networkx is that we can then easily print out the network in a variety of graph viz tool friendly formats
#run a Twitter search using the tweepy library
for result in tweepy.Cursor(tweepy.api.search,q=searchterm,rpp=rpp).items(limit):
user_name=result.from_user
user_id=result.from_user_id
if user_id not in sender_ids:
if debug: print 'New user',user_name,user_id
sender_ids.append(user_id)
DG.add_node(user_id,label=user_name,name=user_name)
#The next step is to grab the friends of each user (likely to be fewer than their friends, and construct a graph
#Note that we really should keep tabs on how many API calls we have made in case we hit the limit...
#(I think calls to the search API don't count...)
#Each grab of the friends list grabs up to 5k users (I think) but if a user has 20k users, that may use 4 calls etc
# most folk have < 5000 friends, so assume 1 call per friend
# Unauthenticated calls are limited at 150 calls per hour
# Authenticated calls give you 350 per hour
# So as a rule of thumb, if no-one else is running unauthenticated calls from Scraperwiki, run this at most once an hour to pull back
## hashtag networks containing no more than 100 people...
for user in sender_ids:
#that is, for each unique recent user of the search term...
if debug: print 'getting friends of',user
#grab the friends of the current user
friends=tweepy.Cursor(tweepy.api.friends_ids,id=user).items()
#we're now going to look through the friends list to see if any friends are recent searchterm users themselves
for friend in friends:
if friend in sender_ids:
#if they are, add them to the graph with an edge from user to friend
DG.add_edge(user,friend)
#networkx has a variety of file export filters available, such as GEXF
#gexf files can be imported directly into a tool such as Gephi (gephi.org)
import networkx.readwrite.gexf as gf
writer=gf.GEXFWriter(encoding='utf-8',prettyprint=True,version='1.1draft')
writer.add_graph(DG)
try:
import scraperwiki
scraperwiki.utils.httpresponseheader("Content-Type", "text/xml")
except:
pass
try:
from xml.etree.cElementTree import tostring
print tostring(writer.xml)
except:
print 'You need to find a way of printing out the XML object...'
#If you run/preview this view in scraperwiki, assuming you haven't maxed out on the Twitter API,
##or spent so long runningthe script/waiting that scraperwiki has timed out the script on you,
##you should see an XML based GEXF file. Save it with a .gexf suffix, then open it in something like Gephi... :-)
#Script to grab a list of recent hashtag users from Twitter and graph their friendship connections
#NOTE this goes against the spirit of Scraperwiki somewhat, because I am using a view on its own to grab and display the data
#As to why I'm not grabbing the Twitter data into a Scraperwiki db using a Scraperwiki scraper... I'm not sure Twitter T&Cs allow that...
#There is a good chance you can run this on your own machine if you install the necessary libraries (see below).
#To do that, copy all this code and save it in a file on you own computer as test.py
#On a linux or mac, using the terminal on your computer, cd to the directory containing test.py and then enter:
# python test.py > test.gexf
#The programme should run and the output should be sent to the file test.gexf, which you can then load into Gephi, for example.
#If you want to watch progress, set debug=True
#This will break the XML output of the graph file though..
#...so if you want well formed XML output in the actual view, set debug=False
debug=False
#If you run this script on your own machine, you need to install a couple of libraries first if you haven't already done so
#from the command line: easy_install tweepy
import tweepy
#from the command line: easy_install networkx
import networkx as nx
#We're going to build up a list of folk who've recently used the hashtag
sender_ids=[]
#Some config stuff...
rpp=20 #results per page; 100 is the max
limit=20 #the most tweets we want to return, max 1500
searchterm='zapier lang:fr'
#only grab up to the limit...
if limit>rpp: rpp=limit
#I'm going to use networkx to build up a graph of friend connections between recent searchterm users
DG=nx.DiGraph()
#The advantage of networkx is that we can then easily print out the network in a variety of graph viz tool friendly formats
#run a Twitter search using the tweepy library
for result in tweepy.Cursor(tweepy.api.search,q=searchterm,rpp=rpp).items(limit):
user_name=result.from_user
user_id=result.from_user_id
if user_id not in sender_ids:
if debug: print 'New user',user_name,user_id
sender_ids.append(user_id)
DG.add_node(user_id,label=user_name,name=user_name)
#The next step is to grab the friends of each user (likely to be fewer than their friends, and construct a graph
#Note that we really should keep tabs on how many API calls we have made in case we hit the limit...
#(I think calls to the search API don't count...)
#Each grab of the friends list grabs up to 5k users (I think) but if a user has 20k users, that may use 4 calls etc
# most folk have < 5000 friends, so assume 1 call per friend
# Unauthenticated calls are limited at 150 calls per hour
# Authenticated calls give you 350 per hour
# So as a rule of thumb, if no-one else is running unauthenticated calls from Scraperwiki, run this at most once an hour to pull back
## hashtag networks containing no more than 100 people...
for user in sender_ids:
#that is, for each unique recent user of the search term...
if debug: print 'getting friends of',user
#grab the friends of the current user
friends=tweepy.Cursor(tweepy.api.friends_ids,id=user).items()
#we're now going to look through the friends list to see if any friends are recent searchterm users themselves
for friend in friends:
if friend in sender_ids:
#if they are, add them to the graph with an edge from user to friend
DG.add_edge(user,friend)
#networkx has a variety of file export filters available, such as GEXF
#gexf files can be imported directly into a tool such as Gephi (gephi.org)
import networkx.readwrite.gexf as gf
writer=gf.GEXFWriter(encoding='utf-8',prettyprint=True,version='1.1draft')
writer.add_graph(DG)
try:
import scraperwiki
scraperwiki.utils.httpresponseheader("Content-Type", "text/xml")
except:
pass
try:
from xml.etree.cElementTree import tostring
print tostring(writer.xml)
except:
print 'You need to find a way of printing out the XML object...'
#If you run/preview this view in scraperwiki, assuming you haven't maxed out on the Twitter API,
##or spent so long runningthe script/waiting that scraperwiki has timed out the script on you,
##you should see an XML based GEXF file. Save it with a .gexf suffix, then open it in something like Gephi... :-)
|
[
"pallih@kaninka.net"
] |
pallih@kaninka.net
|
43ecc023480a4d0a3ae444fdbb018d62949e576f
|
277cd286fa69eb7ab03b806a620322d5701b04b3
|
/2017/round2/freshCholocate.py
|
9a32325d5177e3651f13eb3194f6926336f867be
|
[] |
no_license
|
YeahHuang/Codejam
|
5cd96d45cdb8d26eadb1e3a5f1fae8c1faef4e48
|
5086696b9dd1ac7b09f0cab34b643dde1d916e0b
|
refs/heads/master
| 2021-05-24T17:36:36.314823
| 2020-07-19T15:52:52
| 2020-07-19T15:52:52
| 253,679,729
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,098
|
py
|
from bisect import bisect_left, bisect_right, insort_left, insort_right
from string import ascii_lowercase
from heapq import heappush, heappop, heapify
from collections import Counter, defaultdict
from itertools import product
import random
import sys
"""
11:57 - 13:01 pass
n(number of groups) p(number of pieces of chocolate per pack)
g1..gn 每组多少人
Case #x: y(the number of groups that will receive only fresh chocolate if you bring them in in an order that maximizes that number)
n: [1,100]
gi [1,100]
小 p [2,3]
大 p [2,4]
"""
global debug, test
def solve():
global debug, test
T = int(input())
debug = False
test = True
for it in range(T):
n, p = map(int, input().split())
g = list(map(lambda x: int(x)%p, input().split()))
cnt = Counter(g)
for i in range(p):
if i not in cnt.keys():
cnt[i] = 0
ans = 0
ans += cnt[0]
cnt[0] = 0 #一开始这个没加 WA 2次
if debug:
print("step0 add %d"%ans)
for i in range(1, p):
if i == p-i:
ans += cnt[i] // 2
cnt[i] -= cnt[i] // 2 * 2
else:
pair = min(cnt[i], cnt[p-i])
ans, cnt[i], cnt[p-i] = ans + pair, cnt[i]- pair, cnt[p-i]-pair
if debug:
print("2 combine to 1ans=%d"%ans)
if p == 3:
#3 combines to 1
ans += cnt[1] // 3 + cnt[2] // 3
cnt[1], cnt[2] = cnt[1]%3, cnt[2]%3
elif p == 4:
#3 combines to 1 1 1 2 or 3 3 2
if cnt[2] > 0:
if cnt[1] > 0:
pair = min(cnt[2], cnt[1]//2)
ans, cnt[1], cnt[2] = ans + pair, cnt[1] - pair*2, cnt[2] - pair
else:
pair = min(cnt[2], cnt[3]//2)
ans, cnt[3], cnt[2] = ans + pair, cnt[3] - pair*2, cnt[2] - pair
#4 combines to 1 1 1 1 1 or 3 3 3 3
ans += cnt[1] // 4
cnt[1] %= 4
ans += cnt[3] // 4
cnt[3] %= 4
if debug:
print("2 combine ans = %d"%ans)
if sum([v for k, v in cnt.items()]) > 0:
ans += 1
print("Case #%d: %d"%(it+1, ans))
|
[
"noreply@github.com"
] |
YeahHuang.noreply@github.com
|
5b709f7498dc80885e92977e4067ecdcf71992e7
|
15f321878face2af9317363c5f6de1e5ddd9b749
|
/solutions_python/Problem_109/38.py
|
7c4de50235fef69c7b9596e465413432228daf45
|
[] |
no_license
|
dr-dos-ok/Code_Jam_Webscraper
|
c06fd59870842664cd79c41eb460a09553e1c80a
|
26a35bf114a3aa30fc4c677ef069d95f41665cc0
|
refs/heads/master
| 2020-04-06T08:17:40.938460
| 2018-10-14T10:12:47
| 2018-10-14T10:12:47
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,441
|
py
|
#!/usr/bin/python
import sys
def solve(N, W, L, students):
corners = set([(0, 0)])
locs = {}
ordered_students = list(enumerate(students))
ordered_students.sort(key=lambda a: -a[1])
for j, r in ordered_students:
for x, y in corners:
px = x
py = y
newx = x + r
if x > 0:
px += r
newx += r
newy = y + r
if y > 0:
py += r
newy += r
if px > W:
continue
if py > L:
continue
good = True
for i, (lx, ly) in locs.items():
if (lx - px) ** 2 + (ly - py) ** 2 < (r + students[i]) ** 2:
good = False
break
if not good:
continue
locs[j] = (px, py)
corners.remove((x, y))
corners.add((newx, y))
corners.add((x, newy))
break
else:
print W, L
print r, students
print corners
print locs
sys.stderr.write('error\n')
sys.exit(1)
return ' '.join('%s %s' % l for l in (locs[k] for k in range(N)))
T = int(raw_input())
for i in range(T):
N, W, L = map(int, raw_input().split())
print "Case #%i: %s" % (i+1, solve(N, W, L, map(int, raw_input().split())))
sys.stdout.flush()
|
[
"miliar1732@gmail.com"
] |
miliar1732@gmail.com
|
88cfb85af6861de642d593f1d15f51ff6a76f3d4
|
1780cd68f8927c2e8d57850ef2985c5a682dd2de
|
/wsh/__main__.py
|
c7adcbf6938075befdbb3048e01155e69ea2ea25
|
[] |
no_license
|
RyanKung/stack
|
68087b85560e0a4f334521123153694cb555f595
|
27e2563763dbca73e816ab699e3f5976d5b92a53
|
refs/heads/master
| 2021-01-21T15:21:56.766727
| 2016-07-27T02:53:45
| 2016-07-27T02:53:45
| 57,367,027
| 84
| 7
| null | 2016-05-22T04:17:55
| 2016-04-29T08:04:27
|
Python
|
UTF-8
|
Python
| false
| false
| 128
|
py
|
import sys
import re
from .main import main
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())
|
[
"ryankung@ieee.org"
] |
ryankung@ieee.org
|
12067521f8dcc24d30cfd4bdd28b91eb3e13998d
|
5777b01b9d6a6479cf2dc497ea8e3ff7f7dc7b48
|
/206_ReverseLinkedList.py
|
4d1b5eaa52406f88cbf5c4f45744d06c850ef63e
|
[
"MIT"
] |
permissive
|
brettchien/LeetCode
|
d0e6c980b814fe866667f7be23ee674949004a49
|
386b9e2f7c6389fed8825a5ec0b8c0ea733e780b
|
refs/heads/master
| 2021-03-12T21:24:06.552129
| 2016-08-27T06:10:55
| 2016-08-27T06:10:55
| 38,339,280
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 577
|
py
|
# Definition for singly-linked list.
# class ListNode:
# def __init__(self, x):
# self.val = x
# self.next = None
class Solution:
# @param {ListNode} head
# @return {ListNode}
def reverseList(self, head):
if not head:
return head
dummy = ListNode(0)
dummy.next = head
current = head
while current.next:
prev = dummy.next
tmp = current.next
current.next = current.next.next
tmp.next = prev
dummy.next = tmp
return dummy.next
|
[
"brett.chien@gmail.com"
] |
brett.chien@gmail.com
|
4bc511805642a574d45078d0a0c880bacbde8c51
|
51afd3d538f79c13af39dc13d974b2fe48be0285
|
/baseline_LR/train_test_multilr.py
|
c83a597a9828a59e59d3027bd83195528d105e0e
|
[] |
no_license
|
listenzcc/mne_signal_detection
|
5d6fc7831762281bfea5a3377a957a6b2688ca28
|
103e0b3736a14dd0b6763583d94b5bd1f47bdc9c
|
refs/heads/master
| 2020-04-04T19:18:11.299444
| 2018-12-07T01:26:11
| 2018-12-07T01:26:11
| 156,200,657
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,771
|
py
|
# coding: utf-8
import os
import sys
import numpy as np
import threading
import matplotlib.pyplot as plt
from sklearn.linear_model import LogisticRegression
sys.path.append('..')
from load_preprocess import get_epochs
def vstack(a, b):
if len(a) == 0:
return b
return np.vstack((a, b))
def scale(data):
shape = data.shape
for j in range(shape[0]):
for k in range(shape[1]):
baseline = data[j, k, 0:250]
m = np.mean(baseline)
s = np.std(baseline)
data[j][k] = (data[j][k] - m) / s
return data
ranges = [250, 350, 550, 650]
range_id = [1, 2, 1]
def get_Xy_from_data(data, ranges=ranges,
range_id=range_id):
X = []
y = []
shape = data.shape
for k in range(len(ranges)-1):
left, right = ranges[k], ranges[k+1]
id = range_id[k]
for j in range(left, right):
X = vstack(X, data[:, :, j])
y = vstack(y, id+np.zeros(shape[0]).reshape(shape[0], 1))
return X, y
def plot(X, y, axe, clf, title='title'):
axe.plot(y+0.1)
predict = clf.predict(X)
axe.plot(predict)
prob = clf.predict_proba(X) - 1
axe.plot(prob)
for j in range(0, len(y), 400):
axe.plot([j, j], list(axe.get_ylim()),
color='gray')
y = np.ravel(y)
predict = np.ravel(predict)
acc = np.count_nonzero(
(y > 1) == (predict > 1))/len(y)
title += ', acc %.2f' % acc
axe.set_title(title)
# Prepare filename QYJ, ZYF
filedir = 'D:/BeidaShuju/rawdata/QYJ'
fname_training_list = list(os.path.join(
filedir, 'MultiTraining_%d_raw_tsss.fif' % j)
for j in range(1, 6))
fname_testing_list = list(os.path.join(
filedir, 'MultiTest_%d_raw_tsss.fif' % j)
for j in range(1, 9))
ortids_training = [2, 6, 9, 14, 17, 33]
ortids_testing = [8, 16, 32, 64]
train = True
ortids = ortids_training
fname_list = fname_training_list
data_X = fname_list.copy()
data_y = fname_list.copy()
for j in range(len(fname_list)):
print(fname_list[j])
epochs = get_epochs(fname=fname_list[j], train=train)
data = epochs.get_data()
data_X[j] = ortids.copy()
data_y[j] = ortids.copy()
for k in range(len(ortids)):
data_ = data[epochs.events[:, 2] == ortids[k]]
data_ = scale(np.mean(data_, 0)[np.newaxis, :])
data_X[j][k], data_y[j][k] = get_Xy_from_data(
data_, range_id=[1, k+2, 1])
def train_clf(test_run, data_X=data_X, data_y=data_y):
X_train = []
y_train = []
X_test = []
y_test = []
for j in range(len(fname_list)):
if j == test_run:
for k in range(len(ortids)):
X_test = vstack(X_test, data_X[j][k])
y_test = vstack(y_test, data_y[j][k])
continue
for k in range(len(ortids)):
X_train = vstack(X_train, data_X[j][k])
y_train = vstack(y_train, data_y[j][k])
clf = LogisticRegression(multi_class='multinomial',
solver='newton-cg',
penalty='l2')
clf.fit(X_train, np.ravel(y_train))
return clf, X_test, y_test, X_train, y_train
def athread(test_run, axe, title):
print('%d running' % test_run)
clf, X_test, y_test, X_train, y_train = train_clf(test_run)
plot(X_test, y_test, axe, clf, title=title)
print('%d done' % test_run)
fig, axes = plt.subplots(5, 1)
threads = []
for test_run in range(5):
title = '%d' % (test_run)
print(title)
# athread(test_run, axe=axes[test_run], title=title)
t = threading.Thread(target=athread, args=(
test_run, axes[test_run], title))
threads.append(t)
for t in threads:
t.setDaemon(True)
t.start()
for t in threads:
t.join()
plt.show()
|
[
"listenzcc@mail.bnu.edu.cn"
] |
listenzcc@mail.bnu.edu.cn
|
d4cdff5dc3003601e3f5cd24332181069349a8c4
|
e9f5ab8edbc60272a6e0ee8ad97c84eec344e5d6
|
/examples/02_stdnormal_K1/plot-01-demo=deletes-model=dp_mix+gauss.py
|
b67a27f76ce42412714edf4a594f7940dd01042d
|
[
"BSD-3-Clause"
] |
permissive
|
s-mawjee/bnpy
|
85a114043b4d557e3e2baec9ce654a50712dce7d
|
57cc2d6545c6bd169132db5596a323ea52980d50
|
refs/heads/master
| 2020-04-24T23:45:57.889647
| 2019-05-08T11:07:08
| 2019-05-08T11:07:08
| 155,522,778
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,605
|
py
|
"""
========================================================================
Variational with merge and delete proposals for DP mixtures of Gaussians
========================================================================
How delete moves can be more effective than merges.
In this example, we show how merge moves alone may not be enough
to reliably escape local optima. Instead, we show that more flexible
delete moves can escape from situations where merges alone fail.
"""
# SPECIFY WHICH PLOT CREATED BY THIS SCRIPT IS THE THUMBNAIL IMAGE
# sphinx_gallery_thumbnail_number = 2
import bnpy
import numpy as np
import os
from matplotlib import pylab
import seaborn as sns
FIG_SIZE = (3, 3)
pylab.rcParams['figure.figsize'] = FIG_SIZE
###############################################################################
#
# Create toy dataset of many points drawn from standard normal
prng = np.random.RandomState(42)
X = prng.randn(100000, 1)
dataset = bnpy.data.XData(X, name='StandardNormalK1')
###############################################################################
#
# Make a simple plot of the raw data
pylab.hist(dataset.X[:, 0], 50, normed=1)
pylab.xlabel('x')
pylab.ylabel('p(x)')
pylab.tight_layout()
###############################################################################
# Setup: Determine specific settings of the proposals
# ---------------------------------------------------
merge_kwargs = dict(
m_startLap=10,
m_pair_ranking_procedure='total_size',
m_pair_ranking_direction='descending',
)
delete_kwargs = dict(
d_startLap=10,
d_nRefineSteps=50,
)
###############################################################################
#
# Setup: Helper function to display the learned clusters
# ------------------------------------------------------
def show_clusters_over_time(
task_output_path=None,
query_laps=[0, 1, 2, 10, 20, None],
nrows=2):
'''
'''
ncols = int(np.ceil(len(query_laps) // float(nrows)))
fig_handle, ax_handle_list = pylab.subplots(
figsize=(FIG_SIZE[0] * ncols, FIG_SIZE[1] * nrows),
nrows=nrows, ncols=ncols, sharex=True, sharey=True)
for plot_id, lap_val in enumerate(query_laps):
cur_model, lap_val = bnpy.load_model_at_lap(task_output_path, lap_val)
cur_ax_handle = ax_handle_list.flatten()[plot_id]
bnpy.viz.PlotComps.plotCompsFromHModel(
cur_model, dataset=dataset, ax_handle=cur_ax_handle)
cur_ax_handle.set_xlim([-4.5, 4.5])
cur_ax_handle.set_xlabel("lap: %d" % lap_val)
pylab.tight_layout()
###############################################################################
#
# Run with *merge* moves only, from K=5 initial clusters
# --------------------------------------------------------
#
# Unfortunately, no pairwise merge is accepted.
# The model is stuck using 5 clusters when one cluster would do.
gamma = 5.0
sF = 0.1
K = 5
m_trained_model, m_info_dict = bnpy.run(
dataset, 'DPMixtureModel', 'Gauss', 'memoVB',
output_path=('/tmp/StandardNormalK1/' +
'trymoves-K=%d-gamma=%s-ECovMat=%s*eye-moves=merge,shuffle/' % (
K, gamma, sF)),
nLap=100, nTask=1, nBatch=1,
gamma0=gamma, sF=sF, ECovMat='eye',
K=K, initname='randexamplesbydist',
moves='merge,shuffle',
**dict(**merge_kwargs))
show_clusters_over_time(m_info_dict['task_output_path'])
###############################################################################
#
# Run with *delete* moves, from K=5 initial clusters
# --------------------------------------------------------
#
# More flexible delete moves *are* accepted.
d_trained_model, d_info_dict = bnpy.run(
dataset, 'DPMixtureModel', 'Gauss', 'memoVB',
output_path=('/tmp/StandardNormalK1/' +
'trymoves-K=%d-gamma=%s-ECovMat=%s*eye-moves=delete,shuffle/' % (
K, gamma, sF)),
nLap=100, nTask=1, nBatch=1,
gamma0=gamma, sF=sF, ECovMat='eye',
K=K, initname='randexamplesbydist',
moves='delete,shuffle',
**dict(delete_kwargs))
show_clusters_over_time(d_info_dict['task_output_path'])
###############################################################################
#
# Loss function trace plot
# ------------------------
#
pylab.plot(
m_info_dict['lap_history'][1:],
m_info_dict['loss_history'][1:], 'k.-',
label='vb_with_merges')
pylab.plot(
d_info_dict['lap_history'][1:],
d_info_dict['loss_history'][1:], 'b.-',
label='vb_with_deletes')
pylab.legend(loc='upper right')
pylab.xlabel('num. laps')
pylab.ylabel('loss')
pylab.tight_layout()
|
[
"mike@michaelchughes.com"
] |
mike@michaelchughes.com
|
e02578080980b2b450e344b20eee1ab7eb42e084
|
8dc84558f0058d90dfc4955e905dab1b22d12c08
|
/third_party/chromite/lib/cros_logging_unittest.py
|
5ec365c3addccd5a85b0afd3193dc35ebed13c47
|
[
"LGPL-2.0-or-later",
"GPL-1.0-or-later",
"MIT",
"Apache-2.0",
"BSD-3-Clause",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
meniossin/src
|
42a95cc6c4a9c71d43d62bc4311224ca1fd61e03
|
44f73f7e76119e5ab415d4593ac66485e65d700a
|
refs/heads/master
| 2022-12-16T20:17:03.747113
| 2020-09-03T10:43:12
| 2020-09-03T10:43:12
| 263,710,168
| 1
| 0
|
BSD-3-Clause
| 2020-05-13T18:20:09
| 2020-05-13T18:20:08
| null |
UTF-8
|
Python
| false
| false
| 3,796
|
py
|
# -*- coding: utf-8 -*-
# Copyright 2015 The Chromium OS Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Tests for cros_logging."""
from __future__ import print_function
import sys
from chromite.lib import cros_logging as logging
from chromite.lib import cros_test_lib
class CrosloggingTest(cros_test_lib.OutputTestCase):
"""Test logging works as expected."""
def setUp(self):
self.logger = logging.getLogger()
sh = logging.StreamHandler(sys.stdout)
self.logger.addHandler(sh)
def AssertLogContainsMsg(self, msg, functor, *args, **kwargs):
"""Asserts that calling functor logs a line that contains msg.
Args:
msg: The message to look for.
functor: A function taking no arguments to test.
*args, **kwargs: passthrough arguments to AssertLogContainsMsg.
"""
with self.OutputCapturer():
functor()
self.AssertOutputContainsLine(msg, *args, **kwargs)
def testNotice(self):
"""Test logging.notice works and is between INFO and WARNING."""
msg = 'notice message'
self.logger.setLevel(logging.INFO)
self.AssertLogContainsMsg(msg, lambda: logging.notice(msg))
self.logger.setLevel(logging.WARNING)
self.AssertLogContainsMsg(msg, lambda: logging.notice(msg), invert=True)
def testPrintBuildbotFunctionsNoMarker(self):
"""PrintBuildbot* without markers should not be recognized by buildbot."""
self.AssertLogContainsMsg('@@@STEP_LINK@',
lambda: logging.PrintBuildbotLink('name', 'url'),
check_stderr=True, invert=True)
self.AssertLogContainsMsg('@@@@STEP_TEXT@',
lambda: logging.PrintBuildbotStepText('text'),
check_stderr=True, invert=True)
self.AssertLogContainsMsg('@@@STEP_WARNINGS@@@',
logging.PrintBuildbotStepWarnings,
check_stderr=True, invert=True)
self.AssertLogContainsMsg('@@@STEP_FAILURE@@@',
logging.PrintBuildbotStepFailure,
check_stderr=True, invert=True)
self.AssertLogContainsMsg('@@@BUILD_STEP',
lambda: logging.PrintBuildbotStepName('name'),
check_stderr=True, invert=True)
self.AssertLogContainsMsg(
'@@@SET_BUILD_PROPERTY',
lambda: logging.PrintBuildbotSetBuildProperty('name', {'a': 'value'}),
check_stderr=True, invert=True)
def testPrintBuildbotFunctionsWithMarker(self):
"""PrintBuildbot* with markers should be recognized by buildbot."""
logging.EnableBuildbotMarkers()
self.AssertLogContainsMsg('@@@STEP_LINK@name@url@@@',
lambda: logging.PrintBuildbotLink('name', 'url'),
check_stderr=True)
self.AssertLogContainsMsg('@@@STEP_TEXT@text@@@',
lambda: logging.PrintBuildbotStepText('text'),
check_stderr=True)
self.AssertLogContainsMsg('@@@STEP_WARNINGS@@@',
logging.PrintBuildbotStepWarnings,
check_stderr=True)
self.AssertLogContainsMsg('@@@STEP_FAILURE@@@',
logging.PrintBuildbotStepFailure,
check_stderr=True)
self.AssertLogContainsMsg('@@@BUILD_STEP@name@@@',
lambda: logging.PrintBuildbotStepName('name'),
check_stderr=True)
self.AssertLogContainsMsg(
'@@@SET_BUILD_PROPERTY@name@"value"@@@',
lambda: logging.PrintBuildbotSetBuildProperty('name', 'value'),
check_stderr=True)
|
[
"arnaud@geometry.ee"
] |
arnaud@geometry.ee
|
1eed5a6b4240f4958b2f31bd6573c8e90732fd33
|
d3ce58c4576431df14de0990f45cfd574f0aa45f
|
/.history/user/views_20201012031349.py
|
c2b13bfdbd18379d7610d2eee0c80aff2d6309f3
|
[] |
no_license
|
rahulsolankib/portfolio
|
fe93f0e6b0b28990f0b9fad84dbf7c3aa07243c4
|
281ed429e2590376aee4649b2ea7b3e8facaf6f1
|
refs/heads/master
| 2023-01-02T06:55:21.319094
| 2020-10-26T08:55:22
| 2020-10-26T08:55:22
| 305,586,595
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 659
|
py
|
from django.shortcuts import render,redirect
from django.contrib.auth.forms import UserCreationForm
from django.contrib import messages
from .forms import UserRegisterForm
# Create your views here.
def register(request):
if request.method == 'POST':
form = UserRegisterForm(request.POST)
if form.is_valid():
form.save()
username = form.cleaned_data.get('username')
messages.success(request, f'Account created for {username}!')
return redirect('Start-Page')
else:
form = UserCreationForm()
return render(request,'user/register.html',{'form':form,'title':'Register Here!'})
|
[
"rahulsolankib@gmail.com"
] |
rahulsolankib@gmail.com
|
30a6d77a390396db8c45a785cd1129a10f348acb
|
b8dfb4371270042da2828b8f3e3da9a1ee9d0e83
|
/chat/migrations/0002_message_created.py
|
296f446a5b7beab992b26586c1d1b4b66dbb2116
|
[] |
no_license
|
skiboorg/puzzle_game
|
4bcfcad8a5e9647b413cab3b6f05b7aaa92811b3
|
c38a6b5af9c50da0a1e978df3851d7d78db41dfe
|
refs/heads/master
| 2023-01-22T15:48:58.962659
| 2020-11-25T07:27:01
| 2020-11-25T07:27:01
| 263,743,360
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 391
|
py
|
# Generated by Django 3.0.6 on 2020-05-16 12:16
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('chat', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='message',
name='created',
field=models.DateTimeField(auto_now_add=True, null=True),
),
]
|
[
"ddnnss.i1@gmail.com"
] |
ddnnss.i1@gmail.com
|
f14882f969949e9cb17aeffd927c279b87368ab3
|
1d483945b82db39d2c5a0cd31522c2780c2c43ad
|
/my_app/ss_lib/Ssheet_class.py
|
6b13db5aa008fc257c6cecf841cf911bc6309015
|
[] |
no_license
|
jpisano99/scrub_email
|
f2dfaa47437a1e5102baa62687df84e855be8835
|
0aaff6d8ffd5adc6549d490c396d8ae125ca6ad4
|
refs/heads/master
| 2020-07-24T22:35:07.855516
| 2019-09-16T15:06:52
| 2019-09-16T15:06:52
| 208,071,291
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 6,060
|
py
|
from .smartsheet_basic_functions import *
ss_config = dict(
SS_TOKEN=my_app.my_secrets.passwords["SS_TOKEN"]
)
class Ssheet:
ss_token = ss_config['SS_TOKEN']
ss = smartsheet.Smartsheet(ss_token)
def __init__(self, name, meta_data_only=False):
self.name = name
self.sheet = {}
self.id = 0
self.columns = {}
self.rows = {}
self.col_name_idx = {}
self.col_id_idx = {}
self.refresh(meta_data_only)
def refresh(self, meta_data_only):
self.sheet = ss_get_sheet(self.ss, self.name)
self.id = self.sheet['id']
# If this self.sheet does NOT exist the self.id will equal -1
# so skip refreshing the following to avoid an error
if self.id != -1:
self.columns = ss_get_col_data(self.ss, self.id)
# self.rows = ss_get_row_data(self.ss, self.id)
self.col_name_idx = ss_col_name_idx(self.ss, self.columns)
self.col_id_idx = ss_col_id_idx(self.ss, self.columns)
if not meta_data_only:
self.rows = ss_get_row_data(self.ss, self.id)
def row_lookup(self, col_name, row_value):
# Return a list of all row_ids
# Where col_name contains row_value
row_ids = []
col_id = self.col_name_idx[col_name]
for row in self.rows:
cells = row['cells']
for cell in cells:
if cell['columnId'] == col_id:
cell_val = cell['value'] if 'value' in cell else '' # In case cell has no value assign null
if cell_val == row_value:
row_ids.append(row['id'])
return row_ids
def get_rows(self):
row_dict = {}
for row in self.rows:
row_record = {}
for cell in row['cells']:
raw_cell_val = cell['value'] if 'value' in cell else ''
raw_col_name = self.col_id_idx[cell['columnId']]
row_record[raw_col_name] = raw_cell_val
row_dict[row['rowNumber']] = row_record
return row_dict
def add_rows(self, add_rows):
ss_add_rows(self.ss, self.id, add_rows)
return
def del_rows(self, del_rows):
ss_del_rows(self.ss, self.id, del_rows)
return
def add_cols(self, add_cols):
ss_add_column(self.ss, self.id, add_cols)
return
def del_cols(self, del_cols):
ss_del_column(self.ss, self.id, del_cols)
return
def mod_cell(self, col_id, row_dict):
ss_mod_cell(self.ss, self.id, col_id, row_dict)
return
def create_sheet(self, name, _col_dict):
# If our self.id was a -1 we can create a sheet
# Just pass in a 'name' and a col_dict
# Example(s):
# my_columns = [{'primary': True, 'title': 'ERP Customer Name', 'type': 'TEXT_NUMBER'},
# {'title': 'End Customer Ultimate Name', 'type': 'TEXT_NUMBER'},
# {'title': 'col1', 'type': 'CHECKBOX', 'symbol': 'STAR'}]
ss_create_sheet(self.ss, name, _col_dict)
self.name = name
self.refresh(True)
return
def delete_sheet(self):
ss_delete_sheet(self.ss, self.name)
self.refresh(True)
return
def __repr__(self):
return "Ssheet('{}')".format(self.name)
# def __iter__(self):
# return self
#
# def __next__(self):
# self.index += 1
# if self.index == len(self.sql_to_ss):
# raise StopIteration
# return self.sql_to_ss[self.index]
if __name__ == "__main__":
my_ss = Ssheet('Tetration On-Demand POV Raw Data')
print(my_ss)
print("Sheet Data: ", my_ss.sheet)
print(my_ss.sheet['id'])
print("Columns: ", my_ss.columns)
print("Column Dict: ", my_ss.col_dict)
print("Row Data: ", my_ss.rows)
exit()
print('Row IDs: ', my_ss.row_lookup('cisco_owner_name', 'Chris McHenry'))
# Add Columns Example
# my_cols = []
# my_cols.append({
# 'title': 'New Picklist Column 2',
# 'type': 'PICKLIST',
# 'options': [
# 'First',
# 'Second',
# 'Third'],
# 'index': 4})
# my_cols.append({
# 'title': 'New Date Column1',
# 'type': 'DATE',
# 'validation': True,
# 'index': 4})
# my_ss.add_cols(my_cols)
# my_ss.refresh()
# Delete Column Example
# my_col_id = my_ss.col_dict['New Date Column']
# my_ss.del_cols(my_col_id)
# my_ss.refresh()
# # Add Rows Example
# my_col_id = my_ss.col_dict['cisco_owner_name']
# my_col1_id = my_ss.col_dict['cisco_owner']
#
# cell_data = []
# my_rows = []
# cell_data.append({
# 'column_id': my_col_id,
# 'value': 'blanche',
# 'strict': False})
# cell_data.append({
# 'column_id': my_col1_id,
# 'value': 'stan',
# 'strict': False})
# my_rows.append(cell_data)
#
# cell_data = []
# cell_data.append({
# 'column_id': my_col1_id,
# 'value': 'blanche',
# 'strict': False})
# my_rows.append(cell_data)
#
# my_ss.add_rows(my_rows)
# # Call this to update our sheet object
# my_ss.refresh()
# print("Added Rows: ", len(my_ss.rows))
# Delete Rows Example
# print("Deleted # of Rows BEFORE: ", len(my_ss.rows))
# print('Row IDs: ', my_ss.row_lookup('cisco_owner', 'stan'))
# rows_to_delete = my_ss.row_lookup('cisco_owner', 'stan')
# my_ss.del_rows(rows_to_delete)
# my_ss.refresh()
# print("Deleted # of Rows AFTER: ", len(my_ss.rows))
# Modify Cells Example
my_row_ids = my_ss.row_lookup('cisco_owner_name', 'ang')
my_col_id = my_ss.col_dict['company_name']
new_val = 'client director'
my_row_dict = {}
for id in my_row_ids:
my_row_dict[id] = new_val
my_ss.mod_cell(my_col_id, my_row_dict)
my_ss.refresh()
exit()
# RESPONSE DEBUG CODE - DO NOT DELETE
# print(json.dumps(my_ss.rows, indent=2))
|
[
"jpisano@cisco.com"
] |
jpisano@cisco.com
|
c7687c1fc9bb0a10bffee0abdafcf0a9bdbfa4c2
|
0366bccae8841bbf6ecaad70660aae89bb0f6394
|
/19_OOP_polymorphism.py/4_example.py
|
6b6eaf7e022d461d103323dc43a5c7a9f62a3f52
|
[] |
no_license
|
KobiShashs/Python
|
8a5bdddcaef84b455795c5393cbacee5967493f7
|
e748973ad0b3e12c5fb87648783531783282832a
|
refs/heads/master
| 2021-04-05T20:18:57.715805
| 2020-04-02T21:51:44
| 2020-04-02T21:51:44
| 248,597,057
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 692
|
py
|
class BigThing:
def __init__(self, something):
self._something = something
def size(self):
if(type(self._something) == int):
print(self._something)
else:
print(len(self._something))
class BigCat(BigThing):
def __init__(self, something, weight):
super().__init__(something)
self._weight = weight
def size(self):
if(self._weight > 20):
print("Very Fat")
elif (self._weight > 15):
print("Fat")
else:
print("OK")
def main():
my_thing = BigThing("balloon")
print(my_thing.size())
cutie = BigCat("mitzy", 22)
print(cutie.size())
main()
|
[
"kobi.shasha@gmail.com"
] |
kobi.shasha@gmail.com
|
09171235eb4b872b6ae2dd265aee80bd82353d98
|
f09dc121f213f2881df3572288b7ee5b39246d73
|
/aliyun-python-sdk-ecs/aliyunsdkecs/request/v20140526/ModifyInstanceMaintenanceAttributesRequest.py
|
2c3e59f0c7a53b3514866f689d76e78e1a24c31f
|
[
"Apache-2.0"
] |
permissive
|
hetw/aliyun-openapi-python-sdk
|
2f31378ad6be0896fb8090423f607e9c7d3ae774
|
7443eacee9fbbaa93c7975c6dbec92d3c364c577
|
refs/heads/master
| 2023-01-19T22:42:36.214770
| 2020-12-04T10:55:14
| 2020-12-04T10:55:14
| 318,689,093
| 1
| 0
|
NOASSERTION
| 2020-12-05T03:03:03
| 2020-12-05T03:03:03
| null |
UTF-8
|
Python
| false
| false
| 3,168
|
py
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkecs.endpoint import endpoint_data
class ModifyInstanceMaintenanceAttributesRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Ecs', '2014-05-26', 'ModifyInstanceMaintenanceAttributes')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_ResourceOwnerId(self):
return self.get_query_params().get('ResourceOwnerId')
def set_ResourceOwnerId(self,ResourceOwnerId):
self.add_query_param('ResourceOwnerId',ResourceOwnerId)
def get_MaintenanceWindows(self):
return self.get_query_params().get('MaintenanceWindow')
def set_MaintenanceWindows(self, MaintenanceWindows):
for depth1 in range(len(MaintenanceWindows)):
if MaintenanceWindows[depth1].get('StartTime') is not None:
self.add_query_param('MaintenanceWindow.' + str(depth1 + 1) + '.StartTime', MaintenanceWindows[depth1].get('StartTime'))
if MaintenanceWindows[depth1].get('EndTime') is not None:
self.add_query_param('MaintenanceWindow.' + str(depth1 + 1) + '.EndTime', MaintenanceWindows[depth1].get('EndTime'))
def get_ActionOnMaintenance(self):
return self.get_query_params().get('ActionOnMaintenance')
def set_ActionOnMaintenance(self,ActionOnMaintenance):
self.add_query_param('ActionOnMaintenance',ActionOnMaintenance)
def get_ResourceOwnerAccount(self):
return self.get_query_params().get('ResourceOwnerAccount')
def set_ResourceOwnerAccount(self,ResourceOwnerAccount):
self.add_query_param('ResourceOwnerAccount',ResourceOwnerAccount)
def get_OwnerAccount(self):
return self.get_query_params().get('OwnerAccount')
def set_OwnerAccount(self,OwnerAccount):
self.add_query_param('OwnerAccount',OwnerAccount)
def get_OwnerId(self):
return self.get_query_params().get('OwnerId')
def set_OwnerId(self,OwnerId):
self.add_query_param('OwnerId',OwnerId)
def get_InstanceIds(self):
return self.get_query_params().get('InstanceId')
def set_InstanceIds(self, InstanceIds):
for depth1 in range(len(InstanceIds)):
if InstanceIds[depth1] is not None:
self.add_query_param('InstanceId.' + str(depth1 + 1) , InstanceIds[depth1])
|
[
"sdk-team@alibabacloud.com"
] |
sdk-team@alibabacloud.com
|
175ca4dbc99b9d492d6ae7075d14c6c264b624c7
|
503d2f8f5f5f547acb82f7299d86886691966ca5
|
/atcoder/diverta2019_b.py
|
91c65200aac281406b1f81daf84852c467ff3220
|
[] |
no_license
|
Hironobu-Kawaguchi/atcoder
|
3fcb649cb920dd837a1ced6713bbb939ecc090a9
|
df4b55cc7d557bf61607ffde8bda8655cf129017
|
refs/heads/master
| 2023-08-21T14:13:13.856604
| 2023-08-12T14:53:03
| 2023-08-12T14:53:03
| 197,216,790
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 271
|
py
|
# https://atcoder.jp/contests/diverta2019/tasks/diverta2019_b
R, G, B, N = map(int, input().split())
ans = 0
for r in range(N//R+1):
tmp = N - r * R
for g in range(tmp//G+1):
if (N - r * R - g * G) % B == 0:
ans += 1
print(ans)
|
[
"hironobukawaguchi3@gmail.com"
] |
hironobukawaguchi3@gmail.com
|
fa67cb1779f7feb966a476e6984cc58af6ff7cf3
|
688be2b7e2aef0f7484f390d0a1dc105e3217b5e
|
/product/urls.py
|
9819a5d59007108354d65c5c2464a9f98ab6b2fc
|
[] |
no_license
|
extreme1337/olx-clone
|
0fffac94f3d6bce0085bc52526357a6f2b81bd47
|
d7ad9e5d90f9dbb73d1fbe8b82b8d0d2b6996922
|
refs/heads/main
| 2023-03-01T11:00:22.760807
| 2021-02-10T08:27:13
| 2021-02-10T08:27:13
| 337,080,866
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 311
|
py
|
from django.urls import path
from . import views
app_name = 'product'
urlpatterns = [
path('', views.productlist, name='product_list'),
path('<slug:category_slug>', views.productlist, name='product_list_category'),
path('detail/<slug:product_slug>', views.productdetail, name='product_detail'),
]
|
[
"marko.miseljic.14@gmail.com"
] |
marko.miseljic.14@gmail.com
|
a5776fa23dd4a48ace919efe7fb4709bc7d2f969
|
8f24e443e42315a81028b648e753c50967c51c78
|
/dashboard/modules/runtime_env/runtime_env_agent.py
|
01ccd94af5a6ab54712ca8864c05bb328d00c4c4
|
[
"MIT",
"BSD-3-Clause",
"Apache-2.0"
] |
permissive
|
simon-mo/ray
|
d07efdada8d05c6e10417f96e8dfc35f9ad33397
|
1e42e6cd15e2fb96c217cba8484e59ed0ef4b0c8
|
refs/heads/master
| 2023-03-06T00:09:35.758834
| 2022-12-23T18:46:48
| 2022-12-23T18:46:48
| 122,156,396
| 4
| 2
|
Apache-2.0
| 2023-03-04T08:56:56
| 2018-02-20T04:47:06
|
Python
|
UTF-8
|
Python
| false
| false
| 23,830
|
py
|
import asyncio
import json
import logging
import os
import time
import traceback
from collections import defaultdict
from dataclasses import dataclass
from typing import Callable, Dict, List, Set, Tuple
from ray._private.ray_constants import (
DEFAULT_RUNTIME_ENV_TIMEOUT_SECONDS,
)
import ray.dashboard.consts as dashboard_consts
import ray.dashboard.modules.runtime_env.runtime_env_consts as runtime_env_consts
import ray.dashboard.utils as dashboard_utils
from ray._private.async_compat import create_task
from ray._private.ray_logging import setup_component_logger
from ray._private.runtime_env.conda import CondaPlugin
from ray._private.runtime_env.container import ContainerManager
from ray._private.runtime_env.context import RuntimeEnvContext
from ray._private.runtime_env.java_jars import JavaJarsPlugin
from ray._private.runtime_env.pip import PipPlugin
from ray._private.runtime_env.plugin import (
RuntimeEnvPlugin,
create_for_plugin_if_needed,
)
from ray._private.utils import get_or_create_event_loop
from ray._private.runtime_env.plugin import RuntimeEnvPluginManager
from ray._private.runtime_env.py_modules import PyModulesPlugin
from ray._private.runtime_env.working_dir import WorkingDirPlugin
from ray.core.generated import (
agent_manager_pb2,
runtime_env_agent_pb2,
runtime_env_agent_pb2_grpc,
)
from ray.core.generated.runtime_env_common_pb2 import (
RuntimeEnvState as ProtoRuntimeEnvState,
)
from ray.runtime_env import RuntimeEnv, RuntimeEnvConfig
default_logger = logging.getLogger(__name__)
# TODO(edoakes): this is used for unit tests. We should replace it with a
# better pluggability mechanism once available.
SLEEP_FOR_TESTING_S = os.environ.get("RAY_RUNTIME_ENV_SLEEP_FOR_TESTING_S")
@dataclass
class CreatedEnvResult:
# Whether or not the env was installed correctly.
success: bool
# If success is True, will be a serialized RuntimeEnvContext
# If success is False, will be an error message.
result: str
# The time to create a runtime env in ms.
creation_time_ms: int
# e.g., "working_dir"
UriType = str
class ReferenceTable:
"""
The URI reference table which is used for GC.
When the reference count is decreased to zero,
the URI should be removed from this table and
added to cache if needed.
"""
def __init__(
self,
uris_parser: Callable[[RuntimeEnv], Tuple[str, UriType]],
unused_uris_callback: Callable[[List[Tuple[str, UriType]]], None],
unused_runtime_env_callback: Callable[[str], None],
):
# Runtime Environment reference table. The key is serialized runtime env and
# the value is reference count.
self._runtime_env_reference: Dict[str, int] = defaultdict(int)
# URI reference table. The key is URI parsed from runtime env and the value
# is reference count.
self._uri_reference: Dict[str, int] = defaultdict(int)
self._uris_parser = uris_parser
self._unused_uris_callback = unused_uris_callback
self._unused_runtime_env_callback = unused_runtime_env_callback
# send the `DeleteRuntimeEnvIfPossible` RPC when the client exits. The URI won't
# be leaked now because the reference count will be reset to zero when the job
# finished.
self._reference_exclude_sources: Set[str] = {
"client_server",
}
def _increase_reference_for_uris(self, uris):
default_logger.debug(f"Increase reference for uris {uris}.")
for uri, _ in uris:
self._uri_reference[uri] += 1
def _decrease_reference_for_uris(self, uris):
default_logger.debug(f"Decrease reference for uris {uris}.")
unused_uris = list()
for uri, uri_type in uris:
if self._uri_reference[uri] > 0:
self._uri_reference[uri] -= 1
if self._uri_reference[uri] == 0:
unused_uris.append((uri, uri_type))
del self._uri_reference[uri]
else:
default_logger.warn(f"URI {uri} does not exist.")
if unused_uris:
default_logger.info(f"Unused uris {unused_uris}.")
self._unused_uris_callback(unused_uris)
return unused_uris
def _increase_reference_for_runtime_env(self, serialized_env: str):
default_logger.debug(f"Increase reference for runtime env {serialized_env}.")
self._runtime_env_reference[serialized_env] += 1
def _decrease_reference_for_runtime_env(self, serialized_env: str):
default_logger.debug(f"Decrease reference for runtime env {serialized_env}.")
unused = False
if self._runtime_env_reference[serialized_env] > 0:
self._runtime_env_reference[serialized_env] -= 1
if self._runtime_env_reference[serialized_env] == 0:
unused = True
del self._runtime_env_reference[serialized_env]
else:
default_logger.warn(f"Runtime env {serialized_env} does not exist.")
if unused:
default_logger.info(f"Unused runtime env {serialized_env}.")
self._unused_runtime_env_callback(serialized_env)
return unused
def increase_reference(
self, runtime_env: RuntimeEnv, serialized_env: str, source_process: str
) -> None:
if source_process in self._reference_exclude_sources:
return
self._increase_reference_for_runtime_env(serialized_env)
uris = self._uris_parser(runtime_env)
self._increase_reference_for_uris(uris)
def decrease_reference(
self, runtime_env: RuntimeEnv, serialized_env: str, source_process: str
) -> None:
if source_process in self._reference_exclude_sources:
return list()
self._decrease_reference_for_runtime_env(serialized_env)
uris = self._uris_parser(runtime_env)
self._decrease_reference_for_uris(uris)
@property
def runtime_env_refs(self) -> Dict[str, int]:
"""Return the runtime_env -> ref count mapping.
Returns:
The mapping of serialized runtime env -> ref count.
"""
return self._runtime_env_reference
class RuntimeEnvAgent(
dashboard_utils.DashboardAgentModule,
runtime_env_agent_pb2_grpc.RuntimeEnvServiceServicer,
):
"""An RPC server to create and delete runtime envs.
Attributes:
dashboard_agent: The DashboardAgent object contains global config.
"""
LOG_FILENAME = "runtime_env_agent.log"
def __init__(self, dashboard_agent):
super().__init__(dashboard_agent)
self._runtime_env_dir = dashboard_agent.runtime_env_dir
self._logging_params = dashboard_agent.logging_params
self._per_job_logger_cache = dict()
# Cache the results of creating envs to avoid repeatedly calling into
# conda and other slow calls.
self._env_cache: Dict[str, CreatedEnvResult] = dict()
# Maps a serialized runtime env to a lock that is used
# to prevent multiple concurrent installs of the same env.
self._env_locks: Dict[str, asyncio.Lock] = dict()
self._gcs_aio_client = self._dashboard_agent.gcs_aio_client
self._pip_plugin = PipPlugin(self._runtime_env_dir)
self._conda_plugin = CondaPlugin(self._runtime_env_dir)
self._py_modules_plugin = PyModulesPlugin(
self._runtime_env_dir, self._gcs_aio_client
)
self._java_jars_plugin = JavaJarsPlugin(
self._runtime_env_dir, self._gcs_aio_client
)
self._working_dir_plugin = WorkingDirPlugin(
self._runtime_env_dir, self._gcs_aio_client
)
self._container_manager = ContainerManager(dashboard_agent.temp_dir)
# TODO(architkulkarni): "base plugins" and third-party plugins should all go
# through the same code path. We should never need to refer to
# self._xxx_plugin, we should just iterate through self._plugins.
self._base_plugins: List[RuntimeEnvPlugin] = [
self._working_dir_plugin,
self._pip_plugin,
self._conda_plugin,
self._py_modules_plugin,
self._java_jars_plugin,
]
self._plugin_manager = RuntimeEnvPluginManager()
for plugin in self._base_plugins:
self._plugin_manager.add_plugin(plugin)
self._reference_table = ReferenceTable(
self.uris_parser,
self.unused_uris_processor,
self.unused_runtime_env_processor,
)
self._logger = default_logger
self._logging_params.update(filename=self.LOG_FILENAME)
self._logger = setup_component_logger(
logger_name=default_logger.name, **self._logging_params
)
# Don't propagate logs to the root logger, because these logs
# might contain sensitive information. Instead, these logs should
# be confined to the runtime env agent log file `self.LOG_FILENAME`.
self._logger.propagate = False
def uris_parser(self, runtime_env):
result = list()
for name, plugin_setup_context in self._plugin_manager.plugins.items():
plugin = plugin_setup_context.class_instance
uris = plugin.get_uris(runtime_env)
for uri in uris:
result.append((uri, UriType(name)))
return result
def unused_uris_processor(self, unused_uris: List[Tuple[str, UriType]]) -> None:
for uri, uri_type in unused_uris:
self._plugin_manager.plugins[str(uri_type)].uri_cache.mark_unused(uri)
def unused_runtime_env_processor(self, unused_runtime_env: str) -> None:
def delete_runtime_env():
del self._env_cache[unused_runtime_env]
self._logger.info("Runtime env %s deleted.", unused_runtime_env)
if unused_runtime_env in self._env_cache:
if not self._env_cache[unused_runtime_env].success:
loop = get_or_create_event_loop()
# Cache the bad runtime env result by ttl seconds.
loop.call_later(
dashboard_consts.BAD_RUNTIME_ENV_CACHE_TTL_SECONDS,
delete_runtime_env,
)
else:
delete_runtime_env()
def get_or_create_logger(self, job_id: bytes):
job_id = job_id.decode()
if job_id not in self._per_job_logger_cache:
params = self._logging_params.copy()
params["filename"] = f"runtime_env_setup-{job_id}.log"
params["logger_name"] = f"runtime_env_{job_id}"
params["propagate"] = False
per_job_logger = setup_component_logger(**params)
self._per_job_logger_cache[job_id] = per_job_logger
return self._per_job_logger_cache[job_id]
async def GetOrCreateRuntimeEnv(self, request, context):
self._logger.debug(
f"Got request from {request.source_process} to increase "
"reference for runtime env: "
f"{request.serialized_runtime_env}."
)
async def _setup_runtime_env(
runtime_env: RuntimeEnv,
serialized_runtime_env,
serialized_allocated_resource_instances,
):
allocated_resource: dict = json.loads(
serialized_allocated_resource_instances or "{}"
)
# Use a separate logger for each job.
per_job_logger = self.get_or_create_logger(request.job_id)
# TODO(chenk008): Add log about allocated_resource to
# avoid lint error. That will be moved to cgroup plugin.
per_job_logger.debug(f"Worker has resource :" f"{allocated_resource}")
context = RuntimeEnvContext(env_vars=runtime_env.env_vars())
await self._container_manager.setup(
runtime_env, context, logger=per_job_logger
)
# Warn about unrecognized fields in the runtime env.
for name, _ in runtime_env.plugins():
if name not in self._plugin_manager.plugins:
per_job_logger.warning(
f"runtime_env field {name} is not recognized by "
"Ray and will be ignored. In the future, unrecognized "
"fields in the runtime_env will raise an exception."
)
"""Run setup for each plugin unless it has already been cached."""
for (
plugin_setup_context
) in self._plugin_manager.sorted_plugin_setup_contexts():
plugin = plugin_setup_context.class_instance
uri_cache = plugin_setup_context.uri_cache
await create_for_plugin_if_needed(
runtime_env, plugin, uri_cache, context, per_job_logger
)
return context
async def _create_runtime_env_with_retry(
runtime_env,
serialized_runtime_env,
serialized_allocated_resource_instances,
setup_timeout_seconds,
) -> Tuple[bool, str, str]:
"""
Create runtime env with retry times. This function won't raise exceptions.
Args:
runtime_env: The instance of RuntimeEnv class.
serialized_runtime_env: The serialized runtime env.
serialized_allocated_resource_instances: The serialized allocated
resource instances.
setup_timeout_seconds: The timeout of runtime environment creation.
Returns:
a tuple which contains result (bool), runtime env context (str), error
message(str).
"""
self._logger.info(
f"Creating runtime env: {serialized_env} with timeout "
f"{setup_timeout_seconds} seconds."
)
serialized_context = None
error_message = None
for _ in range(runtime_env_consts.RUNTIME_ENV_RETRY_TIMES):
try:
# python 3.6 requires the type of input is `Future`,
# python 3.7+ only requires the type of input is `Awaitable`
# TODO(Catch-Bull): remove create_task when ray drop python 3.6
runtime_env_setup_task = create_task(
_setup_runtime_env(
runtime_env,
serialized_env,
request.serialized_allocated_resource_instances,
)
)
runtime_env_context = await asyncio.wait_for(
runtime_env_setup_task, timeout=setup_timeout_seconds
)
serialized_context = runtime_env_context.serialize()
error_message = None
break
except Exception as e:
err_msg = f"Failed to create runtime env {serialized_env}."
self._logger.exception(err_msg)
error_message = "".join(
traceback.format_exception(type(e), e, e.__traceback__)
)
if isinstance(e, asyncio.TimeoutError):
hint = (
f"Failed due to timeout; check runtime_env setup logs"
" and consider increasing `setup_timeout_seconds` beyond "
f"the default of {DEFAULT_RUNTIME_ENV_TIMEOUT_SECONDS}."
"For example: \n"
' runtime_env={"config": {"setup_timeout_seconds":'
" 1800}, ...}\n"
)
error_message = hint + error_message
await asyncio.sleep(
runtime_env_consts.RUNTIME_ENV_RETRY_INTERVAL_MS / 1000
)
if error_message:
self._logger.error(
"Runtime env creation failed for %d times, "
"don't retry any more.",
runtime_env_consts.RUNTIME_ENV_RETRY_TIMES,
)
return False, None, error_message
else:
self._logger.info(
"Successfully created runtime env: %s, the context: %s",
serialized_env,
serialized_context,
)
return True, serialized_context, None
try:
serialized_env = request.serialized_runtime_env
runtime_env = RuntimeEnv.deserialize(serialized_env)
except Exception as e:
self._logger.exception(
"[Increase] Failed to parse runtime env: " f"{serialized_env}"
)
return runtime_env_agent_pb2.GetOrCreateRuntimeEnvReply(
status=agent_manager_pb2.AGENT_RPC_STATUS_FAILED,
error_message="".join(
traceback.format_exception(type(e), e, e.__traceback__)
),
)
# Increase reference
self._reference_table.increase_reference(
runtime_env, serialized_env, request.source_process
)
if serialized_env not in self._env_locks:
# async lock to prevent the same env being concurrently installed
self._env_locks[serialized_env] = asyncio.Lock()
async with self._env_locks[serialized_env]:
if serialized_env in self._env_cache:
serialized_context = self._env_cache[serialized_env]
result = self._env_cache[serialized_env]
if result.success:
context = result.result
self._logger.info(
"Runtime env already created "
f"successfully. Env: {serialized_env}, "
f"context: {context}"
)
return runtime_env_agent_pb2.GetOrCreateRuntimeEnvReply(
status=agent_manager_pb2.AGENT_RPC_STATUS_OK,
serialized_runtime_env_context=context,
)
else:
error_message = result.result
self._logger.info(
"Runtime env already failed. "
f"Env: {serialized_env}, "
f"err: {error_message}"
)
# Recover the reference.
self._reference_table.decrease_reference(
runtime_env, serialized_env, request.source_process
)
return runtime_env_agent_pb2.GetOrCreateRuntimeEnvReply(
status=agent_manager_pb2.AGENT_RPC_STATUS_FAILED,
error_message=error_message,
)
if SLEEP_FOR_TESTING_S:
self._logger.info(f"Sleeping for {SLEEP_FOR_TESTING_S}s.")
time.sleep(int(SLEEP_FOR_TESTING_S))
runtime_env_config = RuntimeEnvConfig.from_proto(request.runtime_env_config)
# accroding to the document of `asyncio.wait_for`,
# None means disable timeout logic
setup_timeout_seconds = (
None
if runtime_env_config["setup_timeout_seconds"] == -1
else runtime_env_config["setup_timeout_seconds"]
)
start = time.perf_counter()
(
successful,
serialized_context,
error_message,
) = await _create_runtime_env_with_retry(
runtime_env,
serialized_env,
request.serialized_allocated_resource_instances,
setup_timeout_seconds,
)
creation_time_ms = int(round((time.perf_counter() - start) * 1000, 0))
if not successful:
# Recover the reference.
self._reference_table.decrease_reference(
runtime_env, serialized_env, request.source_process
)
# Add the result to env cache.
self._env_cache[serialized_env] = CreatedEnvResult(
successful,
serialized_context if successful else error_message,
creation_time_ms,
)
# Reply the RPC
return runtime_env_agent_pb2.GetOrCreateRuntimeEnvReply(
status=agent_manager_pb2.AGENT_RPC_STATUS_OK
if successful
else agent_manager_pb2.AGENT_RPC_STATUS_FAILED,
serialized_runtime_env_context=serialized_context,
error_message=error_message,
)
async def DeleteRuntimeEnvIfPossible(self, request, context):
self._logger.info(
f"Got request from {request.source_process} to decrease "
"reference for runtime env: "
f"{request.serialized_runtime_env}."
)
try:
runtime_env = RuntimeEnv.deserialize(request.serialized_runtime_env)
except Exception as e:
self._logger.exception(
"[Decrease] Failed to parse runtime env: "
f"{request.serialized_runtime_env}"
)
return runtime_env_agent_pb2.GetOrCreateRuntimeEnvReply(
status=agent_manager_pb2.AGENT_RPC_STATUS_FAILED,
error_message="".join(
traceback.format_exception(type(e), e, e.__traceback__)
),
)
self._reference_table.decrease_reference(
runtime_env, request.serialized_runtime_env, request.source_process
)
return runtime_env_agent_pb2.DeleteRuntimeEnvIfPossibleReply(
status=agent_manager_pb2.AGENT_RPC_STATUS_OK
)
async def GetRuntimeEnvsInfo(self, request, context):
"""Return the runtime env information of the node."""
# TODO(sang): Currently, it only includes runtime_env information.
# We should include the URI information which includes,
# URIs
# Caller
# Ref counts
# Cache information
# Metrics (creation time & success)
# Deleted URIs
limit = request.limit if request.HasField("limit") else -1
runtime_env_states = defaultdict(ProtoRuntimeEnvState)
runtime_env_refs = self._reference_table.runtime_env_refs
for runtime_env, ref_cnt in runtime_env_refs.items():
runtime_env_states[runtime_env].runtime_env = runtime_env
runtime_env_states[runtime_env].ref_cnt = ref_cnt
for runtime_env, result in self._env_cache.items():
runtime_env_states[runtime_env].runtime_env = runtime_env
runtime_env_states[runtime_env].success = result.success
if not result.success:
runtime_env_states[runtime_env].error = result.result
runtime_env_states[runtime_env].creation_time_ms = result.creation_time_ms
reply = runtime_env_agent_pb2.GetRuntimeEnvsInfoReply()
count = 0
for runtime_env_state in runtime_env_states.values():
if limit != -1 and count >= limit:
break
count += 1
reply.runtime_env_states.append(runtime_env_state)
reply.total = len(runtime_env_states)
return reply
async def run(self, server):
if server:
runtime_env_agent_pb2_grpc.add_RuntimeEnvServiceServicer_to_server(
self, server
)
@staticmethod
def is_minimal_module():
return True
|
[
"noreply@github.com"
] |
simon-mo.noreply@github.com
|
cc097148e3d7e18a8f47a9cb3025b59a1f608088
|
c102085883a0c066f3d7edf7d81e4057ed745748
|
/pypy3/lilac.py
|
767f629c87e8c52708e572cf27fe02b4ef234ad9
|
[] |
no_license
|
jcstr/arch4edu
|
58afe3a59727bb57afd1decdd6988392d151c29b
|
76104e81ef0097a6c21f3d241b92772656b39703
|
refs/heads/master
| 2020-05-15T15:19:40.301478
| 2019-04-20T06:21:14
| 2019-04-20T06:21:14
| 182,370,458
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 990
|
py
|
#!/usr/bin/env python3
from lilaclib import *
update_on = [{'archpkg': 'pypy3'}]
build_prefix = ['extra-armv6h', 'extra-armv7h']
time_limit_hours = 72
def pre_build():
download_official_pkgbuild('pypy3')
add_arch(['armv6h', 'armv7h'])
for line in edit_file('PKGBUILD'):
if line.startswith('source=('):
print(line.replace('(', '(a243e4e0b21c.patch::"https://bitbucket.org/pypy/pypy/commits/a243e4e0b21c968ba3fb42e3a575be24a2d6461b/raw"\n'))
elif line.startswith('sha512sums=('):
print(line.replace('(', '("480e1e9fc11d703ad167ca0fe9473e5216b02f2b39a1251ac9f673252d65a7837cbbcfbebba8a941542ef5c044cb6021b83cec218cdede12b3cfd2fa28e5dff2"\n'))
elif line.startswith(' patch'):
print(line)
print(' patch -Np1 -i ${srcdir}/a243e4e0b21c.patch')
else:
print(line)
def post_build():
git_add_files('PKGBUILD')
git_commit()
if __name__ == '__main__':
single_main('extra-x86_64')
|
[
"i@jingbei.li"
] |
i@jingbei.li
|
6f9cd87d90dcc4a0affe0aaa017f87908eeb611c
|
b41b996b4a14f11bb3d7676b4539725a93c2d586
|
/SourceCode/Codesignal-Selenium/SourceNew/Py3/createArray.py3
|
528b8cb6fad90081036480d7d75d4d0320b130f2
|
[] |
no_license
|
mquandvr/selenium
|
ee2c0e7febb15f4db4d33a8575726f67c48bde05
|
d7bb4c95d4d0756c66cbf3c69387318fc07219f2
|
refs/heads/master
| 2020-06-05T05:49:57.476867
| 2019-06-18T10:55:27
| 2019-06-18T10:55:27
| 192,335,493
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 44
|
py3
|
def createArray(size):
return [1]* size
|
[
"11130052@st.hcmuaf.edu.vn"
] |
11130052@st.hcmuaf.edu.vn
|
b0dfaa6f3182873be00588c860c03b8b3989b65d
|
52b5773617a1b972a905de4d692540d26ff74926
|
/.history/cylicRot_20200715001137.py
|
b769c8a9d001bcac07d9fa2a6eb925b43dd3895f
|
[] |
no_license
|
MaryanneNjeri/pythonModules
|
56f54bf098ae58ea069bf33f11ae94fa8eedcabc
|
f4e56b1e4dda2349267af634a46f6b9df6686020
|
refs/heads/master
| 2022-12-16T02:59:19.896129
| 2020-09-11T12:05:22
| 2020-09-11T12:05:22
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 543
|
py
|
# given an array rotate it k times to the right
def rotate(A,K):
# first I'd rotate the array once
# so how do we rotate the array
# we move the last element to the firs place and
# the rest follow suit
# moving elements to the right in an array
# [3,8,9,7,6]
# 3 ---> 0 now 3 ---> 1
# 8 ---> 1 now 8 ---> 2
# A[0] = A[len(A)-1]
# [6 ,3,8,9,7]
# what do you notice that A[i] = A[i+1]
# [lst[-1]] + lst[:-1]
print(A)
rotate([3, 8, 9, 7, 6], 3)
|
[
"mary.jereh@gmail.com"
] |
mary.jereh@gmail.com
|
720904416d61d2eb860e892e6a3bedc06be3beb9
|
0febba6ea7a07550e3c7677dfb4037f3ea9662e4
|
/mysite/settings.py
|
dea9ef93351be4c69ed1fcb3f4a893bfa014fe75
|
[] |
no_license
|
abubych/my-first-blog
|
88341a6e2b9a006aa22fc1b243230c95e51a400a
|
8e39bd6a0d964bcaa99db7bfbe89a1755d75c113
|
refs/heads/master
| 2020-09-26T22:09:31.931145
| 2019-12-06T15:03:10
| 2019-12-06T15:03:10
| 219,852,145
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,155
|
py
|
"""
Django settings for mysite project.
Generated by 'django-admin startproject' using Django 2.2.7.
For more information on this file, see
https://docs.djangoproject.com/en/2.2/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.2/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.2/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'dqq83fazc22^&ki+opifgrs2vucm!$@%hp5k!w&ux409mcenr)'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'blog',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'mysite.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'mysite.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.2/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/2.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.2/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'Europe/Kiev'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.2/howto/static-files/
STATIC_URL = '/static/'
STATIC_ROOT = os.path.join(BASE_DIR, 'static')
|
[
"you@example.com"
] |
you@example.com
|
12f2d2c34e5181bb8883b981c7dd86ec76bba947
|
c8ea4fe0dccca928b92234b72a7a8d9cd6cf4d14
|
/tests/eth2/beacon/types/test_proposer_slashings.py
|
2d70091de7d9c186937546a3f8b370369a0c2677
|
[
"MIT"
] |
permissive
|
kclowes/trinity
|
b6bc4f7c57ade1651cf9b2ca9ca88493f3485007
|
f0400c78a6d828dd266b1f31dd3fa7aacf97486d
|
refs/heads/master
| 2020-04-16T16:11:28.531260
| 2019-01-14T17:03:56
| 2019-01-14T17:44:58
| 165,728,497
| 0
| 0
|
MIT
| 2019-01-14T20:17:01
| 2019-01-14T20:17:00
| null |
UTF-8
|
Python
| false
| false
| 665
|
py
|
from eth2.beacon.types.proposer_slashings import (
ProposerSlashing,
)
def test_defaults(sample_proposer_slashing_params):
slashing = ProposerSlashing(**sample_proposer_slashing_params)
assert slashing.proposer_index == sample_proposer_slashing_params['proposer_index']
assert slashing.proposal_data_1 == sample_proposer_slashing_params['proposal_data_1']
assert slashing.proposal_signature_1 == sample_proposer_slashing_params['proposal_signature_1']
assert slashing.proposal_data_2 == sample_proposer_slashing_params['proposal_data_2']
assert slashing.proposal_signature_2 == sample_proposer_slashing_params['proposal_signature_2']
|
[
"hwwang156@gmail.com"
] |
hwwang156@gmail.com
|
cdd80aa7d4fd23ee92ffbfa5085fa51294fb6d22
|
614cad3588af9c0e51e0bb98963075e3195e92f5
|
/models/onet/config.py
|
5ab763d53ab51f06d527d863d05d4dbdd603df17
|
[] |
no_license
|
dragonlong/haoi-pose
|
2810dae7f9afd0a26b3d0a5962fd9ae8a5abac58
|
43388efd911feecde588b27a753de353b8e28265
|
refs/heads/master
| 2023-07-01T14:18:29.029484
| 2021-08-10T10:57:42
| 2021-08-10T10:57:42
| 294,602,794
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,532
|
py
|
import torch
import torch.distributions as dist
from torch import nn
import os
import __init__
from models.encoder import encoder_dict
from models.onet import models, training, generation
# from im2mesh import data
# from im2mesh import config
import dataset as data
from utils import config
def get_model(cfg, device=None, dataset=None, **kwargs):
''' Return the Occupancy Network model.
Args:
cfg (dict): imported yaml config
device (device): pytorch device
dataset (dataset): dataset
'''
decoder = cfg['model']['decoder']
encoder = cfg['model']['encoder']
encoder_latent = cfg['model']['encoder_latent']
dim = cfg['data']['dim']
z_dim = cfg['model']['z_dim']
c_dim = cfg['model']['c_dim']
decoder_kwargs = cfg['model']['decoder_kwargs']
encoder_kwargs = cfg['model']['encoder_kwargs']
encoder_latent_kwargs = cfg['model']['encoder_latent_kwargs']
decoder = models.decoder_dict[decoder](
dim=dim, z_dim=z_dim, c_dim=c_dim,
**decoder_kwargs
)
if z_dim != 0:
encoder_latent = models.encoder_latent_dict[encoder_latent](
dim=dim, z_dim=z_dim, c_dim=c_dim,
**encoder_latent_kwargs
)
else:
encoder_latent = None
if encoder == 'idx':
encoder = nn.Embedding(len(dataset), c_dim)
elif encoder is not None:
encoder = encoder_dict[encoder](
c_dim=c_dim,
**encoder_kwargs
)
else:
encoder = None
p0_z = get_prior_z(cfg, device)
model = models.OccupancyNetwork(
decoder, encoder, encoder_latent, p0_z, device=device
)
return model
def get_trainer(model, optimizer, cfg, device, **kwargs):
''' Returns the trainer object.
Args:
model (nn.Module): the Occupancy Network model
optimizer (optimizer): pytorch optimizer object
cfg (dict): imported yaml config
device (device): pytorch device
'''
threshold = cfg['test']['threshold']
out_dir = cfg['training']['out_dir']
vis_dir = os.path.join(out_dir, 'vis')
input_type = cfg['data']['input_type']
trainer = training.Trainer(
model, optimizer,
device=device, input_type=input_type,
vis_dir=vis_dir, threshold=threshold,
eval_sample=cfg['training']['eval_sample'],
)
return trainer
def get_generator(model, cfg, device, **kwargs):
''' Returns the generator object.
Args:
model (nn.Module): Occupancy Network model
cfg (dict): imported yaml config
device (device): pytorch device
'''
preprocessor = config.get_preprocessor(cfg, device=device)
generator = generation.Generator3D(
model,
device=device,
threshold=cfg['test']['threshold'],
resolution0=cfg['generation']['resolution_0'],
upsampling_steps=cfg['generation']['upsampling_steps'],
sample=cfg['generation']['use_sampling'],
refinement_step=cfg['generation']['refinement_step'],
simplify_nfaces=cfg['generation']['simplify_nfaces'],
preprocessor=preprocessor,
)
return generator
def get_prior_z(cfg, device, **kwargs):
''' Returns prior distribution for latent code z.
Args:
cfg (dict): imported yaml config
device (device): pytorch device
'''
z_dim = cfg['model']['z_dim']
p0_z = dist.Normal(
torch.zeros(z_dim, device=device),
torch.ones(z_dim, device=device)
)
return p0_z
def get_data_fields(mode, cfg):
''' Returns the data fields.
Args:
mode (str): the mode which is used
cfg (dict): imported yaml config
'''
points_transform = data.SubsamplePoints(cfg['data']['points_subsample'])
with_transforms = cfg['model']['use_camera']
fields = {}
fields['points'] = data.PointsField(
cfg['data']['points_file'], points_transform,
with_transforms=with_transforms,
unpackbits=cfg['data']['points_unpackbits'],
)
if mode in ('val', 'test'):
points_iou_file = cfg['data']['points_iou_file']
voxels_file = cfg['data']['voxels_file']
if points_iou_file is not None:
fields['points_iou'] = data.PointsField(
points_iou_file,
with_transforms=with_transforms,
unpackbits=cfg['data']['points_unpackbits'],
)
if voxels_file is not None:
fields['voxels'] = data.VoxelsField(voxels_file)
return fields
|
[
"lxiaol9@vt.edu"
] |
lxiaol9@vt.edu
|
7e404da28c3fe14b73b70c442e023410f00a3ade
|
a62fdd0beb6c47cc704c1192b68b0bcfcd024304
|
/Python/II/19-TXTEDIT2/3/ui_form.py
|
a4d1d6b8f2c118bffdff979ebf87d1580422b60e
|
[] |
no_license
|
a6461/Qt-PyQt
|
da1895b4faccda80b8079ecdca79f1ea525daa0a
|
404bd7fbbc432ebeaa1a486fc8e005d47aed9cfd
|
refs/heads/master
| 2020-03-14T22:16:48.714825
| 2018-06-12T20:45:58
| 2018-06-12T20:45:58
| 131,817,506
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 7,492
|
py
|
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'form.ui'
#
# Created by: PyQt5 UI code generator 5.10.1
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_Form(object):
def setupUi(self, Form):
Form.setObjectName("Form")
Form.resize(500, 300)
self.centralwidget = QtWidgets.QWidget(Form)
self.centralwidget.setObjectName("centralwidget")
self.gridLayout = QtWidgets.QGridLayout(self.centralwidget)
self.gridLayout.setContentsMargins(0, 0, 0, 0)
self.gridLayout.setSpacing(0)
self.gridLayout.setObjectName("gridLayout")
self.textEdit = QtWidgets.QTextEdit(self.centralwidget)
self.textEdit.setObjectName("textEdit")
self.gridLayout.addWidget(self.textEdit, 0, 0, 1, 1)
Form.setCentralWidget(self.centralwidget)
self.menubar = QtWidgets.QMenuBar(Form)
self.menubar.setGeometry(QtCore.QRect(0, 0, 500, 21))
self.menubar.setObjectName("menubar")
self.menuFile = QtWidgets.QMenu(self.menubar)
self.menuFile.setObjectName("menuFile")
self.menuFormat = QtWidgets.QMenu(self.menubar)
self.menuFormat.setObjectName("menuFormat")
self.menuColors = QtWidgets.QMenu(self.menuFormat)
self.menuColors.setObjectName("menuColors")
Form.setMenuBar(self.menubar)
self.actionNew = QtWidgets.QAction(Form)
self.actionNew.setObjectName("actionNew")
self.actionOpen = QtWidgets.QAction(Form)
self.actionOpen.setObjectName("actionOpen")
self.actionSave = QtWidgets.QAction(Form)
self.actionSave.setObjectName("actionSave")
self.actionSaveAs = QtWidgets.QAction(Form)
self.actionSaveAs.setObjectName("actionSaveAs")
self.actionExit = QtWidgets.QAction(Form)
self.actionExit.setObjectName("actionExit")
self.actionBold = QtWidgets.QAction(Form)
self.actionBold.setCheckable(True)
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.actionBold.setFont(font)
self.actionBold.setObjectName("actionBold")
self.actionItalic = QtWidgets.QAction(Form)
self.actionItalic.setCheckable(True)
font = QtGui.QFont()
font.setItalic(True)
self.actionItalic.setFont(font)
self.actionItalic.setObjectName("actionItalic")
self.actionUnderline = QtWidgets.QAction(Form)
self.actionUnderline.setCheckable(True)
font = QtGui.QFont()
font.setUnderline(True)
self.actionUnderline.setFont(font)
self.actionUnderline.setObjectName("actionUnderline")
self.actionStrikeOut = QtWidgets.QAction(Form)
self.actionStrikeOut.setCheckable(True)
font = QtGui.QFont()
font.setStrikeOut(True)
self.actionStrikeOut.setFont(font)
self.actionStrikeOut.setObjectName("actionStrikeOut")
self.actionLeft = QtWidgets.QAction(Form)
self.actionLeft.setCheckable(True)
self.actionLeft.setChecked(True)
self.actionLeft.setObjectName("actionLeft")
self.actionCenter = QtWidgets.QAction(Form)
self.actionCenter.setCheckable(True)
self.actionCenter.setObjectName("actionCenter")
self.actionRight = QtWidgets.QAction(Form)
self.actionRight.setCheckable(True)
self.actionRight.setObjectName("actionRight")
self.actionFontColor = QtWidgets.QAction(Form)
self.actionFontColor.setObjectName("actionFontColor")
self.actionBackgroundColor = QtWidgets.QAction(Form)
self.actionBackgroundColor.setObjectName("actionBackgroundColor")
self.actionFont = QtWidgets.QAction(Form)
self.actionFont.setObjectName("actionFont")
self.actionJustify = QtWidgets.QAction(Form)
self.actionJustify.setCheckable(True)
self.actionJustify.setObjectName("actionJustify")
self.menuFile.addAction(self.actionNew)
self.menuFile.addAction(self.actionOpen)
self.menuFile.addAction(self.actionSave)
self.menuFile.addAction(self.actionSaveAs)
self.menuFile.addSeparator()
self.menuFile.addAction(self.actionExit)
self.menuColors.addAction(self.actionFontColor)
self.menuColors.addAction(self.actionBackgroundColor)
self.menuFormat.addAction(self.actionBold)
self.menuFormat.addAction(self.actionItalic)
self.menuFormat.addAction(self.actionUnderline)
self.menuFormat.addAction(self.actionStrikeOut)
self.menuFormat.addSeparator()
self.menuFormat.addAction(self.actionLeft)
self.menuFormat.addAction(self.actionCenter)
self.menuFormat.addAction(self.actionRight)
self.menuFormat.addAction(self.actionJustify)
self.menuFormat.addSeparator()
self.menuFormat.addAction(self.menuColors.menuAction())
self.menuFormat.addAction(self.actionFont)
self.menubar.addAction(self.menuFile.menuAction())
self.menubar.addAction(self.menuFormat.menuAction())
self.retranslateUi(Form)
self.actionExit.triggered.connect(Form.close)
QtCore.QMetaObject.connectSlotsByName(Form)
def retranslateUi(self, Form):
_translate = QtCore.QCoreApplication.translate
Form.setWindowTitle(_translate("Form", "Text Editor"))
self.menuFile.setTitle(_translate("Form", "&File"))
self.menuFormat.setTitle(_translate("Form", "F&ormat"))
self.menuColors.setTitle(_translate("Form", "&Colors"))
self.actionNew.setText(_translate("Form", "&New"))
self.actionNew.setShortcut(_translate("Form", "Ctrl+N"))
self.actionOpen.setText(_translate("Form", "&Open"))
self.actionOpen.setShortcut(_translate("Form", "Ctrl+O"))
self.actionSave.setText(_translate("Form", "&Save"))
self.actionSave.setShortcut(_translate("Form", "Ctrl+S"))
self.actionSaveAs.setText(_translate("Form", "Save &As"))
self.actionSaveAs.setShortcut(_translate("Form", "F12"))
self.actionExit.setText(_translate("Form", "E&xit"))
self.actionBold.setText(_translate("Form", "&Bold"))
self.actionBold.setShortcut(_translate("Form", "Ctrl+B"))
self.actionItalic.setText(_translate("Form", "&Italic"))
self.actionItalic.setShortcut(_translate("Form", "Ctrl+I"))
self.actionUnderline.setText(_translate("Form", "&Underline"))
self.actionUnderline.setShortcut(_translate("Form", "Ctrl+U"))
self.actionStrikeOut.setText(_translate("Form", "S&trikeout"))
self.actionStrikeOut.setShortcut(_translate("Form", "Ctrl+T"))
self.actionLeft.setText(_translate("Form", "&Left alignment"))
self.actionLeft.setShortcut(_translate("Form", "Ctrl+L"))
self.actionCenter.setText(_translate("Form", "C&enter"))
self.actionCenter.setShortcut(_translate("Form", "Ctrl+E"))
self.actionRight.setText(_translate("Form", "&Right alignment"))
self.actionRight.setShortcut(_translate("Form", "Ctrl+R"))
self.actionFontColor.setText(_translate("Form", "&Font color..."))
self.actionBackgroundColor.setText(_translate("Form", "&Background color..."))
self.actionFont.setText(_translate("Form", "&Font..."))
self.actionJustify.setText(_translate("Form", "&Justify"))
self.actionJustify.setShortcut(_translate("Form", "Ctrl+J"))
|
[
"a6461@yandex.ru"
] |
a6461@yandex.ru
|
70cb2348662187ca633adcf0cdd99bff60a49366
|
cd40b7cc395f36740000ed4a4144b1c0666ab0fd
|
/hstrat/test_drive/generate_template_phylogeny/_evolve_fitness_trait_population_/__init__.py
|
120b54b41250aec6ea25e07702d1f1cbcbf146b6
|
[
"MIT"
] |
permissive
|
mmore500/hstrat
|
94fd22c86a87a5707590b9398ef679444ed82d6d
|
b2d2caded1db5e2dc681d9f171d7c74b322c55c3
|
refs/heads/master
| 2023-08-31T03:36:44.457576
| 2023-08-25T14:39:29
| 2023-08-25T14:39:29
| 464,531,144
| 5
| 2
|
NOASSERTION
| 2023-08-25T13:07:52
| 2022-02-28T15:11:45
|
Python
|
UTF-8
|
Python
| false
| false
| 575
|
py
|
"""Implementation for `evolve_fitness_trait_population`."""
from ._apply_island_migrations import _apply_island_migrations
from ._apply_mutation import _apply_mutation
from ._apply_niche_invasions import _apply_niche_invasions
from ._get_island_id import _get_island_id
from ._get_niche_id import _get_niche_id
from ._select_parents import _select_parents
# adapted from https://stackoverflow.com/a/31079085
__all__ = [
"_apply_island_migrations",
"_apply_mutation",
"_apply_niche_invasions",
"_get_island_id",
"_get_niche_id",
"_select_parents",
]
|
[
"mmore500.login+gpg@gmail.com"
] |
mmore500.login+gpg@gmail.com
|
c84c74cb7e09d90664a0eec0ef4a514fc53fc7d3
|
77311ad9622a7d8b88707d7cee3f44de7c8860cb
|
/res_bw/scripts/common/lib/email/mime/application.py
|
c28b866a4335bc589db5c85b4d96b4a6b04c04fd
|
[] |
no_license
|
webiumsk/WOT-0.9.14-CT
|
9b193191505a4560df4e872e022eebf59308057e
|
cfe0b03e511d02c36ce185f308eb48f13ecc05ca
|
refs/heads/master
| 2021-01-10T02:14:10.830715
| 2016-02-14T11:59:59
| 2016-02-14T11:59:59
| 51,606,676
| 0
| 0
| null | null | null | null |
WINDOWS-1250
|
Python
| false
| false
| 1,525
|
py
|
# 2016.02.14 12:47:54 Střední Evropa (běžný čas)
# Embedded file name: scripts/common/Lib/email/mime/application.py
"""Class representing application/* type MIME documents."""
__all__ = ['MIMEApplication']
from email import encoders
from email.mime.nonmultipart import MIMENonMultipart
class MIMEApplication(MIMENonMultipart):
"""Class for generating application/* MIME documents."""
def __init__(self, _data, _subtype = 'octet-stream', _encoder = encoders.encode_base64, **_params):
"""Create an application/* type MIME document.
_data is a string containing the raw application data.
_subtype is the MIME content type subtype, defaulting to
'octet-stream'.
_encoder is a function which will perform the actual encoding for
transport of the application data, defaulting to base64 encoding.
Any additional keyword arguments are passed to the base class
constructor, which turns them into parameters on the Content-Type
header.
"""
if _subtype is None:
raise TypeError('Invalid application MIME subtype')
MIMENonMultipart.__init__(self, 'application', _subtype, **_params)
self.set_payload(_data)
_encoder(self)
return
# okay decompyling c:\Users\PC\wotsources\files\originals\res_bw\scripts\common\lib\email\mime\application.pyc
# decompiled 1 files: 1 okay, 0 failed, 0 verify failed
# 2016.02.14 12:47:54 Střední Evropa (běžný čas)
|
[
"info@webium.sk"
] |
info@webium.sk
|
91385116b3fdd55a4da2fd97f1c0d310dc86f93b
|
3b74b57d5f513e98d1ad5404c09aeb7b7a03ed63
|
/solow/tests_solow/test_ces.py
|
b39266ad585f3e2e99a4867cb3fa058212f302e9
|
[] |
no_license
|
vgregory757/QuantEcon.applications
|
0b7a13aa05798c15ff5943adb99d4c01585a2d76
|
4f542e20034b1ca6cfbca2708cb6e29509af401d
|
refs/heads/master
| 2020-04-05T22:52:45.978767
| 2017-03-15T19:53:30
| 2017-03-15T19:53:30
| 51,265,260
| 2
| 1
| null | 2016-02-07T20:39:34
| 2016-02-07T20:39:33
| null |
UTF-8
|
Python
| false
| false
| 2,652
|
py
|
"""
Test suite for ces.py module.
@author : David R. Pugh
@date : 2014-12-08
"""
import nose
import numpy as np
from .... models.solow import ces
params = {'A0': 1.0, 'g': 0.02, 'L0': 1.0, 'n': 0.02, 's': 0.15,
'alpha': 0.33, 'sigma': 1.1, 'delta': 0.05}
model = ces.CESModel(params)
def test_steady_state():
"""Compare analytic steady state with numerical steady state."""
eps = 1e-1
for g in np.linspace(eps, 0.05, 4):
for n in np.linspace(eps, 0.05, 4):
for s in np.linspace(eps, 1-eps, 4):
for alpha in np.linspace(eps, 1-eps, 4):
for delta in np.linspace(eps, 1-eps, 4):
for sigma in np.linspace(eps, 2.0, 4):
tmp_params = {'A0': 1.0, 'g': g, 'L0': 1.0, 'n': n,
's': s, 'alpha': alpha, 'delta': delta,
'sigma': sigma}
try:
model.params = tmp_params
# use root finder to compute the steady state
actual_ss = model.steady_state
expected_ss = model.find_steady_state(1e-12, 1e9)
# conduct the test (numerical precision limits!)
nose.tools.assert_almost_equals(actual_ss,
expected_ss,
places=6)
# handles params with non finite steady state
except AttributeError:
continue
def test_validate_params():
"""Testing validation of params attribute."""
invalid_params_0 = {'A0': 1.0, 'g': 0.02, 'L0': 1.0, 'n': 0.02, 's': 0.15,
'alpha': 1.33, 'delta': 0.03, 'sigma': 1.2}
invalid_params_1 = {'A0': 1.0, 'g': 0.02, 'L0': 1.0, 'n': 0.02, 's': 0.15,
'alpha': 0.33, 'delta': 0.03, 'sigma': 0.0}
invalid_params_2 = {'A0': 1.0, 'g': 0.01, 'L0': 1.0, 'n': 0.01, 's': 0.12,
'alpha': 0.75, 'delta': 0.01, 'sigma': 2.0}
# alpha must be in (0, 1)
with nose.tools.assert_raises(AttributeError):
ces.CESModel(invalid_params_0)
# sigma must be strictly positive
with nose.tools.assert_raises(AttributeError):
ces.CESModel(invalid_params_1)
# parameters inconsistent with finite steady state
with nose.tools.assert_raises(AttributeError):
ces.CESModel(invalid_params_2)
|
[
"mamckay@gmail.com"
] |
mamckay@gmail.com
|
2c4b41bdd50001fb5c1c59e6267ace6e6328bb99
|
b44b690c96cfbaba35fa3cc32e8da4442adb9fad
|
/Python/0151. Reverse Words in a String.py
|
321978c8c6a222f9d220eacb369fc4854db04e12
|
[] |
no_license
|
faisalraza33/leetcode
|
24d610c6884e218719d82a5c79f1695cb6463d68
|
d7cf4ffba14c6f1ff4551634f4002b53dfeae9b7
|
refs/heads/master
| 2022-08-10T02:05:21.932664
| 2022-07-05T09:59:47
| 2022-07-05T09:59:47
| 238,060,131
| 0
| 0
| null | 2020-02-03T20:54:51
| 2020-02-03T20:54:50
| null |
UTF-8
|
Python
| false
| false
| 1,218
|
py
|
# Given an input string s, reverse the order of the words.
# A word is defined as a sequence of non-space characters. The words in s will be separated by at least one space.
# Return a string of the words in reverse order concatenated by a single space.
# Note that s may contain leading or trailing spaces or multiple spaces between two words. The returned string should only have a single space separating the words. Do not include any extra spaces.
#
# Example 1:
#
# Input: s = "the sky is blue"
# Output: "blue is sky the"
#
# Example 2:
#
# Input: s = " hello world "
# Output: "world hello"
# Explanation: Your reversed string should not contain leading or trailing spaces.
#
# Example 3:
#
# Input: s = "a good example"
# Output: "example good a"
# Explanation: You need to reduce multiple spaces between two words to a single space in the reversed string.
#
# Constraints:
#
# 1 <= s.length <= 10^4
# s contains English letters (upper-case and lower-case), digits, and spaces ' '.
# There is at least one word in s.
class Solution:
def reverseWords(self, s: str) -> str:
# "a b ".split(" ") # ['a', 'b', '']
# "a b ".split() # ['a', 'b']
return " ".join(s.split()[::-1])
|
[
"Hongbo.Miao@outlook.com"
] |
Hongbo.Miao@outlook.com
|
dafa4f1cff7207f3e58d1b7ccc1374c0626541d2
|
d993f821da125498b6dfb01792fcd24c83ae7e34
|
/selfself/Custom_Logger.py
|
b5e3ddece1261f6cbd1aac598eaa40a1be777d40
|
[] |
no_license
|
Arjuna1513/Python_Practice_Programs
|
2c8370d927c8bade2d2b0b5bd0345c7d5f139202
|
7c72600d72f68afee62ee64be25d961822429aeb
|
refs/heads/master
| 2020-06-24T02:36:03.186924
| 2019-07-25T14:31:02
| 2019-07-25T14:31:02
| 198,824,589
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 603
|
py
|
import logging
import inspect
class CustomLogger:
def custom_logger(self, logLevel=logging.INFO):
loggerName = inspect.stack()[1][3]
logger = logging.getLogger(loggerName)
logger.setLevel(logging.DEBUG)
fHandler = logging.FileHandler("Automation.log", mode='a')
fHandler.setLevel(logLevel)
formatter = logging.Formatter("%(asctime)s : %(name)s : %(levelname)s : %(message)s"
, datefmt="%d/%m/%Y %H:%M:%S")
fHandler.setFormatter(formatter)
logger.addHandler(fHandler)
return logger
|
[
"malli00022@gmail.com"
] |
malli00022@gmail.com
|
e6b6647a488e26f74160f7438b57eade28904ee2
|
f1961c86e6da14f35c21d7235f4fc8a89fabdcad
|
/DailyProgrammer/DP20120716C.py
|
fbf41c1a899349a2b6e8c51a9ac8c6c23b1ec13b
|
[
"MIT"
] |
permissive
|
DayGitH/Python-Challenges
|
d4930bdd85cd1a977d8f6192775ca956a375fcde
|
bc32f1332a92fcc2dfa6f5ea4d95f8a8d64c3edf
|
refs/heads/master
| 2021-01-17T13:01:03.784523
| 2018-06-29T23:49:04
| 2018-06-29T23:49:04
| 58,497,683
| 2
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,015
|
py
|
"""
Write a program that is able to find all words in a Boggle [http://en.wikipedia.org/wiki/Boggle] board. For a word list,
you can use this file [http://code.google.com/p/dotnetperls-controls/downloads/detail?name=enable1.txt].
How many words can you find in the following 10x10 Boggle board?
T N L E P I A C N M
T R E H O C F E I W
S C E R E D A M E A
A O M O G O O F I E
G A C M H N L E R X
T D E R J T F O A S
I T A R T I N H N T
R N L Y I V S C R T
A X E P S S H A C F
I U I I I A I W T T
Thanks to Medicalizawhat for suggesting this problem (and to Cosmologicon for providing a word list and some
commentary) at /r/dailyprogrammer_ideas! If you have a problem that you think would be good for us, why not head
over there and suggest it?
"""
class Node:
""" from https://en.wikipedia.org/wiki/Trie#Algorithms """
def __init__(self):
self.children = {} # mapping from character ==> Node
self.value = None
def find(node, key):
""" from https://en.wikipedia.org/wiki/Trie#Algorithms """
for char in key:
if char in node.children:
node = node.children[char]
else:
return None
return node.value
def insert(root, string, value):
""" from https://en.wikipedia.org/wiki/Trie#Algorithms """
node = root
i = 0
while i < len(string):
if string[i] in node.children:
node = node.children[string[i]]
i += 1
else:
break
# append new nodes for the remaining characters, if any
while i < len(string):
node.children[string[i]] = Node()
node = node.children[string[i]]
i += 1
# store value in the terminal node
node.value = value
def create_trie():
with open('enable1.txt', 'r') as f:
data = f.read().split()
node = Node()
for d in data:
insert(node, d, d)
return node
def main():
node = create_trie()
print(find(node, 'decipherer'))
if __name__ == "__main__":
main()
|
[
"akber91@gmail.com"
] |
akber91@gmail.com
|
af364988e9423021730917fdab606389a84c8240
|
7a1a65b0cda41ea204fad4848934db143ebf199a
|
/automatedprocesses_thirdstage/oath_core_yesterday_media.py
|
cdb09b67be3c5a1a2f87b835ad27edef4a1dd403
|
[] |
no_license
|
bpopovich44/ReaperSec
|
4b015e448ed5ce23316bd9b9e33966373daea9c0
|
22acba4d84313e62dbbf95cf2a5465283a6491b0
|
refs/heads/master
| 2021-05-02T18:26:11.875122
| 2019-06-22T15:02:09
| 2019-06-22T15:02:09
| 120,664,056
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,227
|
py
|
#!/usr/bin/python2.7
import sys
import json
import time
import datetime
import aol_api
from mysql.connector import MySQLConnection, Error
from python_dbconfig import read_db_config
from data_file import report_book
from data_file import platforms
from data_file import fees
todaysdate = time.strftime("%Y-%m-%d %H:%M:%S")
yesterday = datetime.date.fromordinal(datetime.date.today().toordinal()-1).strftime("%F")
def connect():
# """Gets AOL Data and writes them to a MySQL table"""
db = "mysql_sl"
report_type = "core_yesterday_media"
p_name = sys.argv[1]
p_id = platforms[p_name]["id"]
gross_rev = platforms[p_name]["fee"]
r = fees["aol_platform"]
a_cost = fees["aol_cost"]
platform_rev = p_name + "_revenue"
db_updated = False
# Connect To DB:
db_config = read_db_config(db)
try:
#print('Connecting to database...')
conn = MySQLConnection(**db_config)
if conn.is_connected():
#print('Connection established.')
cursor = conn.cursor()
# calls get_access_token function and starts script
logintoken = aol_api.get_access_token(p_name)
#print logintoken
for report in report_book[report_type][p_name]:
result = aol_api.run_existing_report(logintoken, str(report))
#print(result)
if len(result) == 0:
break
row_count_value = json.loads(result)['row_count']
if int(row_count_value) >= 1:
if db_updated == False:
sql = "DROP TABLE IF EXISTS " + p_name + "_core_yesterday_media"
cursor.execute(sql)
sql = "CREATE TABLE " + p_name + "_core_yesterday_media (date varchar(25), hour int, inventory_source varchar(255), \
media varchar(255), ad_opportunities bigint, market_opportunities bigint, ad_attempts bigint, \
ad_impressions bigint, ad_errors bigint, ad_revenue decimal(15, 5), aol_cost decimal(15,5), \
epiphany_gross_revenue decimal(15, 5)," + p_name + "_revenue decimal(15, 5), clicks int, \
iab_viewability_measurable_ad_impressions bigint, iab_viewable_ad_impressions bigint, platform int)"
cursor.execute(sql)
db_updated = True
print(str(todaysdate) + " Running " + p_name + "_core_yesterday_media report # " + str(report))
for x in json.loads(result)['data']:
date = x['row'][0]
hour = x['row'][1]
inventory_source = x['row'][2]
media = x['row'][3].replace('"', " ")
ad_opportunities = x['row'][4]
market_opportunities = x['row'][5]
ad_attempts = x['row'][6]
ad_impressions = x['row'][7]
ad_errors = x['row'][8]
ad_revenue = x['row'][9]
aol_cos = x['row'][9]
epiphany_gross_rev = x['row'][9]
platform_rev = x['row'][9]
clicks = x['row'][10]
iab_viewability_measurable_ad_impressions = x['row'][11]
iab_viewable_ad_impressions = x['row'][12]
platform = str(p_id)
list = (date, hour, inventory_source, media, ad_opportunities, market_opportunities, ad_attempts, \
ad_impressions, ad_errors, ad_revenue, aol_cos, epiphany_gross_rev, platform_rev, clicks, \
iab_viewability_measurable_ad_impressions, iab_viewable_ad_impressions, platform)
#print(list)
if p_name == 'dna':
aol_cost = "0"
epiphany_gross_revenue = "0"
platform_revenue = "0"
else:
aol_cost = float(float(aol_cos) * float(a_cost))
epiphany_gross_revenue = float(float(epiphany_gross_rev) * float(gross_rev))
platform_revenue = float(float(platform_rev) * float(r))
sql = """INSERT INTO """ + p_name + """_core_yesterday_media VALUES ("%s", "%s", "%s", "%s", "%s", "%s", \
"%s", "%s", "%s", "%s", "%s", "%s", "%s", "%s", "%s", "%s", "%s")""" % (date, hour, inventory_source, \
media, ad_opportunities, market_opportunities, ad_attempts, ad_impressions, ad_errors, ad_revenue, \
aol_cost, epiphany_gross_revenue, platform_revenue, clicks, iab_viewability_measurable_ad_impressions, \
iab_viewable_ad_impressions, platform)
cursor.execute(sql)
cursor.execute('commit')
else:
print('Connection failed')
except Error as error:
print(error)
finally:
conn.close()
#print('Connection closed.')
if __name__ == '__main__':
connect()
|
[
"bpopovich4@gmail.com"
] |
bpopovich4@gmail.com
|
da23b03efa139367d54c2fcb3e57fd0f9d9631df
|
a06586c101a31bf6c9a7dc307cf664120ac092fd
|
/Trakttv.bundle/Contents/Libraries/Shared/shove/stores/bsdb.py
|
0da028a82f4fcbd598ea314ed513ba07efaced6b
|
[] |
no_license
|
HaKDMoDz/Plex-Trakt-Scrobbler
|
22dd1d8275698761cb20a402bce4c5bef6e364f9
|
6d46cdd1bbb99a243b8628d6c3996d66bb427823
|
refs/heads/master
| 2021-01-22T00:10:18.699894
| 2015-05-25T23:52:45
| 2015-05-25T23:52:45
| 37,312,507
| 2
| 0
| null | 2015-06-12T09:00:54
| 2015-06-12T09:00:53
| null |
UTF-8
|
Python
| false
| false
| 1,271
|
py
|
# -*- coding: utf-8 -*-
'''
Berkeley Source Database Store.
shove's URI for BSDDB stores follows the form:
bsddb://<path>
Where the path is a URL path to a Berkeley database. Alternatively, the native
pathname to a Berkeley database can be passed as the 'engine' parameter.
'''
from threading import Condition
from stuf.six import b
try:
from bsddb import hashopen
except ImportError:
try:
from bsddb3 import hashopen
except ImportError:
raise ImportError('requires bsddb library')
from shove.store import SyncStore
from shove._compat import synchronized
__all__ = ['BSDBStore']
class BSDBStore(SyncStore):
'''Berkeley Source Database-based object storage frontend.'''
init = 'bsddb://'
def __init__(self, engine, **kw):
super(BSDBStore, self).__init__(engine, **kw)
self._store = hashopen(self._engine)
self._lock = Condition()
self.sync = self._store.sync
@synchronized
def __getitem__(self, key):
return self.loads(self._store[key], key)
@synchronized
def __setitem__(self, key, value):
self._store[b(key)] = self.dumps(value)
self.sync()
@synchronized
def __delitem__(self, key):
del self._store[b(key)]
self.sync()
|
[
"gardiner91@gmail.com"
] |
gardiner91@gmail.com
|
f29357782fe9d19f59b184769074665aac2d6031
|
0fa00ecf2dd671515dc001d4b14049ec6a0c1f1c
|
/custom_components/solcast_solar/sensor.py
|
4f8754b9a839da84530777f98558be4b93ca6e78
|
[
"Unlicense"
] |
permissive
|
bacco007/HomeAssistantConfig
|
d91a5368344f50abbea881bd1e6dfc57a0e456ca
|
8548d9999ddd54f13d6a307e013abcb8c897a74e
|
refs/heads/master
| 2023-08-30T07:07:33.571959
| 2023-08-29T20:00:00
| 2023-08-29T20:00:00
| 230,585,631
| 98
| 16
|
Unlicense
| 2023-09-09T08:28:39
| 2019-12-28T09:05:02
|
Python
|
UTF-8
|
Python
| false
| false
| 12,368
|
py
|
"""Support for Solcast PV forecast sensors."""
from __future__ import annotations
import logging
from typing import Final
from datetime import datetime as dt
from datetime import timedelta, timezone
from homeassistant.components.sensor import (SensorDeviceClass, SensorEntity,
SensorEntityDescription)
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import (ATTR_IDENTIFIERS, ATTR_MANUFACTURER,
ATTR_MODEL, ATTR_NAME, ENERGY_KILO_WATT_HOUR,
ENERGY_WATT_HOUR)
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers.device_registry import DeviceEntryType
from homeassistant.helpers.entity import EntityCategory
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from homeassistant.helpers.update_coordinator import CoordinatorEntity
from .const import DOMAIN, ATTR_ENTRY_TYPE
from .coordinator import SolcastUpdateCoordinator
_LOGGER = logging.getLogger(__name__)
SENSORS: dict[str, SensorEntityDescription] = {
"total_kwh_forecast_today": SensorEntityDescription(
key="total_kwh_forecast_today",
device_class=SensorDeviceClass.ENERGY,
native_unit_of_measurement=ENERGY_KILO_WATT_HOUR,
name="Forecast Today",
icon="mdi:solar-power",
),
"peak_w_today": SensorEntityDescription(
key="peak_w_today",
device_class=SensorDeviceClass.ENERGY,
native_unit_of_measurement=ENERGY_WATT_HOUR,
name="Peak Forecast Today",
icon="mdi:solar-power",
),
"peak_w_time_today": SensorEntityDescription(
key="peak_w_time_today",
name="Peak Time Today",
icon="mdi:clock",
device_class=SensorDeviceClass.TIMESTAMP,
),
"forecast_this_hour": SensorEntityDescription(
key="forecast_this_hour",
device_class=SensorDeviceClass.ENERGY,
native_unit_of_measurement=ENERGY_WATT_HOUR,
name="Forecast This Hour",
icon="mdi:solar-power",
),
"forecast_remaining_today": SensorEntityDescription(
key="get_remaining_today",
device_class=SensorDeviceClass.ENERGY,
native_unit_of_measurement=ENERGY_KILO_WATT_HOUR,
name="Forecast Remaining Today",
icon="mdi:solar-power",
),
"forecast_next_hour": SensorEntityDescription(
key="forecast_next_hour",
device_class=SensorDeviceClass.ENERGY,
native_unit_of_measurement=ENERGY_WATT_HOUR,
name="Forecast Next Hour",
icon="mdi:solar-power",
),
"total_kwh_forecast_tomorrow": SensorEntityDescription(
key="total_kwh_forecast_tomorrow",
device_class=SensorDeviceClass.ENERGY,
native_unit_of_measurement=ENERGY_KILO_WATT_HOUR,
name="Forecast Tomorrow",
icon="mdi:solar-power",
),
"peak_w_tomorrow": SensorEntityDescription(
key="peak_w_tomorrow",
device_class=SensorDeviceClass.ENERGY,
native_unit_of_measurement=ENERGY_WATT_HOUR,
name="Peak Forecast Tomorrow",
icon="mdi:solar-power",
),
"peak_w_time_tomorrow": SensorEntityDescription(
key="peak_w_time_tomorrow",
name="Peak Time Tomorrow",
icon="mdi:clock",
device_class=SensorDeviceClass.TIMESTAMP,
),
"api_counter": SensorEntityDescription(
key="api_counter",
name="API Used",
icon="mdi:web-check",
entity_category=EntityCategory.DIAGNOSTIC,
),
"api_limit": SensorEntityDescription(
key="api_limit",
name="API Limit",
icon="mdi:web-check",
entity_category=EntityCategory.DIAGNOSTIC,
),
"lastupdated": SensorEntityDescription(
key="lastupdated",
device_class=SensorDeviceClass.TIMESTAMP,
name="API Last Polled",
icon="mdi:clock",
entity_category=EntityCategory.DIAGNOSTIC,
),
"total_kwh_forecast_d3": SensorEntityDescription(
key="total_kwh_forecast_d3",
device_class=SensorDeviceClass.ENERGY,
native_unit_of_measurement=ENERGY_KILO_WATT_HOUR,
name="Forecast D3",
icon="mdi:solar-power",
),
"total_kwh_forecast_d4": SensorEntityDescription(
key="total_kwh_forecast_d4",
device_class=SensorDeviceClass.ENERGY,
native_unit_of_measurement=ENERGY_KILO_WATT_HOUR,
name="Forecast D4",
icon="mdi:solar-power",
),
"total_kwh_forecast_d5": SensorEntityDescription(
key="total_kwh_forecast_d5",
device_class=SensorDeviceClass.ENERGY,
native_unit_of_measurement=ENERGY_KILO_WATT_HOUR,
name="Forecast D5",
icon="mdi:solar-power",
),
"total_kwh_forecast_d6": SensorEntityDescription(
key="total_kwh_forecast_d6",
device_class=SensorDeviceClass.ENERGY,
native_unit_of_measurement=ENERGY_KILO_WATT_HOUR,
name="Forecast D6",
icon="mdi:solar-power",
),
"total_kwh_forecast_d7": SensorEntityDescription(
key="total_kwh_forecast_d7",
device_class=SensorDeviceClass.ENERGY,
native_unit_of_measurement=ENERGY_KILO_WATT_HOUR,
name="Forecast D7",
icon="mdi:solar-power",
),
}
async def async_setup_entry(
hass: HomeAssistant,
entry: ConfigEntry,
async_add_entities: AddEntitiesCallback,
) -> None:
"""Set up the Solcast sensor."""
coordinator: SolcastUpdateCoordinator = hass.data[DOMAIN][entry.entry_id]
entities = []
for sensor_types in SENSORS:
sen = SolcastSensor(coordinator, SENSORS[sensor_types],entry, coordinator._version)
entities.append(sen)
for site in coordinator.solcast._sites:
k = SensorEntityDescription(
key=site["resource_id"],
name=site["name"],
icon="mdi:home",
device_class=SensorDeviceClass.ENERGY,
native_unit_of_measurement=ENERGY_KILO_WATT_HOUR,
entity_category=EntityCategory.CONFIG,
)
sen = RooftopSensor(coordinator, k,entry, coordinator._version)
entities.append(sen)
# k = SensorEntityDescription(
# key="solcast_has_update",
# name="Integration Update",
# icon="mdi:update",
# #device_class=SensorDeviceClass.ENERGY,
# #native_unit_of_measurement=ENERGY_KILO_WATT_HOUR,
# entity_category=EntityCategory.CONFIG,
# )
# sen = SolcastUpdate(coordinator, k,entry, coordinator._version)
# entities.append(sen)
async_add_entities(entities)
class SolcastSensor(CoordinatorEntity, SensorEntity):
"""Representation of a Seplos Sensor device."""
def __init__(
self,
coordinator: SolcastUpdateCoordinator,
entity_description: SensorEntityDescription,
entry: ConfigEntry,
version: str,
) -> None:
"""Initialize the sensor."""
super().__init__(coordinator)
self.entity_description = entity_description
#self._id = f"solcast_{entity_description.key}"
self.coordinator = coordinator
ATTRIBUTION: Final = "Data provided by Solcast Solar"
_attr_attribution = ATTRIBUTION
self._attributes = {}
self._attr_extra_state_attributes = {}
self._sensor_data = coordinator.get_sensor_value(entity_description.key)
self._attr_device_info = {
ATTR_IDENTIFIERS: {(DOMAIN, entry.entry_id)},
ATTR_NAME: "Solcast PV Forecast", #entry.title,
ATTR_MANUFACTURER: "Oziee",
ATTR_MODEL: "Solcast PV Forecast",
ATTR_ENTRY_TYPE: DeviceEntryType.SERVICE,
"sw_version": version,
"configuration_url": "https://toolkit.solcast.com.au/live-forecast",
#"configuration_url": f"https://toolkit.solcast.com.au/rooftop-sites/{entry.options[CONF_RESOURCE_ID]}/detail",
#"hw_version": entry.options[CONF_RESOURCE_ID],
}
self._unique_id = f"solcast_api_{entity_description.name}"
@property
def name(self):
"""Return the name of the device."""
return f"{self.entity_description.name}"
@property
def friendly_name(self):
"""Return the name of the device."""
return self.entity_description.name
@property
def unique_id(self):
"""Return the unique ID of the binary sensor."""
return f"solcast_{self._unique_id}"
@property
def extra_state_attributes(self):
"""Return the state extra attributes of the binary sensor."""
return self.coordinator.get_sensor_extra_attributes(self.entity_description.key)
@property
def native_value(self):
"""Return the value reported by the sensor."""
return self._sensor_data
@property
def should_poll(self) -> bool:
"""Return if the sensor should poll."""
return False
async def async_added_to_hass(self) -> None:
"""When entity is added to hass."""
await super().async_added_to_hass()
self.async_on_remove(
self.coordinator.async_add_listener(self._handle_coordinator_update)
)
@callback
def _handle_coordinator_update(self) -> None:
"""Handle updated data from the coordinator."""
self._sensor_data = self.coordinator.get_sensor_value(self.entity_description.key)
self.async_write_ha_state()
class RooftopSensor(CoordinatorEntity, SensorEntity):
"""Representation of a Seplos Sensor device."""
def __init__(
self,
coordinator: SolcastUpdateCoordinator,
entity_description: SensorEntityDescription,
entry: ConfigEntry,
version: str,
) -> None:
"""Initialize the sensor."""
super().__init__(coordinator)
self.entity_description = entity_description
#self._id = f"solcast_{entity_description.key}"
self.coordinator = coordinator
ATTRIBUTION: Final = "Data provided by Solcast Solar"
_attr_attribution = ATTRIBUTION
self._attributes = {}
self._attr_extra_state_attributes = {}
self._sensor_data = coordinator.get_site_value(entity_description.key)
self._attr_device_info = {
ATTR_IDENTIFIERS: {(DOMAIN, entry.entry_id)},
ATTR_NAME: "Solcast PV Forecast", #entry.title,
ATTR_MANUFACTURER: "Oziee",
ATTR_MODEL: "Solcast PV Forecast",
ATTR_ENTRY_TYPE: DeviceEntryType.SERVICE,
"sw_version": version,
"configuration_url": "https://toolkit.solcast.com.au/live-forecast",
#"configuration_url": f"https://toolkit.solcast.com.au/rooftop-sites/{entry.options[CONF_RESOURCE_ID]}/detail",
#"hw_version": entry.options[CONF_RESOURCE_ID],
}
self._unique_id = f"solcast_api_{entity_description.key}"
@property
def name(self):
"""Return the name of the device."""
return f"{self.entity_description.name}"
@property
def friendly_name(self):
"""Return the name of the device."""
return self.entity_description.name
@property
def unique_id(self):
"""Return the unique ID of the binary sensor."""
return f"solcast_{self._unique_id}"
@property
def extra_state_attributes(self):
"""Return the state extra attributes of the binary sensor."""
return self.coordinator.get_site_extra_attributes(self.entity_description.key)
@property
def native_value(self):
"""Return the value reported by the sensor."""
return self._sensor_data
@property
def should_poll(self) -> bool:
"""Return if the sensor should poll."""
return False
async def async_added_to_hass(self) -> None:
"""When entity is added to hass."""
await super().async_added_to_hass()
self.async_on_remove(
self.coordinator.async_add_listener(self._handle_coordinator_update)
)
@callback
def _handle_coordinator_update(self) -> None:
"""Handle updated data from the coordinator."""
self._sensor_data = self.coordinator.get_site_value(self.entity_description.key)
self.async_write_ha_state()
|
[
"thomas@thomasbaxter.info"
] |
thomas@thomasbaxter.info
|
957745c63aa4acbb57a1750949c8d2115760bb3c
|
d772869033c47a666622e9ee518bb306db5451a5
|
/unified/modules/main/categories/phone_sms/clicksend_sms/util.py
|
45a36a96360e84f47ec87f916ca256fc31d727b4
|
[] |
no_license
|
funny2code/unified_api
|
920f1e19b2304e331b019f8a531d412b8759e725
|
ffa28ba0e5c0bd8ad7dd44a468e3d1e777bba725
|
refs/heads/main
| 2023-08-31T16:00:17.074427
| 2021-10-04T04:09:45
| 2021-10-04T04:09:45
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 367
|
py
|
import clicksend_client
from flask import Response, request
def get_clicksend_configuration(clicksend_sms_headers):
'''Authentication for Clicksend API'''
configuration = clicksend_client.Configuration()
configuration.username = clicksend_sms_headers['username']
configuration.password = clicksend_sms_headers['password']
return configuration
|
[
"baidawardipendar@gmail.com"
] |
baidawardipendar@gmail.com
|
a4b7456e3c8f645d193567dd9cb63fb57ebd7f6d
|
729a5032e812ef85fe09d4e9682105288ab915fc
|
/elit/item/seed.py
|
2946e57dd876c784673d1cc23f34f3c5314d8600
|
[] |
no_license
|
junhg0211/elit
|
d2c60f2325efd37c5c20a5160157c296c8c45d49
|
b6c26f655b1d485b47483c6ef184ff6cb0c226f0
|
refs/heads/main
| 2023-07-18T07:20:26.519724
| 2021-08-30T01:57:08
| 2021-08-30T01:57:08
| 397,745,875
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,346
|
py
|
from asyncio import TimeoutError as AsyncioTimeoutError
from typing import Tuple
from discord import Embed
from discord.ext.commands import Bot, Context
from elit.exception import ChannelError
from elit.item import Item
from util import message_author_check, irago
class Seed(Item):
name = '씨앗'
type = 3
description = '밭에 농작물을 심을 수 있습니다.'
buy_prise = 2
async def use(self, amount: int, player, bot: Bot, ctx: Context) -> Tuple[str, Embed]:
farm = player.get_farm()
farm_channel = farm.get_channel(bot)
if ctx.channel != farm_channel:
raise ChannelError(f':x: {ctx.author.mention} **여기서는 농작물을 심을 수 없어요!** '
f'{farm_channel.mention}에서 시도해주세요.')
await ctx.send(f':potted_plant: 심을 농작물의 이름을 입력해주세요. 농작물은 __{amount}개__를 심습니다. '
f'(`취소` 또는 `cancel`이라고 입력하면 심기가 취소됩니다.)')
try:
message = await bot.wait_for('message', check=message_author_check(ctx.author), timeout=15)
except AsyncioTimeoutError:
raise AsyncioTimeoutError(f':x: {ctx.author.mention} **작물 심기가 취소되었습니다.** '
f'작물 이름 입력 시간이 초과되었어요...')
else:
if message.content in ('취소', 'cancel'):
raise ValueError(':x: **심기를 취소했습니다.**')
elif message.content in ('자세히', 'specific'):
raise ValueError(f':x: **작물 이름은 __{message.content}__{irago(message.content)} 지을 수 없습니다.**')
crop_name = message.content
if len(crop_name) > 16:
raise ValueError(f':x: {ctx.author.mention} **작물 이름이 너무 길어요!** 작물 이름은 16글자 이내로 지어주세요!')
amount, planted_at = farm.plant(crop_name, amount)
embed = farm.get_planted_crop_by_name(crop_name).get_embed()
return self.apply_use(amount, f':potted_plant: {farm_channel.mention}에 '
f'`{crop_name}` __{amount}개__를 심었습니다.'), embed
def get_prise_per_piece(self) -> int:
return 1
|
[
"junhg0211@gmail.com"
] |
junhg0211@gmail.com
|
8fdd8520b2283971949c09761c0766c98f0e3724
|
fd10bb1c3e08f429a421ea87e73939aea2187227
|
/src/model/modules/feed_forward_block.py
|
0bcba2db8594bdf7b66e406381a84e60e9e2198d
|
[] |
no_license
|
dertilo/speech-recognition-transformer
|
2b9e6336c57eafdbb0dde34c61166a0364f283f4
|
f3c4ef5bae631ce6bbdd9dcc78c529e4d39994ce
|
refs/heads/master
| 2022-12-13T06:04:56.939095
| 2020-07-07T09:39:12
| 2020-07-07T09:39:12
| 257,938,478
| 4
| 1
| null | 2022-11-28T12:48:00
| 2020-04-22T15:14:55
|
Python
|
UTF-8
|
Python
| false
| false
| 5,394
|
py
|
import torch as t
from src.model.modules.gelu import Gelu
import torch
from src.model.modules.low_rank_linear import LowRankLinear
class FeedForwardReZeroBlock(t.nn.Module):
"""
feed forward layer combine with add - layer norm - dropout
"""
def __init__(self, input_size, inner_size, dropout, use_low_rank=False):
super(FeedForwardReZeroBlock, self).__init__()
if not use_low_rank:
self.linear1 = t.nn.Linear(input_size, inner_size, bias=True)
t.nn.init.xavier_normal_(self.linear1.weight)
else:
self.linear1 = LowRankLinear(input_size, inner_size, rank=128)
self.gelu = Gelu()
self.dropout = t.nn.Dropout(dropout, inplace=True)
if not use_low_rank:
self.linear2 = t.nn.Linear(inner_size, input_size, bias=True)
t.nn.init.xavier_normal_(self.linear2.weight)
else:
self.linear2 = LowRankLinear(inner_size, input_size, rank=128)
self.dropout = t.nn.Dropout(dropout, inplace=True)
self.rezero_alpha = t.nn.Parameter(t.Tensor([0]))
def forward(self, net):
residual = net
net = self.linear1(net)
net = self.gelu(net)
net = self.dropout(net)
net = self.linear2(net)
net = self.dropout(net)
net = self.rezero_alpha * net
net += residual
return net
class FeedForwardBlock(t.nn.Module):
"""
feed forward layer combine with add - layer norm - dropout
"""
def __init__(self, input_size, inner_size, dropout, use_low_rank=False):
super(FeedForwardBlock, self).__init__()
if not use_low_rank:
self.linear1 = t.nn.Linear(input_size, inner_size, bias=True)
t.nn.init.xavier_normal_(self.linear1.weight)
else:
self.linear1 = LowRankLinear(input_size, inner_size, rank=128)
self.gelu = Gelu()
self.dropout = t.nn.Dropout(dropout, inplace=True)
if not use_low_rank:
self.linear2 = t.nn.Linear(inner_size, input_size, bias=True)
t.nn.init.xavier_normal_(self.linear2.weight)
else:
self.linear2 = LowRankLinear(inner_size, input_size, rank=128)
self.layer_norm = t.nn.LayerNorm(input_size)
self.dropout = t.nn.Dropout(dropout, inplace=True)
def forward(self, net):
residual = net
net = self.layer_norm(net)
net = self.linear1(net)
net = self.gelu(net)
net = self.dropout(net)
net = self.linear2(net)
net = self.dropout(net)
net += residual
return net
class MultiLayeredConv1d(torch.nn.Module):
"""Multi-layered conv1d for Transformer block.
This is a module of multi-leyered conv1d designed to replace positionwise feed-forward network
in Transforner block, which is introduced in `FastSpeech: Fast, Robust and Controllable Text to Speech`_.
.. _`FastSpeech: Fast, Robust and Controllable Text to Speech`:
https://arxiv.org/pdf/1905.09263.pdf
"""
def __init__(self, in_chans, hidden_chans, kernel_size, dropout_rate):
"""Initialize MultiLayeredConv1d module.
Args:
in_chans (int): Number of input channels.
hidden_chans (int): Number of hidden channels.
kernel_size (int): Kernel size of conv1d.
dropout_rate (float): Dropout rate.
"""
super(MultiLayeredConv1d, self).__init__()
self.w_1 = torch.nn.Conv1d(in_chans, hidden_chans, kernel_size,
stride=1, padding=(kernel_size - 1) // 2)
self.w_2 = torch.nn.Conv1d(hidden_chans, in_chans, kernel_size,
stride=1, padding=(kernel_size - 1) // 2)
self.dropout = torch.nn.Dropout(dropout_rate)
def forward(self, x):
"""Calculate forward propagation.
Args:
x (Tensor): Batch of input tensors (B, ..., in_chans).
Returns:
Tensor: Batch of output tensors (B, ..., hidden_chans).
"""
x = torch.relu(self.w_1(x.transpose(-1, 1))).transpose(-1, 1)
return self.w_2(self.dropout(x).transpose(-1, 1)).transpose(-1, 1)
class Conv1dLinear(torch.nn.Module):
"""Conv1D + Linear for Transformer block.
A variant of MultiLayeredConv1d, which replaces second conv-layer to linear.
"""
def __init__(self, in_chans, hidden_chans, kernel_size, dropout_rate):
"""Initialize Conv1dLinear module.
Args:
in_chans (int): Number of input channels.
hidden_chans (int): Number of hidden channels.
kernel_size (int): Kernel size of conv1d.
dropout_rate (float): Dropout rate.
"""
super(Conv1dLinear, self).__init__()
self.w_1 = torch.nn.Conv1d(in_chans, hidden_chans, kernel_size,
stride=1, padding=(kernel_size - 1) // 2)
self.w_2 = torch.nn.Linear(hidden_chans, in_chans)
self.dropout = torch.nn.Dropout(dropout_rate)
def forward(self, x):
"""Calculate forward propagation.
Args:
x (Tensor): Batch of input tensors (B, ..., in_chans).
Returns:
Tensor: Batch of output tensors (B, ..., hidden_chans).
"""
x = torch.relu(self.w_1(x.transpose(-1, 1))).transpose(-1, 1)
return self.w_2(self.dropout(x))
|
[
"lancertong@live.com"
] |
lancertong@live.com
|
39b67bdf6db8b2bd8a32118189359f4ca09763ca
|
0cf9bb9c50c6efc1bc4a7923f42f6fad79039598
|
/Homeworks/HW 11_ Playing the Game Starter Code/testgame.py
|
56ad1ba84fd6cf3e17dcc8bd4907ab171d691030
|
[] |
no_license
|
AlbMej/CSE-2050-Data-Structures-and-Object-Oriented-Design
|
c950bada185823c70370522e0735533b41bd726b
|
bfbe91d698e650d78c20fd535c45108a8dba1030
|
refs/heads/master
| 2020-04-25T13:20:57.537243
| 2019-03-12T19:54:04
| 2019-03-12T19:54:04
| 172,806,267
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,884
|
py
|
from nim import Nim
from gametree import GameTree
from TicTacToe import TicTacToe
import unittest
class TestGameTree(unittest.TestCase):
"""Tests for the GameTree data structure"""
def testinit(self):
"""Test the initializer"""
n1 = Nim([2,3,4])
g1 = GameTree(n1)
n2 = Nim([3,1,4,2])
g2 = GameTree(n2)
def testcurrentwins(self):
"""Test currentwins()"""
n1 = Nim([2,3,4])
g1 = GameTree(n1)
self.assertEqual(g1.currentwins(), True)
n2 = Nim([1,0,1])
g2 = GameTree(n2)
self.assertEqual(g2.currentwins(), True)
n3 = Nim([1,0,1,1])
g3 = GameTree(n3)
self.assertEqual(g3.currentwins(), False)
def testcurrentloses(self):
"""Test currentloses()"""
n1 = Nim([2,3,4])
g1 = GameTree(n1)
self.assertEqual(g1.currentloses(), False)
n2 = Nim([1,0,1])
g2 = GameTree(n2)
self.assertEqual(g2.currentloses(), False)
n3 = Nim([1,0,1,1])
g3 = GameTree(n3)
self.assertEqual(g3.currentloses(), True)
def testcurrentdraws(self):
"""Test currentdraws()"""
n1 = Nim([1,0,1])
g1 = GameTree(n1)
self.assertEqual(g1.currentdraws(), False)
n2 = Nim([1,0,1,1])
g2 = GameTree(n2)
self.assertEqual(g2.currentdraws(), False)
def testtictactoewins(self):
"""Test currentloses() on TicTacToe"""
t1 = TicTacToe('X', 'OO_XX____')
g1 = GameTree(t1)
self.assertEqual(g1.currentwins(), True)
t3 = TicTacToe('X', 'X__X__O_O')
g3 = GameTree(t3)
self.assertEqual(g3.currentwins(), False)
t3 = TicTacToe('O', '____X____')
g3 = GameTree(t3)
self.assertEqual(g3.currentwins(), False)
def testtictactoeloses(self):
"""Test currentwins() on TicTacToe"""
t1 = TicTacToe('X', 'OO_XX____')
g1 = GameTree(t1)
self.assertEqual(g1.currentloses(), False)
t3 = TicTacToe('X', 'X__X__O_O')
g3 = GameTree(t3)
self.assertEqual(g3.currentloses(), True)
t3 = TicTacToe('O', '____X____')
g3 = GameTree(t3)
self.assertEqual(g3.currentloses(), False)
def testtictactoedraws(self):
t1 = TicTacToe('X', 'OO_XX____')
g1 = GameTree(t1)
self.assertEqual(g1.currentdraws(), False)
t3 = TicTacToe('X', 'X__X__O_O')
g3 = GameTree(t3)
self.assertEqual(g3.currentdraws(), False)
t3 = TicTacToe('O', '____X____')
g3 = GameTree(t3)
self.assertEqual(g3.currentdraws(), True)
t4 = TicTacToe('X', 'XOOOXXX__')
g4 = GameTree(t4)
self.assertEqual(g4.currentwins(), True)
if __name__ == '__main__':
unittest.main()
|
[
"albertomejia295@gmail.com"
] |
albertomejia295@gmail.com
|
e60acb0ebd7eefde531c78becbdac6ce143d7eb5
|
0d6e6d696dbba1380b742a1ff77ad342360ac243
|
/src/scripts/dispms.py
|
19c3773ba48ef21c68c14f442f34407149889f6f
|
[
"MIT"
] |
permissive
|
griembauer/EESARDocker
|
82c0791c3b51fd6144e7ec48f275b5fcaab527d4
|
49f5b2990a0d01dad830b054adf5f7555c927781
|
refs/heads/master
| 2023-01-12T00:59:52.519821
| 2020-11-19T11:54:25
| 2020-11-19T11:54:25
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 12,793
|
py
|
#!/usr/bin/env python
#******************************************************************************
# Name: dispms.py
# Purpose: Display a multispectral image
# allowed formats: uint8, uint16,float32,float64
# Usage (from command line):
# python dispms.py [OPTIONS]
#
# Copyright (c) 2018 Mort Canty
import sys, getopt, os
from osgeo import gdal
from osgeo.gdalconst import GA_ReadOnly
import matplotlib.pyplot as plt
from matplotlib import cm
import numpy as np
import auxil.auxil1 as auxil
def make_image(redband,greenband,blueband,rows,cols,enhance,rng=None):
X = np.ones((rows*cols,3),dtype=np.uint8)
if enhance == 'linear255':
i = 0
for tmp in [redband,greenband,blueband]:
tmp = tmp.ravel()
X[:,i] = auxil.bytestr(tmp,[0,255])
i += 1
elif enhance == 'linear':
i = 0
for tmp in [redband,greenband,blueband]:
tmp = tmp.ravel()
X[:,i] = auxil.linstr(tmp,rng)
i += 1
elif enhance == 'linear2pc':
i = 0
for tmp in [redband,greenband,blueband]:
tmp = tmp.ravel()
X[:,i] = auxil.lin2pcstr(tmp)
i += 1
elif enhance == 'equalization':
i = 0
for tmp in [redband,greenband,blueband]:
tmp = tmp.ravel()
X[:,i] = auxil.histeqstr(tmp)
i += 1
elif enhance == 'sqrt':
i = 0
for tmp in [redband,greenband,blueband]:
tmp = tmp.ravel()
tmp = np.sqrt(tmp)
mn = np.min(tmp)
mx = np.max(tmp)
if mx-mn > 0:
tmp = (tmp-mn)*255.0/(mx-mn)
tmp = np.where(tmp<0,0,tmp)
tmp = np.where(tmp>255,255,tmp)
X[:,i] = auxil.lin2pcstr(tmp)
i += 1
elif (enhance == 'logarithmic2pc') or (enhance == 'logarithmic'):
i = 0
for tmp in [redband,greenband,blueband]:
tmp = tmp.ravel()
mn = np.min(tmp)
if mn < 0:
tmp = tmp - mn
idx = np.where(tmp == 0)
tmp[idx] = np.mean(tmp) # get rid of black edges
idx = np.where(tmp > 0)
tmp[idx] = np.log(tmp[idx])
mn = np.min(tmp)
mx = np.max(tmp)
if mx-mn > 0:
tmp = (tmp-mn)*255.0/(mx-mn)
tmp = np.where(tmp<0,0,tmp)
tmp = np.where(tmp>255,255,tmp)
if enhance == 'logarithmic2pc':
# 2% linear stretch
X[:,i] = auxil.lin2pcstr(tmp)
else:
# no stretch
X[:,i] = tmp
i += 1
return np.reshape(X,(rows,cols,3))/255.
def dispms(filename1=None,filename2=None,dims=None,DIMS=None,rgb=None,RGB=None,enhance=None,ENHANCE=None,sfn=None,cls=None,CLS=None,alpha=None,labels=None):
gdal.AllRegister()
if filename1 == None:
filename1 = input('Enter image filename: ')
inDataset1 = gdal.Open(filename1,GA_ReadOnly)
try:
cols = inDataset1.RasterXSize
rows = inDataset1.RasterYSize
bands1 = inDataset1.RasterCount
except Exception as e:
print( 'Error in dispms: %s --could not read image file'%e )
return
if filename2 is not None:
inDataset2 = gdal.Open(filename2,GA_ReadOnly)
try:
cols2 = inDataset2.RasterXSize
rows2 = inDataset2.RasterYSize
bands2 = inDataset2.RasterCount
except Exception as e:
print( 'Error in dispms: %s --could not read second image file'%e )
return
if dims == None:
dims = [0,0,cols,rows]
x0,y0,cols,rows = dims
if rgb == None:
rgb = [1,1,1]
r,g,b = rgb
r = int(np.min([r,bands1]))
g = int(np.min([g,bands1]))
b = int(np.min([b,bands1]))
rng = None
if enhance == None:
enhance = 5
if enhance == 1:
enhance1 = 'linear255'
elif enhance == 2:
enhance1 = 'linear'
elif enhance == 3:
enhance1 = 'linear2pc'
elif enhance == 4:
enhance1 = 'equalization'
elif enhance == 5:
enhance1 = 'logarithmic2pc'
elif enhance == 6:
enhance1 = 'logarithmic'
elif enhance == 7:
enhance1 = 'sqrt'
elif isinstance(enhance,list):
enhance1 = 'linear'
rng = enhance
else:
enhance = 'linear2pc'
try:
if cls is None:
redband = np.nan_to_num(inDataset1.GetRasterBand(r).ReadAsArray(x0,y0,cols,rows))
greenband = np.nan_to_num(inDataset1.GetRasterBand(g).ReadAsArray(x0,y0,cols,rows))
blueband = np.nan_to_num(inDataset1.GetRasterBand(b).ReadAsArray(x0,y0,cols,rows))
else:
classimg = inDataset1.GetRasterBand(1).ReadAsArray(x0,y0,cols,rows).ravel()
num_classes = np.max(classimg)-np.min(classimg)
redband = classimg
greenband = classimg
blueband = classimg
enhance1 = 'linear'
inDataset1 = None
except Exception as e:
print( 'Error in dispms: %s'%e )
return
X1 = make_image(redband,greenband,blueband,rows,cols,enhance1,rng)
if filename2 is not None:
# two images
if DIMS == None:
DIMS = [0,0,cols2,rows2]
x0,y0,cols,rows = DIMS
if RGB == None:
RGB = rgb
r,g,b = RGB
r = int(np.min([r,bands2]))
g = int(np.min([g,bands2]))
b = int(np.min([b,bands2]))
rng = None
enhance = ENHANCE
if enhance == None:
enhance = 5
if enhance == 1:
enhance2 = 'linear255'
elif enhance == 2:
enhance2= 'linear'
elif enhance == 3:
enhance2 = 'linear2pc'
elif enhance == 4:
enhance2 = 'equalization'
elif enhance == 5:
enhance2 = 'logarithmic2pc'
elif enhance == 6:
enhance2 = 'logarithmic'
elif enhance == 7:
enhance2 = 'sqrt'
elif isinstance(enhance,list):
enhance2 = 'linear'
rng = enhance
else:
enhance = 'linear2pc'
try:
if CLS is None:
redband = np.nan_to_num(inDataset2.GetRasterBand(r).ReadAsArray(x0,y0,cols,rows))
greenband = np.nan_to_num(inDataset2.GetRasterBand(g).ReadAsArray(x0,y0,cols,rows))
blueband = np.nan_to_num(inDataset2.GetRasterBand(b).ReadAsArray(x0,y0,cols,rows))
else:
classimg = inDataset2.GetRasterBand(1).ReadAsArray(x0,y0,cols,rows).ravel()
redband = classimg
greenband = classimg
blueband = classimg
enhance2 = 'linear'
inDataset2 = None
except Exception as e:
print( 'Error in dispms: %s'%e )
return
X2 = make_image(redband,greenband,blueband,rows,cols,enhance2,rng)
if alpha is not None:
fig, ax = plt.subplots(figsize=(10,10))
ax.imshow(X2)
if cls is not None:
ticks = np.linspace(0.0,1.0,num_classes+1)
if labels is not None:
ticklabels = labels
else:
ticklabels = list(map(str,range(0,num_classes+1)))
X1[X1 == 0] = np.nan
cmap = cm.get_cmap('jet')
cmap.set_bad(alpha=0)
cmap.set_under('black')
cax = ax.imshow(X1[:,:,0]-0.01,cmap=cmap,alpha=alpha)
cax.set_clim(0.0,1.0)
cbar = fig.colorbar(cax,orientation='vertical', ticks=ticks, shrink=0.8,pad=0.05)
cbar.set_ticklabels(ticklabels)
else:
ax.imshow(X1-0.01,alpha=alpha)
# ax.set_title('%s: %s: %s: %s\n'%(os.path.basename(filename1),enhance1, str(rgb), str(dims)))
else:
fig, ax = plt.subplots(1,2,figsize=(20,10))
if cls:
cmap = cm.get_cmap('jet')
cmap.set_under('black')
cax = ax[0].imshow(X1[:,:,0]-0.01,cmap=cmap)
cax.set_clim(0.0,1.0)
else:
ax[0].imshow(X1)
ax[0].set_title('%s: %s: %s: %s\n'%(os.path.basename(filename1),enhance1, str(rgb), str(dims)))
if CLS:
cmap = cm.get_cmap('jet')
cmap.set_under('black')
cax = ax[1].imshow(X2[:,:,0]-0.01,cmap=cmap)
cax.set_clim(0.01,1.0)
else:
ax[1].imshow(X2)
ax[1].set_title('%s: %s: %s: %s\n'%(os.path.basename(filename2),enhance2, str(RGB), str(DIMS)))
else:
# one image
fig,ax = plt.subplots(figsize=(10,10))
if cls:
ticks = np.linspace(0.0,1.0,num_classes+1)
if labels is not None:
ticklabels = labels
else:
ticklabels = list(map(str,range(0,num_classes+1)))
cmap = cm.get_cmap('jet')
cmap.set_under('black')
cax = ax.imshow(X1[:,:,0]-0.01,cmap=cmap)
cax.set_clim(0.0,1.0)
cbar = fig.colorbar(cax,orientation='vertical', ticks=ticks, shrink=0.8,pad=0.05)
cbar.set_ticklabels(ticklabels)
else:
ax.imshow(X1)
fn = os.path.basename(filename1)
if len(fn)>40:
fn = fn[:37]+' ... '
ax.set_title('%s: %s: %s: %s\n'%(fn,enhance1, str(rgb), str(dims)))
if sfn is not None:
plt.savefig(sfn,bbox_inches='tight')
plt.show()
def main():
usage = '''
Usage:
--------------------------------------
Display an RGB composite image
python %s [OPTIONS]
Options:
-h this help
-f <string> image filename or left-hand image filename
(if not specified, it will be queried)
-F <string> right-hand image filename, if present
-e <int> left enhancement (1=linear255 2=linear
3=linear2pc saturation 4=histogram equalization
5=logarithmic2ps (default)
6=logarithmic
7=square root
<list> linear range
-E <int> right ditto
-p <list> left RGB band positions e.g. -p [1,2,3]
-P <list> right ditto
-d <list> left spatial subset [x,y,width,height]
e.g. -d [0,0,200,200]
-D <list> right ditto
-c right display as classification image
-C left ditto
-o <float> overlay left image onto right with opacity
-r <string> class labels (a string evaluating to a list of strings)
-s <string> save to a file in EPS format
-------------------------------------'''%sys.argv[0]
options,_ = getopt.getopt(sys.argv[1:],'hco:Cf:F:p:P:d:D:e:E:s:r:')
filename1 = None
filename2 = None
dims = None
rgb = None
DIMS = None
RGB = None
enhance = None
ENHANCE = None
alpha = None
cls = None
CLS = None
sfn = None
labels = None
for option, value in options:
if option == '-h':
print( usage )
return
elif option == '-s':
sfn = value
elif option =='-o':
alpha = eval(value)
elif option == '-f':
filename1 = value
elif option == '-F':
filename2 = value
elif option == '-p':
rgb = tuple(eval(value))
elif option == '-P':
RGB = tuple(eval(value))
elif option == '-d':
dims = eval(value)
elif option == '-D':
DIMS = eval(value)
elif option == '-e':
enhance = eval(value)
elif option == '-E':
ENHANCE = eval(value)
elif option == '-c':
cls = True
elif option == '-C':
CLS = True
elif option == '-r':
labels = eval(value)
dispms(filename1,filename2,dims,DIMS,rgb,RGB,enhance,ENHANCE,sfn,cls,CLS,alpha,labels)
if __name__ == '__main__':
main()
|
[
"mort.canty@gmail.com"
] |
mort.canty@gmail.com
|
eefadf6c7a8ed64f11d537d97fad62d617be43b6
|
6f2b98bcfca2e338dba4b313f5cbca3e5ca2e1f6
|
/backend/api/migrations/0034_add_image_deletehash.py
|
a6ea652b4581bac987436aa749d7c4db20c9a3b4
|
[
"MIT"
] |
permissive
|
Disfactory/Disfactory
|
c9187d73607d359a18206b42b3e7e4727f62e0ae
|
54163d8c14e9b67493a0354175cfe87a0f98d823
|
refs/heads/master
| 2023-07-24T19:22:28.963065
| 2023-07-17T10:54:50
| 2023-07-17T10:54:50
| 209,294,385
| 50
| 22
|
MIT
| 2023-08-26T07:55:06
| 2019-09-18T11:43:33
|
Python
|
UTF-8
|
Python
| false
| false
| 419
|
py
|
# Generated by Django 2.2.13 on 2021-03-17 13:05
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('api', '0033_remove_factory_point'),
]
operations = [
migrations.AddField(
model_name='image',
name='deletehash',
field=models.TextField(blank=True, help_text='delete hash', null=True),
),
]
|
[
"swind@cloudmosa.com"
] |
swind@cloudmosa.com
|
3aaa5e767907175dcfd0bc96a76aad87f4a2a27b
|
e3365bc8fa7da2753c248c2b8a5c5e16aef84d9f
|
/indices/famou.py
|
851c640e9e32f3289b86018e04310c50f3ee47f8
|
[] |
no_license
|
psdh/WhatsintheVector
|
e8aabacc054a88b4cb25303548980af9a10c12a8
|
a24168d068d9c69dc7a0fd13f606c080ae82e2a6
|
refs/heads/master
| 2021-01-25T10:34:22.651619
| 2015-09-23T11:54:06
| 2015-09-23T11:54:06
| 42,749,205
| 2
| 3
| null | 2015-09-23T11:54:07
| 2015-09-18T22:06:38
|
Python
|
UTF-8
|
Python
| false
| false
| 1,814
|
py
|
ii = [('CookGHP3.py', 3), ('MarrFDI.py', 2), ('CoolWHM2.py', 3), ('GodwWSL2.py', 2), ('RogePAV.py', 1), ('SadlMLP.py', 3), ('FerrSDO3.py', 1), ('WilbRLW.py', 4), ('RennJIT.py', 1), ('ProuWCM.py', 1), ('CookGHP.py', 1), ('ShawHDE.py', 1), ('MartHSI2.py', 2), ('LeakWTI2.py', 4), ('KembFJ1.py', 4), ('WilkJMC3.py', 5), ('WilbRLW5.py', 1), ('LeakWTI3.py', 4), ('PettTHE.py', 2), ('MarrFDI3.py', 2), ('PeckJNG.py', 5), ('WilbRLW2.py', 1), ('ClarGE2.py', 21), ('WilkJMC2.py', 1), ('CarlTFR.py', 6), ('SeniNSP.py', 1), ('CoopJBT2.py', 1), ('RoscTTI3.py', 3), ('AinsWRR3.py', 9), ('CookGHP2.py', 7), ('RoscTTI2.py', 13), ('CoolWHM.py', 8), ('CrokTPS.py', 24), ('ClarGE.py', 34), ('LandWPA.py', 1), ('BuckWGM.py', 1), ('IrviWVD.py', 7), ('LyelCPG.py', 1), ('DaltJMA.py', 3), ('WestJIT2.py', 2), ('DibdTRL2.py', 7), ('AinsWRR.py', 6), ('CrocDNL.py', 3), ('MedwTAI.py', 8), ('WadeJEB.py', 16), ('NewmJLP.py', 1), ('GodwWLN.py', 10), ('CoopJBT.py', 3), ('SoutRD2.py', 10), ('LeakWTI4.py', 6), ('LeakWTI.py', 3), ('MedwTAI2.py', 1), ('BachARE.py', 1), ('SoutRD.py', 4), ('WheeJPT.py', 1), ('MereHHB3.py', 3), ('HowiWRL2.py', 3), ('BailJD3.py', 1), ('WilkJMC.py', 4), ('HogaGMM.py', 28), ('MartHRW.py', 1), ('FitzRNS4.py', 15), ('FitzRNS.py', 4), ('BentJRP.py', 1), ('EdgeMHT.py', 5), ('BowrJMM.py', 2), ('FerrSDO.py', 2), ('RoscTTI.py', 8), ('ThomGLG.py', 1), ('StorJCC.py', 1), ('KembFJ2.py', 4), ('BabbCRD.py', 1), ('JacoWHI2.py', 1), ('WilbRLW3.py', 2), ('AinsWRR2.py', 3), ('MereHHB2.py', 1), ('JacoWHI.py', 1), ('ClarGE3.py', 51), ('RogeSIP.py', 4), ('DibdTRL.py', 15), ('FitzRNS2.py', 5), ('HogaGMM2.py', 16), ('DwigTHH.py', 1), ('NortSTC.py', 2), ('BowrJMM2.py', 3), ('BeckWRE.py', 7), ('WordWYR.py', 2), ('DibdTBR.py', 1), ('ThomWEC.py', 1), ('KeigTSS.py', 4), ('KirbWPW.py', 1), ('BentJDO.py', 1), ('ClarGE4.py', 42)]
|
[
"prabhjyotsingh95@gmail.com"
] |
prabhjyotsingh95@gmail.com
|
253eb447069949259264040ad4f04fa3c929cca0
|
acff427a36d6340486ff747ae9e52f05a4b027f2
|
/playground/narcisse/vstream-client/actions.py
|
2fcb4cc9c1c59b88cf51530ab7e633160d07a0a4
|
[] |
no_license
|
jeremie1112/pisilinux
|
8f5a03212de0c1b2453132dd879d8c1556bb4ff7
|
d0643b537d78208174a4eeb5effeb9cb63c2ef4f
|
refs/heads/master
| 2020-03-31T10:12:21.253540
| 2018-10-08T18:53:50
| 2018-10-08T18:53:50
| 152,126,584
| 2
| 1
| null | 2018-10-08T18:24:17
| 2018-10-08T18:24:17
| null |
UTF-8
|
Python
| false
| false
| 662
|
py
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Licensed under the GNU General Public License, version 3.
# See the file http://www.gnu.org/copyleft/gpl.txt
from pisi.actionsapi import autotools
from pisi.actionsapi import pisitools
from pisi.actionsapi import shelltools
from pisi.actionsapi import get
WorkDir="vstream-client-%s" % get.srcVERSION()
def setup():
shelltools.export("OS_CXXFLAGS", "%s -fno-strict-aliasing" % get.CXXFLAGS())
shelltools.export("CFLAGS", "%s" % get.CFLAGS())
shelltools.system("./configure --prefix=/usr")
def build():
autotools.make()
def install():
autotools.rawInstall("DESTDIR=%s" % get.installDIR())
|
[
"erkanisik@yahoo.com"
] |
erkanisik@yahoo.com
|
c79bb1ad75927ddf35a24723228d2a9a9a44e124
|
83a6382bd9842c432e8d9672b8b03188bcf8dda4
|
/test_project/test_project/settings.py
|
74db50b1f21aa0209f9e22a3982cc0fc9def1781
|
[] |
no_license
|
encast/django-services
|
c401c299eaacc87c3089d8c0b6f90fc7e736a72b
|
813e5bee7eb0dc9144ca814ce471f3c69e176c6d
|
refs/heads/master
| 2021-06-25T12:48:41.157800
| 2017-05-19T13:26:20
| 2017-05-19T13:26:20
| 152,356,678
| 0
| 0
| null | 2018-10-10T03:18:10
| 2018-10-10T03:18:09
| null |
UTF-8
|
Python
| false
| false
| 5,397
|
py
|
# Django settings for test_project project.
DEBUG = True
TEMPLATE_DEBUG = DEBUG
ADMINS = (
# ('Your Name', 'your_email@example.com'),
)
MANAGERS = ADMINS
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3', # Add 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
'NAME': 'db', # Or path to database file if using sqlite3.
# The following settings are not used with sqlite3:
'USER': '',
'PASSWORD': '',
'HOST': '', # Empty for localhost through domain sockets or '127.0.0.1' for localhost through TCP.
'PORT': '', # Set to empty string for default.
}
}
# Hosts/domain names that are valid for this site; required if DEBUG is False
# See https://docs.djangoproject.com/en/1.5/ref/settings/#allowed-hosts
ALLOWED_HOSTS = []
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# In a Windows environment this must be set to your system time zone.
TIME_ZONE = 'America/Chicago'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en-us'
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale.
USE_L10N = True
# If you set this to False, Django will not use timezone-aware datetimes.
USE_TZ = True
# Absolute filesystem path to the directory that will hold user-uploaded files.
# Example: "/var/www/example.com/media/"
MEDIA_ROOT = ''
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash.
# Examples: "http://example.com/media/", "http://media.example.com/"
MEDIA_URL = ''
# Absolute path to the directory static files should be collected to.
# Don't put anything in this directory yourself; store your static files
# in apps' "static/" subdirectories and in STATICFILES_DIRS.
# Example: "/var/www/example.com/static/"
STATIC_ROOT = ''
# URL prefix for static files.
# Example: "http://example.com/static/", "http://static.example.com/"
STATIC_URL = '/static/'
# Additional locations of static files
STATICFILES_DIRS = (
# Put strings here, like "/home/html/static" or "C:/www/django/static".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
# List of finder classes that know how to find static files in
# various locations.
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
# 'django.contrib.staticfiles.finders.DefaultStorageFinder',
)
# Make this unique, and don't share it with anybody.
SECRET_KEY = '0ckrui566@s8s442vk#7a#4!#dfea$ndj56*6+cv!$=-sz)+)9'
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
# 'django.template.loaders.eggs.Loader',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
# Uncomment the next line for simple clickjacking protection:
# 'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'test_project.urls'
# Python dotted path to the WSGI application used by Django's runserver.
WSGI_APPLICATION = 'test_project.wsgi.application'
TEMPLATE_DIRS = (
# Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
'services',
'services.apps.docgen',
'south',
'main',
# Uncomment the next line to enable the admin:
# 'django.contrib.admin',
# Uncomment the next line to enable admin documentation:
# 'django.contrib.admindocs',
)
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error when DEBUG=False.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
}
},
'handlers': {
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
}
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
}
}
|
[
"semarjt@gmail.com"
] |
semarjt@gmail.com
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.