blob_id stringlengths 40 40 | directory_id stringlengths 40 40 | path stringlengths 3 288 | content_id stringlengths 40 40 | detected_licenses listlengths 0 112 | license_type stringclasses 2 values | repo_name stringlengths 5 115 | snapshot_id stringlengths 40 40 | revision_id stringlengths 40 40 | branch_name stringclasses 684 values | visit_date timestamp[us]date 2015-08-06 10:31:46 2023-09-06 10:44:38 | revision_date timestamp[us]date 1970-01-01 02:38:32 2037-05-03 13:00:00 | committer_date timestamp[us]date 1970-01-01 02:38:32 2023-09-06 01:08:06 | github_id int64 4.92k 681M ⌀ | star_events_count int64 0 209k | fork_events_count int64 0 110k | gha_license_id stringclasses 22 values | gha_event_created_at timestamp[us]date 2012-06-04 01:52:49 2023-09-14 21:59:50 ⌀ | gha_created_at timestamp[us]date 2008-05-22 07:58:19 2023-08-21 12:35:19 ⌀ | gha_language stringclasses 147 values | src_encoding stringclasses 25 values | language stringclasses 1 value | is_vendor bool 2 classes | is_generated bool 2 classes | length_bytes int64 128 12.7k | extension stringclasses 142 values | content stringlengths 128 8.19k | authors listlengths 1 1 | author_id stringlengths 1 132 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
4c6cd8fa2c76f07442cc03436bacac23835e402e | 52b5773617a1b972a905de4d692540d26ff74926 | /.history/robort_20200727105954.py | bf79f5392d4aaf3cefaba3da6918f708fb08598f | [] | no_license | MaryanneNjeri/pythonModules | 56f54bf098ae58ea069bf33f11ae94fa8eedcabc | f4e56b1e4dda2349267af634a46f6b9df6686020 | refs/heads/master | 2022-12-16T02:59:19.896129 | 2020-09-11T12:05:22 | 2020-09-11T12:05:22 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 555 | py | def uniquePaths(m,n):
# use dynamic programming and answer is at arr[m][n]
# let's create and empty grid with 0's
grid = [[0 for x in range(m)] for y in range(n)]
print(grid)
# then using the top down uproach we shall prefill all the
# i,j i = 0 and j+1
# then i +1 ,j = 0
for i in range(len(grid)):
# print('i',i)
for j in range(len(grid[i])):
if i == 0 or j == 0:
print('i',i)
grid[i][j] = 1
print(grid)
uniquePaths(3,2) | [
"mary.jereh@gmail.com"
] | mary.jereh@gmail.com |
53761b003841d9f17b9bd786b1199e029573d183 | ff23900a911e099595c392a7efab1d268b4f5f7d | /python_modules/libraries/dagster-snowflake-pandas/dagster_snowflake_pandas_tests/test_snowflake_pandas_type_handler.py | 045f02682b567ae7c7cbb3a15986b9b2d4dfdf84 | [
"Apache-2.0"
] | permissive | zkan/dagster | bbf2da091bdc7fca028c569db72b9c68ddf55e98 | b2b19edb71fc8985f505b116927350dd23b4a7d9 | refs/heads/master | 2022-08-24T03:20:12.583577 | 2022-08-16T00:01:23 | 2022-08-16T00:01:23 | 244,012,061 | 0 | 0 | Apache-2.0 | 2020-02-29T17:33:24 | 2020-02-29T17:33:24 | null | UTF-8 | Python | false | false | 7,662 | py | import logging
import os
import uuid
from contextlib import contextmanager
from typing import Iterator
from unittest.mock import patch
import pandas
import pytest
from dagster_snowflake import build_snowflake_io_manager
from dagster_snowflake.resources import SnowflakeConnection
from dagster_snowflake.snowflake_io_manager import TableSlice
from dagster_snowflake_pandas import SnowflakePandasTypeHandler
from dagster_snowflake_pandas.snowflake_pandas_type_handler import (
_convert_string_to_timestamp,
_convert_timestamp_to_string,
)
from pandas import DataFrame
from dagster import (
MetadataValue,
Out,
TableColumn,
TableSchema,
build_input_context,
build_output_context,
job,
op,
)
resource_config = {
"database": "database_abc",
"account": "account_abc",
"user": "user_abc",
"password": "password_abc",
"warehouse": "warehouse_abc",
}
IS_BUILDKITE = os.getenv("BUILDKITE") is not None
SHARED_BUILDKITE_SNOWFLAKE_CONF = {
"account": os.getenv("SNOWFLAKE_ACCOUNT", ""),
"user": "BUILDKITE",
"password": os.getenv("SNOWFLAKE_BUILDKITE_PASSWORD", ""),
}
@contextmanager
def temporary_snowflake_table(schema_name: str, db_name: str, column_str: str) -> Iterator[str]:
snowflake_config = dict(database=db_name, **SHARED_BUILDKITE_SNOWFLAKE_CONF)
table_name = "test_io_manager_" + str(uuid.uuid4()).replace("-", "_")
with SnowflakeConnection(
snowflake_config, logging.getLogger("temporary_snowflake_table")
).get_connection() as conn:
conn.cursor().execute(f"create table {schema_name}.{table_name} ({column_str})")
try:
yield table_name
finally:
conn.cursor().execute(f"drop table {schema_name}.{table_name}")
def test_handle_output():
with patch("dagster_snowflake_pandas.snowflake_pandas_type_handler._connect_snowflake"):
handler = SnowflakePandasTypeHandler()
df = DataFrame([{"col1": "a", "col2": 1}])
output_context = build_output_context(resource_config=resource_config)
metadata = handler.handle_output(
output_context,
TableSlice(
table="my_table",
schema="my_schema",
database="my_db",
columns=None,
partition=None,
),
df,
)
assert metadata == {
"dataframe_columns": MetadataValue.table_schema(
TableSchema(columns=[TableColumn("col1", "object"), TableColumn("col2", "int64")])
),
"row_count": 1,
}
def test_load_input():
with patch("dagster_snowflake_pandas.snowflake_pandas_type_handler._connect_snowflake"), patch(
"dagster_snowflake_pandas.snowflake_pandas_type_handler.pd.read_sql"
) as mock_read_sql:
mock_read_sql.return_value = DataFrame([{"COL1": "a", "COL2": 1}])
handler = SnowflakePandasTypeHandler()
input_context = build_input_context()
df = handler.load_input(
input_context,
TableSlice(
table="my_table",
schema="my_schema",
database="my_db",
columns=None,
partition=None,
),
)
assert mock_read_sql.call_args_list[0][1]["sql"] == "SELECT * FROM my_db.my_schema.my_table"
assert df.equals(DataFrame([{"col1": "a", "col2": 1}]))
def test_type_conversions():
# no timestamp data
no_time = pandas.Series([1, 2, 3, 4, 5])
converted = _convert_string_to_timestamp(_convert_timestamp_to_string(no_time))
assert (converted == no_time).all()
# timestamp data
with_time = pandas.Series(
[
pandas.Timestamp("2017-01-01T12:30:45.35"),
pandas.Timestamp("2017-02-01T12:30:45.35"),
pandas.Timestamp("2017-03-01T12:30:45.35"),
]
)
time_converted = _convert_string_to_timestamp(_convert_timestamp_to_string(with_time))
assert (with_time == time_converted).all()
# string that isn't a time
string_data = pandas.Series(["not", "a", "timestamp"])
assert (_convert_string_to_timestamp(string_data) == string_data).all()
@pytest.mark.skipif(not IS_BUILDKITE, reason="Requires access to the BUILDKITE snowflake DB")
def test_io_manager_with_snowflake_pandas():
with temporary_snowflake_table(
schema_name="SNOWFLAKE_IO_MANAGER_SCHEMA",
db_name="TEST_SNOWFLAKE_IO_MANAGER",
column_str="foo string, quux integer",
) as table_name:
# Create a job with the temporary table name as an output, so that it will write to that table
# and not interfere with other runs of this test
@op(
out={
table_name: Out(
io_manager_key="snowflake", metadata={"schema": "SNOWFLAKE_IO_MANAGER_SCHEMA"}
)
}
)
def emit_pandas_df(_):
return pandas.DataFrame({"foo": ["bar", "baz"], "quux": [1, 2]})
@op
def read_pandas_df(df: pandas.DataFrame):
assert set(df.columns) == {"foo", "quux"}
assert len(df.index) == 2
snowflake_io_manager = build_snowflake_io_manager([SnowflakePandasTypeHandler()])
@job(
resource_defs={"snowflake": snowflake_io_manager},
config={
"resources": {
"snowflake": {
"config": {
**SHARED_BUILDKITE_SNOWFLAKE_CONF,
"database": "TEST_SNOWFLAKE_IO_MANAGER",
}
}
}
},
)
def io_manager_test_pipeline():
read_pandas_df(emit_pandas_df())
res = io_manager_test_pipeline.execute_in_process()
assert res.success
@pytest.mark.skipif(not IS_BUILDKITE, reason="Requires access to the BUILDKITE snowflake DB")
def test_io_manager_with_snowflake_pandas_timestamp_data():
with temporary_snowflake_table(
schema_name="SNOWFLAKE_IO_MANAGER_SCHEMA",
db_name="TEST_SNOWFLAKE_IO_MANAGER",
column_str="foo string, date TIMESTAMP_NTZ(9)",
) as table_name:
time_df = pandas.DataFrame(
{
"foo": ["bar", "baz"],
"date": [
pandas.Timestamp("2017-01-01T12:30:45.350"),
pandas.Timestamp("2017-02-01T12:30:45.350"),
],
}
)
@op(
out={
table_name: Out(
io_manager_key="snowflake", metadata={"schema": "SNOWFLAKE_IO_MANAGER_SCHEMA"}
)
}
)
def emit_time_df(_):
return time_df
@op
def read_time_df(df: pandas.DataFrame):
assert set(df.columns) == {"foo", "date"}
assert (df["date"] == time_df["date"]).all()
snowflake_io_manager = build_snowflake_io_manager([SnowflakePandasTypeHandler()])
@job(
resource_defs={"snowflake": snowflake_io_manager},
config={
"resources": {
"snowflake": {
"config": {
**SHARED_BUILDKITE_SNOWFLAKE_CONF,
"database": "TEST_SNOWFLAKE_IO_MANAGER",
}
}
}
},
)
def io_manager_timestamp_test_job():
read_time_df(emit_time_df())
res = io_manager_timestamp_test_job.execute_in_process()
assert res.success
| [
"noreply@github.com"
] | zkan.noreply@github.com |
71ba1fab1dc153bbad617aeb8002714868377f16 | 9dba277eeb0d5e9d2ac75e2e17ab5b5eda100612 | /19100101/qiming09/d5_exercise_stats_text.py | 9123ccf331de4f4c95e8ba80583a8abffaa4e99a | [] | no_license | shen-huang/selfteaching-python-camp | e8410bfc06eca24ee2866c5d890fd063e9d4be89 | 459f90c9f09bd3a3df9e776fc64dfd64ac65f976 | refs/heads/master | 2022-05-02T05:39:08.932008 | 2022-03-17T07:56:30 | 2022-03-17T07:56:30 | 201,287,222 | 9 | 6 | null | 2019-08-08T15:34:26 | 2019-08-08T15:34:25 | null | UTF-8 | Python | false | false | 1,456 | py | # this is d5 excercise_2 for text
# date : 2019.3.22
# author by : qiming
# 原始文本
text = '''
The Zen of Python, by Tim Peters
Beautiful is better than ugly.
Explicit is better than implicit.
Simple is better than complex.
Complex is better than complicated.
Flat is better than nested.
Sparse is better than dense.
Readability counts.
Special cases aren't special enough to break the rules.
Although practicality beats purity.
Errors should never pass silently.
Unless explicitly silenced.
In the face of ambxiguity, refuse the temptation to guess.
There should be one-- and preferably only one --obvious way to do it.
Although that way may not be obvious at first unless you're Dutch.
Now is better than never.
Although never is often better than *right* now.
If the implementation is hard to explain, it's a bad idea.
If the implementation is easy to explain, it may be a good idea.
Namespaces are one honking great idea -- let's do more of those!
'''
# 只统计英文单词,不包括非英文字符的其他任何符号,如连接符号、空白字符等等
list1 = text.split( )
i=0
for i in range(0,len(list1)):
list1[i]=list1[i].strip('*-,.!')
if list1[i]==' ':
list1[i].remove(' ')
else:
i=i+1
# 使用dict统计字符串样本中各个英文单词出现的次数
# 按照出现次数从大到小排列,示例 {'is': 10, ‘better’ : 9, …… }
import collections
print(collections.Counter(list1))
| [
"6396023+realcaiying@users.noreply.github.com"
] | 6396023+realcaiying@users.noreply.github.com |
ebccb5939007fff56633866cd7d1b87aa282bfec | f2411753c4eb2dd04ee9136c594784c073d1de02 | /graphene/contrib/django/debug/sql/tracking.py | 47f7a30c5d9283dbb046f20617a21cb2349d971f | [
"MIT"
] | permissive | AdrielVelazquez/graphene | 9a5dbcfa02102cbf4c7463476fd1c51dcdefb107 | 4d15bc4f796db403e1ed4877665b80422b516eca | refs/heads/master | 2020-12-28T23:50:29.035822 | 2016-05-20T19:20:10 | 2016-05-20T19:20:10 | 59,317,384 | 0 | 0 | null | 2016-05-20T18:29:37 | 2016-05-20T18:29:37 | null | UTF-8 | Python | false | false | 4,857 | py | # Code obtained from django-debug-toolbar sql panel tracking
from __future__ import absolute_import, unicode_literals
import json
from threading import local
from time import time
from django.utils import six
from django.utils.encoding import force_text
class SQLQueryTriggered(Exception):
"""Thrown when template panel triggers a query"""
class ThreadLocalState(local):
def __init__(self):
self.enabled = True
@property
def Wrapper(self):
if self.enabled:
return NormalCursorWrapper
return ExceptionCursorWrapper
def recording(self, v):
self.enabled = v
state = ThreadLocalState()
recording = state.recording # export function
def wrap_cursor(connection, panel):
if not hasattr(connection, '_graphene_cursor'):
connection._graphene_cursor = connection.cursor
def cursor():
return state.Wrapper(connection._graphene_cursor(), connection, panel)
connection.cursor = cursor
return cursor
def unwrap_cursor(connection):
if hasattr(connection, '_graphene_cursor'):
del connection._graphene_cursor
del connection.cursor
class ExceptionCursorWrapper(object):
"""
Wraps a cursor and raises an exception on any operation.
Used in Templates panel.
"""
def __init__(self, cursor, db, logger):
pass
def __getattr__(self, attr):
raise SQLQueryTriggered()
class NormalCursorWrapper(object):
"""
Wraps a cursor and logs queries.
"""
def __init__(self, cursor, db, logger):
self.cursor = cursor
# Instance of a BaseDatabaseWrapper subclass
self.db = db
# logger must implement a ``record`` method
self.logger = logger
def _quote_expr(self, element):
if isinstance(element, six.string_types):
return "'%s'" % force_text(element).replace("'", "''")
else:
return repr(element)
def _quote_params(self, params):
if not params:
return params
if isinstance(params, dict):
return dict((key, self._quote_expr(value))
for key, value in params.items())
return list(map(self._quote_expr, params))
def _decode(self, param):
try:
return force_text(param, strings_only=True)
except UnicodeDecodeError:
return '(encoded string)'
def _record(self, method, sql, params):
start_time = time()
try:
return method(sql, params)
finally:
stop_time = time()
duration = (stop_time - start_time)
_params = ''
try:
_params = json.dumps(list(map(self._decode, params)))
except Exception:
pass # object not JSON serializable
alias = getattr(self.db, 'alias', 'default')
conn = self.db.connection
vendor = getattr(conn, 'vendor', 'unknown')
params = {
'vendor': vendor,
'alias': alias,
'sql': self.db.ops.last_executed_query(
self.cursor, sql, self._quote_params(params)),
'duration': duration,
'raw_sql': sql,
'params': _params,
'start_time': start_time,
'stop_time': stop_time,
'is_slow': duration > 10,
'is_select': sql.lower().strip().startswith('select'),
}
if vendor == 'postgresql':
# If an erroneous query was ran on the connection, it might
# be in a state where checking isolation_level raises an
# exception.
try:
iso_level = conn.isolation_level
except conn.InternalError:
iso_level = 'unknown'
params.update({
'trans_id': self.logger.get_transaction_id(alias),
'trans_status': conn.get_transaction_status(),
'iso_level': iso_level,
'encoding': conn.encoding,
})
# We keep `sql` to maintain backwards compatibility
self.logger.record(**params)
def callproc(self, procname, params=()):
return self._record(self.cursor.callproc, procname, params)
def execute(self, sql, params=()):
return self._record(self.cursor.execute, sql, params)
def executemany(self, sql, param_list):
return self._record(self.cursor.executemany, sql, param_list)
def __getattr__(self, attr):
return getattr(self.cursor, attr)
def __iter__(self):
return iter(self.cursor)
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
self.close()
| [
"me@syrusakbary.com"
] | me@syrusakbary.com |
f73a5a5a3e638c2d9e725a8cc34c709f5d3794e8 | 163bbb4e0920dedd5941e3edfb2d8706ba75627d | /Code/CodeRecords/2327/60621/239684.py | 40d8b72fef8bfd3b2b5a04ac9e8fd343edc92093 | [] | no_license | AdamZhouSE/pythonHomework | a25c120b03a158d60aaa9fdc5fb203b1bb377a19 | ffc5606817a666aa6241cfab27364326f5c066ff | refs/heads/master | 2022-11-24T08:05:22.122011 | 2020-07-28T16:21:24 | 2020-07-28T16:21:24 | 259,576,640 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 381 | py | a=input()
I=a.count("I")
D=a.count("D")
b=[0]
for i in range(len(a)):
b.append(i+1)
b1=[i for i in b[0:I]]
b2=[j for j in b[I:]]
b2.sort(reverse=True)
c=[];cu1,cu2=0,0
for i in a:
if i=="I":
c.append(b1[cu1])
cu1+=1
else:
c.append(b2[cu2])
cu2+=1
if(cu1!=len(b1)):
c.append(b1[len(b1)-1])
else:
c.append(b2[len(b2)-1])
print(c) | [
"1069583789@qq.com"
] | 1069583789@qq.com |
74c1927ddec50a5fd6abea8524720bce578f3c39 | da052c0bbf811dc4c29a83d1b1bffffd41becaab | /core/stock_count_link_inv_adjust/models/stock_count.py | 320955c28c49a62f0247469c7f96181c6750588d | [] | no_license | Muhammad-SF/Test | ef76a45ad28ac8054a4844f5b3826040a222fb6e | 46e15330b5d642053da61754247f3fbf9d02717e | refs/heads/main | 2023-03-13T10:03:50.146152 | 2021-03-07T20:28:36 | 2021-03-07T20:28:36 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 354 | py | from odoo import fields, models, api
class StockCountInherit(models.Model):
_inherit = "stock.count"
def action_done(self):
self.action_inventory_adjustment()
self.inv_id.action_done()
print("inv id=====1111111=======", self.inv_id)
self.inv_id.write({'state': 'confirm'})
self.write({'state': 'close'})
| [
"jbalu2801@gmail.com"
] | jbalu2801@gmail.com |
45f034a56235052cb6039c6ce0f0280c9bfbc65c | 1533e34bec5adac39a27d42971c39f7cf610dc00 | /migrations/versions/77190eba98d9_vamo.py | 706a1baee8e798abbe085b43f9244e70f67af49d | [] | no_license | apocalipsys/florabot | 3d8c510d88ca20260471ae0916f406c8715beb4a | ff181430e20cd5739b1f3e6f872d4506002f9a7f | refs/heads/rama-desert | 2022-12-10T04:07:17.181809 | 2020-02-25T06:04:12 | 2020-02-25T06:04:12 | 241,262,587 | 2 | 0 | null | 2022-12-08T03:39:03 | 2020-02-18T03:22:14 | Python | UTF-8 | Python | false | false | 842 | py | """vamo
Revision ID: 77190eba98d9
Revises:
Create Date: 2019-08-11 03:10:10.235839
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '77190eba98d9'
down_revision = None
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('users',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('username', sa.String(length=20), nullable=True),
sa.Column('password_hash', sa.String(length=128), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('username')
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('users')
# ### end Alembic commands ###
| [
"martinvargas82@gmail.com"
] | martinvargas82@gmail.com |
11266b5585922b736d205d227e1d6801bc5a9630 | cfd2df2e798d85805970ab9f355ee33ff20f0d6e | /array_find_repeat.py | f19559e5d4e105dc9c934943b234dc804e5dd3aa | [] | no_license | chaichai1997/python-algorithm | 483691ec4e3f386acac26412edaaae328cce3fae | 3b4815f3cc27eaceb9ad0b15b5e45077a9cd62a5 | refs/heads/master | 2022-11-15T22:37:28.035141 | 2020-07-13T13:01:35 | 2020-07-13T13:01:35 | 261,659,330 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,220 | py | # -*- coding: utf-8 -*-
# author = "chaichai"
"""
找到数组中的唯一重复元素
"""
"""
借助字典实现,以空间换时间
"""
def find(array):
if array is None:
return -1
lens = len(array)
hash_table = dict()
for i in range(lens):
hash_table[array[i]] = 0
j = 0
for j in range(lens):
if hash_table[array[j]] == 0:
hash_table[array[j]] = 1
else:
return array[j]
return -1
"""
异或法
"""
def find_xor(array):
if array is None:
return -1
lens = len(array)
result = 0
for i in range(lens):
result ^= array[i]
for j in range(lens):
result ^= j
return result
"""
数据映射
"""
def find_map(array):
if array is None:
return -1
lens = len(array)
index = 0
i = 0
while True:
if array[i] >= lens:
return -1
if array[index] < 0:
break
array[index] *= -1
index = -1 * array[index]
if index >= lens:
print("非法")
return -1
return index
if __name__ == '__main__':
array = [1, 3, 4, 2, 5, 3]
print(find(array))
print(find_xor(array))
| [
"1224816105@qq.com"
] | 1224816105@qq.com |
43add3d270a3864bc374264f663d6631bb82c1bc | f60b964dc39ba54bb84f1c4949be3b91a92b8346 | /track_order/forms.py | 07a601aa7dea5764b5b5fcc4168a15febb3a3310 | [
"Apache-2.0"
] | permissive | jiejiang/courier | 4b0b4fc56c5510228ffcc4de51b074c7aff9502f | 6fdeaf041c77dba0f97e206adb7b0cded9674d3d | refs/heads/master | 2022-11-30T14:24:53.950502 | 2019-12-06T16:42:00 | 2019-12-06T16:42:00 | 195,387,643 | 0 | 0 | Apache-2.0 | 2022-11-22T01:22:33 | 2019-07-05T10:08:19 | Python | UTF-8 | Python | false | false | 5,108 | py | # *- coding: utf-8 -*
from django.core.validators import RegexValidator
import re
from django.shortcuts import get_object_or_404
from django import forms
from django.db.models.manager import Manager
from django.utils.translation import ugettext as _
from mezzanine.accounts.forms import LoginForm, PasswordResetForm, ProfileForm, get_profile_for_user
from crispy_forms.helper import FormHelper
from crispy_forms.layout import Submit, Layout, Fieldset, ButtonHolder, Div, Field, HTML
from crispy_forms.bootstrap import PrependedText, InlineRadios, FieldWithButtons, StrictButton
from captcha.fields import CaptchaField
def format_parcel_force_import_job_create_form(form):
form.helper = FormHelper()
form.helper.form_class = 'form-horizontal'
form.helper.label_class = 'col-lg-5'
form.helper.field_class = 'col-lg-7'
form.fields['input_file'].label = _(u"选择PDF文件")
form.helper.layout = Layout(
'input_file',
ButtonHolder(
Submit('submit', _(u"上载"), css_class='btn-block btn-lg btn-success btn'),
)
)
return form
class QueryOrderForm(forms.Form):
ROUTE_CHOICES = (('order_system', _(u"包税线路")), ('parcel_force', _(u"Parcel Force")))
DAYS_CHOICES = (('7', _(u"一周")), ('14', _(u"两周")), ('31', _(u"一个月")), ('62', _(u"两个月")),)
route = forms.ChoiceField(label=_(u"线路选择"), choices=ROUTE_CHOICES, required=True, initial='order_system')
name = forms.CharField(label=_(u"收件人姓名"), required=False)
mobile = forms.CharField(label=_(u"收件人手机号码(无区号)"), required=False)
id = forms.CharField(label=_(u"收件人身份证号码"), required=False)
days = forms.ChoiceField(label=_(u"下单时间范围"), choices=DAYS_CHOICES, required=True, initial=31)
captcha = CaptchaField(label=_(u"验证码"))
class Media:
js = ('js/route_choice.js',)
def __init__(self, *args, **kwargs):
super(QueryOrderForm, self).__init__(*args, **kwargs)
self.helper = FormHelper()
self.helper.form_class = 'form-horizontal'
self.helper.label_class = 'cols-sm-2'
self.helper.field_class = 'cols-sm-10'
self.helper.layout = Layout(
InlineRadios('route'),
Div(
HTML("<span>" + _(u"信息填写") + "</span>"), css_class="strike"
),
PrependedText('name', '<i class="fa-user fa" aria-hidden="true"></i>', placeholder=_(u"收件人姓名"),
wrapper_class='order_system'),
PrependedText('mobile', '<i class="fa-mobile fa" aria-hidden="true"></i>', placeholder=_(u"收件人手机号码")),
Div(
HTML("<span>" + _(u"或") + "</span>"), css_class="strike order_system"
),
PrependedText('id', '<i class="fa-id-card fa" aria-hidden="true"></i>', placeholder=_(u"收件人身份证号码"),
wrapper_class='order_system'),
InlineRadios('days'),
HTML("<hr/>"),
Field('captcha', placeholder=_(u"输入验证码")),
ButtonHolder(
Submit('submit', _(u"查询"), css_class='btn-block btn-lg login-button'),
css_class='form-group',
)
)
self.valid_days = set([x[0] for x in self.DAYS_CHOICES])
def clean(self):
error = {}
route = self.cleaned_data['route']
id = self.cleaned_data.get('id', None)
name = self.cleaned_data.get('name', None)
mobile = self.cleaned_data.get('mobile', None)
days = self.cleaned_data.get('days', None)
if days not in self.valid_days:
error['days'] = _(u"非法选项")
if (route == 'order_system' and (id or (name and mobile))) or (route == 'parcel_force' and mobile):
pass
else:
for field in ('id', 'name', 'mobile'):
if not self.cleaned_data.get(field, None):
error[field] = _(u"请填写此字段")
raise forms.ValidationError(error)
class TrackShippingForm(forms.Form):
order_number = forms.CharField(min_length=8, max_length=25,
validators=[
RegexValidator(
regex=r'^[a-zA-Z\d]+$',
message=_(u'订单号格式错误'),
),
]
)
def __init__(self, *args, **kwargs):
super(TrackShippingForm, self).__init__(*args, **kwargs)
self.helper = FormHelper()
self.helper.form_show_labels = False
self.helper.form_id = "track-form"
self.helper.layout = Layout(
FieldWithButtons(Field('order_number', placeholder=_(u"输入订单号")),
StrictButton("<i class='fa fa-search'></i> " + _(u"立刻查询"), type="submit",
css_class="btn-primary", id="track-submit"))
)
| [
"mail.jie.jiang@gmail.com"
] | mail.jie.jiang@gmail.com |
f405684acdf758881166e153094be05f1bd7e74f | 0191140830e827ddfde9300d5cc5962018a7bac1 | /stats/repository/profile_repository.py | e5cb1f59a579dc58327fccc72662973f10bf9cc2 | [] | no_license | NicolleLouis/LouisNicolle | d816a60f30d92a9c2bc1b6ef6443c477505bf1bc | b99ae034d58afce5670d0b2fb0e5f3ce57bf1449 | refs/heads/master | 2023-08-17T20:37:29.024430 | 2021-09-13T14:26:02 | 2021-09-13T14:26:02 | 291,709,252 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 253 | py | from stats.models.profile import Profile
class ProfileRepository:
@staticmethod
def get_queryset():
return Profile.objects.all()
@staticmethod
def get_by_id(profile_id):
return Profile.objects.get(user__id=profile_id)
| [
"louisxnicolle@gmail.com"
] | louisxnicolle@gmail.com |
15a5b9f4edafc0425bbc71ad0fadb13380abbce3 | 8dcd3ee098b4f5b80879c37a62292f42f6b2ae17 | /venv/Lib/site-packages/pythonwin/pywin/Demos/app/basictimerapp.py | 46531f04cc5ac47ca4ff16d82187736d37233b04 | [] | no_license | GregVargas1999/InfinityAreaInfo | 53fdfefc11c4af8f5d2b8f511f7461d11a3f7533 | 2e4a7c6a2424514ca0ec58c9153eb08dc8e09a4a | refs/heads/master | 2022-12-01T20:26:05.388878 | 2020-08-11T18:37:05 | 2020-08-11T18:37:05 | 286,821,452 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,609 | py | # basictimerapp - a really simple timer application.
# This should be run using the command line:
# pythonwin /app demos\basictimerapp.py
import string
import sys
import time
import timer
import win32api
import win32con
import win32ui
from pywin.framework import app, cmdline, dlgappcore, cmdline
class TimerAppDialog(dlgappcore.AppDialog):
softspace = 1
def __init__(self, appName=""):
dlgappcore.AppDialog.__init__(self, win32ui.IDD_GENERAL_STATUS)
self.timerAppName = appName
self.argOff = 0
if len(self.timerAppName) == 0:
if len(sys.argv) > 1 and sys.argv[1][0] != '/':
self.timerAppName = sys.argv[1]
self.argOff = 1
def PreDoModal(self):
# sys.stderr = sys.stdout
pass
def ProcessArgs(self, args):
for arg in args:
if arg == "/now":
self.OnOK()
def OnInitDialog(self):
win32ui.SetProfileFileName('pytimer.ini')
self.title = win32ui.GetProfileVal(self.timerAppName, "Title", "Remote System Timer")
self.buildTimer = win32ui.GetProfileVal(self.timerAppName, "Timer", "EachMinuteIntervaler()")
self.doWork = win32ui.GetProfileVal(self.timerAppName, "Work", "DoDemoWork()")
# replace "\n" with real \n.
self.doWork = self.doWork.replace('\\n', '\n')
dlgappcore.AppDialog.OnInitDialog(self)
self.SetWindowText(self.title)
self.prompt1 = self.GetDlgItem(win32ui.IDC_PROMPT1)
self.prompt2 = self.GetDlgItem(win32ui.IDC_PROMPT2)
self.prompt3 = self.GetDlgItem(win32ui.IDC_PROMPT3)
self.butOK = self.GetDlgItem(win32con.IDOK)
self.butCancel = self.GetDlgItem(win32con.IDCANCEL)
self.prompt1.SetWindowText("Python Timer App")
self.prompt2.SetWindowText("")
self.prompt3.SetWindowText("")
self.butOK.SetWindowText("Do it now")
self.butCancel.SetWindowText("Close")
self.timerManager = TimerManager(self)
self.ProcessArgs(sys.argv[self.argOff:])
self.timerManager.go()
return 1
def OnDestroy(self, msg):
dlgappcore.AppDialog.OnDestroy(self, msg)
self.timerManager.stop()
def OnOK(self):
# stop the timer, then restart after setting special boolean
self.timerManager.stop()
self.timerManager.bConnectNow = 1
self.timerManager.go()
return
# def OnCancel(self): default behaviour - cancel == close.
# return
class TimerManager:
def __init__(self, dlg):
self.dlg = dlg
self.timerId = None
self.intervaler = eval(self.dlg.buildTimer)
self.bConnectNow = 0
self.bHaveSetPrompt1 = 0
def CaptureOutput(self):
self.oldOut = sys.stdout
self.oldErr = sys.stderr
sys.stdout = sys.stderr = self
self.bHaveSetPrompt1 = 0
def ReleaseOutput(self):
sys.stdout = self.oldOut
sys.stderr = self.oldErr
def write(self, str):
s = str.strip()
if len(s):
if self.bHaveSetPrompt1:
dest = self.dlg.prompt3
else:
dest = self.dlg.prompt1
self.bHaveSetPrompt1 = 1
dest.SetWindowText(s)
def go(self):
self.OnTimer(None, None)
def stop(self):
if self.timerId: timer.kill_timer(self.timerId)
self.timerId = None
def OnTimer(self, id, timeVal):
if id: timer.kill_timer(id)
if self.intervaler.IsTime() or self.bConnectNow:
# do the work.
try:
self.dlg.SetWindowText(self.dlg.title + " - Working...")
self.dlg.butOK.EnableWindow(0)
self.dlg.butCancel.EnableWindow(0)
self.CaptureOutput()
try:
exec(self.dlg.doWork)
print("The last operation completed successfully.")
except:
t, v, tb = sys.exc_info()
str = "Failed: %s: %s" % (t, repr(v))
print(str)
self.oldErr.write(str)
tb = None # Prevent cycle
finally:
self.ReleaseOutput()
self.dlg.butOK.EnableWindow()
self.dlg.butCancel.EnableWindow()
self.dlg.SetWindowText(self.dlg.title)
else:
now = time.time()
nextTime = self.intervaler.GetNextTime()
if nextTime:
timeDiffSeconds = nextTime - now
timeDiffMinutes = int(timeDiffSeconds / 60)
timeDiffSeconds = timeDiffSeconds % 60
timeDiffHours = int(timeDiffMinutes / 60)
timeDiffMinutes = timeDiffMinutes % 60
self.dlg.prompt1.SetWindowText(
"Next connection due in %02d:%02d:%02d" % (timeDiffHours, timeDiffMinutes, timeDiffSeconds))
self.timerId = timer.set_timer(self.intervaler.GetWakeupInterval(), self.OnTimer)
self.bConnectNow = 0
class TimerIntervaler:
def __init__(self):
self.nextTime = None
self.wakeUpInterval = 2000
def GetWakeupInterval(self):
return self.wakeUpInterval
def GetNextTime(self):
return self.nextTime
def IsTime(self):
now = time.time()
if self.nextTime is None:
self.nextTime = self.SetFirstTime(now)
ret = 0
if now >= self.nextTime:
ret = 1
self.nextTime = self.SetNextTime(self.nextTime, now)
# do the work.
return ret
class EachAnyIntervaler(TimerIntervaler):
def __init__(self, timeAt, timePos, timeAdd, wakeUpInterval=None):
TimerIntervaler.__init__(self)
self.timeAt = timeAt
self.timePos = timePos
self.timeAdd = timeAdd
if wakeUpInterval:
self.wakeUpInterval = wakeUpInterval
def SetFirstTime(self, now):
timeTup = time.localtime(now)
lst = []
for item in timeTup:
lst.append(item)
bAdd = timeTup[self.timePos] > self.timeAt
lst[self.timePos] = self.timeAt
for pos in range(self.timePos + 1, 6):
lst[pos] = 0
ret = time.mktime(tuple(lst))
if (bAdd):
ret = ret + self.timeAdd
return ret;
def SetNextTime(self, lastTime, now):
return lastTime + self.timeAdd
class EachMinuteIntervaler(EachAnyIntervaler):
def __init__(self, at=0):
EachAnyIntervaler.__init__(self, at, 5, 60, 2000)
class EachHourIntervaler(EachAnyIntervaler):
def __init__(self, at=0):
EachAnyIntervaler.__init__(self, at, 4, 3600, 10000)
class EachDayIntervaler(EachAnyIntervaler):
def __init__(self, at=0):
EachAnyIntervaler.__init__(self, at, 3, 86400, 10000)
class TimerDialogApp(dlgappcore.DialogApp):
def CreateDialog(self):
return TimerAppDialog()
def DoDemoWork():
print("Doing the work...")
print("About to connect")
win32api.MessageBeep(win32con.MB_ICONASTERISK)
win32api.Sleep(2000)
print("Doing something else...")
win32api.MessageBeep(win32con.MB_ICONEXCLAMATION)
win32api.Sleep(2000)
print("More work.")
win32api.MessageBeep(win32con.MB_ICONHAND)
win32api.Sleep(2000)
print("The last bit.")
win32api.MessageBeep(win32con.MB_OK)
win32api.Sleep(2000)
app = TimerDialogApp()
def t():
t = TimerAppDialog("Test Dialog")
t.DoModal()
return t
if __name__ == '__main__':
import demoutils
demoutils.NeedApp()
| [
"44142880+GregVargas1999@users.noreply.github.com"
] | 44142880+GregVargas1999@users.noreply.github.com |
3ea816954b53404bf8485c9a0d61ec4f52b95ec5 | ea3e35eb82436bfa1e544346267b126bd80888e6 | /verb_filter.py | 6f71b4c06076e8329e81faadebc86f830d37683a | [] | no_license | amazingguni/commits-dataset | 5800ed7c9624d036c0e286b7e6e14887ed6d261e | 688eea9b1906859e2538cd8eda50dac82b006738 | refs/heads/master | 2022-11-29T18:14:43.623696 | 2020-08-04T08:03:54 | 2020-08-04T08:03:54 | 261,342,299 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,425 | py | import sys
from pathlib import Path
from utils import tokenize, overlap_two_seq, starts_with_verb, remove_tag_issue_number,\
remove_http_urls, remove_redundant_white_space, remove_last_special_char, starts_with_verb2, remove_no_english_str
import random
def main(path):
preprocessed_dir = Path(path)
f_all_index = open(preprocessed_dir / 'all.index')
f_all_target = open(preprocessed_dir / 'all.target')
f_all_origin_target = open(preprocessed_dir / 'all.origin.target')
f_all_line_diff = open(preprocessed_dir / 'all.line.source')
f_all_word_diff = open(preprocessed_dir / 'all.word.source')
f_filtered_index = open(preprocessed_dir / 'all.verbfilter.index', 'w')
f_filtered_target = open(preprocessed_dir / 'all.verbfilter.target', 'w')
f_filtered_line_diff = open(preprocessed_dir / 'all.verbfilter.line.source', 'w')
f_filtered_word_diff = open(preprocessed_dir / 'all.verbfilter.word.source', 'w')
total_cnt = 0
filtered_cnt = 0
word_not_overlap_cnt = 0
for index, origin_target, target, line_diff, word_diff in zip(f_all_index, f_all_origin_target, f_all_target, f_all_line_diff, f_all_word_diff):
total_cnt += 1
target = target.strip()
origin_target = origin_target.strip()
if not target:
continue
line_diff = line_diff.strip()
word_diff = word_diff.strip()
if 'revert' in target.lower():
continue
target_words = target.split()
if not starts_with_verb(target.lower().split()):
continue
word_diff_words = word_diff.strip()
if not overlap_two_seq(word_diff_words, target_words):
word_not_overlap_cnt += 1
continue
f_filtered_index.write(f'{index.strip()}\n')
f_filtered_target.write(f'{target}\n')
f_filtered_line_diff.write(f'{line_diff}\n')
f_filtered_word_diff.write(f'{word_diff}\n')
filtered_cnt += 1
print(f'Filtered {filtered_cnt} data generated(total: {total_cnt})')
print(f'word_not_overlap_cnt: {word_not_overlap_cnt}')
if __name__ == '__main__':
import argparse
parser = argparse.ArgumentParser(
description='Verb filter commit message dataset')
parser.add_argument('--path', type=str, metavar='N', required=True, help='Directory which contains all dataset')
args = parser.parse_args()
main(args.path)
| [
"amazingguni@gmail.com"
] | amazingguni@gmail.com |
23c84214c30992885af2c6f196c75971c9b62e9f | e88a8bb96ee85d52fdd21613356a1b48a0aba18e | /src/analyse/run.py | 2dcf1d51eec94f97723cefce71980c8ca2528fdd | [
"LicenseRef-scancode-unknown-license-reference",
"MIT"
] | permissive | timtroendle/money-land | b5c3f527e7a30eaa25dd47cf2f1082c5dbb6bb29 | fe3ed6e531cfe91156886d4fa685a14840749f36 | refs/heads/master | 2023-06-28T16:42:27.982087 | 2021-08-04T15:07:51 | 2021-08-04T15:07:51 | 229,407,310 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,718 | py | import calliope
from calliope.core.util.logging import set_log_verbosity
import calliope.backend.run
import calliope.backend.pyomo
import calliope.core.attrdict
import calliope.exceptions
from calliope.analysis import postprocess
import pyomo.core as po
ROOFTOP_TECH_NAME1 = "roof_mounted_pv_n"
ROOFTOP_TECH_NAME2 = "roof_mounted_pv_e_w"
ROOFTOP_TECH_NAME3 = "roof_mounted_pv_s_flat"
UTILITY_TECH_NAME = "open_field_pv"
WIND_TECH_NAME1 = "wind_onshore_monopoly"
WIND_TECH_NAME2 = "wind_onshore_competing"
OFFSHORE_TECH_NAME = "wind_offshore"
def run(path_to_model, override_dict, roof_share, util_share, wind_share, offshore_share,
units_without_shore, overrides, path_to_output):
assert roof_share + util_share + wind_share + offshore_share == 100
set_log_verbosity("info", include_solver_output=True, capture_warnings=True)
model = calliope.Model(
path_to_model,
scenario=",".join(overrides),
override_dict=override_dict
)
model.run(build_only=True)
pyomo_model = model.backend._backend
pyomo_model.roof_constraint = po.Constraint(pyomo_model.locs, rule=rooftop_constraint(roof_share / 100))
pyomo_model.util_constraint = po.Constraint(pyomo_model.locs, rule=utility_constraint(util_share / 100))
pyomo_model.wind_constraint = po.Constraint(
pyomo_model.locs,
rule=wind_constraint(wind_share / 100, offshore_share / 100, units_without_shore)
)
pyomo_model.offshore_constraint = po.Constraint(
pyomo_model.locs,
rule=offshore_constraint(offshore_share / 100, units_without_shore)
)
model = run_updated_model(model)
scenario = f"roof-{roof_share}-percent,util-{util_share}-percent,wind-{wind_share}-percent,offshore-{offshore_share}-percent"
model._model_data.attrs["scenario"] = scenario
model.to_netcdf(path_to_output)
def rooftop_constraint(share):
def rooftop_constraint(model, loc):
lhs = sum(
model.energy_cap[loc_tech]
for loc_tech in model.loc_techs
if is_rooftop_pv(loc_tech) and (loc_tech.split("::")[0] == loc)
)
rhs = share * sum(
model.energy_cap[loc_tech]
for loc_tech in model.loc_techs
if is_pv_or_wind(loc_tech) and (loc_tech.split("::")[0] == loc)
)
return lhs == rhs
return rooftop_constraint
def utility_constraint(share):
def utility_constraint(model, loc):
lhs = sum(
model.energy_cap[loc_tech]
for loc_tech in model.loc_techs
if loc_tech.split("::") == [loc, UTILITY_TECH_NAME]
)
rhs = share * sum(
model.energy_cap[loc_tech]
for loc_tech in model.loc_techs
if is_pv_or_wind(loc_tech) and (loc_tech.split("::")[0] == loc)
)
return lhs == rhs
return utility_constraint
def wind_constraint(wind_share, offshore_share, units_without_shore):
def wind_constraint(model, loc):
if offshore_share > 0 and loc in units_without_shore:
share = wind_share + offshore_share
else:
share = wind_share
lhs = sum(
model.energy_cap[loc_tech]
for loc_tech in model.loc_techs
if is_wind(loc_tech) and (loc_tech.split("::")[0] == loc)
)
rhs = share * sum(
model.energy_cap[loc_tech]
for loc_tech in model.loc_techs
if is_pv_or_wind(loc_tech) and (loc_tech.split("::")[0] == loc)
)
return lhs == rhs
return wind_constraint
def offshore_constraint(offshore_share, units_without_shore):
def offshore_constraint(model, loc):
if offshore_share > 0 and loc in units_without_shore:
share = 0
else:
share = offshore_share
lhs = sum(
model.energy_cap[loc_tech]
for loc_tech in model.loc_techs
if loc_tech.split("::") == [loc, OFFSHORE_TECH_NAME]
)
rhs = share * sum(
model.energy_cap[loc_tech]
for loc_tech in model.loc_techs
if is_pv_or_wind(loc_tech) and (loc_tech.split("::")[0] == loc)
)
return lhs == rhs
return offshore_constraint
def is_wind(loc_tech):
loc_tech = str(loc_tech)
return (
(WIND_TECH_NAME1 in loc_tech)
or (WIND_TECH_NAME2 in loc_tech)
)
def is_rooftop_pv(loc_tech):
loc_tech = str(loc_tech)
return (
(ROOFTOP_TECH_NAME1 in loc_tech)
or (ROOFTOP_TECH_NAME2 in loc_tech)
or (ROOFTOP_TECH_NAME3 in loc_tech)
)
def is_pv_or_wind(loc_tech):
loc_tech = str(loc_tech)
return (
(ROOFTOP_TECH_NAME1 in loc_tech)
or (ROOFTOP_TECH_NAME2 in loc_tech)
or (ROOFTOP_TECH_NAME3 in loc_tech)
or (UTILITY_TECH_NAME in loc_tech)
or (WIND_TECH_NAME1 in loc_tech)
or (WIND_TECH_NAME2 in loc_tech)
or (OFFSHORE_TECH_NAME in loc_tech)
)
def run_updated_model(model):
# This method is largely taken from various places within Calliope's core code,
# as Calliope does not offer this functionality.
# The code is thus copyright Calliope authors.
backend_model = model.backend._backend
backend_model.__calliope_run_config = calliope.core.attrdict.AttrDict.from_yaml_string(
model._model_data.attrs['run_config']
)
results, backend_mode = calliope.backend.run.run_plan(
model_data=model._model_data,
timings=model._timings,
backend=calliope.backend.pyomo.model,
backend_rerun=backend_model,
build_only=False
)
# Add additional post-processed result variables to results
if results.attrs.get('termination_condition', None) in ['optimal', 'feasible']:
results = postprocess.postprocess_model_results(
results, model._model_data, model._timings
)
else:
raise calliope.exceptions.BackendError("Problem is non optimal.")
for var in results.data_vars:
results[var].attrs['is_result'] = 1
model._model_data.update(results)
model._model_data.attrs.update(results.attrs)
model.results = model._model_data.filter_by_attrs(is_result=1)
return model
if __name__ == "__main__":
run(
path_to_model=snakemake.input.model,
override_dict=snakemake.params.override_dict,
roof_share=int(snakemake.wildcards.roof),
util_share=int(snakemake.wildcards.util),
wind_share=int(snakemake.wildcards.wind),
offshore_share=int(snakemake.wildcards.offshore),
units_without_shore=snakemake.params.no_shore,
overrides=snakemake.params.overrides,
path_to_output=snakemake.output[0]
)
| [
"tim.troendle@usys.ethz.ch"
] | tim.troendle@usys.ethz.ch |
8aa2163b58f7138a4de76912ea66304b7649d175 | 53fab060fa262e5d5026e0807d93c75fb81e67b9 | /backup/user_327/ch178_2020_08_14_14_01_59_028335.py | 7badb3c3c79c26e0d310b79dcaf29de895cd1704 | [] | no_license | gabriellaec/desoft-analise-exercicios | b77c6999424c5ce7e44086a12589a0ad43d6adca | 01940ab0897aa6005764fc220b900e4d6161d36b | refs/heads/main | 2023-01-31T17:19:42.050628 | 2020-12-16T05:21:31 | 2020-12-16T05:21:31 | 306,735,108 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 214 | py | def junta_nomes(l0,l1,l2):
result = []
for i in l0:
for e in l2:
result.append(i + " " + e)
for a in l1:
for b in l2:
result.append(a + " " + b)
return result | [
"you@example.com"
] | you@example.com |
f9d3a990ff56d875f011dbad5ee2666d88489f69 | d6b7b16b6e9c3287ffcac3869d11b4c4286f4b89 | /pmg/models/posts.py | 8f1926e2c73c2377ec6ccd75b099b8ed739e0bc9 | [
"Apache-2.0"
] | permissive | havanhuy1997/pmg-cms-2 | 39e3e66f2b9f57a347e56b93d963c87554983fa7 | 21571235cf3d9552013bca29ab9af288b08e00d6 | refs/heads/master | 2020-06-27T20:05:05.776667 | 2019-08-01T07:46:47 | 2019-08-01T07:46:47 | 200,036,932 | 0 | 0 | Apache-2.0 | 2019-08-01T11:21:00 | 2019-08-01T11:20:59 | null | UTF-8 | Python | false | false | 1,502 | py | from sqlalchemy import func, sql
from sqlalchemy.orm import validates
from .base import FileLinkMixin
from pmg import db
class Post(db.Model):
__tablename__ = 'post'
id = db.Column(db.Integer, primary_key=True)
title = db.Column(db.String, nullable=False)
slug = db.Column(db.String, nullable=False, unique=True, index=True)
featured = db.Column(db.Boolean(), default=False, server_default=sql.expression.false(), nullable=False, index=True)
body = db.Column(db.Text)
date = db.Column(db.DateTime(timezone=True), index=True, unique=False, nullable=False, server_default=func.now())
files = db.relationship("PostFile", lazy='joined')
created_at = db.Column(db.DateTime(timezone=True), index=True, unique=False, nullable=False, server_default=func.now())
updated_at = db.Column(db.DateTime(timezone=True), server_default=func.now(), onupdate=func.current_timestamp())
@validates('slug')
def validate_slug(self, key, value):
return value.strip('/')
def __unicode__(self):
return unicode(self.title)
class PostFile(FileLinkMixin, db.Model):
__tablename__ = "post_files"
id = db.Column(db.Integer, primary_key=True)
post_id = db.Column(db.Integer, db.ForeignKey('post.id', ondelete='CASCADE'), index=True, nullable=False)
post = db.relationship('Post')
file_id = db.Column(db.Integer, db.ForeignKey('file.id', ondelete="CASCADE"), index=True, nullable=False)
file = db.relationship('File', lazy='joined')
| [
"jbothma@gmail.com"
] | jbothma@gmail.com |
6bf6394e69ea92fd3ce0755abb504a5cbf668f18 | ece0d321e48f182832252b23db1df0c21b78f20c | /engine/2.80/scripts/addons/io_mesh_raw/export_raw.py | b5c5ef36fea3b28cc3b7d5627738ef87e4a9815a | [
"GPL-3.0-only",
"Font-exception-2.0",
"GPL-3.0-or-later",
"Apache-2.0",
"LicenseRef-scancode-public-domain",
"LicenseRef-scancode-unknown-license-reference",
"LicenseRef-scancode-public-domain-disclaimer",
"Bitstream-Vera",
"LicenseRef-scancode-blender-2010",
"LGPL-2.1-or-later",
"GPL-2.0-or-later",
"GPL-2.0-only",
"LGPL-2.0-only",
"PSF-2.0",
"LicenseRef-scancode-free-unknown",
"LicenseRef-scancode-proprietary-license",
"GPL-1.0-or-later",
"BSD-2-Clause",
"Unlicense"
] | permissive | byteinc/Phasor | 47d4e48a52fa562dfa1a2dbe493f8ec9e94625b9 | f7d23a489c2b4bcc3c1961ac955926484ff8b8d9 | refs/heads/master | 2022-10-25T17:05:01.585032 | 2019-03-16T19:24:22 | 2019-03-16T19:24:22 | 175,723,233 | 3 | 1 | Unlicense | 2022-10-21T07:02:37 | 2019-03-15T00:58:08 | Python | UTF-8 | Python | false | false | 2,896 | py | # ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
# <pep8-80 compliant>
"""
This script exports a Mesh to a RAW triangle format file.
The raw triangle format is very simple; it has no verts or faces lists.
It's just a simple ascii text file with the vertices of each triangle
listed on each line. In addition, also quads can be exported as a line
of 12 values (this was the default before blender 2.5). Now default
settings will triangulate the mesh.
Usage:
Execute this script from the "File->Export" menu. You can select
whether modifiers should be applied and if the mesh is triangulated.
"""
import bpy
def faceToTriangles(face):
triangles = []
if len(face) == 4:
triangles.append([face[0], face[1], face[2]])
triangles.append([face[2], face[3], face[0]])
else:
triangles.append(face)
return triangles
def faceValues(face, mesh, matrix):
fv = []
for verti in face.vertices:
fv.append((matrix * mesh.vertices[verti].co)[:])
return fv
def faceToLine(face):
return " ".join([("%.6f %.6f %.6f" % v) for v in face] + ["\n"])
def write(filepath,
applyMods=True,
triangulate=True,
):
scene = bpy.context.scene
faces = []
for obj in bpy.context.selected_objects:
if applyMods or obj.type != 'MESH':
try:
me = obj.to_mesh(scene, True, "PREVIEW")
except:
me = None
is_tmp_mesh = True
else:
me = obj.data
if not me.tessfaces and me.polygons:
me.calc_tessface()
is_tmp_mesh = False
if me is not None:
matrix = obj.matrix_world.copy()
for face in me.tessfaces:
fv = faceValues(face, me, matrix)
if triangulate:
faces.extend(faceToTriangles(fv))
else:
faces.append(fv)
if is_tmp_mesh:
bpy.data.meshes.remove(me)
# write the faces to a file
file = open(filepath, "w")
for face in faces:
file.write(faceToLine(face))
file.close()
| [
"admin@irradiate.net"
] | admin@irradiate.net |
3e6f3db8997dc8059f9c74f10b8bd93d869af08e | ece0d321e48f182832252b23db1df0c21b78f20c | /engine/2.80/scripts/addons/presets/operator/mesh.primitive_xyz_function_surface/catalan.py | 8c8767584bda51ce6cf33a934249ef20f9c3c6f2 | [
"GPL-3.0-only",
"Font-exception-2.0",
"GPL-3.0-or-later",
"Apache-2.0",
"LicenseRef-scancode-public-domain",
"LicenseRef-scancode-unknown-license-reference",
"LicenseRef-scancode-public-domain-disclaimer",
"Bitstream-Vera",
"LicenseRef-scancode-blender-2010",
"LGPL-2.1-or-later",
"GPL-2.0-or-later",
"GPL-2.0-only",
"LGPL-2.0-only",
"PSF-2.0",
"LicenseRef-scancode-free-unknown",
"LicenseRef-scancode-proprietary-license",
"GPL-1.0-or-later",
"BSD-2-Clause",
"Unlicense"
] | permissive | byteinc/Phasor | 47d4e48a52fa562dfa1a2dbe493f8ec9e94625b9 | f7d23a489c2b4bcc3c1961ac955926484ff8b8d9 | refs/heads/master | 2022-10-25T17:05:01.585032 | 2019-03-16T19:24:22 | 2019-03-16T19:24:22 | 175,723,233 | 3 | 1 | Unlicense | 2022-10-21T07:02:37 | 2019-03-15T00:58:08 | Python | UTF-8 | Python | false | false | 446 | py | import bpy
op = bpy.context.active_operator
op.x_eq = 'u-sin(u)*cosh(v)'
op.y_eq = '4*sin(1/2*u)*sinh(v/2)'
op.z_eq = '1-cos(u)*cosh(v)'
op.range_u_min = -3.1415927410125732
op.range_u_max = 9.42477798461914
op.range_u_step = 32
op.wrap_u = False
op.range_v_min = -2.0
op.range_v_max = 2.0
op.range_v_step = 128
op.wrap_v = False
op.close_v = False
op.n_eq = 1
op.a_eq = '0'
op.b_eq = '0'
op.c_eq = '0'
op.f_eq = '0'
op.g_eq = '0'
op.h_eq = '0'
| [
"admin@irradiate.net"
] | admin@irradiate.net |
bfae4fb55b4f57600152cc8d3cb55d720b812077 | b9959cb19e518674b722e2a6fb879056c0f1ba83 | /kozmic/builds/views.py | 73a8d199b00b7c502aeff64b890e322d89a53943 | [] | no_license | bazilio91/kozmic-ci | d186d7c5b61081ea5eec972d36091c17ecfdf4be | f3ddc9145e4eb93803caae1511e1bcb4a9b18c7a | refs/heads/master | 2021-01-16T01:07:50.770776 | 2014-12-24T06:59:28 | 2014-12-24T07:40:16 | 31,765,700 | 0 | 0 | null | 2015-03-06T11:05:36 | 2015-03-06T11:05:35 | null | UTF-8 | Python | false | false | 3,379 | py | import json
import github3
import sqlalchemy
from flask import request, redirect, url_for
from kozmic import db, csrf
from kozmic.models import Project, Build, Hook, HookCall
from . import bp, tasks
def get_ref_and_sha(payload):
action = payload.get('action')
if action is None:
# See `tests.func_fixtures.PUSH_HOOK_CALL_DATA` for payload
ref = payload.get('ref') # ref looks like "refs/heads/master"
if not ref or not ref.startswith('refs/heads/'):
return None
prefix_length = len('refs/heads/')
ref = ref[prefix_length:]
sha = payload.get('head_commit', {}).get('id')
if not sha:
return None
return ref, sha
elif action in ('opened', 'synchronize'):
# See `tests.func_fixtures.PULL_REQUEST_HOOK_CALL_DATA` for payload
gh_pull = github3.pulls.PullRequest(payload.get('pull_request', {}))
try:
return gh_pull.head.ref, gh_pull.head.sha
except:
return None
else:
return None
@csrf.exempt
@bp.route('/_hooks/hook/<int:id>/', methods=('POST',))
def hook(id):
hook = Hook.query.get_or_404(id)
payload = json.loads(request.data)
if set(payload.keys()) == {'zen', 'hook_id'}:
# http://developer.github.com/webhooks/#ping-event
if hook.gh_id != payload['hook_id']:
return 'Wrong hook URL', 400
else:
return 'OK'
ref_and_sha = get_ref_and_sha(payload)
if not ref_and_sha:
return 'Failed to fetch ref and commit from payload', 400
ref, sha = ref_and_sha
gh_commit = hook.project.gh.git_commit(sha)
build = hook.project.builds.filter(
Build.gh_commit_ref == ref,
Build.gh_commit_sha == gh_commit.sha).first()
if not build:
build = Build(
project=hook.project,
status='enqueued',
gh_commit_ref=ref,
gh_commit_sha=gh_commit.sha,
gh_commit_author=gh_commit.author['name'],
gh_commit_message=gh_commit.message)
build.calculate_number()
db.session.add(build)
hook_call = HookCall(
hook=hook,
build=build,
gh_payload=payload)
db.session.add(hook_call)
try:
db.session.commit()
except sqlalchemy.exc.IntegrityError:
# Commit may fail due to "unique_ref_and_sha_within_project"
# constraint on Build or "unique_hook_call_within_build" on
# HookCall. It means that GitHub called this hook twice
# (for example, on push and pull request sync events)
# at the same time and Build and HookCall has been just
# committed by another transaction.
db.session.rollback()
return 'OK'
tasks.do_job.delay(hook_call_id=hook_call.id)
return 'OK'
@bp.route('/badges/<gh_login>/<gh_name>/<ref>')
def badge(gh_login, gh_name, ref):
project = Project.query.filter_by(
gh_login=gh_login, gh_name=gh_name).first_or_404()
build = project.get_latest_build(ref=ref)
badge = build and build.status or 'success'
response = redirect(url_for(
'static',
filename='img/badges/{}.png'.format(badge),
_external=True,
# Use https so that GitHub does not cache images served from HTTPS
_scheme='https'))
response.status_code = 307
return response
| [
"anthony.romanovich@gmail.com"
] | anthony.romanovich@gmail.com |
64060e6e7e281f5870da0bd130d6b4b05662328c | e2ca3205bb5240a1e4c87de0bdb13faa70241f16 | /src/verify/image/alpine/proxy/setup.py | f3dc277e61ded0dff9250e7536da94e2549aecb9 | [
"Apache-2.0"
] | permissive | random-python/nspawn | 67da4d96d54dcbf537adaf3421a03020ea5c1769 | 25f53aa565c0685842a89d48d949b0459b1de0a6 | refs/heads/master | 2023-05-11T21:27:44.557577 | 2023-05-07T17:00:18 | 2023-05-07T17:00:18 | 184,904,641 | 21 | 2 | null | null | null | null | UTF-8 | Python | false | false | 1,760 | py | #!/usr/bin/env python
# import os, runpy
# this_dir = os.path.dirname(os.path.abspath(__file__))
# runpy.run_path(f"{this_dir}/a.py")
from nspawn.setup import *
import platform
epoch = "3.10"
release = f"{epoch}.3"
hardware = platform.machine()
machine_name = "alpa-proxy"
network_face = TOOL.select_interface()
IMAGE(f"file://localhost/tmp/nspawn/repo/alpine/proxy/default-{release}-{hardware}.tar.gz")
MACHINE(
# define machine name
name=machine_name,
# extra entries for [Unit] section
unit_conf=[
"Description=hello-kitty", # override description
],
# extra entries for [Service] section
service_conf=[
"CPUQuota=10%", # throttle processor usage
],
# extra entries for [Install] section
install_conf=[
"# user comment: hello-kitty", # inject user comment
],
)
WITH(
# Hostname="alpase", # needs systemd v 239
Boot='yes', # auto detect /bin/init program
Quiet="yes", # suppress "press to escape" message
KeepUnit="yes", # use service unit as nspawn scope
Register="yes", # expose service unit with machinectl
MACVLAN=network_face,
# Capability='all',
)
# use host ssh login for container
WITH(BindReadOnly="/root/.ssh/authorized_keys")
# alpine system entry
# EXEC(['/sbin/init'])
# EXEC(['/bin/ls', '-Rlas', f"/root"])
# external config
config_dir = f"{TOOL.nspawn_tempdir()}/machine/{machine_name}"
# externally configurable hostname
hostname_path = f"{config_dir}/etc/hostname"
WITH(BindReadOnly=f"{hostname_path}:/etc/hostname")
CAST(source="/etc/hostname", target=hostname_path, machine_name=machine_name)
# externally exposed message log
messages_path = f"{config_dir}/var/log/messages"
WITH(Bind=f"{messages_path}:/var/log/messages")
| [
"andrei.pozolotin@gmail.com"
] | andrei.pozolotin@gmail.com |
865c1a19719a5537fde1e8a41f62e258ab016386 | 98c6ea9c884152e8340605a706efefbea6170be5 | /examples/data/Assignment_3/mtsmol017/question3.py | c1a527c00cceea2444895527ffa725ec32f0b7c3 | [] | no_license | MrHamdulay/csc3-capstone | 479d659e1dcd28040e83ebd9e3374d0ccc0c6817 | 6f0fa0fa1555ceb1b0fb33f25e9694e68b6a53d2 | refs/heads/master | 2021-03-12T21:55:57.781339 | 2014-09-22T02:22:22 | 2014-09-22T02:22:22 | 22,372,174 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 653 | py | x=input("Enter the message:\n")
y=eval(input("Enter the message repeat count:\n"))
b=eval(input("Enter the frame thickness:\n"))
v=b-1
u=len(x)
p=u+2
c=p+2*v
e=0
space=0
for i in range(v):
print("|"*e,"+","-"*c,"+","|"*e,sep="")
c=c-2
space=space+1
e=e+1
if b>0:
print("|"*v,"+",p*"-","+","|"*v,sep="")
t=1
for i in range(0):
print("|")
for i in range(y):
print("|"*v,"|"*t, " ",x, " ","|"*v,end="|",sep="")
print()
if b>0:
print("|"*v,"+",p*"-","+","|"*v,sep="")
c=p+1
space=v-1
for i in range(v):
print("|"*space,"+","-","-"*c,"+","|"*space,sep="")
c=c+2
space=space-1 | [
"jarr2000@gmail.com"
] | jarr2000@gmail.com |
b687da01dd8d2a85a8e6edf65dbb440abe5934a3 | 5a5b109bb29b21d3e2805a1cdac72261d862e2ae | /loop/ForElse.py | 2ad6e235650d27e97659178806759d836ac3da80 | [] | no_license | 3454833326/geek | 428030f7f780e9cd6b6f775b4518f295c6b6bb2d | 39214ff8c68e60b065f636da6dcb04c52e2a787a | refs/heads/master | 2021-05-27T04:20:16.035334 | 2020-04-08T23:27:25 | 2020-04-08T23:27:25 | 254,203,089 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 200 | py | for i in range(1,10):
for j in range(1,i+1):
print(f'{i}*{j}={i*j}',end=' ')
print()
for i in range(1,6):
for j in range(1,i+1):#控制
print('*',end=' ')
print()
| [
"you@example.com"
] | you@example.com |
d12a52f71e3698240804934cd88872171f43888d | de24f83a5e3768a2638ebcf13cbe717e75740168 | /moodledata/vpl_data/79/usersdata/247/43511/submittedfiles/serie1.py | 90e784d791ec3951343a353ba1a25c5d7cf6ffee | [] | no_license | rafaelperazzo/programacao-web | 95643423a35c44613b0f64bed05bd34780fe2436 | 170dd5440afb9ee68a973f3de13a99aa4c735d79 | refs/heads/master | 2021-01-12T14:06:25.773146 | 2017-12-22T16:05:45 | 2017-12-22T16:05:45 | 69,566,344 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 222 | py | # -*- coding: utf-8 -*-
import math
n=int(input('digite n: '))
d=1
c=1
soma=0
t=1
while t<=n:
d=d+1
c=d**2
t=t+1
if t%2==0:
soma=soma-(d//c)
else:
soma=soma+(d//c)
print(c)
| [
"rafael.mota@ufca.edu.br"
] | rafael.mota@ufca.edu.br |
3219371d6d9064ce91bbce562de29eee8403174a | 4da55187c399730f13c5705686f4b9af5d957a3f | /resources/sumo_exporter/crossroad.py | 066119d5b50767c46efb4945edfc1ac6845750d0 | [
"Apache-2.0"
] | permissive | Ewenwan/webots | 7111c5587100cf35a9993ab923b39b9e364e680a | 6b7b773d20359a4bcf29ad07384c5cf4698d86d3 | refs/heads/master | 2020-04-17T00:23:54.404153 | 2019-01-16T13:58:12 | 2019-01-16T13:58:12 | 166,048,591 | 2 | 0 | Apache-2.0 | 2019-01-16T13:53:50 | 2019-01-16T13:53:50 | null | UTF-8 | Python | false | false | 3,345 | py | """Road class container."""
import math
import re
from re_definitions import floatRE, intRE
from data_structures import grouper
from lxml import etree as ET
class Crossroad(object):
"""Class matching with a Webots Crossroad, containing facilities to export to SUMO junctions."""
crossroads = []
def __init__(self, crossroadType):
"""Constructor: Initialize the crossroad with a unique id."""
self.roads = [] # connected roads
self.id = 'Custom%d' % len(Crossroad.crossroads)
self.translation = [0.0, 0.0, 0.0]
self.connectedRoadIDs = []
self.shape = []
self.crossroadType = crossroadType
def init_from_wbt_string(self, wbtString):
"""Extract info from the wbtString matching the node."""
try:
self.id = re.findall(r'id\s*"([^"]*)"', wbtString)[0]
except:
pass
try:
self.translation = [float(x) for x in re.findall(r'translation\s*(%s\s*%s\s*%s)' % (floatRE, floatRE, floatRE), wbtString)[0].split()]
except:
pass
try:
self.rotation = [float(x) for x in re.findall(r'rotation\s*(%s\s*%s\s*%s\s*%s)' % (floatRE, floatRE, floatRE, floatRE), wbtString)[0].split()]
except:
self.rotation = [0.0, 1.0, 0.0, 0.0]
try:
self.connectedRoadIDs = [x.replace('"', '') for x in re.findall(r'connectedRoadIDs\s*\[([^\]]*)\]', wbtString)[0].split()]
except:
pass
if self.crossroadType == "Crossroad":
try:
self.shape = grouper(3, [float(x) for x in re.findall(r'shape\s*\[([^\]]*)\]', wbtString)[0].split()])
except:
pass
elif self.crossroadType == "RoadIntersection":
roadNumber = 4
self.shape = []
try:
roadNumber = int(re.findall(r'roadNumber\s*(%s)' % intRE, wbtString)[0])
except:
roadNumber = 4
roadsWidth = 7.0
try:
roadsWidth = float(re.findall(r'roadsWidth\s*(%s)' % floatRE, wbtString)[0])
except:
roadsWidth = 7.0
outerRadius = roadsWidth / (2 * math.sin(math.pi / roadNumber))
angle = self.rotation[3]
if self.rotation[1] > 0:
angle = -angle
for i in range(roadNumber):
x1 = outerRadius * math.cos(2 * math.pi * i / roadNumber)
y1 = outerRadius * math.sin(2 * math.pi * i / roadNumber)
x2 = math.cos(angle) * x1 - math.sin(angle) * y1
y2 = math.cos(angle) * y1 + math.sin(angle) * x1
self.shape.append([x2, 0, y2])
def create_node(self, nodes):
"""Populate the SUMO XML node."""
node = ET.SubElement(nodes, 'node')
node.attrib['id'] = self.id
node.attrib['x'] = str(- self.translation[0])
node.attrib['y'] = str(self.translation[2])
if len(self.shape) > 0:
shape = ""
for wayPoint in self.shape:
shape += "%f,%f " % (- wayPoint[0] - self.translation[0], wayPoint[2] + self.translation[2])
shape += "%f,%f" % (- self.shape[0][0] - self.translation[0], self.shape[0][2] + self.translation[2])
node.attrib['shape'] = shape
| [
"David.Mansolino@cyberbotics.com"
] | David.Mansolino@cyberbotics.com |
d7f490aa2885aa9e485209b26c0ef98369f6b933 | 6bfda75657070e177fa620a43c917096cbd3c550 | /kubernetes/client/models/v1beta1_daemon_set.py | 60bc14e1d3b94368a23f36ef626edc220da4a1ff | [
"Apache-2.0"
] | permissive | don41382/client-python | 8e7e747a62f9f4fc0402eea1a877eab1bb80ab36 | e69d4fe204b98f7d7ee3ada3996b4f5fbceae5fe | refs/heads/master | 2021-01-19T23:15:50.172933 | 2017-04-18T18:00:48 | 2017-04-18T18:00:48 | 88,943,866 | 0 | 0 | null | 2017-04-21T05:19:52 | 2017-04-21T05:19:52 | null | UTF-8 | Python | false | false | 7,290 | py | # coding: utf-8
"""
Kubernetes
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: v1.6.1
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from pprint import pformat
from six import iteritems
import re
class V1beta1DaemonSet(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
def __init__(self, api_version=None, kind=None, metadata=None, spec=None, status=None):
"""
V1beta1DaemonSet - a model defined in Swagger
:param dict swaggerTypes: The key is attribute name
and the value is attribute type.
:param dict attributeMap: The key is attribute name
and the value is json key in definition.
"""
self.swagger_types = {
'api_version': 'str',
'kind': 'str',
'metadata': 'V1ObjectMeta',
'spec': 'V1beta1DaemonSetSpec',
'status': 'V1beta1DaemonSetStatus'
}
self.attribute_map = {
'api_version': 'apiVersion',
'kind': 'kind',
'metadata': 'metadata',
'spec': 'spec',
'status': 'status'
}
self._api_version = api_version
self._kind = kind
self._metadata = metadata
self._spec = spec
self._status = status
@property
def api_version(self):
"""
Gets the api_version of this V1beta1DaemonSet.
APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: http://releases.k8s.io/HEAD/docs/devel/api-conventions.md#resources
:return: The api_version of this V1beta1DaemonSet.
:rtype: str
"""
return self._api_version
@api_version.setter
def api_version(self, api_version):
"""
Sets the api_version of this V1beta1DaemonSet.
APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: http://releases.k8s.io/HEAD/docs/devel/api-conventions.md#resources
:param api_version: The api_version of this V1beta1DaemonSet.
:type: str
"""
self._api_version = api_version
@property
def kind(self):
"""
Gets the kind of this V1beta1DaemonSet.
Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: http://releases.k8s.io/HEAD/docs/devel/api-conventions.md#types-kinds
:return: The kind of this V1beta1DaemonSet.
:rtype: str
"""
return self._kind
@kind.setter
def kind(self, kind):
"""
Sets the kind of this V1beta1DaemonSet.
Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: http://releases.k8s.io/HEAD/docs/devel/api-conventions.md#types-kinds
:param kind: The kind of this V1beta1DaemonSet.
:type: str
"""
self._kind = kind
@property
def metadata(self):
"""
Gets the metadata of this V1beta1DaemonSet.
Standard object's metadata. More info: http://releases.k8s.io/HEAD/docs/devel/api-conventions.md#metadata
:return: The metadata of this V1beta1DaemonSet.
:rtype: V1ObjectMeta
"""
return self._metadata
@metadata.setter
def metadata(self, metadata):
"""
Sets the metadata of this V1beta1DaemonSet.
Standard object's metadata. More info: http://releases.k8s.io/HEAD/docs/devel/api-conventions.md#metadata
:param metadata: The metadata of this V1beta1DaemonSet.
:type: V1ObjectMeta
"""
self._metadata = metadata
@property
def spec(self):
"""
Gets the spec of this V1beta1DaemonSet.
The desired behavior of this daemon set. More info: http://releases.k8s.io/HEAD/docs/devel/api-conventions.md#spec-and-status
:return: The spec of this V1beta1DaemonSet.
:rtype: V1beta1DaemonSetSpec
"""
return self._spec
@spec.setter
def spec(self, spec):
"""
Sets the spec of this V1beta1DaemonSet.
The desired behavior of this daemon set. More info: http://releases.k8s.io/HEAD/docs/devel/api-conventions.md#spec-and-status
:param spec: The spec of this V1beta1DaemonSet.
:type: V1beta1DaemonSetSpec
"""
self._spec = spec
@property
def status(self):
"""
Gets the status of this V1beta1DaemonSet.
The current status of this daemon set. This data may be out of date by some window of time. Populated by the system. Read-only. More info: http://releases.k8s.io/HEAD/docs/devel/api-conventions.md#spec-and-status
:return: The status of this V1beta1DaemonSet.
:rtype: V1beta1DaemonSetStatus
"""
return self._status
@status.setter
def status(self, status):
"""
Sets the status of this V1beta1DaemonSet.
The current status of this daemon set. This data may be out of date by some window of time. Populated by the system. Read-only. More info: http://releases.k8s.io/HEAD/docs/devel/api-conventions.md#spec-and-status
:param status: The status of this V1beta1DaemonSet.
:type: V1beta1DaemonSetStatus
"""
self._status = status
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
| [
"mehdy@google.com"
] | mehdy@google.com |
85b2df707765c4c88c8d8c146a13f60e046de768 | 460b244bbec6b389628eeb764b5e0b7e867e02cb | /optimization/core/util_data.py | 55284667810af9c0a148fac5e10e8db3e6ceaed7 | [] | no_license | chrisgarcia001/Synchronized-Multi-Assignment-Orienteering | e570af8fbc43d8731bf02b2abcd24a2c092aae23 | 70479c06d621086b7f9f2f675176aea5032bbdd3 | refs/heads/master | 2023-04-13T11:25:16.588814 | 2022-11-01T04:08:21 | 2022-11-01T04:08:21 | 427,838,276 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,931 | py | #-----------------------------------------------------------------------------------------------------
# Author: cgarcia@umw.edu
# Created: 2/20/2020
# About: This file contains some utility functions for generating data.
#
# Revision History:
# Date Author Change Description
# 2/20/2020 cgarcia Initial version.
#-----------------------------------------------------------------------------------------------------
import random as rnd
import functools as ft
# Generate a matrix of constant values.
def const_matrix(n_rows, n_columns, const=0):
return [[const for j in range(n_columns)] for i in range(n_rows)]
# Expands a matrix (2D list) by duplicating rows and columns according to the specified dimensional factors.
def expand_matrix(matrix, row_factor=1, column_factor=1):
rows = []
for i in matrix:
for rf in range(row_factor):
row = list(i)
col = []
for j in row:
for cf in range(column_factor):
col.append(j)
rows.append(col)
return rows
# Generate a random float in the specified range.
def rand_float(minf, maxf):
return minf + ((maxf - minf) * rnd.random())
# Generate a random segment in form of (start, finish), based on the length and bound constraints specified.
def rand_segment(min_length, max_length, lower_bound, upper_bound, integer=False):
min_length = min(min_length, upper_bound - lower_bound)
max_length = min(max_length, upper_bound - lower_bound)
length = rand_float(min_length, max_length)
position = rand_float(lower_bound, upper_bound - length)
if not(integer):
return (position, position + length)
else:
a = round(position)
b = min(round(position + length), int(upper_bound))
return(int(a), int(b))
# Generate a list of random 0/1 values according to the specified probability of getting a 1.
def rand_binaries(n, prob_1):
rb = lambda: 1 if rnd.random() < prob_1 else 0
return [rb() for i in range(n)]
# Given m items and n slots, randomly distrubute the items to the slots and return the final slot item counts.
def rand_slot_distribute(m_items, n_slots):
slots = [0 for i in range(n_slots)]
for i in range(m_items):
slots[rnd.randint(0, n_slots - 1)] += 1
return slots
# Generates a random binary matrix such that no row or column sums to zero, provided sum(column_sums) >= n_rows.
# @param n_rows: the number of rows in the matrix
# @param column_sums: a column vector specifying the sums that each column should have in the final matrix.
def rand_bin_matrix(n_rows, column_sums):
column_sums = [min(s, n_rows) for s in column_sums] # safety feature to prevent an infinite loop
mat = const_matrix(n_rows, len(column_sums), 0)
zeros = [0 for i in range(len(column_sums))]
ones = [1 for i in range(len(column_sums))]
i = 0
while column_sums != zeros:
if i >= n_rows:
i = 0
rnd.shuffle(mat)
try:
j = rnd.sample([x for x in range(len(column_sums)) if mat[i][x] == 0 and column_sums[x] != 0], 1)[0]
mat[i][j] += 1
column_sums[j] -= 1
except:
pass
i += 1
return mat
# Return a shuffled copy of some_list.
def shuffled(some_list):
x = list(some_list)
rnd.shuffle(x)
return x
# Breaks a segment/window to a set of discrete points evenly separated by delta.
# @param a: the lower point of the segment
# @param b: the upper point of the segment
# @param delta: the distance between points, except for possibly the last point.
# @returns: a list of numbers consisting of points in the segment.
def segment_to_points(a, b, delta):
points = []
curr = a
while curr < b:
points.append(curr)
curr += delta
points.append(b)
return points
# Given a list of numeric weights that correspond to the probability that an index will be chosen, randomly select
# one of the indices and return it.
def random_weighted_index(weights):
if len(weights) < 1:
raise 'random_weight_index: weights must not be empty'
sm = sum(weights)
tw = 0
rd = rand_float(0, sm)
for i in range(len(weights)):
tw += weights[i]
if rd <= tw:
return i
# For a set of matrices all containing m rows, return the column-concatenated matrix that results
def cbind(*matrices):
return ft.reduce(lambda w,x: [y + z for (y, z) in zip(w, x)], matrices)
# For a set of matrices all containing n columns, return the row-concatenated matrix that results
def rbind(*matrices):
return ft.reduce(lambda x,y: x + y, matrices)
# Compute the dot product between x and y.
def dot(x, y):
return sum(a * b for (a,b) in zip(x, y))
# Compute the transpose of a matrix.
def transpose(matrix):
return [list(x) for x in zip(*matrix)]
| [
"chrisgarcia001@gmail.com"
] | chrisgarcia001@gmail.com |
03af5da735bc880fd6176f87063e0985480a8d6a | af82475dc7eb45c478414372c222e7b6016359d4 | /python书籍/Python For Finance Code/Code of Python For Finance/4375OS_09_Code/4375_09_21_binomial_Su_Sd.py | c2af289baf7f964e56e4f84512260d98c35b921b | [] | no_license | enfangzhong/PythonBaseCode | 8f58c8b817eb9f4b0f0a5be437a52d5b5fab3433 | 9ab4a578b2692fdbb6aeeacb310251d51f72e953 | refs/heads/master | 2020-05-17T16:26:02.598344 | 2019-04-27T20:49:40 | 2019-04-27T20:49:40 | 183,817,172 | 4 | 1 | null | null | null | null | UTF-8 | Python | false | false | 585 | py | """
Name : 4375OS_09_21_binomial_Su_Sd.py
Book : Python for Finance
Publisher: Packt Publishing Ltd.
Author : Yuxing Yan
Date : 12/26/2013
email : yany@canisius.edu
paulyxy@hotmail.com
"""
import matplotlib.pyplot as plt
xlim(0,1)
plt.figtext(0.18,0.5,'S')
plt.figtext(0.6,0.5+0.25,'Su')
plt.figtext(0.6,0.5-0.25,'Sd')
plt.annotate('',xy=(0.6,0.5+0.25), xytext=(0.1,0.5), arrowprops=dict(facecolor='b',shrink=0.01))
plt.annotate('',xy=(0.6,0.5-0.25), xytext=(0.1,0.5), arrowprops=dict(facecolor='b',shrink=0.01))
plt.axis('off')
plt.show()
| [
"944727327@qq.com"
] | 944727327@qq.com |
0dd4858585bb91807053895813bd9c5cfe6a5169 | 6fcfb638fa725b6d21083ec54e3609fc1b287d9e | /python/warmheartli_ChatBotCourse/ChatBotCourse-master/subtitle/preprocess/mv_zip.py | abd02f4f9029aae4f962d508e75b310134b61ee9 | [] | no_license | LiuFang816/SALSTM_py_data | 6db258e51858aeff14af38898fef715b46980ac1 | d494b3041069d377d6a7a9c296a14334f2fa5acc | refs/heads/master | 2022-12-25T06:39:52.222097 | 2019-12-12T08:49:07 | 2019-12-12T08:49:07 | 227,546,525 | 10 | 7 | null | 2022-12-19T02:53:01 | 2019-12-12T07:29:39 | Python | UTF-8 | Python | false | false | 481 | py | import glob
import os
import fnmatch
import shutil
import sys
def iterfindfiles(path, fnexp):
for root, dirs, files in os.walk(path):
for filename in fnmatch.filter(files, fnexp):
yield os.path.join(root, filename)
i=0
for filename in iterfindfiles(r"./input/", "*.ZIP"):
i=i+1
newfilename = "zip/" + str(i) + "_" + os.path.basename(filename)
print filename + " <===> " + newfilename
shutil.move(filename, newfilename)
#sys.exit(-1)
| [
"659338505@qq.com"
] | 659338505@qq.com |
ccfcc740765b86fd30656349bd302d96acc5b0ac | 2194b6c17f3153c5976d6ac4a9ab78211027adab | /otoroshi_admin_api_client/models/otoroshiutilsmailer_generic_mailer_settings.py | a92d1005941524a407022b597ec668261a6a32e8 | [] | no_license | krezreb/otoroshi-admin-api-client | 7fab5e873c9c5950d77fffce6bcf80d3fdf4c319 | 9b3156c11eac227024cfe4a26c0129618deb2c4d | refs/heads/master | 2023-05-08T08:32:00.982987 | 2021-05-27T09:55:00 | 2021-05-27T09:55:00 | 371,324,636 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,673 | py | from typing import Any, Dict, List, Type, TypeVar, Union
import attr
from ..models.otoroshiutilsmailer_email_location import OtoroshiutilsmailerEmailLocation
from ..models.otoroshiutilsmailer_generic_mailer_settings_headers import OtoroshiutilsmailerGenericMailerSettingsHeaders
from ..models.otoroshiutilsmailer_generic_mailer_settings_type import OtoroshiutilsmailerGenericMailerSettingsType
from ..types import UNSET, Unset
T = TypeVar("T", bound="OtoroshiutilsmailerGenericMailerSettings")
@attr.s(auto_attribs=True)
class OtoroshiutilsmailerGenericMailerSettings:
"""Settings for the generic mailer (http requests)"""
headers: Union[Unset, OtoroshiutilsmailerGenericMailerSettingsHeaders] = UNSET
to: Union[Unset, List[OtoroshiutilsmailerEmailLocation]] = UNSET
type: Union[Unset, OtoroshiutilsmailerGenericMailerSettingsType] = UNSET
url: Union[Unset, str] = UNSET
additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict)
def to_dict(self) -> Dict[str, Any]:
headers: Union[Unset, Dict[str, Any]] = UNSET
if not isinstance(self.headers, Unset):
headers = self.headers.to_dict()
to: Union[Unset, List[Dict[str, Any]]] = UNSET
if not isinstance(self.to, Unset):
to = []
for to_item_data in self.to:
to_item = to_item_data.to_dict()
to.append(to_item)
type: Union[Unset, str] = UNSET
if not isinstance(self.type, Unset):
type = self.type.value
url = self.url
field_dict: Dict[str, Any] = {}
field_dict.update(self.additional_properties)
field_dict.update({})
if headers is not UNSET:
field_dict["headers"] = headers
if to is not UNSET:
field_dict["to"] = to
if type is not UNSET:
field_dict["type"] = type
if url is not UNSET:
field_dict["url"] = url
return field_dict
@classmethod
def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
d = src_dict.copy()
_headers = d.pop("headers", UNSET)
headers: Union[Unset, OtoroshiutilsmailerGenericMailerSettingsHeaders]
if isinstance(_headers, Unset):
headers = UNSET
else:
headers = OtoroshiutilsmailerGenericMailerSettingsHeaders.from_dict(_headers)
to = []
_to = d.pop("to", UNSET)
for to_item_data in _to or []:
to_item = OtoroshiutilsmailerEmailLocation.from_dict(to_item_data)
to.append(to_item)
_type = d.pop("type", UNSET)
type: Union[Unset, OtoroshiutilsmailerGenericMailerSettingsType]
if isinstance(_type, Unset):
type = UNSET
else:
type = OtoroshiutilsmailerGenericMailerSettingsType(_type)
url = d.pop("url", UNSET)
otoroshiutilsmailer_generic_mailer_settings = cls(
headers=headers,
to=to,
type=type,
url=url,
)
otoroshiutilsmailer_generic_mailer_settings.additional_properties = d
return otoroshiutilsmailer_generic_mailer_settings
@property
def additional_keys(self) -> List[str]:
return list(self.additional_properties.keys())
def __getitem__(self, key: str) -> Any:
return self.additional_properties[key]
def __setitem__(self, key: str, value: Any) -> None:
self.additional_properties[key] = value
def __delitem__(self, key: str) -> None:
del self.additional_properties[key]
def __contains__(self, key: str) -> bool:
return key in self.additional_properties
| [
"josephbeeson@gmail.com"
] | josephbeeson@gmail.com |
84255a0dd811e56d25d5f188a90e82f3cde0ebfd | f360c6fe06fb9859039a5d39fad5815fd4aff372 | /community/community/settings.py | 03c23fe8c6938d3fa32e191fc23503d3ce85174c | [] | no_license | gwjczwy/Django-CMS | d6297055957548997e86d383d54ae051062c8854 | f1a00d637c65809d606df3d4b96bcc594af09bd8 | refs/heads/master | 2020-04-24T21:57:44.818864 | 2019-03-03T08:41:50 | 2019-03-03T08:41:50 | 172,295,715 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,206 | py | """
Django settings for community project.
Generated by 'django-admin startproject' using Django 2.1.4.
For more information on this file, see
https://docs.djangoproject.com/en/2.1/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.1/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.1/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'lj$)a*tr+m3bv)02z%hmj022hv^e&e&!qu88*wlybtd^5um9k8'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'posting',
'accounts',
'bootstrap4',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'community.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'posting/../templates')],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'community.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.1/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/2.1/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.1/topics/i18n/
LANGUAGE_CODE = 'zh-hans'
TIME_ZONE = 'Asia/Shanghai'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.1/howto/static-files/
STATIC_URL = '/static/'
| [
"zwy053@163.com"
] | zwy053@163.com |
0f78b322495fcc4b94ab7128cbaa72bd339f862a | b8bbdfc593b6d816e67a344f720f90ec05236778 | /dev/mypy/plugin/decorators.py | 1957b59996576d5742ee2d4702404edfd4a247c8 | [
"Apache-2.0",
"MIT",
"BSD-3-Clause",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | apache/airflow | ed78db0a8bab7e096990e143926e52f518e288ab | 1b122c15030e99cef9d4ff26d3781a7a9d6949bc | refs/heads/main | 2023-09-01T08:37:34.556097 | 2023-09-01T06:49:05 | 2023-09-01T06:49:05 | 33,884,891 | 22,756 | 11,558 | Apache-2.0 | 2023-09-14T20:12:36 | 2015-04-13T18:04:58 | Python | UTF-8 | Python | false | false | 2,942 | py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import copy
import functools
from mypy.nodes import ARG_NAMED_OPT
from mypy.plugin import FunctionContext, Plugin
from mypy.types import CallableType, NoneType, UnionType
TYPED_DECORATORS = {
"airflow.providers.google.cloud.hooks.dataflow._fallback_to_project_id_from_variables": ["project_id"],
"fallback_to_default_project_id of GoogleBaseHook": ["project_id"],
"provide_gcp_credential_file of GoogleBaseHook": [],
}
class TypedDecoratorPlugin(Plugin):
"""Mypy plugin for typed decorators."""
def get_function_hook(self, fullname: str):
"""Check for known typed decorators by name."""
if fullname in TYPED_DECORATORS:
return functools.partial(
_analyze_decorator,
provided_arguments=TYPED_DECORATORS[fullname],
)
return None
def _analyze_decorator(function_ctx: FunctionContext, provided_arguments: list[str]):
if not isinstance(function_ctx.arg_types[0][0], CallableType):
return function_ctx.default_return_type
if not isinstance(function_ctx.default_return_type, CallableType):
return function_ctx.default_return_type
return _change_decorator_function_type(
function_ctx.arg_types[0][0],
function_ctx.default_return_type,
provided_arguments,
)
def _change_decorator_function_type(
decorated: CallableType,
decorator: CallableType,
provided_arguments: list[str],
) -> CallableType:
decorator.arg_kinds = decorated.arg_kinds
decorator.arg_names = decorated.arg_names
# Mark provided arguments as optional
decorator.arg_types = copy.copy(decorated.arg_types)
for argument in provided_arguments:
try:
index = decorated.arg_names.index(argument)
except ValueError:
continue
decorated_type = decorated.arg_types[index]
decorator.arg_types[index] = UnionType.make_union([decorated_type, NoneType()])
decorated.arg_kinds[index] = ARG_NAMED_OPT
return decorator
def plugin(version: str):
"""Mypy plugin entrypoint."""
return TypedDecoratorPlugin
| [
"noreply@github.com"
] | apache.noreply@github.com |
db7d202b86ebe4b5f5f44e2c932195e11dc2d9d3 | 2a157b0378fb3b59ffea8160de942b780e433bac | /surf.py | 4209986097b2a2ce431fe7d24d3a078c7d6202cf | [] | no_license | WeixinGithubJiang/imgret | bb05a5b4c71a5e023882f474007df468070264bd | 28ac6461de815e37539f1893c29d4af6d1c1647d | refs/heads/master | 2021-10-09T01:03:43.443255 | 2018-12-19T14:08:18 | 2018-12-19T14:08:18 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 861 | py | import cv2
from local_feature import LocalFeature
class SURF(LocalFeature):
def __init__(self,
image_size,
keypoint_image_border_size,
max_keypoint_count,
ldescriptor_length,
hessian_threshold,
extended,
upright):
super(SURF, self).__init__(
image_size=image_size,
keypoint_image_border_size=keypoint_image_border_size,
max_keypoint_count=max_keypoint_count,
ldescriptor_length=ldescriptor_length)
self.feature_detector = cv2.xfeatures2d.SURF_create(
hessianThreshold=hessian_threshold,
nOctaves=4,
nOctaveLayers=3,
extended=extended,
upright=upright)
self.descriptor_extractor = self.feature_detector
| [
"osemery@gmail.com"
] | osemery@gmail.com |
82e74179866ad9243cd200fd873cdcc54082b43c | eabf9d677b9ccd59f42e5359e46720899bf8cf10 | /PyFunceble/status/reputation/domain_and_ip.py | 6e54eed6e74d4487e4bbead828c16010eb4cac60 | [
"Apache-2.0"
] | permissive | Phyleas/PyFunceble | 26263c55f7cf1f282348660cdadcb6c9c3989d70 | 1f6fb58e1afc29fc4418ffc84d1e066cbd836125 | refs/heads/master | 2023-08-21T22:27:14.448059 | 2021-06-13T08:50:33 | 2021-06-13T08:50:33 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,178 | py | """
The tool to check the availability or syntax of domain, IP or URL.
::
██████╗ ██╗ ██╗███████╗██╗ ██╗███╗ ██╗ ██████╗███████╗██████╗ ██╗ ███████╗
██╔══██╗╚██╗ ██╔╝██╔════╝██║ ██║████╗ ██║██╔════╝██╔════╝██╔══██╗██║ ██╔════╝
██████╔╝ ╚████╔╝ █████╗ ██║ ██║██╔██╗ ██║██║ █████╗ ██████╔╝██║ █████╗
██╔═══╝ ╚██╔╝ ██╔══╝ ██║ ██║██║╚██╗██║██║ ██╔══╝ ██╔══██╗██║ ██╔══╝
██║ ██║ ██║ ╚██████╔╝██║ ╚████║╚██████╗███████╗██████╔╝███████╗███████╗
╚═╝ ╚═╝ ╚═╝ ╚═════╝ ╚═╝ ╚═══╝ ╚═════╝╚══════╝╚═════╝ ╚══════╝╚══════╝
Provides the status interface for domains and IP reputation check.
Author:
Nissar Chababy, @funilrys, contactTATAfunilrysTODTODcom
Special thanks:
https://pyfunceble.github.io/special-thanks.html
Contributors:
https://pyfunceble.github.io/contributors.html
Project link:
https://github.com/funilrys/PyFunceble
Project documentation:
https://pyfunceble.readthedocs.io/en/master/
Project homepage:
https://pyfunceble.github.io/
License:
::
Copyright 2017, 2018, 2019, 2020, 2021 Nissar Chababy
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import PyFunceble
from ..gatherer_base import GathererBase
class DomainAndIp(GathererBase):
"""
Gather the reputation of the given domain or IP.
"""
# pylint: disable=no-member
def __init__(self, subject, filename=None, whois_db=None, inactive_db=None):
super().__init__(
subject, filename=filename, whois_db=whois_db, inactive_db=inactive_db
)
self.subject_type += "domain"
self.__gather()
def __gather(self):
"""
Process the gathering.
"""
self.status["_status_source"] = self.status.status_source = "REPUTATION"
if self.status.domain_syntax_validation or self.status.ipv4_syntax_validation:
if self.status.tested in PyFunceble.lookup.IPv4Reputation():
self.status[
"_status"
] = self.status.status = PyFunceble.STATUS.official.malicious
else:
self.status[
"_status"
] = self.status.status = PyFunceble.STATUS.official.sane
else:
self.status[
"_status"
] = self.status.status = PyFunceble.STATUS.official.sane
PyFunceble.output.Generate(
self.status.given,
self.subject_type,
self.status.status,
source=self.status.status_source,
whois_server=self.status.whois_server,
filename=self.filename,
ip_validation=self.status.ipv4_syntax_validation
or self.status.ipv6_syntax_validation,
).status_file()
PyFunceble.LOGGER.debug(f"[{self.status.given}] State:\n{self.status.get()}")
| [
"contact@funilrys.com"
] | contact@funilrys.com |
b63576e0ab27f1712ffce71a6a7894c4cfe75dca | 6b19ed8845f7cb020ad49da57a0c0fe85314a274 | /zerver/migrations/0154_fix_invalid_bot_owner.py | 831dd1298d504d5f67ce28e23ff86f7dfc463cd7 | [
"LicenseRef-scancode-free-unknown",
"Apache-2.0"
] | permissive | jahau/zulip | eb4da13858892065591caced88fc9a086fa0e0d2 | 51a8873579b9d4bb95219cd4a5c859fa972fa06b | refs/heads/master | 2021-05-18T03:44:32.003307 | 2020-03-27T22:29:55 | 2020-03-28T19:04:36 | 251,087,399 | 1 | 0 | Apache-2.0 | 2020-03-29T17:11:42 | 2020-03-29T17:11:42 | null | UTF-8 | Python | false | false | 890 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.6 on 2018-04-03 01:52
from __future__ import unicode_literals
from django.db import migrations
from django.db.backends.postgresql_psycopg2.schema import DatabaseSchemaEditor
from django.db.migrations.state import StateApps
def migrate_fix_invalid_bot_owner_values(apps: StateApps, schema_editor: DatabaseSchemaEditor) -> None:
"""Fixes UserProfile objects that incorrectly had a bot_owner set"""
UserProfile = apps.get_model('zerver', 'UserProfile')
UserProfile.objects.filter(is_bot=False).exclude(bot_owner=None).update(bot_owner=None)
class Migration(migrations.Migration):
dependencies = [
('zerver', '0153_remove_int_float_custom_fields'),
]
operations = [
migrations.RunPython(
migrate_fix_invalid_bot_owner_values,
reverse_code=migrations.RunPython.noop),
]
| [
"tabbott@zulipchat.com"
] | tabbott@zulipchat.com |
b73696b3d2790af9f065fd9f7d86caf8d4ac6135 | 812045c3ec6587827aeb18bde666237dfffc21ae | /tf_quant_finance/models/heston/approximations/__init__.py | 5432abd6f5a8def3d371ed7293d0a56b73ae16a4 | [
"Apache-2.0",
"LicenseRef-scancode-generic-cla",
"LicenseRef-scancode-unknown-license-reference",
"BSD-3-Clause"
] | permissive | google/tf-quant-finance | 2062082c85e8679b71e69bbeb579fe338c1b0288 | 0d3a2193c0f2d320b65e602cf01d7a617da484df | refs/heads/master | 2023-08-31T01:58:15.415811 | 2023-08-15T07:37:46 | 2023-08-15T07:38:22 | 198,669,252 | 4,165 | 557 | Apache-2.0 | 2023-08-04T19:25:55 | 2019-07-24T16:09:50 | Python | UTF-8 | Python | false | false | 1,143 | py | # Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Approximations to the Heston model."""
from tf_quant_finance.models.heston.approximations.asian_prices import asian_option_price
from tf_quant_finance.models.heston.approximations.calibration import calibration
from tf_quant_finance.models.heston.approximations.european_option import european_option_price
from tensorflow.python.util.all_util import remove_undocumented # pylint: disable=g-direct-tensorflow-import
_allowed_symbols = [
'asian_option_price',
'calibration',
'european_option_price',
]
remove_undocumented(__name__, _allowed_symbols)
| [
"tf-quant-finance-robot@google.com"
] | tf-quant-finance-robot@google.com |
e71034657c2ddde1dd53e31bbc3a037549103ec6 | 433a24663b73fa3550069fafe3a8af24c61a864c | /pyscreenshot/plugins/pyside2_grabwindow.py | 39b0cd494cb67bc94d84ddb506b22f64c38cc2d1 | [
"BSD-2-Clause"
] | permissive | robocorp/rpaframework-screenshot | 472c9f73237df27266e68ff43dd96e2402eb6325 | 7cf03b23f4bdf1e4a2e3df1893de598e852dd346 | refs/heads/master | 2021-06-13T21:31:47.342026 | 2020-04-12T18:16:34 | 2020-04-12T18:16:34 | 254,451,568 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,051 | py | import logging
from PIL import Image
from pyscreenshot.plugins.backend import CBackend
from pyscreenshot.util import py2
if py2():
import StringIO
BytesIO = StringIO.StringIO
else:
import io
BytesIO = io.BytesIO
log = logging.getLogger(__name__)
# TODO:PY2 error:
# TypeError: 'PySide2.QtGui.QScreen.grabWindow' called with wrong argument types:
# PySide2.QtGui.QScreen.grabWindow(int)
# Supported signatures:
# PySide2.QtGui.QScreen.grabWindow(WId, int = 0, int = 0, int = -1, int = -1)
# https://stackoverflow.com/questions/59118938/type-error-when-calling-qscreen-grabwindow
class PySide2BugError(Exception):
pass
app = None
class PySide2GrabWindow(CBackend):
name = "pyside2"
childprocess = False
apply_childprocess = True
def __init__(self):
pass
def grab_to_buffer(self, buff, file_type="png"):
if py2():
raise PySide2BugError()
import PySide2
from PySide2 import QtGui
from PySide2 import QtCore
from PySide2 import QtWidgets
QApplication = QtWidgets.QApplication
QBuffer = QtCore.QBuffer
QIODevice = QtCore.QIODevice
QScreen = QtGui.QScreen
# QPixmap = self.PySide2.QtGui.QPixmap
global app
if not app:
app = QApplication([])
qbuffer = QBuffer()
qbuffer.open(QIODevice.ReadWrite)
QScreen.grabWindow(
QApplication.primaryScreen(), QApplication.desktop().winId()
).save(qbuffer, file_type)
# https://stackoverflow.com/questions/52291585/pyside2-typeerror-bytes-object-cannot-be-interpreted-as-an-integer
buff.write(qbuffer.data().data())
qbuffer.close()
def grab(self, bbox=None):
strio = BytesIO()
self.grab_to_buffer(strio)
strio.seek(0)
im = Image.open(strio)
if bbox:
im = im.crop(bbox)
return im
def backend_version(self):
import PySide2
return PySide2.__version__
| [
"ponty@home"
] | ponty@home |
dbf9838d8a0fe09396539ff90c7f896781279b36 | d489eadec9d4499ed066223f8e4881f14c3cc777 | /.ci/prep_azure.py | 5199a87e0ef8d5042c35777013a9fdcd20065a68 | [
"BSD-3-Clause"
] | permissive | lumatijev/mitogen | d0121faa8c3aa87a08b09bbe5967d6c0a3ac1263 | b610b0c93bbab1bc0fbe86cfcc4f3a56fd2b2c14 | refs/heads/master | 2020-04-28T15:48:12.177452 | 2019-03-13T16:59:48 | 2019-03-13T16:59:48 | 175,389,282 | 0 | 0 | BSD-3-Clause | 2019-03-13T09:34:01 | 2019-03-13T09:34:00 | null | UTF-8 | Python | false | false | 616 | py | #!/usr/bin/env python
import os
import sys
import ci_lib
batches = []
if ci_lib.have_apt():
batches.append([
'echo force-unsafe-io | sudo tee /etc/dpkg/dpkg.cfg.d/nosync',
'sudo add-apt-repository ppa:deadsnakes/ppa',
'sudo apt-get update',
'sudo apt-get -y install python2.6 python2.6-dev libsasl2-dev libldap2-dev',
])
#batches.append([
#'pip install -r dev_requirements.txt',
#])
if ci_lib.have_docker():
batches.extend(
['docker pull %s' % (ci_lib.image_for_distro(distro),)]
for distro in ci_lib.DISTROS
)
ci_lib.run_batches(batches)
| [
"dw@botanicus.net"
] | dw@botanicus.net |
33863f605bfdb090461483e022ae5109bdf4aec5 | 41a008ceea2ae75b94cf2110a1370af1f789ff3f | /lava/helper/tests/test_dispatcher.py | 1eb1b3a512256d244782fd34e41b11f1e0982a9f | [] | no_license | guanhe0/lava_v1 | 937916a0009c0a3f801e61f7580b96e324da64b1 | c49e753ce55104e3eadb0126088b7580a39446fe | refs/heads/master | 2022-10-28T02:33:52.924608 | 2017-01-04T07:24:59 | 2017-01-04T08:43:37 | 78,068,030 | 0 | 1 | null | 2022-10-07T02:00:16 | 2017-01-05T01:36:27 | Python | UTF-8 | Python | false | false | 2,855 | py | # Copyright (C) 2013 Linaro Limited
#
# Author: Milo Casagrande <milo.casagrande@linaro.org>
#
# This file is part of lava-tool.
#
# lava-tool is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License version 3
# as published by the Free Software Foundation
#
# lava-tool is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with lava-tool. If not, see <http://www.gnu.org/licenses/>.
"""lava.helper.dispatcher tests."""
import os
import tempfile
from mock import patch
from lava.tool.errors import CommandError
from lava.helper.tests.helper_test import HelperTest
from lava.helper.dispatcher import (
choose_devices_path,
)
class DispatcherTests(HelperTest):
def setUp(self):
super(DispatcherTests, self).setUp()
self.devices_dir = os.path.join(tempfile.gettempdir(), "devices")
os.makedirs(self.devices_dir)
def tearDown(self):
super(DispatcherTests, self).tearDown()
os.removedirs(self.devices_dir)
def test_choose_devices_path_0(self):
# Tests that when passing more than one path, the first writable one
# is returned.
obtained = choose_devices_path(
["/", "/root", self.temp_dir, os.path.expanduser("~")])
expected = os.path.join(self.temp_dir, "devices")
self.assertEqual(expected, obtained)
def test_choose_devices_path_1(self):
# Tests that when passing a path that is not writable, CommandError
# is raised.
self.assertRaises(CommandError, choose_devices_path,
["/", "/root", "/root/tmpdir"])
def test_choose_devices_path_2(self):
# Tests that the correct path for devices is created on the filesystem.
expected_path = os.path.join(self.temp_dir, "devices")
obtained = choose_devices_path([self.temp_dir])
self.assertEqual(expected_path, obtained)
self.assertTrue(os.path.isdir(expected_path))
def test_choose_devices_path_3(self):
# Tests that returns the already existing devices path.
obtained = choose_devices_path([tempfile.gettempdir()])
self.assertEqual(self.devices_dir, obtained)
@patch("__builtin__.open")
def test_choose_devices_path_4(self, mocked_open):
# Tests that when IOError is raised and we pass only one dir
# CommandError is raised.
mocked_open.side_effect = IOError()
self.assertRaises(CommandError, choose_devices_path,
[tempfile.gettempdir()])
self.assertTrue(mocked_open.called)
| [
"fanghuangcai@163.com"
] | fanghuangcai@163.com |
06409b3f8d41206942c65aa5d15f4a9c29d20919 | 351175b725ac71e28b5e2811b3ad47052a352923 | /onnx/backend/test/case/node/sequenceinsert.py | 619043d45a333758b8fe047670bfb5f3327c6f6f | [
"Apache-2.0"
] | permissive | take-cheeze/onnx | 99bf73cdde80da357a7eb3e18d542c2d58ec084d | ee7d2cdfa34b8b3c7e0b68b70daf72aaa48c23ac | refs/heads/master | 2023-08-24T05:36:45.737517 | 2022-12-15T17:24:47 | 2022-12-15T17:24:47 | 189,348,215 | 0 | 0 | MIT | 2019-05-30T04:58:40 | 2019-05-30T04:58:40 | null | UTF-8 | Python | false | false | 2,470 | py | # SPDX-License-Identifier: Apache-2.0
from typing import Any, List
import numpy as np
import onnx
from ..base import Base
from . import expect
def sequence_insert_reference_implementation(
sequence: List[Any], tensor: np.ndarray, position: np.ndarray = None
) -> List[Any]:
# make a copy of input sequence
seq = list(sequence)
if position is not None:
# In these cases, insert_position will be between [-len(sequence), len(sequence)]
# The position argument will be in the format np.array([pos_index])
insert_position = position[0]
seq.insert(insert_position, tensor)
else:
# Default position of insertion is at the end of the sequence.
seq.append(tensor)
return seq
class SequenceInsert(Base):
@staticmethod
def export() -> None:
test_cases = {
"at_back": [np.array([10, 11, 12]).astype(np.int64)],
"at_front": [np.array([-2, -1, 0]), np.array([0]).astype(np.int64)],
}
sequence = [
np.array([1, 2, 3, 4]).astype(np.int64),
np.array([5, 6, 7]).astype(np.int64),
np.array([8, 9]).astype(np.int64),
]
for test_name, test_inputs in test_cases.items():
tensor = test_inputs[0].astype(np.int64)
if len(test_inputs) > 1:
node = onnx.helper.make_node(
"SequenceInsert",
inputs=["sequence", "tensor", "position"],
outputs=["output_sequence"],
)
position = test_inputs[1]
inserted = sequence_insert_reference_implementation(
sequence, tensor, position
)
expect(
node,
inputs=[sequence, tensor, position],
outputs=[inserted],
name="test_sequence_insert_" + test_name,
)
else:
node = onnx.helper.make_node(
"SequenceInsert",
inputs=["sequence", "tensor"],
outputs=["output_sequence"],
)
inserted = sequence_insert_reference_implementation(sequence, tensor)
expect(
node,
inputs=[sequence, tensor],
outputs=[inserted],
name="test_sequence_insert_" + test_name,
)
| [
"noreply@github.com"
] | take-cheeze.noreply@github.com |
3a26364f1b038b02eb40aad2454fd6fb3cb36c07 | 7f52724110a12d7721f3bbb7a0fce0c4b1c3dd97 | /gameserver/ResPrice.py | 6e09ece6c606661689b5cf79c23b24e13872cfe9 | [
"MIT"
] | permissive | cssp1/assignment1 | 896cb69e8ff43e26658c65ea16b079f87eebef9a | 0839fc589cb52e7384c446593db79e0c2ea737d5 | refs/heads/master | 2023-03-10T08:03:56.954064 | 2022-07-20T04:02:15 | 2022-07-20T04:02:15 | 29,496,198 | 0 | 2 | null | 2023-02-17T17:56:53 | 2015-01-19T20:52:53 | JavaScript | UTF-8 | Python | false | false | 3,885 | py | #!/usr/bin/env python
# Copyright (c) 2015 Battlehouse Inc. All rights reserved.
# Use of this source code is governed by an MIT-style license that can be
# found in the LICENSE file.
# this is a library for use by the game server and analytics code to calculate
# the price for a bundle of fungible resources.
# When using this library from a stand-alone tool, just pass None for the session.
import math
# In order to be callable from both inside server.py and from stand-alone analytics tools,
# this is an adaptor that handles calling get_any_abtest_value where appropriate to handle overrides.
def resolve_value(session, override_name, default_value):
if session:
return session.player.get_any_abtest_value(override_name, default_value)
return default_value
# returns a parameter from store.json that might be overridden by an A/B test, and might also be a per-resource dictionary
def get_resource_parameter(gamedata, session, name, resname):
ret = resolve_value(session, name, gamedata['store'][name])
if type(ret) is dict:
ret = ret[resname]
return ret
def cost_legacy_exp_log(gamedata, session, resname, amount, currency):
if amount > 2:
scale_factor = get_resource_parameter(gamedata, session, 'resource_price_formula_scale', resname)
coeff = resolve_value(session, 'gamebucks_per_fbcredit', gamedata['store']['gamebucks_per_fbcredit']) if currency == 'gamebucks' else 1
price = scale_factor * coeff * 0.06 * math.exp(0.75 * (math.log10(amount) - 2.2 * math.pow(math.log10(amount), -1.25)))
return price
else:
return 1
def cost_piecewise_linear(gamedata, session, resname, amount, currency):
price_points = get_resource_parameter(gamedata, session, 'resource_price_formula_piecewise_linear_points', resname)
for i in xrange(1, len(price_points)):
if (amount < price_points[i][0] or i == len(price_points) - 1):
scale_factor = get_resource_parameter(gamedata, session, 'resource_price_formula_scale', resname)
coeff = (1 / resolve_value(session, 'gamebucks_per_fbcredit', gamedata['store']['gamebucks_per_fbcredit'])) if currency != 'gamebucks' else 1
# cast to float so that we don't use integer division
slope = float(price_points[i][1] - price_points[i - 1][1]) / (price_points[i][0] - price_points[i - 1][0])
return scale_factor * coeff * (price_points[i - 1][1] + slope * (amount - price_points[i - 1][0]))
raise Exception('Unhandled case while calculating piecewise_linear prices. This should never happen.')
def cost_by_townhall_level(gamedata, session, resname, amount, currency):
scale_factor = get_resource_parameter(gamedata, session, 'resource_price_formula_scale', resname)
price_points = get_resource_parameter(gamedata, session, 'resource_price_formula_by_townhall_level', resname)
if not session: raise Exception('must have session to compute townhall level')
th_level = session.player.get_townhall_level()
assert th_level >= 1 and th_level <= len(price_points)
res_per_gamebuck = price_points[th_level-1]
coeff = (1 / resolve_value(session, 'gamebucks_per_fbcredit', gamedata['store']['gamebucks_per_fbcredit'])) if currency != 'gamebucks' else 1
return scale_factor * coeff * amount / float(res_per_gamebuck)
price_formulas = {
'legacy_exp_log': cost_legacy_exp_log,
'piecewise_linear': cost_piecewise_linear,
'by_townhall_level': cost_by_townhall_level,
}
# returns the price of an arbitrary amount of fungible resources
def get_resource_price(gamedata, session, resname, amount, currency):
if amount <= 0:
return 0
price_formula_name = get_resource_parameter(gamedata, session, 'resource_price_formula', resname)
return math.ceil(price_formulas[price_formula_name](gamedata, session, resname, amount, currency))
| [
"dmaas@spinpunch.com"
] | dmaas@spinpunch.com |
9569faaeef0944b297f019a3868299475553cfa7 | 3468fe20cd1128eb8e18354c30490421e504e4af | /portal/apps/videologue/templatetags/videologue_tags.py | 3b8b0d5eac75b16567487b0cda7de58bf28361bb | [
"LicenseRef-scancode-unknown-license-reference",
"BSD-3-Clause"
] | permissive | djpeluca/utopia-cms | 7da45422ffc4f1f397f385ea37243f2745a758de | 1e444afea565fdc734abf449b8ebe9b7c2c47d80 | refs/heads/main | 2023-08-19T23:04:44.666527 | 2021-10-27T01:55:11 | 2021-10-27T01:55:11 | 387,323,009 | 0 | 0 | BSD-3-Clause | 2021-07-19T03:03:48 | 2021-07-19T03:03:48 | null | UTF-8 | Python | false | false | 1,663 | py | # -*- coding: utf-8 -*-
from videologue.models import YouTubeVideo
from django.template import (Context, Library, loader, Node, TemplateSyntaxError)
from string import lower
register = Library()
TPL_DIR = 'videologue/templates/'
class RenderLatestVideoNode(Node):
def __init__(self, kwcontext):
self.kw = kwcontext
def render(self, context):
try:
video = YouTubeVideo.objects.latest()
except:
video = None
context.update({self.kw: video})
return ''
class RenderVideoNode(Node):
def __init__(self, kwcontext, vid):
self.kw = kwcontext
self.vid = vid
def render(self, context):
try:
video = YouTubeVideo.objects.get(id=self.vid)
except:
video = None
context.update({self.kw: video})
return ''
@register.tag
def get_latest_video(parser, token):
"""Usage: {% get_latest_video as video_object %}"""
bits = token.contents.split()
if len(bits) != 3 or bits[1] != 'as':
raise TemplateSyntaxError('Invalid arguments for %s' % bits[0])
return RenderLatestVideoNode(bits[2])
@register.tag
def get_video(parser, token):
"""Usage: {% get_video id as video_object %}"""
bits = token.contents.split()
if len(bits) != 4 or bits[2] != 'as':
raise TemplateSyntaxError('Invalid arguments for %s' % bits[0])
return RenderVideoNode(bits[3], bits[1])
@register.filter
def render_video(video):
if not video:
return ''
tpl = loader.get_template(
TPL_DIR + '%s/module.html' % lower(video.__class__.__name__))
return tpl.render({'video': video})
| [
"apacheco@ladiaria.com.uy"
] | apacheco@ladiaria.com.uy |
d4acfecc03cbaee58f18dace5a929be206713d9f | 53fab060fa262e5d5026e0807d93c75fb81e67b9 | /backup/user_248/ch76_2020_04_12_20_47_41_177110.py | 048734f0d57c078395eafd03c2ef252a0ed91c86 | [] | no_license | gabriellaec/desoft-analise-exercicios | b77c6999424c5ce7e44086a12589a0ad43d6adca | 01940ab0897aa6005764fc220b900e4d6161d36b | refs/heads/main | 2023-01-31T17:19:42.050628 | 2020-12-16T05:21:31 | 2020-12-16T05:21:31 | 306,735,108 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 189 | py | def aniversariantes_de_setembro(dicionario):
dicionario2={}
for i in dicionario:
if dicionario[i][2:4]=='09':
dicionario2[i]=dicionario[i]
return dicionario2 | [
"you@example.com"
] | you@example.com |
1fec1f837a84009fd62bc40d7396ed04597bd931 | 76b7eb861bdaf84dc5cb7e8945de95c7605ae4bf | /campfin/efilings/management/commands/process_skede_lines.py | 815f82f0150ea0446563f6152d1ada3d1e64e1c2 | [] | no_license | jsfenfen/paper_fec | 60e4587d41b183be9a2a714d326bbcbe69657aa0 | f05d37ef78a22e2f0c071408914e6667f4d4b988 | refs/heads/master | 2018-12-28T04:20:16.589236 | 2015-09-01T06:28:07 | 2015-09-01T06:28:07 | 27,084,007 | 5 | 1 | null | 2015-08-19T16:19:43 | 2014-11-24T16:27:23 | Python | UTF-8 | Python | false | false | 4,917 | py | from django.core.management.base import BaseCommand, CommandError
from datetime import date
from fec_alerts.models import new_filing
from formdata.models import SkedE
from summary_data.models import Candidate_Overlay
from reconciliation.fec_reconciler import match_by_name, run_fec_query
from add_committees_to_skede import attach_committee_to_skedeline
from shared_utils.cycle_utils import get_cycle_from_date
def set_data_from_self(skedeline):
name = None
if skedeline.candidate_middle_name:
name = "%s, %s %s" % (skedeline.candidate_last_name, skedeline.candidate_first_name, skedeline.candidate_middle_name)
else:
name = "%s, %s" % (skedeline.candidate_last_name, skedeline.candidate_first_name)
skedeline.candidate_district_checked = skedeline.candidate_district
skedeline.candidate_office_checked = skedeline.candidate_office
skedeline.candidate_state_checked = skedeline.candidate_state
skedeline.candidate_name_checked = name
skedeline.support_oppose_checked = skedeline.support_oppose_code
skedeline.save()
def set_data_from_candidate_id(skedeline, candidate_id):
cycle_date = skedeline.effective_date
THIS_CYCLE = None
if cycle_date:
THIS_CYCLE = get_cycle_from_date(cycle_date)
try:
this_candidate = Candidate_Overlay.objects.get(fec_id=candidate_id, cycle=(THIS_CYCLE))
skedeline.candidate_id_checked = this_candidate.fec_id
skedeline.candidate_checked = this_candidate
skedeline.candidate_district_checked = this_candidate.office_district
skedeline.district_checked = this_candidate.district
skedeline.candidate_office_checked = this_candidate.office
skedeline.candidate_party_checked = this_candidate.party
skedeline.candidate_state_checked = this_candidate.state
skedeline.candidate_name_checked = this_candidate.name
skedeline.support_oppose_checked = skedeline.support_oppose_code
skedeline.save()
return True
except Candidate_Overlay.DoesNotExist:
print "Missing candidate overlay for %s filing %s" % (candidate_id, skedeline.filing_number)
return False
def fuzzy_match_candidate(skedeline):
state = skedeline.candidate_state
name_to_check = "%s, %s" % (skedeline.candidate_last_name, skedeline.candidate_first_name)
office = skedeline.candidate_office
state = skedeline.candidate_state
cycle_date = skedeline.effective_date
THIS_CYCLE = None
if cycle_date:
THIS_CYCLE = get_cycle_from_date(cycle_date)
result = run_fec_query(name_to_check, state=state, office=office, cycle=THIS_CYCLE, fuzzy=True)
if result:
if result[0]['match']:
print "Fuzzy matching matched %s, %s, %s to %s with id %s" % (name_to_check, state, office, result[0]['name'], result[0]['id'])
return set_data_from_candidate_id(skedeline, result[0]['id'])
print "Fuzzy matching couldn't match %s, %s, %s" % (name_to_check, state, office)
return False
def attach_ie_target(skedeline):
candidate_id = skedeline.candidate_id_number
# If there's a candidate id, enter the data from the overlay
if candidate_id:
result = set_data_from_candidate_id(skedeline, candidate_id)
if result:
return True
else:
# if we're still here, try a fuzzy match
fuzzy_match_result = fuzzy_match_candidate(skedeline)
if fuzzy_match_result:
return True
# fall back on data that's already there.
set_data_from_self(skedeline)
return False
class Command(BaseCommand):
help = "Set the name and details of the candidate targetted"
requires_model_validation = False
def handle(self, *args, **options):
filings_to_process = new_filing.objects.filter(data_is_processed=True, body_rows_superceded=True).exclude(ie_rows_processed=True).order_by('filing_number')
for this_filing in filings_to_process:
lines_present = this_filing.lines_present
has_sked_E = False
try:
lines_present['E']
if int(lines_present['E']) > 0:
has_sked_E = True
except KeyError:
continue
if has_sked_E:
#print "processing %s " % (this_filing.filing_number)
#print lines_present, lines_present['E']
skedelines = SkedE.objects.filter(filing_number=this_filing.filing_number)
for skede in skedelines:
attach_committee_to_skedeline(skede)
attach_ie_target(skede)
# mark that we've been processed.
this_filing.ie_rows_processed=True
this_filing.save() | [
"jfenton@sunlightfoundation.com"
] | jfenton@sunlightfoundation.com |
2ed31607d5a3eb7e3e91fa0eba51db90b0af3268 | a81c1492783e7cafcaf7da5f0402d2d283b7ce37 | /google/ads/google_ads/v6/services/transports/campaign_bid_modifier_service_grpc_transport.py | 9c627ba77a44921e7a3dd9698c767432afb6e484 | [
"Apache-2.0"
] | permissive | VincentFritzsche/google-ads-python | 6650cf426b34392d1f58fb912cb3fc25b848e766 | 969eff5b6c3cec59d21191fa178cffb6270074c3 | refs/heads/master | 2023-03-19T17:23:26.959021 | 2021-03-18T18:18:38 | 2021-03-18T18:18:38 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,322 | py | # -*- coding: utf-8 -*-
#
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import google.api_core.grpc_helpers
from google.ads.google_ads.v6.proto.services import campaign_bid_modifier_service_pb2_grpc
class CampaignBidModifierServiceGrpcTransport(object):
"""gRPC transport class providing stubs for
google.ads.googleads.v6.services CampaignBidModifierService API.
The transport provides access to the raw gRPC stubs,
which can be used to take advantage of advanced
features of gRPC.
"""
# The scopes needed to make gRPC calls to all of the methods defined
# in this service.
_OAUTH_SCOPES = (
'https://www.googleapis.com/auth/adwords',
)
def __init__(self, channel=None, credentials=None,
address='googleads.googleapis.com:443'):
"""Instantiate the transport class.
Args:
channel (grpc.Channel): A ``Channel`` instance through
which to make calls. This argument is mutually exclusive
with ``credentials``; providing both will raise an exception.
credentials (google.auth.credentials.Credentials): The
authorization credentials to attach to requests. These
credentials identify this application to the service. If none
are specified, the client will attempt to ascertain the
credentials from the environment.
address (str): The address where the service is hosted.
"""
# If both `channel` and `credentials` are specified, raise an
# exception (channels come with credentials baked in already).
if channel is not None and credentials is not None:
raise ValueError(
'The `channel` and `credentials` arguments are mutually '
'exclusive.',
)
# Create the channel.
if channel is None:
channel = self.create_channel(
address=address,
credentials=credentials,
options={
'grpc.max_send_message_length': -1,
'grpc.max_receive_message_length': -1,
}.items(),
)
self._channel = channel
# gRPC uses objects called "stubs" that are bound to the
# channel and provide a basic method for each RPC.
self._stubs = {
'campaign_bid_modifier_service_stub': campaign_bid_modifier_service_pb2_grpc.CampaignBidModifierServiceStub(channel),
}
@classmethod
def create_channel(
cls,
address='googleads.googleapis.com:443',
credentials=None,
**kwargs):
"""Create and return a gRPC channel object.
Args:
address (str): The host for the channel to use.
credentials (~.Credentials): The
authorization credentials to attach to requests. These
credentials identify this application to the service. If
none are specified, the client will attempt to ascertain
the credentials from the environment.
kwargs (dict): Keyword arguments, which are passed to the
channel creation.
Returns:
grpc.Channel: A gRPC channel object.
"""
return google.api_core.grpc_helpers.create_channel(
address,
credentials=credentials,
scopes=cls._OAUTH_SCOPES,
**kwargs
)
@property
def channel(self):
"""The gRPC channel used by the transport.
Returns:
grpc.Channel: A gRPC channel object.
"""
return self._channel
@property
def get_campaign_bid_modifier(self):
"""Return the gRPC stub for :meth:`CampaignBidModifierServiceClient.get_campaign_bid_modifier`.
Returns the requested campaign bid modifier in full detail.
Returns:
Callable: A callable which accepts the appropriate
deserialized request object and returns a
deserialized response object.
"""
return self._stubs['campaign_bid_modifier_service_stub'].GetCampaignBidModifier
@property
def mutate_campaign_bid_modifiers(self):
"""Return the gRPC stub for :meth:`CampaignBidModifierServiceClient.mutate_campaign_bid_modifiers`.
Creates, updates, or removes campaign bid modifiers.
Operation statuses are returned.
Returns:
Callable: A callable which accepts the appropriate
deserialized request object and returns a
deserialized response object.
"""
return self._stubs['campaign_bid_modifier_service_stub'].MutateCampaignBidModifiers | [
"noreply@github.com"
] | VincentFritzsche.noreply@github.com |
db3f0149082015ad2e316bc5ffa3e4203d75ed58 | 9edaf93c833ba90ae9a903aa3c44c407a7e55198 | /netex/models/tram_submode_enumeration.py | 21e98e1f17bfaf27cc43a6cf15270a4c5b5075be | [] | no_license | tefra/xsdata-samples | c50aab4828b8c7c4448dbdab9c67d1ebc519e292 | ef027fe02e6a075d8ed676c86a80e9647d944571 | refs/heads/main | 2023-08-14T10:31:12.152696 | 2023-07-25T18:01:22 | 2023-07-25T18:01:22 | 222,543,692 | 6 | 1 | null | 2023-06-25T07:21:04 | 2019-11-18T21:00:37 | Python | UTF-8 | Python | false | false | 355 | py | from enum import Enum
__NAMESPACE__ = "http://www.netex.org.uk/netex"
class TramSubmodeEnumeration(Enum):
UNKNOWN = "unknown"
UNDEFINED = "undefined"
CITY_TRAM = "cityTram"
LOCAL_TRAM = "localTram"
REGIONAL_TRAM = "regionalTram"
SIGHTSEEING_TRAM = "sightseeingTram"
SHUTTLE_TRAM = "shuttleTram"
TRAIN_TRAM = "trainTram"
| [
"chris@komposta.net"
] | chris@komposta.net |
47cc84638073c784a1f807c048912002bf7587f6 | 780b01976dad99c7c2ed948b8473aa4e2d0404ba | /scripts/alphas_archive/zs_callspread/alpha_ichimokucloud_long_bullish_dec13.py | 61e52350abd7d1776b0b7786ea4b29869a6c59b1 | [] | no_license | trendmanagement/tmqrexo_alexveden | a8ad699c2c3df4ce283346d287aff4364059a351 | 4d92e2ee2bc97ea2fcf075382d4a5f80ce3d72e4 | refs/heads/master | 2021-03-16T08:38:00.518593 | 2019-01-23T08:30:18 | 2019-01-23T08:30:18 | 56,336,692 | 1 | 1 | null | 2019-01-22T14:21:03 | 2016-04-15T17:05:53 | Python | UTF-8 | Python | false | false | 1,425 | py | #
#
# Automatically generated file
# Created at: 2016-12-16 11:13:41.634827
#
from backtester.strategy import OptParam
from backtester.swarms.rebalancing import SwarmRebalance
from backtester.strategy import OptParamArray
from backtester.swarms.rankingclasses import RankerBestWithCorrel
from backtester.costs import CostsManagerEXOFixed
from strategies.strategy_ichimokucloud import StrategyIchimokuCloud
STRATEGY_NAME = StrategyIchimokuCloud.name
STRATEGY_SUFFIX = "_Bullish_Dec13"
STRATEGY_CONTEXT = {
'strategy': {
'class': StrategyIchimokuCloud,
'opt_params': [
OptParamArray('Direction', [1]),
OptParam('conversion_line_period', 9, 5, 5, 13),
OptParam('base_line_period', 26, 26, 26, 13),
OptParam('leading_spans_lookahead_period', 26, 26, 26, 10),
OptParam('leading_span_b_period', 52, 13, 13, 10),
OptParamArray('RulesIndex', [0]),
OptParam('MedianPeriod', 5, 45, 45, 10),
],
'exo_name': 'ZS_CallSpread',
},
'costs': {
'context': {
'costs_options': 3.0,
'costs_futures': 3.0,
},
'manager': CostsManagerEXOFixed,
},
'swarm': {
'rebalance_time_function': SwarmRebalance.every_friday,
'members_count': 1,
'ranking_class': RankerBestWithCorrel(window_size=-1, correl_threshold=-0.5),
},
}
| [
"i@alexveden.com"
] | i@alexveden.com |
c3259cc6b10289095af347d864f6e9ffaaad2ed2 | c3feebac5afce89b0261168286cc5052c20a89b7 | /gui/imageViewWidget.py | 6a4a745cec118d0a796e5de7fd5846cca2c62e7f | [] | no_license | iHaD/meShaderEd | 6252337ba8d152f89854186b468ff3ce226a254e | f305ae7aaf669317eb0470af18ee82b4b62a3e7d | refs/heads/master | 2021-01-18T02:31:06.974282 | 2014-07-14T11:05:52 | 2014-07-14T11:05:52 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,902 | py | #===============================================================================
# imageViewWidget.py
#
#
#
#===============================================================================
import os, sys
from PyQt4 import QtCore, QtGui
from PyQt4.QtCore import QDir, QString, QModelIndex
from PyQt4.QtGui import QFileSystemModel
from PyQt4.QtGui import QFileIconProvider
from ui_imageViewWidget import Ui_imageViewWidget
import gui.ui_settings as UI
from core.node import Node
from core.nodeLibrary import NodeLibrary
#
# ImageViewWidget
#
class ImageViewWidget ( QtGui.QWidget ) :
#
# __init__
#
def __init__ ( self ) :
#
QtGui.QWidget.__init__ ( self )
# This is always the same
self.ui = Ui_imageViewWidget ()
self.ui.setupUi ( self )
self.ui.selector.setMinimumSize ( QtCore.QSize ( UI.COMBO_WIDTH, UI.COMBO_HEIGHT ) )
self.ui.selector.setMaximumSize ( QtCore.QSize( UI.MAX, UI.COMBO_HEIGHT ) )
self.imageNodes = []
#self.ui.treeView.setDragEnabled ( True )
#self.ui.treeView.setRootIsDecorated( True )
QtCore.QObject.connect ( self.ui.imageArea, QtCore.SIGNAL ( 'mouseDoubleClickEvent' ), self.updateViewer )
QtCore.QObject.connect ( self.ui.selector, QtCore.SIGNAL ( 'currentIndexChanged(int)' ), self.onViewerChanged )
#QtCore.QObject.connect( self.ui, QtCore.SIGNAL( 'paramChanged()' ), self.onParamChanged )
#self.updateGui()
#self.emit( QtCore.SIGNAL( 'onGfxNodeParamChanged(QObject,QObject)' ), self, param.name )
#
# currentImageNode
#
def currentImageNode ( self ) :
gfxNode = None
idx = self.ui.selector.currentIndex ()
if len ( self.imageNodes ) > 0 :
gfxNode = self.imageNodes [ idx ]
return gfxNode
#
# addViewer
#
def addViewer ( self, gfxNode ) :
#
self.imageNodes.append ( gfxNode )
self.ui.selector.addItem ( gfxNode.node.label )
#
# removeAllViewers
#
def removeAllViewers ( self ) :
#
self.imageNodes = []
self.ui.selector.clear()
#
# removeViewer
#
def removeViewer ( self, gfxNode ) :
#
for i in range ( 0, len ( self.imageNodes ) ) :
if gfxNode == self.imageNodes [ i ] :
self.imageNodes.pop ( i )
self.ui.selector.removeItem ( i )
#QtCore.QObject.disconnect ( gfxNode.node, QtCore.SIGNAL( 'onNodeParamChanged(QObject,QObject)' ), self.onNodeParamChanged )
break
#
# onViewerChanged
#
def onViewerChanged ( self, idx ) :
#
if len ( self.imageNodes ) > 0 :
print ">> ImageViewWidget.onViewerChanged to %s" % self.imageNodes [ idx ].node.label
#QtCore.QObject.connect( self.imageNodes[ idx ].node, QtCore.SIGNAL( 'onNodeParamChanged(QObject,QObject)' ), self.onNodeParamChanged )
self.updateViewer ( compute = False )
#
# updateViewer
#
def updateViewer ( self, compute = True ) :
#
print ">> ImageViewWidget.updateViewer"
RenderViewMode = False
idx = self.ui.selector.currentIndex ()
if len ( self.imageNodes ) > 0 :
gfxNode = self.imageNodes [ idx ]
print ">> ImageViewWidget.getImageName on %s" % gfxNode.node.label
imageInputParam = gfxNode.node.getInputParamByName ( 'image' )
if imageInputParam is not None :
if gfxNode.node.isInputParamLinked ( imageInputParam ):
link = gfxNode.node.inputLinks [ imageInputParam ]
displayParam = link.srcNode.getInputParamByName ( 'DisplayDriver' )
if displayParam is not None :
print '>> Display driver = %s' % displayParam.value
if displayParam.value != 'tiff' :
RenderViewMode = True
if compute :
imageName = gfxNode.node.computeNode ()
else :
imageName = gfxNode.node.imageName
print ">> ImageViewWidget: imageName = %s" % imageName
if not RenderViewMode :
self.ui.imageArea.setImage ( imageName )
#imageParam = None
#for param in gfxNode.node.inputParams :
# if param.name == 'image' :
# imageParam = param
# break
#if imageParam is not None :
# print ">> ImageViewWidget: image = %s" % imageParam.value
# self.ui.imageArea.setImage ( imageParam.value )
#
# autoUpdate
#
def autoUpdate ( self ) : return self.ui.chk_auto.isChecked ()
#
# onNodeParamChanged
#
def onNodeParamChanged ( self, node, param ) :
#
print ">> ImageViewWidget.onNodeParamChanged %s %s" % ( node.label, param.name )
if node == self.currentImageNode().node :
self.updateViewer ()
#
# onNodeLabelChanged
#
def onNodeLabelChanged ( self, gfxNode, newLabel ) :
#
print ">> ImageViewWidget.onNodeLabelChanged %s %s" % ( gfxNode.node.label, newLabel )
i = 0
for i in range ( len ( self.imageNodes ) ) :
if gfxNode == self.imageNodes [ i ] :
self.ui.selector.setItemText ( i, newLabel )
break
i += 1
| [
"Yuri.Meshalkin@gmail.com"
] | Yuri.Meshalkin@gmail.com |
bb775d1214a866c4b577069f0c4dc8e59ea5672e | 90386753276ced3360e76f5551d25f6618613a23 | /Python 100例/44.py | 4a489a499ad6870560b2f7f32b7732f19f290c0b | [] | no_license | yflfly/funny-python | 5d69dbcafd1c98c5e4046b85f8678e4bcf53870c | ff42b84b46152234e3bc824ae8016f354af450c4 | refs/heads/master | 2023-03-09T16:58:27.116227 | 2021-03-01T14:07:46 | 2021-03-01T14:07:46 | 280,602,099 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 731 | py | #!/usr/bin/python
# -*- coding: UTF-8 -*-
'''
两个 3 行 3 列的矩阵,实现其对应位置的数据相加,并返回一个新矩阵:
X = [[12,7,3],
[4 ,5,6],
[7 ,8,9]]
Y = [[5,8,1],
[6,7,3],
[4,5,9]]
程序分析:创建一个新的 3 行 3 列的矩阵,使用 for 迭代并取出 X 和 Y 矩阵中对应位置的值,相加后放到新矩阵的对应位置中。
'''
X = [[12, 7, 3],
[4, 5, 6],
[7, 8, 9]]
Y = [[5, 8, 1],
[6, 7, 3],
[4, 5, 9]]
result = [[0, 0, 0],
[0, 0, 0],
[0, 0, 0]]
# 迭代输出行
for i in range(len(X)):
# 迭代输出列
for j in range(len(X[0])):
result[i][j] = X[i][j] + Y[i][j]
for r in result:
print(r) | [
"yangfengling@inttech.cn"
] | yangfengling@inttech.cn |
7aff24db643aa477df397dca2c7229896579646e | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p03032/s079897721.py | f427a2d0ce6513e4da77d12dc104cf67a5636e74 | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 625 | py | import sys
input = sys.stdin.readline
def main():
N, K = map(int, input().split())
V = list(map(int, input().split()))
ans = -float("inf")
for t in range(min(N, K) + 1):
s = K - t
for l in range(t + 1):
r = t - l
gem = V[:l]
gem += V[-r:] if r != 0 else []
gem.sort()
value = sum(gem)
for i in range(min(s, t)):
if gem[i] < 0:
value -= gem[i]
else:
break
ans = max(ans, value)
print(ans)
if __name__ == "__main__":
main()
| [
"66529651+Aastha2104@users.noreply.github.com"
] | 66529651+Aastha2104@users.noreply.github.com |
3a96c3ae0d08fa37f2a0225887bc6118277e5fdc | 4eee7b0e53818bd21ca009d742ac8391202620ba | /home/three/num_nd.py | e73b8254580d1b8166605ff2b113fb765d64cf50 | [] | no_license | mysqlf/python | e7c44bafee5abefc1356da9fb123fe3d6b3d2e7c | e8aacf30e046d71681a93a5f333de72e48410ebf | refs/heads/master | 2020-05-21T13:33:28.289668 | 2017-11-09T02:13:48 | 2017-11-09T02:13:48 | 61,173,293 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 658 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# @author Greedywolf
print(round(1.23, 1))
# 1.2
print(round(11.27, -1))
# 10.0
print(round(16.27, -1))
# 20.0
print(round(-1.27, -1))
#-0.0
tmpr = round(1.25361, 3)
print(tmpr)
# 1.254
tmp = 1.25361
tmpf = format(tmp, '0.3f')
print(tmpf)
print(tmp)
a = 1627731
# 传给 round() 函数的 ndigits 参数可以是负数,这种情况下,
# 舍入运算会作用在十位、百位、千位等上面
print(round(a, -1))
# 1627730
print(round(a, -2))
# 1627700
print(round(a, -4))
# 1630000
a = 2.1
b = 4.2
c = a + b
print(c)
# 6.300000000000001
print(round(c, 2))
# 6.3
# 这个与输出一定宽度的
| [
"1154505909@qq.com"
] | 1154505909@qq.com |
342c27c478778c9c198081a0a88745f5c731d310 | c61b87703a2e385815cf4abc7cf62221fe6f0d70 | /build/lib/tugalinhas/util.py | 139f04d0127d749bd90670555bc2e5a9db0d1052 | [] | no_license | griselgrisel/pytuga | 39f89c24826e46685da34ec912e56c735d4b786e | e9596ac81a0c563130462ac71bfe96dd01120001 | refs/heads/master | 2020-12-28T20:41:48.815642 | 2015-11-11T20:15:30 | 2015-11-11T20:15:30 | 45,982,808 | 0 | 0 | null | 2015-11-11T13:17:41 | 2015-11-11T13:17:41 | null | UTF-8 | Python | false | false | 7,838 | py | '''A random collection of utility functions'''
import os
import math
from random import randrange
from math import copysign
from PyQt4 import QtSvg, QtGui, QtCore
from tugalinhas import SHARED_ART_PATH, NODEFAULT
#
# Custom errors
#
class TooManyPens(RuntimeError):
pass
#
# Custom functions
#
def as_qpoint(pt):
'''enforce that pt is an instance of QPointF'''
if isinstance(pt, QtCore.QPointF):
return pt
else:
return QtCore.QPointF(*pt)
def sign(x):
'return 1 if x is positive, -1 if negative, or zero'
return copysign(1, x)
def plist(f):
'''given function object,
return a list of [(arg name: default value or None), ...]
'''
parameter_defaults = []
defaults = f.__defaults__
if defaults is not None:
defaultcount = len(defaults)
else:
defaultcount = 0
argcount = f.__code__.co_argcount
for i in range(f.__code__.co_argcount):
name = f.__code__.co_varnames[i]
value = NODEFAULT
if i >= argcount - defaultcount:
value = defaults[i - (argcount - defaultcount)]
parameter_defaults.append((name, value))
return parameter_defaults
class SvgRenderer(object):
'factory for svg renderer objects'
def __init__(self, app):
self.app = app
def getrend(self, filepath=None):
'''Return a handle to the shared SVG renderer for the given svg file.
If no filepath is given, return the renderer for the default svg file.
'''
if filepath is None:
filepath = os.path.join(SHARED_ART_PATH)
return QtSvg.QSvgRenderer(filepath, self.app)
def choose_color(r=None, g=None, b=None, a=None):
'''Normalize input to a tuple of (r, g, b, a)'''
if a is None:
a = 255
elif not (0 <= a <= 255):
raise ValueError('Alpha value must be between 0 and 255')
# Random colors
if r == 'random':
r, g, b = [randrange(256) for _ in range(3)]
elif r == 'rlight':
r, g, b = [randrange(200, 256) for _ in range(3)]
elif r == 'rmedium':
r, g, b = [randrange(100, 200) for _ in range(3)]
elif r == 'rdark':
r, g, b = [randrange(100) for _ in range(3)]
elif r == 'ralpha':
r, g, b = [randrange(256) for _ in range(3)]
a = randrange(100, 200)
# Null colors (shouldn't raise an error?)
elif r is g is b is None:
return None, None, None, None
# From RGB components
elif g is not None and b is not None:
if not (0 <= r <= 255 and 0 <= g <= 255 and 0 <= b <= 255):
raise ValueError('Color components must be between 0 and 255')
c = QtGui.QColor.fromRgb(r, g, b, a)
r, g, b, a = c.red(), c.green(), c.blue(), c.alpha()
# From a tuple or sequence
elif r is not None:
try:
if len(r) == 4:
rr, gg, bb, aa = r
rr, gg, bb, aa = int(rr), int(gg), int(bb), int(aa)
elif len(r) == 3:
rr, gg, bb = r
rr, gg, bb = int(rr), int(gg), int(bb)
aa = 255
else:
raise ValueError
except ValueError:
try:
ci = int(r)
c = QtGui.QColor.fromRgba(ci)
except ValueError:
if not QtGui.QColor.isValidColor(r):
raise ValueError
c = QtGui.QColor(r)
r, g, b, a = c.red(), c.green(), c.blue(), c.alpha()
else:
r, g, b, a = rr, gg, bb, aa
# Bad input...
elif r is None or g is None or b is None:
raise TypeError
return r, g, b, a
def nudge_color(color, r=None, g=None, b=None, a=None):
"""Change the color (a 3-element tuple) by given amounts,
return the new RGB tuple.
Clamps the RGB return values such that 0 <= RGB <= 255
but does not necessarily return only integer values.
Not returning strictly integers allows for smoother color
variations, but note that when the values are passed
to the tugalinhas color() function the values will be
converted to integers. So in order to take advantage
of the more precise values you will need to keep those
separately from the actual tugalinhas color values.
The function's r, g, b parameters can be either:
numbers to be added to or subtracted from the RGB tuple
components, or
percentages (as strings) that will be multiplied by the component
to increase or decrease that component by given the given
percent.
>>> color = (100, 100, 100)
>>> nudge_color(color, g=15)
(100, 115, 100)
>>> color = (100, 100, 100)
>>> nudge_color(color, r=-12.5)
(87.5, 100, 100)
>>> color = (100, 100, 100)
>>> color = nudge_color(color, b='75%')
>>> color
(100, 100, 75.0)
>>> nudge_color(color, b='75%')
(100, 100, 57.25)
>>> color = (100, 100, 100)
>>> nudge_color(color, r=50, g='105%', b=-10)
(150, 105, 90)
"""
if len(color) == 3:
rc, gc, bc = color
ac = 255
elif len(color) == 4:
rc, gc, bc, ac = color
else:
raise ValueError
if r is not None:
try:
rc += r
except TypeError:
rc *= (float(r[:-1]) / 100.0)
if g is not None:
try:
gc += g
except TypeError:
gc *= (float(g[:-1]) / 100.0)
if b is not None:
try:
bc += b
except TypeError:
bc *= (float(b[:-1]) / 100.0)
if a is not None:
try:
ac += a
except TypeError:
ac *= (float(a[:-1]) / 100.0)
rc = min(rc, 255)
gc = min(gc, 255)
bc = min(bc, 255)
ac = min(ac, 255)
rc = max(rc, 0)
gc = max(gc, 0)
bc = max(bc, 0)
ac = max(ac, 0)
return (rc, gc, bc, ac)
def docfrom(function, decorated=None):
'''Creates a decorator that saves documentation from the given
function.
>>> @docfrom(sum)
... def my_sum(args):
... return sum(args, 0.0)
'''
if decorated is not None:
decorated.__doc__ = function.__doc__
return decorated
else:
def decorator(func):
return docfrom(function, func)
return decorator
#
# From Python's turtle module
#
class Vec2D(tuple):
"""Simple 2D vector arithmetic
Provides (for a, b vectors, k number):
a + b vector addition
a - b vector subtraction
a * b inner product
k * a and a * k multiplication with scalar
abs(a) absolute value of a
a.rotate(angle) rotation
"""
def __new__(cls, x, y):
return tuple.__new__(cls, (x, y))
def __add__(self, other):
return Vec2D(self[0] + other[0], self[1] + other[1])
def __mul__(self, other):
if isinstance(other, Vec2D):
return self[0] * other[0] + self[1] * other[1]
return Vec2D(self[0] * other, self[1] * other)
def __rmul__(self, other):
if isinstance(other, int) or isinstance(other, float):
return Vec2D(self[0] * other, self[1] * other)
def __sub__(self, other):
return Vec2D(self[0] - other[0], self[1] - other[1])
def __neg__(self):
return Vec2D(-self[0], -self[1])
def __abs__(self):
return (self[0] ** 2 + self[1] ** 2) ** 0.5
def rotate(self, angle):
"""rotate self counterclockwise by angle
"""
perp = Vec2D(-self[1], self[0])
angle = angle * math.pi / 180.0
c, s = math.cos(angle), math.sin(angle)
return Vec2D(self[0] * c + perp[0] * s, self[1] * c + perp[1] * s)
def __getnewargs__(self):
return (self[0], self[1])
def __repr__(self):
return "(%.2f,%.2f)" % self
| [
"fabiomacedomendes@gmail.com"
] | fabiomacedomendes@gmail.com |
2ee0e724dff3317c1dfc707c560983a8e24f7cb5 | f4434c85e3814b6347f8f8099c081ed4af5678a5 | /sdk/communication/azure-communication-administration/samples/phone_number_capabilities_sample.py | 44b9dcb1a8796731fbe8b24846fef0450db96aee | [
"LicenseRef-scancode-generic-cla",
"MIT",
"LGPL-2.1-or-later"
] | permissive | yunhaoling/azure-sdk-for-python | 5da12a174a37672ac6ed8e3c1f863cb77010a506 | c4eb0ca1aadb76ad892114230473034830116362 | refs/heads/master | 2022-06-11T01:17:39.636461 | 2020-12-08T17:42:08 | 2020-12-08T17:42:08 | 177,675,796 | 1 | 0 | MIT | 2020-03-31T20:35:17 | 2019-03-25T22:43:40 | Python | UTF-8 | Python | false | false | 2,811 | py | # coding: utf-8
# -------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
# --------------------------------------------------------------------------
"""
FILE: phone_number_capabilities_sample.py
DESCRIPTION:
This sample demonstrates how to get number capabilities via a connection string, capabilities update id and phone number for capabilities.
USAGE:
python phone_number_capabilities_sample.py
Set the environment variables with your own values before running the sample:
1) AZURE_COMMUNICATION_SERVICE_CONNECTION_STRING - The endpoint of your Azure Communication Service
2) AZURE_COMMUNICATION_SERVICE_PHONENUMBERS_CAPABILITIES_ID - The capabilities id you want to get
3) AZURE_COMMUNICATION_SERVICE_PHONENUMBERS_PHONENUMBER_FOR_CAPABILITIES - The phone number you want to update capabilities to
"""
import os
from azure.communication.administration import (
PhoneNumberAdministrationClient,
NumberUpdateCapabilities
)
connection_str = os.getenv('AZURE_COMMUNICATION_SERVICE_CONNECTION_STRING')
phone_number_administration_client = PhoneNumberAdministrationClient.from_connection_string(connection_str)
capabilities_id = os.getenv('AZURE_COMMUNICATION_SERVICE_PHONENUMBERS_CAPABILITIES_ID', "capabilities-id")
phonenumber_for_capabilities = os.getenv('AZURE_COMMUNICATION_SERVICE_PHONENUMBERS_PHONENUMBER_FOR_CAPABILITIES', "+17771234567")
def list_all_phone_numbers():
# [START list_all_phone_numbers]
list_all_phone_numbers_response = phone_number_administration_client.list_all_phone_numbers()
# [END list_all_phone_numbers]
print('list_all_phone_numbers_response:')
for phone_number in list_all_phone_numbers_response:
print(phone_number)
def get_capabilities_update():
# [START get_capabilities_update]
capabilities_response = phone_number_administration_client.get_capabilities_update(
capabilities_update_id=capabilities_id
)
# [END get_capabilities_update]
print('capabilities_response:')
print(capabilities_response)
def update_capabilities():
# [START update_capabilities]
update = NumberUpdateCapabilities(add=iter(["InboundCalling"]))
phone_number_capabilities_update = {
phonenumber_for_capabilities: update
}
capabilities_response = phone_number_administration_client.update_capabilities(
phone_number_capabilities_update=phone_number_capabilities_update
)
# [END update_capabilities]
print('capabilities_response:')
print(capabilities_response)
if __name__ == '__main__':
list_all_phone_numbers()
get_capabilities_update()
update_capabilities()
| [
"noreply@github.com"
] | yunhaoling.noreply@github.com |
22623047d1de9e48c8ead3eabedeac514d5c8f48 | f4c4e131ce63ce795822d1ff27c9294e4b00887d | /modules/common.py | 6acd27d318860603203386fe30940b0eaa62dcb4 | [] | no_license | shundev/ruscorpora_tagging | d77196b75808743a41f5ecfb3ee5ef204db72c44 | b6d41d1a33ad25cf6a1f24e6fe94c0816e378fa0 | refs/heads/master | 2021-05-30T17:18:37.065460 | 2015-10-20T13:41:37 | 2015-10-20T13:41:37 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 450 | py | # All rights belong to Non-commercial Partnership "Russian National Corpus"
# http://ruscorpora.ru
# editor marks inside a word
editor_brackets = ur'\[\]\<\>'
def quotetext(s):
if not s:
return u""
return s.replace(u'&', u'&').replace(u'<', u'<').replace(u'>', u'>')
def quoteattr(s):
return quotetext(s).replace(u"'", u''').replace(u'"', u'"').replace(u'\n', u'
').replace(u'\r', u'
').replace(u'\t', u'	') | [
"ishalyminov@gmail.com"
] | ishalyminov@gmail.com |
5852301d44f5755d54598d0a6b389ab9759a8f16 | 636ba2700eaf3a151b73144b510f38c75ab1919d | /Kaggle2/02_image.py | 6fbe71238c67a7659f02cd543a3d79a6841ab069 | [] | no_license | Taerimmm/ML | 17997f388e18c28dfd9de83af98a6d4bebe7e1f0 | 6147cede81ebcc95f21adebf75731fbbb11edfab | refs/heads/master | 2023-06-10T14:26:45.335219 | 2021-07-05T15:30:47 | 2021-07-05T15:30:47 | 324,874,959 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 199 | py | import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
from PIL import Image
import tifffile as tiff
import cv2
import os
from tqdm.notebook import tqdm
import zipfile
import rasterio | [
"xofla7560@naver.com"
] | xofla7560@naver.com |
7095b4a9a2e2976486cba6762f2b417aa67ed27e | 910d4dd8e56e9437cf09dd8b9c61167673140a1f | /0521/경로찾기.py | 812e5313e58abf8a17e66ba6912e44000ace33d7 | [] | no_license | nopasanadamindy/Algorithms | 10825b212395680401b200a37ab4fde9085bc61f | 44b82d2f129c4cc6e811b651c0202a18719689cb | refs/heads/master | 2022-09-28T11:39:54.630487 | 2020-05-29T09:49:56 | 2020-05-29T09:49:56 | 237,923,602 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 693 | py | import sys
sys.stdin = open('경로찾기.txt')
def dfs(v):
stack = []
visited = [0 for _ in range(N)]
while 1 :
for w in range(len(G)):
if G[v][w] == 1 and visited[w] == 0:
stack.append(w)
visited[w] = 1
if len(stack) == 0:
return visited
else:
v = stack.pop()
def prin(a):
for i in range(len(a)):
print(*a[i])
T = int(input())
for test_case in range(1, T+1):
N = int(input())
G = []
result = []
for i in range(N):
temp = list(map(int, input().split()))
G.append(temp)
for i in range(N):
result.append(dfs(i))
prin(result) | [
"iuui21@snu.ac.kr"
] | iuui21@snu.ac.kr |
9738d62381adf8b93b9b3f7c29b65d7326b5ba7e | 312dc11233a147e01b01ad42166eca7e8ebf3c66 | /testing/parabolic.py | a0ecc42c218bbf69486a64e488db627605a33d55 | [] | no_license | byronwasti/GuitarTuner | 622f64f21f913f537fffe8c0fd04970fac99af75 | 1a748c436b01b399cc57d24070bddfb61d2f61f8 | refs/heads/master | 2016-09-05T12:24:45.663439 | 2015-04-21T17:50:45 | 2015-04-21T17:50:45 | 32,682,388 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,670 | py | # -*- coding: utf-8 -*-
#
from __future__ import division
from numpy import polyfit, arange
def parabolic(f, x):
"""Quadratic interpolation for estimating the true position of an
inter-sample maximum when nearby samples are known.
f is a vector and x is an index for that vector.
Returns (vx, vy), the coordinates of the vertex of a parabola that goes
through point x and its two neighbors.
Example:
Defining a vector f with a local maximum at index 3 (= 6), find local
maximum if points 2, 3, and 4 actually defined a parabola.
In [3]: f = [2, 3, 1, 6, 4, 2, 3, 1]
In [4]: parabolic(f, argmax(f))
Out[4]: (3.2142857142857144, 6.1607142857142856)
"""
xv = 1/2. * (f[x-1] - f[x+1]) / (f[x-1] - 2 * f[x] + f[x+1]) + x
yv = f[x] - 1/4. * (f[x-1] - f[x+1]) * (xv - x)
return (xv, yv)
def parabolic_polyfit(f, x, n):
"""Use the built-in polyfit() function to find the peak of a parabola
f is a vector and x is an index for that vector.
n is the number of samples of the curve used to fit the parabola.
"""
a, b, c = polyfit(arange(x-n//2, x+n//2+1), f[x-n//2:x+n//2+1], 2)
xv = -0.5 * b/a
yv = a * xv**2 + b * xv + c
return (xv, yv)
if __name__=="__main__":
from numpy import argmax
import matplotlib.pyplot as plt
y = [2, 1, 4, 8, 11, 10, 7, 3, 1, 1]
xm, ym = argmax(y), y[argmax(y)]
xp, yp = parabolic(y, argmax(y))
plot = plt.plot(y)
plt.hold(True)
plt.plot(xm, ym, 'o', color='silver')
plt.plot(xp, yp, 'o', color='blue')
plt.title('silver = max, blue = estimated max')
| [
"byron.wasti@gmail.com"
] | byron.wasti@gmail.com |
8adf40e302cbe5a0093ac6c8e47055eef1a47754 | 2d445c21e58e80841b0ac99cc678137812f0a670 | /lib/python/adjacency_matrix_graph_bfs.py | 8b98b321ead9a21fae4e993e7057258d2c35ff22 | [] | no_license | kajyuuen/programming_contest | 65c9bc7b7101d7b5fb35cd0cf3fdc6b4213fd0f6 | d2b29edf655f64d018947773a29c9a7e3c4a3456 | refs/heads/master | 2020-04-23T09:51:04.554152 | 2020-03-06T08:09:29 | 2020-03-06T08:09:29 | 171,083,409 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,074 | py | from collections import deque
INF = float("inf")
def bfs(G, i, visited = []):
if len(visited) == 0:
visited.append(i)
queue = deque()
queue.append(i)
while queue:
i = queue.popleft()
for j in range(len(G)):
if (G[i][j] != INF) and (G[i][j] != 0) and (j not in visited):
visited.append(j)
queue.append(j)
return visited
if __name__ == '__main__':
def i_inpl(): return int(input())
def s_inpl(): return map(int,input().split())
def l_inpl(): return list(map(int, input().split()))
# 入力例
# https://atcoder.jp/contests/abc016/tasks/abc016_3
N, M = s_inpl()
# 隣接行列 G, 存在しない辺はINF
G = [[INF] * N for _ in range(N)]
# 自身に向かうコストは0
for i in range(N):
G[i][i] = 0
for _ in range(M):
a, b = s_inpl()
a, b = a-1, b-1
G[a][b] = 1
G[b][a] = 1
# ある頂点から訪れることができる頂点の列挙
for i in range(N):
print(bfs(G, i, [])) | [
"kajyuuen@gmail.com"
] | kajyuuen@gmail.com |
c2dba4555aa853c5706f495c56459e3efea80e93 | 799a0af9c05deabe5d5250a10e480ec15ae0216e | /Xpath_test/xpath_test_2.py | a097fbf67eebc5c9494a6c7e014742b599bcfbbe | [
"MIT"
] | permissive | waws520waws/waws_spider | 9b2be28834c08166463fe265e0f5c37a874369c8 | c6a5988121f32619a5c5134c09fdfd556c696fe7 | refs/heads/master | 2021-03-20T06:01:22.041937 | 2020-03-28T02:49:16 | 2020-03-28T02:49:16 | 247,183,308 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 733 | py | # encoding:utf-8
import lxml
"""
lxml.etree.HTML() 处理文本字符串
lxml.etree.parse() 处理的是文件内容
"""
import lxml.etree
html = lxml.etree.parse("1.html") # 处理文件
print(html)
print(type(html))
print(lxml.etree.tostring(html))
"""
报错:
lxml.etree.XMLSyntaxError: Opening and ending tag mismatch: meta line 4 and head, line 6, column 8
这个主要是标签不匹配的原因,将html中的meta标签去掉即可
"""
"""
知识点:lxml.etree.parse(html_file_path,解析器),使用tostring()得到的数据是bytes类型的,decode解码查看
from lxml import etree
html = etree.parse('./test.html', etree.HTMLParser())
result = etree.tostring(html)
print(result.decode('utf-8'))
""" | [
"16601203140@163.com"
] | 16601203140@163.com |
9623ef12cdf511e3619a20585d60b7650ee1a19b | 7a1b08c64b29522d4bbb913475275c1bc8ad61a4 | /diag/doc_diag4/diag_read.py | a8fe6e0e0eca9a9bd598badc98c0dfb99f4dd8a9 | [
"MIT"
] | permissive | erichilarysmithsr/time-track | 8f84d4cc92cebaedce550b3741982d204e734a6c | dc0a7b63c937d561309f9b1c84af65fb581a8e18 | refs/heads/master | 2023-03-27T08:07:46.717221 | 2021-03-30T16:45:50 | 2021-03-30T16:45:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,214 | py | #!/usr/bin/python3
# -*- coding: utf-8 -*-
from tkinter import *
from tkinter import messagebox
import os
import subprocess
fen=Tk()
fen.title("Diagnostics and ATCD")
fen.configure(background='cyan')
# To place side by side labelo + entrylab
top = Frame(fen, bg='cyan')
bottom = Frame(fen, bg='cyan')
top.pack(side=TOP)
bottom.pack(side=BOTTOM, fill=BOTH, expand=YES)
labelo=Label(fen, text="Diagnostics and ATCD for : ",
font='Arial 18 bold', fg='navy', bg='cyan')
labelo.pack(in_=top, side=LEFT, padx=5, pady=20)
with open('./newpatient/entryfile4.txt', 'r') as filename:
line1=filename.readline()
entrytext=StringVar()
entrytext.set(line1)
entryName=Entry(fen, textvariable=entrytext)
entryName.pack(in_=top, side=LEFT, padx=10, pady=20)
labelallergy=Label(fen, text="Allergy",
font='Arial 18 bold', fg='coral', bg='cyan')
labelallergy.pack(padx=5, pady=10)
with open('./allergy/allergyfile4.txt', 'r') as filename:
lineA1=filename.readline()
lineA2=filename.readline()
lineA3=filename.readline()
lineA4=filename.readline()
lineA5=filename.readline()
lineA6=filename.readline()
lineA7=filename.readline()
entrytext=StringVar()
entrytext.set(lineA1 + ', ' + lineA3 + ', ' + lineA5 + ', ' + lineA7)
entryName=Entry(fen, textvariable=entrytext, width=60)
entryName.pack(padx=10, pady=10)
def importationFile(fichier, encodage="Utf-8"):
file = open(fichier, 'r', encoding=encodage)
content=file.readlines()
file.close()
for li in content:
textBox.insert(END, li)
textBox=Text(fen, height=15, width=60, font=18, relief=SUNKEN)
textBox.pack(padx=30, pady=30)
buttonClose=Button(fen, text="Quit", fg='white', width=10, bd=3,
bg='navy', activebackground='dark turquoise', activeforeground='navy',
highlightbackground='grey17', command=quit)
buttonClose.pack(side='right', padx=10, pady=10)
try:
if os.path.getsize('./diag/doc_diag4/diagrecap4.txt'):
importationFile('./diag/doc_diag4/diagrecap4.txt',
encodage="Utf-8")
except FileNotFoundError as err_file:
print("+ File not found !", err_file)
messagebox.showwarning("WARNING", "File does not exist or "
"file not found !")
fen.mainloop()
| [
"philogenie@protonmail.com"
] | philogenie@protonmail.com |
82b7d1c5796f6c1174bf3088591602482f1c4054 | fd21d6384ba36aa83d0c9f05f889bdbf8912551a | /a10sdk/core/network/network_vlan_global.py | 0777512fc463e065ad6d609790980c39208eecad | [
"Apache-2.0"
] | permissive | 0xtobit/a10sdk-python | 32a364684d98c1d56538aaa4ccb0e3a5a87ecd00 | 1ea4886eea3a1609b2ac1f81e7326758d3124dba | refs/heads/master | 2021-01-18T03:08:58.576707 | 2014-12-10T00:31:52 | 2014-12-10T00:31:52 | 34,410,031 | 0 | 0 | null | 2015-04-22T19:05:12 | 2015-04-22T19:05:12 | null | UTF-8 | Python | false | false | 1,242 | py | from a10sdk.common.A10BaseClass import A10BaseClass
class VlanGlobal(A10BaseClass):
"""Class Description::
Configure global options for vlan.
Class vlan-global supports CRUD Operations and inherits from `common/A10BaseClass`.
This class is the `"PARENT"` class for this module.`
:param l3_vlan_fwd_disable: {"default": 0, "optional": true, "type": "number", "description": "Disable L3 forwarding between VLANs", "format": "flag"}
:param enable_def_vlan_l2_forwarding: {"default": 0, "optional": true, "type": "number", "description": "Enable layer 2 forwarding on default vlan", "format": "flag"}
:param DeviceProxy: The device proxy for REST operations and session handling. Refer to `common/device_proxy.py`
URL for this object::
`https://<Hostname|Ip address>//axapi/v3/network/vlan-global`.
"""
def __init__(self, **kwargs):
self.ERROR_MSG = ""
self.required=[]
self.b_key = "vlan-global"
self.a10_url="/axapi/v3/network/vlan-global"
self.DeviceProxy = ""
self.l3_vlan_fwd_disable = ""
self.enable_def_vlan_l2_forwarding = ""
for keys, value in kwargs.items():
setattr(self,keys, value)
| [
"doug@parksidesoftware.com"
] | doug@parksidesoftware.com |
c3798284253165b3bbdb13b0f739fe26d1a2a201 | 495cbf24ca6db5702b023d5476de91f881f477bf | /bulletin_board/bboard/views.py | 833e6fa25452d5b70d450d57bc2b891a16c6cb8e | [] | no_license | Nikola1001/bulletin_board_django | 29b08368848137628534c3c305c8890632708021 | 32a6e89cc2257329aa314deec58e05531c52c9a0 | refs/heads/master | 2022-12-10T13:18:28.205733 | 2020-09-13T09:46:57 | 2020-09-13T09:46:57 | 295,108,347 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 977 | py | from django.shortcuts import render
from django.http import HttpResponse
from django.views.generic.edit import CreateView
from .models import Bb, Rubric
from .forms import BbForm
def index(request):
bbs = Bb.objects.all()
rubrics = Rubric.objects.all()
context = {'bbs': bbs, 'rubrics': rubrics}
return render(request, 'bboard/index.html', context)
def by_rubric(request, rubric_id):
bbs = Bb.objects.filter(rubric = rubric_id)
rubrics = Rubric.objects.all()
current_rubric = Rubric.objects.get(pk=rubric_id)
context = {'bbs': bbs, 'rubrics': rubrics, 'current_rubric': current_rubric}
return render(request, 'bboard/by_rubric.html', context)
class BbCreateView(CreateView):
template_name = 'bboard/create.html'
form_class = BbForm
success_url = '/bboard/'
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
context['rubrics'] = Rubric.objects.all()
return context
| [
"you@example.com"
] | you@example.com |
0c28571a296e24bae9794871056eb8085fb3f316 | f8ac0d9c2954ee131bb7358441974a6809c7a323 | /tests/compute/lorentz/test_Mt2.py | 140237939a75b259c2d2c83a8923653d292139bf | [
"BSD-3-Clause"
] | permissive | scikit-hep/vector | 38d285deae1ef9b9cae14eec3cf38e9b5c8d35ae | a79ccfb9574421870029506895dcb9c2162ac59d | refs/heads/main | 2023-08-31T10:49:57.311014 | 2023-08-29T17:55:09 | 2023-08-29T17:55:09 | 217,698,030 | 64 | 25 | BSD-3-Clause | 2023-09-12T05:13:17 | 2019-10-26T11:20:23 | Python | UTF-8 | Python | false | false | 4,032 | py | # Copyright (c) 2019-2023, Jonas Eschle, Jim Pivarski, Eduardo Rodrigues, and Henry Schreiner.
#
# Distributed under the 3-clause BSD license, see accompanying file LICENSE
# or https://github.com/scikit-hep/vector for details.
from __future__ import annotations
import pytest
import vector.backends.object
def test_xy_z_t():
vec = vector.backends.object.MomentumObject4D(
vector.backends.object.AzimuthalObjectXY(3, 4),
vector.backends.object.LongitudinalObjectZ(10),
vector.backends.object.TemporalObjectT(20),
)
assert vec.Mt2 == pytest.approx(300)
def test_xy_z_tau():
vec = vector.backends.object.MomentumObject4D(
vector.backends.object.AzimuthalObjectXY(3, 4),
vector.backends.object.LongitudinalObjectZ(10),
vector.backends.object.TemporalObjectTau(16.583123951777),
)
assert vec.Mt2 == pytest.approx(300)
def test_xy_theta_t():
vec = vector.backends.object.MomentumObject4D(
vector.backends.object.AzimuthalObjectXY(3, 4),
vector.backends.object.LongitudinalObjectTheta(0.4636476090008061),
vector.backends.object.TemporalObjectT(20),
)
assert vec.Mt2 == pytest.approx(300)
def test_xy_theta_tau():
vec = vector.backends.object.MomentumObject4D(
vector.backends.object.AzimuthalObjectXY(3, 4),
vector.backends.object.LongitudinalObjectTheta(0.4636476090008061),
vector.backends.object.TemporalObjectTau(16.583123951777),
)
assert vec.Mt2 == pytest.approx(300)
def test_xy_eta_t():
vec = vector.backends.object.MomentumObject4D(
vector.backends.object.AzimuthalObjectXY(3, 4),
vector.backends.object.LongitudinalObjectEta(1.4436354751788103),
vector.backends.object.TemporalObjectT(20),
)
assert vec.Mt2 == pytest.approx(300)
def test_xy_eta_tau():
vec = vector.backends.object.MomentumObject4D(
vector.backends.object.AzimuthalObjectXY(3, 4),
vector.backends.object.LongitudinalObjectEta(1.4436354751788103),
vector.backends.object.TemporalObjectTau(16.583123951777),
)
assert vec.Mt2 == pytest.approx(300)
def test_rhophi_z_t():
vec = vector.backends.object.MomentumObject4D(
vector.backends.object.AzimuthalObjectRhoPhi(5, 0),
vector.backends.object.LongitudinalObjectZ(10),
vector.backends.object.TemporalObjectT(20),
)
assert vec.Mt2 == pytest.approx(300)
def test_rhophi_z_tau():
vec = vector.backends.object.MomentumObject4D(
vector.backends.object.AzimuthalObjectRhoPhi(5, 0),
vector.backends.object.LongitudinalObjectZ(10),
vector.backends.object.TemporalObjectTau(16.583123951777),
)
assert vec.Mt2 == pytest.approx(300)
def test_rhophi_theta_t():
vec = vector.backends.object.MomentumObject4D(
vector.backends.object.AzimuthalObjectRhoPhi(5, 0),
vector.backends.object.LongitudinalObjectTheta(0.4636476090008061),
vector.backends.object.TemporalObjectT(20),
)
assert vec.Mt2 == pytest.approx(300)
def test_rhophi_theta_tau():
vec = vector.backends.object.MomentumObject4D(
vector.backends.object.AzimuthalObjectRhoPhi(5, 0),
vector.backends.object.LongitudinalObjectTheta(0.4636476090008061),
vector.backends.object.TemporalObjectTau(16.583123951777),
)
assert vec.Mt2 == pytest.approx(300)
def test_rhophi_eta_t():
vec = vector.backends.object.MomentumObject4D(
vector.backends.object.AzimuthalObjectRhoPhi(5, 0),
vector.backends.object.LongitudinalObjectEta(1.4436354751788103),
vector.backends.object.TemporalObjectT(20),
)
assert vec.Mt2 == pytest.approx(300)
def test_rhophi_eta_tau():
vec = vector.backends.object.MomentumObject4D(
vector.backends.object.AzimuthalObjectRhoPhi(5, 0),
vector.backends.object.LongitudinalObjectEta(1.4436354751788103),
vector.backends.object.TemporalObjectTau(16.583123951777),
)
assert vec.Mt2 == pytest.approx(300)
| [
"noreply@github.com"
] | scikit-hep.noreply@github.com |
89a56284bc3930a3a7e104fbbcddb3349ebd68bb | 4c601eaa346e660c296e270cc2d79aea9a3721fe | /homeassistant/components/monoprice/__init__.py | 9bceff1531c142933a839999f0e1fedc053c9141 | [
"Apache-2.0"
] | permissive | basnijholt/home-assistant | f55110af9ff602274c0a929c7298ef97a0ef282f | ba55b4b8338a2dc0ba3f1d750efea49d86571291 | refs/heads/dev | 2023-01-21T11:53:52.621353 | 2020-08-08T15:03:06 | 2020-08-08T15:03:06 | 220,313,680 | 5 | 1 | Apache-2.0 | 2023-01-13T06:04:49 | 2019-11-07T19:29:54 | Python | UTF-8 | Python | false | false | 2,378 | py | """The Monoprice 6-Zone Amplifier integration."""
import asyncio
import logging
from pymonoprice import get_monoprice
from serial import SerialException
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_PORT
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryNotReady
from .const import (
CONF_NOT_FIRST_RUN,
DOMAIN,
FIRST_RUN,
MONOPRICE_OBJECT,
UNDO_UPDATE_LISTENER,
)
PLATFORMS = ["media_player"]
_LOGGER = logging.getLogger(__name__)
async def async_setup(hass: HomeAssistant, config: dict):
"""Set up the Monoprice 6-Zone Amplifier component."""
return True
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry):
"""Set up Monoprice 6-Zone Amplifier from a config entry."""
port = entry.data[CONF_PORT]
try:
monoprice = await hass.async_add_executor_job(get_monoprice, port)
except SerialException:
_LOGGER.error("Error connecting to Monoprice controller at %s", port)
raise ConfigEntryNotReady
# double negative to handle absence of value
first_run = not bool(entry.data.get(CONF_NOT_FIRST_RUN))
if first_run:
hass.config_entries.async_update_entry(
entry, data={**entry.data, CONF_NOT_FIRST_RUN: True}
)
undo_listener = entry.add_update_listener(_update_listener)
hass.data.setdefault(DOMAIN, {})[entry.entry_id] = {
MONOPRICE_OBJECT: monoprice,
UNDO_UPDATE_LISTENER: undo_listener,
FIRST_RUN: first_run,
}
for component in PLATFORMS:
hass.async_create_task(
hass.config_entries.async_forward_entry_setup(entry, component)
)
return True
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry):
"""Unload a config entry."""
unload_ok = all(
await asyncio.gather(
*[
hass.config_entries.async_forward_entry_unload(entry, component)
for component in PLATFORMS
]
)
)
if unload_ok:
hass.data[DOMAIN][entry.entry_id][UNDO_UPDATE_LISTENER]()
hass.data[DOMAIN].pop(entry.entry_id)
return unload_ok
async def _update_listener(hass: HomeAssistant, entry: ConfigEntry):
"""Handle options update."""
await hass.config_entries.async_reload(entry.entry_id)
| [
"noreply@github.com"
] | basnijholt.noreply@github.com |
3b75be4e050249e7eaafd24415499319c3bce4d2 | c236fd1f3d54fa79ac79d5154d31f220cfd63ace | /setup.py | 87bb3b45e3d99a7461e84cc32ae233928061cfd2 | [
"MIT"
] | permissive | mmmika/fcn | 46b6e7959d39f68280ca00626dfb1c6ca44ebcd1 | 876ce009d37ca36f65a3c2128102ac04fca47898 | refs/heads/master | 2020-06-10T05:38:16.261089 | 2016-11-27T16:46:13 | 2016-11-27T16:46:13 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,209 | py | #!/usr/bin/env python
import shlex
import subprocess
import sys
from setuptools import find_packages
from setuptools import setup
version = '5.3.0'
if sys.argv[-1] == 'release':
commands = [
'python setup.py sdist',
'twine upload dist/fcn-{0}.tar.gz'.format(version),
'git tag v{0}'.format(version),
'git push origin master --tag',
]
for cmd in commands:
subprocess.call(shlex.split(cmd))
sys.exit(0)
setup(
name='fcn',
version=version,
packages=find_packages(),
scripts=[
'scripts/fcn_infer.py',
'scripts/fcn_learning_curve.py',
],
install_requires=open('requirements.txt').readlines(),
description='Fully Convolutional Networks',
long_description=open('README.rst').read(),
author='Kentaro Wada',
author_email='www.kentaro.wada@gmail.com',
url='http://github.com/wkentaro/fcn',
license='MIT',
keywords='machine-learning',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: POSIX',
'Topic :: Internet :: WWW/HTTP',
],
)
| [
"www.kentaro.wada@gmail.com"
] | www.kentaro.wada@gmail.com |
2e495fb99b1432842a9429eca1429da37cf0ff2a | 1e65ca80032b1b5a4ab3631044c3d41a9f3dd035 | /01_Jump_to_Python/Chapter07/321.py | 2c4c9f6ab079a7f22ee78e41b397caec449e2e37 | [] | no_license | bj730612/Bigdata | cdd398c56023c67a2e56c36151e9f2bca067a40a | 9bb38e30bb3728b4a4e75bc763fa858029414d4e | refs/heads/master | 2020-03-15T09:27:23.995217 | 2018-10-02T00:07:38 | 2018-10-02T00:07:38 | 132,075,198 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 161 | py | import re
p = re.compile('.+:')
m = p.search('http://google.com')
print(m.group())
p = re.compile('.+(?=:)')
m = p.search('http://google.com')
print(m.group())
| [
"USER@test.com"
] | USER@test.com |
65656cd0f8cace0bcc0fb1f5113f197aa36b12b7 | 9743d5fd24822f79c156ad112229e25adb9ed6f6 | /xai/brain/wordbase/otherforms/_benefactions.py | d23b224a84c6e58631f4000da88b4c70a68ff6df | [
"MIT"
] | permissive | cash2one/xai | de7adad1758f50dd6786bf0111e71a903f039b64 | e76f12c9f4dcf3ac1c7c08b0cc8844c0b0a104b6 | refs/heads/master | 2021-01-19T12:33:54.964379 | 2017-01-28T02:00:50 | 2017-01-28T02:00:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 242 | py |
#calss header
class _BENEFACTIONS():
def __init__(self,):
self.name = "BENEFACTIONS"
self.definitions = benefaction
self.parents = []
self.childen = []
self.properties = []
self.jsondata = {}
self.basic = ['benefaction']
| [
"xingwang1991@gmail.com"
] | xingwang1991@gmail.com |
e91ddeb3e191e0e42abe3dff6ea872f61d02497f | a17bb550574747585f36ba159c4415d5e42835e7 | /handle/class_list_handle.py | 8c583d55aa8194a70a6d4b3be5275f6174fc0efc | [] | no_license | z1069867141/sjh | 36895969bc472c9608c14fee84f800adf6ff4af3 | 197b3a7fab135f7a277ba4260dcf3ca10bb7c53e | refs/heads/master | 2022-11-15T19:32:16.934370 | 2020-07-09T10:01:49 | 2020-07-09T10:01:49 | 261,260,925 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,673 | py | import os
import sys
sys.path.append(os.path.join(os.path.dirname(__file__),".."))
from base.FindElement import FindElement
from selenium import webdriver
import time
class class_list(object):
def __init__(self,driver):
self.driver = driver
self.file_path = os.path.join(os.getcwd()+"/config/"+"class_element.ini")
self.cl = FindElement(self.driver,file_path=self.file_path,node="class_list")
def click_class_1(self):
self.cl.get_element("class_1").click()
def click_class_2(self):
self.cl.get_element("class_2").click()
def get_sorting_text(self):
try:
time.sleep(2)
text = self.cl.get_element("sorting_text").text
if text == "综合排序":
return True
else:
return False
except:
return False
def click_QR_tabber(self):
try:
self.cl.get_element("QR_code").click()
time.sleep(2)
text = self.cl.get_element("title_text").text
if text == "分 享":
return True
else:
return False
except:
return False
if __name__ == "__main__":
driver = webdriver.Chrome()
# base = class_list(driver)
# driver.get("http://b2bsaas.qianyansoft.com/Sjh/#/pwdlogin?qythc=")
# base.user_base()
# time.sleep(1)
# driver.get("http://b2bsaas.qianyansoft.com/Sjh/#/category")
# a = class_list(driver)
# # time.sleep(2)
# a.click_QR_tabber()
driver.get("http://b2bsaas.qianyansoft.com/Sjh/#/category/goods?gcId=303")
driver.find_element_by_name("销 量").click() | [
"919824370@qq.com"
] | 919824370@qq.com |
bc28a05454e5f28444f62d7d5ce5b39db48ee9f3 | cb9f816c672a55d0e6b0109f368358a6276a11d9 | /noseapp/core/suite/performers/gevent.py | 59ee046682d668d64df451c72af4d48ec6386a7f | [] | no_license | noseapp/noseapp | ef3d361f6a7505d822b05c2dc6d40c662b3ba285 | 7c3e2e38b6b9fe027847a466615f7d72ed4ea334 | refs/heads/master | 2020-05-17T20:12:14.213873 | 2015-10-06T14:47:30 | 2015-10-06T14:47:30 | 30,531,499 | 3 | 2 | null | 2015-10-01T13:44:16 | 2015-02-09T10:54:26 | Python | UTF-8 | Python | false | false | 872 | py | # -*- coding: utf8 -*-
from __future__ import absolute_import
from multiprocessing import cpu_count
from gevent.pool import Pool
from noseapp.core.suite.base import BaseSuite
from noseapp.core.suite.base import SuitePerformer
class GeventSuitePerformer(SuitePerformer):
"""
Run tests with gevent pool
"""
def __call__(self, pool=None):
self_pool = not bool(pool)
size = self.suite.config.options.async_tests
if size <= 0:
size = cpu_count() / 2
pool = pool or Pool(int(round(size)) or 2)
for test in self.suite.tests:
if self.result.shouldStop:
break
if isinstance(test, BaseSuite):
test.run(self.result, pool=pool)
continue
pool.spawn(self.run_one_test, test)
if self_pool:
pool.join()
| [
"mikhail.trifonov@corp.mail.ru"
] | mikhail.trifonov@corp.mail.ru |
49ce32e81a63bc46de29e7eb1cd17f6fe3f229f6 | 2ffd079c34cb07c738f7e5f703764fed68f2c8c0 | /Solutions/Evaluate_Reverse_Polish_Notation.py | 708a3f4dc84222f8e3a3a9ac462cf0dc57d6c3db | [] | no_license | WuIFan/LeetCode | bc96355022c875bdffb39c89a2088457b97d30ab | 689a100ada757bc20334d5f0084587af3039ca7b | refs/heads/master | 2022-05-24T07:13:01.023733 | 2022-04-03T15:26:23 | 2022-04-03T15:26:23 | 202,471,960 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 749 | py | from typing import List
class Solution:
def evalRPN(self, tokens: List[str]) -> int:
nums = []
for t in tokens:
if t.lstrip('-').isnumeric():
nums.append(int(t))
else:
second = nums.pop()
first = nums.pop()
if t == '+':
ans = first + second
elif t == '-':
ans = first - second
elif t == '*':
ans = first * second
else:
neg = first*second < 0
ans = abs(first) // abs(second)
if neg:
ans = -ans
nums.append(ans)
return nums[-1] | [
"denny91002@gmail.com"
] | denny91002@gmail.com |
14a313d6bc063b870815b658c0cc5045efd8eae0 | 038e6e13ad4a81cee5dbbd6ccc322d48330d15d7 | /AnswerCode/083RemoveDuplicatesfromSortedList.py | 6ebe8ac3b56ba15d5779fcf189dd251a77daac8c | [] | no_license | aistoume/Leetcode | ad69dae6d9f41a03c883fc2582d0afd6997f83d6 | d8dc574b611d0e3d42367ccd47a44fd8443b0b27 | refs/heads/master | 2021-01-12T14:27:18.245818 | 2018-11-09T00:21:04 | 2018-11-09T00:21:04 | 70,066,007 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 589 | py | # 83. Remove Duplicates from Sorted List
# Definition for singly-linked list.
class ListNode(object):
def __init__(self, x):
self.val = x
self.next = None
class Solution(object):
def deleteDuplicates(self, head):
if not head: return None
curr = head
while curr.next:
if curr.val==curr.next.val:
curr.next = curr.next.next
else:
curr = curr.next
return head
So = Solution()
L = [1,1,2,3,3]
head = ListNode(L[0])
curr = head
for i in L[1:]:
curr.next = ListNode(i)
curr = curr.next
ans = So.deleteDuplicates(head)
while ans:
print ans.val
ans = ans.next | [
"ais.yb.mo@gmail.com"
] | ais.yb.mo@gmail.com |
51938e7c55f9356b7ec6b93bcda53735d7801af7 | 4cc75836f13b9829afd59eb9b2ac3a5f6b85c543 | /models/final_experiment_scripts/tpc.py | 4caf041f1b06ce575758e8d7cf03393137faa64a | [
"MIT"
] | permissive | TanmDL/eICU-LoS-prediction | 71316bf072b3bd47e61e22df71631f6d7996e583 | eb19fc84c5702595b052f436408570af314418a2 | refs/heads/master | 2022-11-29T04:14:11.929595 | 2020-08-15T10:33:46 | 2020-08-15T10:33:46 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 599 | py | from eICU_preprocessing.split_train_test import create_folder
from models.run_tpc import TPC
from models.initialise_arguments import initialise_tpc_arguments
if __name__=='__main__':
c = initialise_tpc_arguments()
c['mode'] = 'test'
c['exp_name'] = 'TPC'
c['model_type'] = 'tpc'
log_folder_path = create_folder('models/experiments/final', c.exp_name)
tpc = TPC(config=c,
n_epochs=c.n_epochs,
name=c.exp_name,
base_dir=log_folder_path,
explogger_kwargs={'folder_format': '%Y-%m-%d_%H%M%S{run_number}'})
tpc.run() | [
"ecr38@cam.ac.uk"
] | ecr38@cam.ac.uk |
ab9caac8043a3d31ccc062a0bc33b43c65f5d2e0 | d4e219c07379a08f37dff8ed9b889a1c75531e90 | /pasahero/commuters/migrations/0002_allowed_routes.py | 39780cec580f66546b71b54c3365d415dbe520f7 | [] | no_license | SanCampos/anti-covid | c9d306f584d61b9a1e1e1bc5cda4ac7497acee55 | 4f4ae5f63abfb1c59e29ad4bfc8a16a4e88ff6ad | refs/heads/master | 2021-05-22T16:30:55.933727 | 2020-04-01T14:55:14 | 2020-04-01T14:55:14 | 253,004,388 | 1 | 1 | null | 2020-04-04T14:38:33 | 2020-04-04T13:30:27 | null | UTF-8 | Python | false | false | 850 | py | # Generated by Django 2.2.5 on 2020-03-20 10:23
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('transportations', '0015_auto_20200320_0644'),
('commuters', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Allowed_Routes',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('commuter_id', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='allowed_routes', to='commuters.Commuters')),
('route_id', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='allowed_routes', to='transportations.Routes')),
],
),
]
| [
"benvillabroza1999@gmail.com"
] | benvillabroza1999@gmail.com |
1a3283004646eabd4e8de4243225147a2f252f8c | e5d83ede8521027b05d9b91c43be8cab168610e6 | /0x0A-python-inheritance/2-is_same_class.py | 21c3ccee88f486cb44c6c4084c015a32a21f6fc8 | [] | no_license | Danielo814/holbertonschool-higher_level_programming | 8918c3a6a9c136137761d47c5162b650708dd5cd | 832b692529198bbee44d2733464aedfe650bff7e | refs/heads/master | 2020-03-28T11:09:00.343055 | 2019-02-22T03:33:54 | 2019-02-22T03:33:54 | 148,181,433 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 307 | py | #!/usr/bin/python3
"""
2-is_same_class module that tests if an object is an
instance of the specified class
"""
def is_same_class(obj, a_class):
"""
returns True if obj is instance of a_class, False otherwise
"""
if type(obj) != a_class:
return False
else:
return True
| [
"211@holbertonschool.com"
] | 211@holbertonschool.com |
8a9223c0a0896e8c858df8c838ce1237053174f0 | 163bbb4e0920dedd5941e3edfb2d8706ba75627d | /Code/CodeRecords/2951/60714/272996.py | 9cd79dd531efc7ba42812797b13c7e90adcf7421 | [] | no_license | AdamZhouSE/pythonHomework | a25c120b03a158d60aaa9fdc5fb203b1bb377a19 | ffc5606817a666aa6241cfab27364326f5c066ff | refs/heads/master | 2022-11-24T08:05:22.122011 | 2020-07-28T16:21:24 | 2020-07-28T16:21:24 | 259,576,640 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 480 | py | a = input()
b = input()
ans1 = 0
for i in range(len(a) - 1, -1, -1):
ans1 += int(a[i]) * pow(2, len(a) - i - 1)
ans2 = 0
for i in range(len(b) - 1, -1, -1):
ans2 += int(b[i]) * pow(3, len(b) - i - 1)
flag = True
if ans1 > ans2:
temp = ans1 - ans2
else:
temp = ans2 - ans1
flag = False
while True:
i = 1
temp -= pow(2, i)
if temp % 3 is 0:
if flag:
print(ans2 + temp)
else:
print(ans2 - temp)
break
| [
"1069583789@qq.com"
] | 1069583789@qq.com |
4706b93e3604fba7a00dd32cb2c085a8d838a2bd | f03155acea2660fb04576e3ed60f248b57f43d68 | /migrations/versions/2e1382ecc795_.py | 3c15acd95cf280dafae28d698527e449f60c6cc3 | [] | no_license | hreeder/SlackInSpace | 22ecb413fd31dad8707afd7ae968f895b425e452 | cc44ad0834343f0616f9d5bd5f6820546b105d77 | refs/heads/master | 2020-04-01T23:02:27.078901 | 2015-05-07T13:58:26 | 2015-05-07T13:58:26 | 33,796,690 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,072 | py | """empty message
Revision ID: 2e1382ecc795
Revises: 21ca2aa72535
Create Date: 2015-04-12 13:34:58.152000
"""
# revision identifiers, used by Alembic.
revision = '2e1382ecc795'
down_revision = '21ca2aa72535'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_table('team_member',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('team_id', sa.String(length=64), nullable=True),
sa.Column('user_id', sa.Integer(), nullable=True),
sa.Column('email_address', sa.String(length=128), nullable=True),
sa.Column('slack_user_id', sa.String(length=64), nullable=True),
sa.Column('status', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['team_id'], ['team.id'], ),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], ),
sa.PrimaryKeyConstraint('id')
)
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_table('team_member')
### end Alembic commands ###
| [
"harry@harryreeder.co.uk"
] | harry@harryreeder.co.uk |
3da62f1e1a192e84a4f600ef64a2e48ec708cb18 | fee9bae88bf4ed22b93e3e9df7806f8c897f5f25 | /manage/cli_utils.py | af579c32d5dec42317b115e3f7262a4a77f93dd8 | [] | no_license | fake-name/xA-Scraper | 4123378c5ed87896fc389a90dfd8836cca7a5923 | 9eae8d1220fd0c7cabc97cef9378e4efe3361061 | refs/heads/master | 2023-01-27T15:33:05.349679 | 2023-01-22T11:39:57 | 2023-01-22T11:39:57 | 23,865,307 | 76 | 12 | null | 2019-06-11T12:38:27 | 2014-09-10T07:39:43 | Python | UTF-8 | Python | false | false | 3,271 | py |
#pylint: disable-msg=F0401, W0142
import logging
import psycopg2
import urllib.parse
import traceback
from xascraper import db
from xascraper import database
from settings import settings
from plugins import JOBS
from plugins import JOBS_DISABLED
from plugins import JOBS_NO_CONF
PLUGINS = {
key : (cls_def, cls_def.pluginName)
for cls_def, dummy_interval, key in JOBS
}
DISABLED_PLUGINS = {
key : (cls_def, cls_def.pluginName)
for cls_def, dummy_interval, key in JOBS_DISABLED
}
UNRUNNABLE_PLUGINS = {
cls_def.pluginShortName : (cls_def, cls_def.pluginName)
for cls_def in JOBS_NO_CONF
}
def print_help():
print()
print("Manager interface")
print("Options")
print()
print(" help")
print(" print this message")
print(" reset-run-state")
print(" reset the run-state monitor flags. This is normally done")
print(" at start by main.py, but if you're using just the CLI fetch")
print(" calls, you can do it manually too.")
print(" reset-run-state [sitename]")
print(" reset the run-state for a specific plugin only.")
print(" reset-last-fetched-times")
print(" Reset the last fetch times for all the artists in the database.")
print(" this means the next fetch will re-walk all artists in the database")
print(" in random(ish) order")
print(" reset-last-fetched-times [sitename]")
print(" Reset the last-fetch time for all the artists associated with a specific site")
print(" 'rss-import'")
print(" Import tumblr feeds from a ttrss database instance.")
print(" 'tumblr-import'")
print(" Import the artists you follow on tumblr to your scraped-artists list.")
print(" 'upgrade-db'")
print(" Make any needed schema changes to the database, if needed.")
print(" 'name-clean'")
print(" Checks and does some cleanup of the artist-names in the database.")
print(" 'db-misrelink-clean'")
print(" Does release sanity checks on item URLs")
print(" fetch [sitename]")
print(" with no sitename, this executes all plugins in sequence.")
print(" With a sitename, executes the named plugin.")
print(" fetch-all")
print(" Executes all plugins in parallel.")
print(" import <sitename> <filename>")
print(" Open a text file <filename>, and import the names from")
print(" it into the monitored names database for site <sitename>.")
print(" The file <filename> must be a simple text file with")
print(" one artist name per-line.")
print(" Note that this does not support pixiv names, due to the ")
print(" different mechanism used for supporting pixiv namelist")
print(" tracking.")
print(" Note: this will call `name-clean` after execution automatically.")
print(" dump [export_path] [sitename]")
print(" Dump the database contents for users from a specific site to [export_path]")
print("")
print("Plugins (sitename -> Human-Readable name)")
print(" Available plugins (will be run by the scheduler):")
for key, tup in PLUGINS.items():
print(" {} -> {}".format(key.ljust(8), tup[1]))
print(" Disabled plugins (can be run manually, will not auto run):")
for key, tup in DISABLED_PLUGINS.items():
print(" {} -> {}".format(key.ljust(8), tup[1]))
print(" Unconfigured plugins (cannot be used):")
for key, tup in UNRUNNABLE_PLUGINS.items():
print(" {} -> {}".format(key.ljust(8), tup[1]))
| [
"something@fake-url.com"
] | something@fake-url.com |
648ef60602eb9eba9c4caeb8a2ac8a960cb43412 | ac5e52a3fc52dde58d208746cddabef2e378119e | /exps-gsn-edf/gsn-edf_ut=3.5_rd=0.5_rw=0.06_rn=4_u=0.075-0.35_p=harmonic-2/sched=RUN_trial=10/sched.py | 6b1fdcf3ee82164a4757179b8107b4b603c7ba07 | [] | no_license | ricardobtxr/experiment-scripts | 1e2abfcd94fb0ef5a56c5d7dffddfe814752eef1 | 7bcebff7ac2f2822423f211f1162cd017a18babb | refs/heads/master | 2023-04-09T02:37:41.466794 | 2021-04-25T03:27:16 | 2021-04-25T03:27:16 | 358,926,457 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 316 | py | -X FMLP -Q 0 -L 5 119 400
-X FMLP -Q 0 -L 5 86 400
-X FMLP -Q 0 -L 5 86 400
-X FMLP -Q 1 -L 2 59 175
-X FMLP -Q 1 -L 2 44 150
-X FMLP -Q 2 -L 1 38 200
-X FMLP -Q 2 -L 1 31 200
-X FMLP -Q 3 -L 1 30 125
-X FMLP -Q 3 -L 1 30 125
29 150
25 100
24 175
18 125
17 175
14 100
12 100
12 100
11 100
| [
"ricardo.btxr@gmail.com"
] | ricardo.btxr@gmail.com |
4ddae7c4756405d61f6100fe0848351c7b1e0d1b | 7426522061b222e8d3336b18ff941bb98ff9626c | /qtoggleserver/core/api/funcs/backup.py | bf69f7ad08788ba2632330bd4b703b90bcaa9ddb | [
"Apache-2.0"
] | permissive | DigitEgal/qtoggleserver | 82833aaeb6f0bdad5f28243f132a639f4b406001 | 54b6ac53742af9529fd349d4fc207b0dc8a38d3b | refs/heads/dev | 2023-05-07T14:49:11.273023 | 2021-04-30T20:40:08 | 2021-04-30T20:40:08 | 360,039,836 | 0 | 0 | Apache-2.0 | 2021-04-21T05:18:08 | 2021-04-21T05:13:07 | null | UTF-8 | Python | false | false | 862 | py |
import logging
from qtoggleserver.conf import settings
from qtoggleserver.core import api as core_api
from qtoggleserver.core.typing import GenericJSONList
from qtoggleserver.system import conf as system_conf
logger = logging.getLogger(__name__)
@core_api.api_call(core_api.ACCESS_LEVEL_ADMIN)
async def get_backup_endpoints(request: core_api.APIRequest) -> GenericJSONList:
endpoints = []
if system_conf.can_write_conf_file():
endpoints.append({
'path': '/system',
'display_name': 'System Configuration',
'restore_method': 'PUT',
'order': 5
})
if settings.frontend.enabled:
endpoints.append({
'path': '/frontend',
'display_name': 'App Configuration',
'restore_method': 'PUT',
'order': 45
})
return endpoints
| [
"ccrisan@gmail.com"
] | ccrisan@gmail.com |
fc19c1f114424cc9ab63e7e87a0966cc3ab775aa | 4a48593a04284ef997f377abee8db61d6332c322 | /python/graph_and_tree/tree_structure/iterative_dfs_left_right_tree_traversal.py | 56f0871fee7900fab3ae3d614ed12845a1c9b031 | [
"MIT"
] | permissive | jeremiedecock/snippets | 8feaed5a8d873d67932ef798e16cb6d2c47609f0 | b90a444041c42d176d096fed14852d20d19adaa7 | refs/heads/master | 2023-08-31T04:28:09.302968 | 2023-08-21T07:22:38 | 2023-08-21T07:22:38 | 36,926,494 | 26 | 9 | MIT | 2023-06-06T02:17:44 | 2015-06-05T10:19:09 | Python | UTF-8 | Python | false | false | 2,119 | py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Copyright (c) 2014 Jérémie DECOCK (http://www.jdhp.org)
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
"""
Depth-first search.
"""
from node import Node as Node
#from node import GraphvizNode as Node
def walk(start_node):
"""The tree traversal function."""
stack = [start_node]
while len(stack) > 0:
# Retrive the last element
node = stack.pop()
# Do something with node value...
print(node.getValue())
# Add child node into the stack
stack.extend(reversed(node.getChildNodes()))
#print([n.getValue() for n in stack])
def test():
r"""Main function
Build the following test tree and traverse it.
1
/|\
2 3 4
/ \
5 6
Top-down (left-right) traversal should print: 1, 2, 5, 6, 3, 4.
"""
# Build the test tree
n5 = Node(5)
n6 = Node(6)
n4 = Node(4)
n3 = Node(3)
n2 = Node(2, [n5, n6])
n1 = Node(1, [n2, n3, n4])
# Traverse the tree
walk(n1)
if __name__ == '__main__':
test()
| [
"jd.jdhp@gmail.com"
] | jd.jdhp@gmail.com |
b9908c2801383dd89cbb0c3a1b75c7acaeba368a | e4cae3759a053ca88a936e87e3329aec203608db | /sdk/communication/azure-communication-identity/tests/test_communication_identity_client_async.py | 0dafc2b5b76c122df008680dc360e1d5e5d4f9fd | [
"LicenseRef-scancode-generic-cla",
"LGPL-2.1-or-later",
"MIT"
] | permissive | a-santamaria/azure-sdk-for-python | c9413858747ccfcec2fbbefd50922c515cb4f634 | 9dec418ad621ac75f217e56e901f15b6624800b0 | refs/heads/master | 2022-05-19T00:01:07.604118 | 2021-02-01T22:52:25 | 2021-02-01T22:52:25 | 202,599,021 | 0 | 0 | MIT | 2019-08-15T19:22:33 | 2019-08-15T19:22:32 | null | UTF-8 | Python | false | false | 6,215 | py | # coding: utf-8
# -------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
# --------------------------------------------------------------------------
import pytest
from azure.core.credentials import AccessToken
from azure.communication.identity.aio import CommunicationIdentityClient
from azure.communication.identity._shared.utils import parse_connection_str
from azure_devtools.scenario_tests import RecordingProcessor
from devtools_testutils import ResourceGroupPreparer
from _shared.helper import URIIdentityReplacer
from _shared.asynctestcase import AsyncCommunicationTestCase
from _shared.testcase import BodyReplacerProcessor
from _shared.communication_service_preparer import CommunicationServicePreparer
from azure.identity import DefaultAzureCredential
class FakeTokenCredential(object):
def __init__(self):
self.token = AccessToken("Fake Token", 0)
def get_token(self, *args):
return self.token
class CommunicationIdentityClientTestAsync(AsyncCommunicationTestCase):
def setUp(self):
super(CommunicationIdentityClientTestAsync, self).setUp()
self.recording_processors.extend([
BodyReplacerProcessor(keys=["id", "token"]),
URIIdentityReplacer()])
@ResourceGroupPreparer(random_name_enabled=True)
@CommunicationServicePreparer()
async def test_create_user_from_managed_identity(self, connection_string):
endpoint, access_key = parse_connection_str(connection_string)
from devtools_testutils import is_live
if not is_live():
credential = FakeTokenCredential()
else:
credential = DefaultAzureCredential()
identity_client = CommunicationIdentityClient(endpoint, credential)
async with identity_client:
user = await identity_client.create_user()
assert user.identifier is not None
@ResourceGroupPreparer(random_name_enabled=True)
@CommunicationServicePreparer()
async def test_create_user(self, connection_string):
identity_client = CommunicationIdentityClient.from_connection_string(connection_string)
async with identity_client:
user = await identity_client.create_user()
assert user.identifier is not None
@ResourceGroupPreparer(random_name_enabled=True)
@CommunicationServicePreparer()
async def test_issue_token_from_managed_identity(self, connection_string):
endpoint, access_key = parse_connection_str(connection_string)
from devtools_testutils import is_live
if not is_live():
credential = FakeTokenCredential()
else:
credential = DefaultAzureCredential()
identity_client = CommunicationIdentityClient(endpoint, credential)
async with identity_client:
user = await identity_client.create_user()
token_response = await identity_client.issue_token(user, scopes=["chat"])
assert user.identifier is not None
assert token_response.token is not None
@ResourceGroupPreparer(random_name_enabled=True)
@CommunicationServicePreparer()
async def test_issue_token(self, connection_string):
identity_client = CommunicationIdentityClient.from_connection_string(connection_string)
async with identity_client:
user = await identity_client.create_user()
token_response = await identity_client.issue_token(user, scopes=["chat"])
assert user.identifier is not None
assert token_response.token is not None
@ResourceGroupPreparer(random_name_enabled=True)
@CommunicationServicePreparer()
async def test_revoke_tokens_from_managed_identity(self, connection_string):
endpoint, access_key = parse_connection_str(connection_string)
from devtools_testutils import is_live
if not is_live():
credential = FakeTokenCredential()
else:
credential = DefaultAzureCredential()
identity_client = CommunicationIdentityClient(endpoint, credential)
async with identity_client:
user = await identity_client.create_user()
token_response = await identity_client.issue_token(user, scopes=["chat"])
await identity_client.revoke_tokens(user)
assert user.identifier is not None
assert token_response.token is not None
@ResourceGroupPreparer(random_name_enabled=True)
@CommunicationServicePreparer()
async def test_revoke_tokens(self, connection_string):
identity_client = CommunicationIdentityClient.from_connection_string(connection_string)
async with identity_client:
user = await identity_client.create_user()
token_response = await identity_client.issue_token(user, scopes=["chat"])
await identity_client.revoke_tokens(user)
assert user.identifier is not None
assert token_response.token is not None
@ResourceGroupPreparer(random_name_enabled=True)
@CommunicationServicePreparer()
async def test_delete_user_from_managed_identity(self, connection_string):
endpoint, access_key = parse_connection_str(connection_string)
from devtools_testutils import is_live
if not is_live():
credential = FakeTokenCredential()
else:
credential = DefaultAzureCredential()
identity_client = CommunicationIdentityClient(endpoint, credential)
async with identity_client:
user = await identity_client.create_user()
await identity_client.delete_user(user)
assert user.identifier is not None
@ResourceGroupPreparer(random_name_enabled=True)
@CommunicationServicePreparer()
async def test_delete_user(self, connection_string):
identity_client = CommunicationIdentityClient.from_connection_string(connection_string)
async with identity_client:
user = await identity_client.create_user()
await identity_client.delete_user(user)
assert user.identifier is not None
| [
"noreply@github.com"
] | a-santamaria.noreply@github.com |
702475d5c06c0afe6130323ed6491fb661057ae9 | 8d472f9facb895dda9e1df81f3bb6c2f81b9c357 | /master/bt5/slapos_wechat/SkinTemplateItem/portal_skins/slapos_wechat/PaymentTransaction_updateWechatPaymentStatus.py | faad8b2197c8500892082735e31629a8166b80c4 | [] | no_license | SlapOS/slapos.core | 852485eed9382685f3df6ba8532f8192bb1389c4 | 369e8d56636e1c59a745e68dc68154abfc5b7840 | refs/heads/master | 2023-08-31T04:42:34.722241 | 2023-08-30T15:13:08 | 2023-08-30T15:13:08 | 1,825,920 | 11 | 4 | null | null | null | null | UTF-8 | Python | false | false | 368 | py | state = context.getSimulationState()
if (state != 'started') or (context.getPaymentMode() != 'wechat'):
return "state not started (%s)" % state
else:
# ???
_, transaction_id = context.PaymentTransaction_getWechatId()
if transaction_id is not None:
# so the payment is registered in wechat
context.PaymentTransaction_createWechatEvent().updateStatus()
| [
"rafael@nexedi.com"
] | rafael@nexedi.com |
57a5992e8ab69bc124ae719a7a981da9a13774a3 | 8b3bc4efea5663b356acbabec231d1d647891805 | /214/Solution.py | cd011e9d4d6cb6afbfd0f914ff3c954c02a9140e | [] | no_license | FawneLu/leetcode | 9a982b97122074d3a8488adec2039b67e709af08 | 03020fb9b721a1c345e32bbe04f9b2189bfc3ac7 | refs/heads/master | 2021-06-18T20:13:34.108057 | 2021-03-03T05:14:13 | 2021-03-03T05:14:13 | 177,454,524 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 268 | py | class Solution:
def shortestPalindrome(self, s: str) -> str:
n = len(s)
t =s[::-1]
if n == 0:
return ""
for i in range(n, 0 ,-1):
if s[:i] == t[n-i:]:
break
return t[:n-i] + s | [
"tracylu1996@gmail.com"
] | tracylu1996@gmail.com |
b86f3709de8bb3479af18ab8d8a462010242978a | 4038af23324241fe74ef0604af395cac6da68b26 | /Hackerrank/minimum_time_required/min_time.py | a746548f3b8f028071988d16ee4c7389c30ffae3 | [] | no_license | narnat/algorithms_and_datastructures | 4fd5b0fa401d3f441c522f61eceecd4f8f17010d | f319f64f1774c92ce2a94cc6106eec68215f573b | refs/heads/master | 2020-09-29T05:38:41.099526 | 2019-12-22T21:12:53 | 2019-12-22T21:12:53 | 226,966,112 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 683 | py | #!/usr/bin/python3
import math
""" Minimum time required problem """
def minTime(machines, goal):
l_bound = math.ceil(min(machines) * goal / len(machines))
r_bound = math.ceil(max(machines) * goal / len(machines))
while l_bound < r_bound:
mid = (l_bound + r_bound) // 2
s = sum(mid // i for i in machines)
if s < goal:
l_bound = mid + 1
else:
r_bound = mid
return r_bound
ans1 = minTime([2, 3], 5)
ans2 = minTime([1, 3, 4], 10)
ans3 = minTime([2, 3, 2], 10)
ans4 = minTime([4, 5, 6], 12)
print(ans1)
print(ans2)
print(ans3)
print(ans4)
assert ans1 == 6 and ans2 == 7 and ans3 == 8 and ans4 == 20 | [
"farruh1996@gmail.com"
] | farruh1996@gmail.com |
1924cf501b9069a7dcfd9aa0c4af61bae2945b6d | 6093dca86097633d337a8a8d13c0f7513dd33db6 | /defining_classes_lecture1/LAB/02. Scope Mess.py | 0afb2155d65faddf8ed0ef0538a74e99e8c08802 | [] | no_license | tony-andreev94/Python-OOP | 7b1e862f793193aae7be5a759314b214eef7a8c6 | 26c2143da12ae93f515d9e2823c0e46e0fffbaad | refs/heads/master | 2023-01-28T10:18:25.901244 | 2020-12-10T19:40:43 | 2020-12-10T19:40:43 | 254,870,076 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 395 | py | # https://judge.softuni.bg/Contests/Practice/Index/1934#1
x = "global"
def outer():
x = "local"
def inner():
nonlocal x
x = "nonlocal"
print("inner:", x)
return x
def change_global():
global x
x = "global: changed!"
print("outer:", x)
inner()
print("outer:", x)
return change_global()
print(x)
outer()
print(x)
| [
"tony.andreev94@outlook.com"
] | tony.andreev94@outlook.com |
590af1c251015ae9d7f6be2d779f44f1e6addb1c | 3fc4cac282465350d9b2983527140fc735a0d273 | /0903/12_updowntest.py | 3621b0dc8af94837fbda6ae7399450f1efa3502e | [] | no_license | Orderlee/SBA_STUDY | 2cfeea54d4a9cbfd0c425e1de56324afcc547b81 | 4642546e7546f896fc8b06e9daba25d27c29e154 | refs/heads/master | 2022-12-25T01:08:05.168970 | 2020-09-27T14:57:23 | 2020-09-27T14:57:23 | 299,050,168 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 805 | py | # 모듈을 사용하고자 할때 import 키워드 사용
import random # random 모듈 : 랜덤한 데이터를 추출하고자 할 때 사용하는 모듈
# for idx in range(1,11):
# answer = random.randint(1,100);
# print(answer)
# answer = 컴퓨터가 기억한 값
answer = random.randint(1,100)
print('정답: %d' % answer)
cnt=0 #시도 횟수
while True:
# su 우리가: 입력한 숫
su = int(input('1부터 100사이의 정수 1개 입력:'))
cnt += 1 #시도 횟수를 1증가
if answer > su:
print('%d보다 큰 수를 입력하세요.' % su)
elif answer < su:
print('%d보다 작은 수를 입력하세요.' % su)
else:
print('정답입니다.')
print('%d번만에 맞췄습니다.' % cnt)
break
print('finished')
| [
"61268230+Orderlee@users.noreply.github.com"
] | 61268230+Orderlee@users.noreply.github.com |
a4e78b137f746516856f31f699789563c337e5e2 | 444a9480bce2035565332d4d4654244c0b5cd47b | /official/recommend/Wide_and_Deep_Multitable/src/config.py | 7c268bc3ca56e954a9f8d298c9e674960db089b2 | [
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference",
"LicenseRef-scancode-proprietary-license"
] | permissive | mindspore-ai/models | 7ede9c6454e77e995e674628204e1c6e76bd7b27 | eab643f51336dbf7d711f02d27e6516e5affee59 | refs/heads/master | 2023-07-20T01:49:34.614616 | 2023-07-17T11:43:18 | 2023-07-17T11:43:18 | 417,393,380 | 301 | 92 | Apache-2.0 | 2023-05-17T11:22:28 | 2021-10-15T06:38:37 | Python | UTF-8 | Python | false | false | 4,013 | py | # Copyright 2020 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
""" config. """
import argparse
def argparse_init():
"""
argparse_init
"""
parser = argparse.ArgumentParser(description='WideDeep')
parser.add_argument("--data_path", type=str, default="./test_raw_data/") # The location of the input data.
parser.add_argument("--epochs", type=int, default=8) # The number of epochs used to train.
parser.add_argument("--batch_size", type=int, default=131072) # Batch size for training and evaluation
parser.add_argument("--eval_batch_size", type=int, default=131072) # The batch size used for evaluation.
parser.add_argument("--deep_layers_dim", type=int, nargs='+', default=[1024, 512, 256, 128]) # The sizes of hidden layers for MLP
parser.add_argument("--deep_layers_act", type=str, default='relu') # The act of hidden layers for MLP
parser.add_argument("--keep_prob", type=float, default=1.0) # The Embedding size of MF model.
parser.add_argument("--adam_lr", type=float, default=0.003) # The Adam lr
parser.add_argument("--ftrl_lr", type=float, default=0.1) # The ftrl lr.
parser.add_argument("--l2_coef", type=float, default=0.0) # The l2 coefficient.
parser.add_argument("--is_tf_dataset", type=int, default=1) # The l2 coefficient.
parser.add_argument("--dropout_flag", type=int, default=1) # The dropout rate
parser.add_argument("--output_path", type=str, default="./output/") # The location of the output file.
parser.add_argument("--ckpt_path", type=str, default="./") # The location of the checkpoints file.
parser.add_argument("--eval_file_name", type=str, default="eval.log") # Eval output file.
parser.add_argument("--loss_file_name", type=str, default="loss.log") # Loss output file.
return parser
class WideDeepConfig():
"""
WideDeepConfig
"""
def __init__(self):
self.data_path = ''
self.epochs = 200
self.batch_size = 131072
self.eval_batch_size = 131072
self.deep_layers_act = 'relu'
self.weight_bias_init = ['normal', 'normal']
self.emb_init = 'normal'
self.init_args = [-0.01, 0.01]
self.dropout_flag = False
self.keep_prob = 1.0
self.l2_coef = 0.0
self.adam_lr = 0.003
self.ftrl_lr = 0.1
self.is_tf_dataset = True
self.input_emb_dim = 0
self.output_path = "./output/"
self.eval_file_name = "eval.log"
self.loss_file_name = "loss.log"
self.ckpt_path = "./"
def argparse_init(self):
"""
argparse_init
"""
parser = argparse_init()
args, _ = parser.parse_known_args()
self.data_path = args.data_path
self.epochs = args.epochs
self.batch_size = args.batch_size
self.eval_batch_size = args.eval_batch_size
self.deep_layers_act = args.deep_layers_act
self.keep_prob = args.keep_prob
self.weight_bias_init = ['normal', 'normal']
self.emb_init = 'normal'
self.init_args = [-0.01, 0.01]
self.l2_coef = args.l2_coef
self.ftrl_lr = args.ftrl_lr
self.adam_lr = args.adam_lr
self.is_tf_dataset = bool(args.is_tf_dataset)
self.output_path = args.output_path
self.eval_file_name = args.eval_file_name
self.loss_file_name = args.loss_file_name
self.ckpt_path = args.ckpt_path
self.dropout_flag = bool(args.dropout_flag)
| [
"chenhaozhe1@huawei.com"
] | chenhaozhe1@huawei.com |
f078e52815b8d620a3bc948a26f081c1548b83be | 20c4868b88fd20402ef4a6d589d7382122e48e26 | /python/L1PFProducer_cff.py | 315177ef15fdbf97b3a0fecf3b94d7cd6060f38f | [] | no_license | isobelojalvo/phase2Demonstrator | 7134c526da7e47c67b5a32c70fe76e561b66276c | 64c07512c01593d9e324fea33c61957d8e5fe6fd | refs/heads/master | 2021-01-20T09:27:09.928037 | 2018-07-12T12:51:16 | 2018-07-12T12:51:16 | 90,258,126 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 580 | py | import FWCore.ParameterSet.Config as cms
L1PFProducer = cms.EDProducer("PFObjectProducer",
debug = cms.untracked.bool(False),
EoH_cut = cms.untracked.int32(50),
HoE_cut = cms.untracked.int32(2),
L1Clusters = cms.InputTag("L1CaloClusterProducer","L1Phase2CaloClusters"),
L1TrackInputTag = cms.InputTag("TTTracksFromTracklet", "Level1TTTracks"),
)
| [
"ojalvo@wisc.edu"
] | ojalvo@wisc.edu |
3c3e9c924344cc2a78174033eb88efdd0652695c | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p03665/s598490894.py | 27c1418aa65f1383e6daecca8d1c52215b4074ec | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 222 | py | n, p = map(int, input().split())
A = list(map(int, input().split()))
U = n*100
dp = [0]*(U+1)
dp[0] = 1
for a in A:
for j in reversed(range(U+1)):
if 0 <= j-a:
dp[j] += dp[j-a]
print(sum(dp[p::2]))
| [
"66529651+Aastha2104@users.noreply.github.com"
] | 66529651+Aastha2104@users.noreply.github.com |
ad70f7d8ca908f1273ae019f830e9ac792c4f170 | 1b30905742e50f8e45494c847c2bacdd43da21e7 | /src/test50_02.py | cc5f2ee905dd75bc496c56fcdfcb1f880495e42a | [] | no_license | choijaehoon1/baekjoon_workbook | e57e30f84fafa3ffcd8da9a2238260eab29f7d9f | 26966e9fc814c2099408a6b96906522f432aa602 | refs/heads/main | 2023-07-20T18:54:36.727559 | 2021-08-20T14:39:54 | 2021-08-20T14:39:54 | 352,350,916 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,745 | py | from collections import deque
import sys
def island(x,y,cnt):
visit[x][y] = 1
q = deque()
q.append([x,y])
new_board[x][y] = cnt
while q:
x,y = q.popleft()
for k in range(4):
nx = x + dx[k]
ny = y + dy[k]
if 0<=nx<N and 0<=ny<N:
if visit[nx][ny] == 0 and board[nx][ny] == 1:
visit[nx][ny] = 1
new_board[nx][ny] = cnt
q.append([nx,ny])
def bfs(num):
while q:
x,y = q.popleft()
for k in range(4):
nx = x + dx[k]
ny = y + dy[k]
if 0<=nx<N and 0<=ny<N:
if new_board[nx][ny] != num and board[nx][ny] == 1:
return dist[x][y] # 가장 먼저 접하게 섬에 다른 섬에 도착했을 때 리턴 됨
if dist[nx][ny] == -1 and board[nx][ny] == 0:
dist[nx][ny] = dist[x][y] + 1
q.append([nx,ny])
dx = [-1,1,0,0]
dy = [0,0,-1,1]
N = int(sys.stdin.readline().rstrip())
new_board = [[0]*N for _ in range(N)]
visit = [[0]*N for _ in range(N)]
board = []
for i in range(N):
board.append(list(map(int,sys.stdin.readline().rstrip().split())))
cnt = 0
for i in range(N):
for j in range(N):
if board[i][j] == 1 and visit[i][j] == 0:
cnt += 1
island(i,j,cnt)
# print(new_board)
answer = int(1e9)
for k in range(1,cnt+1):
dist = [[-1]*N for _ in range(N)]
q = deque()
for i in range(N):
for j in range(N):
if new_board[i][j] == k and board[i][j] == 1:
q.append([i,j])
dist[i][j] = 0
tmp = bfs(k)
answer = min(tmp,answer)
print(answer)
| [
"wogns_20@naver.com"
] | wogns_20@naver.com |
e548e0467e9821fefcefe8959ce76648d0c8d5b6 | 578db86c51d44ebddd0dc7b1738985b3dc69eb74 | /corehq/apps/sms/migrations/0034_auto_20191007_0756_noop.py | 2022476a223562183c944ac937d5515939d06c95 | [
"BSD-3-Clause"
] | permissive | dimagi/commcare-hq | a43c7dd32b5f89c89fd5aa1b1359ab7301f4ff6b | e7391ddae1af1dbf118211ecb52c83fc508aa656 | refs/heads/master | 2023-08-16T22:38:27.853437 | 2023-08-16T19:07:19 | 2023-08-16T19:07:19 | 247,278 | 499 | 203 | BSD-3-Clause | 2023-09-14T19:03:24 | 2009-07-09T17:00:07 | Python | UTF-8 | Python | false | false | 3,062 | py | from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('sms', '0033_starfishbackend'),
]
operations = [
migrations.AlterField(
model_name='messagingevent',
name='content_type',
field=models.CharField(choices=[('NOP', 'None'), ('SMS', 'SMS Message'), ('CBK', 'SMS Expecting Callback'), ('SVY', 'SMS Survey'), ('IVR', 'IVR Survey'), ('VER', 'Phone Verification'), ('ADH', 'Manually Sent Message'), ('API', 'Message Sent Via API'), ('CHT', 'Message Sent Via Chat'), ('EML', 'Email')], max_length=3),
),
migrations.AlterField(
model_name='messagingevent',
name='recipient_type',
field=models.CharField(choices=[('CAS', 'Case'), ('MOB', 'Mobile Worker'), ('WEB', 'Web User'), ('UGP', 'User Group'), ('CGP', 'Case Group'), ('MUL', 'Multiple Recipients'), ('LOC', 'Location'), ('LC+', 'Location (including child locations)'), ('VLC', 'Multiple Locations'), ('VL+', 'Multiple Locations (including child locations)'), ('UNK', 'Unknown Contact')], db_index=True, max_length=3, null=True),
),
migrations.AlterField(
model_name='messagingevent',
name='status',
field=models.CharField(choices=[('PRG', 'In Progress'), ('CMP', 'Completed'), ('NOT', 'Not Completed'), ('ERR', 'Error')], max_length=3),
),
migrations.AlterField(
model_name='messagingsubevent',
name='content_type',
field=models.CharField(choices=[('NOP', 'None'), ('SMS', 'SMS Message'), ('CBK', 'SMS Expecting Callback'), ('SVY', 'SMS Survey'), ('IVR', 'IVR Survey'), ('VER', 'Phone Verification'), ('ADH', 'Manually Sent Message'), ('API', 'Message Sent Via API'), ('CHT', 'Message Sent Via Chat'), ('EML', 'Email')], max_length=3),
),
migrations.AlterField(
model_name='messagingsubevent',
name='recipient_type',
field=models.CharField(choices=[('CAS', 'Case'), ('MOB', 'Mobile Worker'), ('WEB', 'Web User')], max_length=3),
),
migrations.AlterField(
model_name='messagingsubevent',
name='status',
field=models.CharField(choices=[('PRG', 'In Progress'), ('CMP', 'Completed'), ('NOT', 'Not Completed'), ('ERR', 'Error')], max_length=3),
),
migrations.AlterField(
model_name='selfregistrationinvitation',
name='phone_type',
field=models.CharField(choices=[('android', 'Android'), ('other', 'Other')], max_length=20, null=True),
),
migrations.AlterField(
model_name='sqlmobilebackend',
name='backend_type',
field=models.CharField(choices=[('SMS', 'SMS'), ('IVR', 'IVR')], default='SMS', max_length=3),
),
migrations.AlterField(
model_name='sqlmobilebackendmapping',
name='backend_type',
field=models.CharField(choices=[('SMS', 'SMS'), ('IVR', 'IVR')], max_length=3),
),
]
| [
"skelly@dimagi.com"
] | skelly@dimagi.com |
d74903f5c33364fcf7a60715f24a8920190c6ec7 | d3efc82dfa61fb82e47c82d52c838b38b076084c | /Autocase_Result/SjShObligationPut/YW_GGQQ_YWFSJHA_GU_082.py | 3a8a856158287639128a7453120a01d573d4397e | [] | no_license | nantongzyg/xtp_test | 58ce9f328f62a3ea5904e6ed907a169ef2df9258 | ca9ab5cee03d7a2f457a95fb0f4762013caa5f9f | refs/heads/master | 2022-11-30T08:57:45.345460 | 2020-07-30T01:43:30 | 2020-07-30T01:43:30 | 280,388,441 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,927 | py | #!/usr/bin/python
# -*- encoding: utf-8 -*-
import sys
import json
sys.path.append("/home/yhl2/workspace/xtp_test/xtp/api")
from xtp_test_case import *
sys.path.append("/home/yhl2/workspace/xtp_test/option/service")
from OptMainService import *
from OptQueryStkPriceQty import *
sys.path.append("/home/yhl2/workspace/xtp_test/service")
from log import *
from CaseParmInsertMysql import *
sys.path.append("/home/yhl2/workspace/xtp_test/option/mysql")
from Opt_SqlData_Transfer import *
sys.path.append("/home/yhl2/workspace/xtp_test/mysql")
from QueryOrderErrorMsg import queryOrderErrorMsg
sys.path.append("/home/yhl2/workspace/xtp_test/utils")
from env_restart import *
reload(sys)
sys.setdefaultencoding('utf-8')
class YW_GGQQ_YWFSJHA_GU_082(xtp_test_case):
def setUp(self):
sql_transfer = Opt_SqlData_Transfer()
sql_transfer.transfer_fund_asset('YW_GGQQ_YWFSJHA_GU_082')
clear_data_and_restart_sh()
Api.trade.Logout()
Api.trade.Login()
def test_YW_GGQQ_YWFSJHA_GU_082(self):
title = '买平(义务方平仓):市价剩余转限价-验资(可用资金<0且下单导致可用资金减少)'
# 定义当前测试用例的期待值
# 期望状态:初始、未成交、部成、全成、部撤已报、部撤、已报待撤、已撤、废单、撤废、内部撤单
# xtp_ID和cancel_xtpID默认为0,不需要变动
case_goal = {
'期望状态': '废单',
'errorID': 11010120,
'errorMSG': queryOrderErrorMsg(11010120),
'是否生成报单': '是',
'是否是撤废': '否',
'xtp_ID': 0,
'cancel_xtpID': 0,
}
logger.warning(title)
# 定义委托参数信息------------------------------------------
# 参数:证券代码、市场、证券类型、证券状态、交易状态、买卖方向(B买S卖)、期望状态、Api
stkparm = QueryStkPriceQty('11002397', '1', '*', '1', '0', 'P', case_goal['期望状态'], Api)
# 如果下单参数获取失败,则用例失败
if stkparm['返回结果'] is False:
rs = {
'用例测试结果': stkparm['返回结果'],
'测试错误原因': '获取下单参数失败,' + stkparm['错误原因'],
}
logger.error('查询结果为False,错误原因: {0}'.format(
json.dumps(rs['测试错误原因'], encoding='UTF-8', ensure_ascii=False)))
self.assertEqual(rs['用例测试结果'], True)
else:
wt_reqs = {
'business_type':Api.const.XTP_BUSINESS_TYPE['XTP_BUSINESS_TYPE_OPTION'],
'order_client_id':1,
'market': Api.const.XTP_MARKET_TYPE['XTP_MKT_SH_A'],
'ticker': stkparm['证券代码'],
'side': Api.const.XTP_SIDE_TYPE['XTP_SIDE_BUY'],
'position_effect':Api.const.XTP_POSITION_EFFECT_TYPE['XTP_POSITION_EFFECT_CLOSE'],
'price_type': Api.const.XTP_PRICE_TYPE['XTP_PRICE_REVERSE_BEST_LIMIT'],
'price': stkparm['涨停价'],
'quantity': 1
}
ParmIni(Api, case_goal['期望状态'], wt_reqs['price_type'])
CaseParmInsertMysql(case_goal, wt_reqs)
rs = serviceTest(Api, case_goal, wt_reqs)
if rs['用例测试结果']:
logger.warning('执行结果为{0}'.format(str(rs['用例测试结果'])))
else:
logger.warning('执行结果为{0},{1},{2}'.format(
str(rs['用例测试结果']), str(rs['用例错误源']),
json.dumps(rs['用例错误原因'], encoding='UTF-8', ensure_ascii=False)))
self.assertEqual(rs['用例测试结果'], True) # 4
if __name__ == '__main__':
unittest.main()
| [
"418033945@qq.com"
] | 418033945@qq.com |
6b9fc9a69050a9b0c6db9f8b90649613a52e8654 | 077c91b9d5cb1a6a724da47067483c622ce64be6 | /fuzz_pyretic_mesh_proactive_firewall_no_close_check_loop_mcs/interreplay_53_l_6/openflow_replay_config.py | 451f2b5836e94a2683df01ee2a01405c012fc4bd | [] | no_license | Spencerx/experiments | 0edd16398725f6fd9365ddbb1b773942e4878369 | aaa98b0f67b0d0c0c826b8a1565916bf97ae3179 | refs/heads/master | 2020-04-03T10:11:40.671606 | 2014-06-11T23:55:11 | 2014-06-11T23:55:11 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,550 | py |
from config.experiment_config_lib import ControllerConfig
from sts.topology import *
from sts.control_flow import OpenFlowReplayer
from sts.simulation_state import SimulationConfig
from sts.input_traces.input_logger import InputLogger
simulation_config = SimulationConfig(controller_configs=[ControllerConfig(start_cmd='./pyretic.py -m p0 pyretic.examples.firewall_for_sts_no_close', label='c1', address='127.0.0.1', cwd='../pyretic', kill_cmd='ps aux | grep -e pox -e pyretic | grep -v simulator | cut -c 9-15 | xargs kill -9')],
topology_class=MeshTopology,
topology_params="num_switches=3",
patch_panel_class=BufferedPatchPanel,
multiplex_sockets=False,
kill_controllers_on_exit=True)
control_flow = OpenFlowReplayer(simulation_config, "experiments/fuzz_pyretic_mesh_proactive_firewall_no_close_check_loop_mcs/interreplay_53_l_6/events.trace")
# wait_on_deterministic_values=False
# delay_flow_mods=False
# Invariant check: 'InvariantChecker.python_check_loops'
# Bug signature: '{'hs_history': [(x^L) - ([]), (dl_vlan:65535,dl_vlan_pcp:0,dl_type:2054,nw_src:123.123.1.3/32,nw_dst:123.123.3.3/32) - ([]), (dl_vlan:65535,dl_vlan_pcp:0,dl_type:2054,nw_src:123.123.1.3/32,nw_dst:123.123.3.3/32) - ([]), (dl_vlan:65535,dl_vlan_pcp:0,dl_type:2054,nw_src:123.123.1.3/32,nw_dst:123.123.3.3/32) - ([])], 'hdr': (dl_vlan:65535,dl_vlan_pcp:0,dl_type:2054,nw_src:123.123.1.3/32,nw_dst:123.123.3.3/32) - ([]), 'visits': [100004, 200002, 300001, 100001], 'port': 200002}'
| [
"cs@cs.berkeley.edu"
] | cs@cs.berkeley.edu |
f697f4f21e50a268bb9b96f4632268b6cd769f87 | a36eb4685fd050c8e1ecb4a333470724bd76df60 | /Leetcode/Jul20/260720/q3/q3.py | 1f406db0251d7cdda7716651a8d622110954fbbd | [] | no_license | phibzy/Contests | c9cff976909234cfafc51db9d9dde01c26123168 | 24aac4c81f34916945be03ed0b7c916dae4dbbb4 | refs/heads/master | 2023-01-20T06:23:06.837937 | 2020-11-30T06:54:58 | 2020-11-30T06:54:58 | 265,750,701 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 403 | py | #!/usr/bin/python3
"""
@author : Chris Phibbs
@created : Sunday Jul 26, 2020 13:14:31 AEST
@file : q3
"""
# Definition for a binary tree node.
# class TreeNode:
# def __init__(self, val=0, left=None, right=None):
# self.val = val
# self.left = left
# self.right = right
class Solution:
def countPairs(self, root, distance):
pass
| [
"phibzy@gmail.com"
] | phibzy@gmail.com |
1d71e6128516fc6148c21ca11b9959d94edff31c | 1a166165ab8287d01cbb377a13efdb5eff5dfef0 | /sdk/communication/azure-communication-phonenumbers/test/_shared/testcase.py | fa09dc67deb019afa1bced230916cbee2dc65c51 | [
"MIT",
"LicenseRef-scancode-generic-cla",
"LGPL-2.1-or-later"
] | permissive | manoj0806/azure-sdk-for-python | 7a14b202ff80f528abd068bf50334e91001a9686 | aab999792db1132232b2f297c76800590a901142 | refs/heads/master | 2023-04-19T16:11:31.984930 | 2021-04-29T23:19:49 | 2021-04-29T23:19:49 | 363,025,016 | 1 | 0 | MIT | 2021-04-30T04:23:35 | 2021-04-30T04:23:35 | null | UTF-8 | Python | false | false | 3,389 | py |
# -------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
# --------------------------------------------------------------------------
import re
import os
from devtools_testutils import AzureTestCase
from azure_devtools.scenario_tests import RecordingProcessor, ReplayableTest
from azure_devtools.scenario_tests.utilities import is_text_payload
from azure.communication.phonenumbers._shared.utils import parse_connection_str
class ResponseReplacerProcessor(RecordingProcessor):
def __init__(self, keys=None, replacement="sanitized"):
self._keys = keys if keys else []
self._replacement = replacement
def process_response(self, response):
import json
try:
body = json.loads(response['body']['string'])
if 'phoneNumbers' in body:
for item in body["phoneNumbers"]:
if isinstance(item, str):
body["phoneNumbers"] = [self._replacement]
break
if "phoneNumber" in item:
item['phoneNumber'] = self._replacement
if "id" in item:
item['id'] = self._replacement
response['body']['string'] = json.dumps(body)
response['url'] = self._replacement
return response
except (KeyError, ValueError, TypeError):
return response
class BodyReplacerProcessor(RecordingProcessor):
"""Sanitize the sensitive info inside request or response bodies"""
def __init__(self, keys=None, replacement="sanitized"):
self._replacement = replacement
self._keys = keys if keys else []
def process_request(self, request):
if is_text_payload(request) and request.body:
request.body = self._replace_keys(request.body.decode()).encode()
return request
def process_response(self, response):
if is_text_payload(response) and response['body']['string']:
response['body']['string'] = self._replace_keys(response['body']['string'])
return response
def _replace_keys(self, body):
import json
try:
body = json.loads(body)
for key in self._keys:
if key in body:
body[key] = self._replacement
except (KeyError, ValueError):
return body
return json.dumps(body)
class CommunicationTestCase(AzureTestCase):
FILTER_HEADERS = ReplayableTest.FILTER_HEADERS + ['x-azure-ref', 'x-ms-content-sha256', 'location']
def __init__(self, method_name, *args, **kwargs):
super(CommunicationTestCase, self).__init__(method_name, *args, **kwargs)
def setUp(self):
super(CommunicationTestCase, self).setUp()
if self.is_playback():
self.connection_str = "endpoint=https://sanitized.communication.azure.com/;accesskey=fake==="
else:
self.connection_str = os.getenv('AZURE_COMMUNICATION_SERVICE_CONNECTION_STRING')
endpoint, _ = parse_connection_str(self.connection_str)
self._resource_name = endpoint.split(".")[0]
self.scrubber.register_name_pair(self._resource_name, "sanitized") | [
"noreply@github.com"
] | manoj0806.noreply@github.com |
5bcf0dda20970a9d1a8e0b883d785cd389b0b7f1 | b853c16efafa74a9e1cb076008a17c9d85389fca | /HOME/笔记/待整理笔记/线程/12.23-t/alarm.py | f1cf5c62caf9f6032bd2f21d19cba926cd7085c8 | [] | no_license | Jason0221/backup | 14c48f1adb871b915d6f0ba49a26396e7cf0cd64 | dfd54cbcf7c27b0df6249104747e9a7ceffcb392 | refs/heads/master | 2020-06-03T13:14:39.751679 | 2017-05-15T08:50:38 | 2017-05-15T08:50:38 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 179 | py | #!/usr/bin/python
import signal
import time
signal.alarm(5)
time.sleep(3)
num = signal.alarm(4)
print num
#signal.pause()
while True:
time.sleep(1)
print "wait....."
| [
"jasonlearning@outlook.com"
] | jasonlearning@outlook.com |
566258060889cb72a0fb1766a1d2280c6a668f14 | 3f1ba75a78568754f221988e69c17df20d69aa8d | /day07/03-函数嵌套.py | 2ebe517f1f2fe89e1c2c4612d6eee9c92c05275e | [] | no_license | itkasumy/LNHPython | 8d2a961c6446923cebc4e4bb99ed4631a90cf3d5 | 4a5c0e7991e167b0406c1e56bae73899dd90390b | refs/heads/master | 2020-04-15T10:16:51.981833 | 2019-01-25T08:34:07 | 2019-01-25T08:34:07 | 164,589,222 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 199 | py | def father(name):
print('from father %s' %name)
def son():
print('from son')
def grandson():
print('from grandson...')
grandson()
son()
father('ksm')
| [
"18500682038@163.com"
] | 18500682038@163.com |
74de8906c049b86432a83972f2d2a1cd447e69ad | 835db5ec0fc127df1de58a9a3af4a869a1a7cd84 | /assignments/functions/every_other_chr.py | 5720eb6ef06ff3a5f6025e13d3a57aff21b84fc7 | [] | no_license | thorhilduranna/2020-3-T-111-PROG | 3ba097e1b54d68bdd6efbf1d7f90911a9336fa5a | c9758b61256aa6e39a3308e576c8ad0bf2b6d027 | refs/heads/master | 2023-02-09T23:39:22.879653 | 2021-01-07T12:59:19 | 2021-01-07T12:59:19 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 228 | py | def every_other_chr(a_str):
'''Returns a new string containing every other character in a_str.'''
return a_str[::2]
input_str = input("Enter a string: ")
print("Every other character:", every_other_chr(input_str))
| [
"hrafnl@gmail.com"
] | hrafnl@gmail.com |
37f2ba7809ae2039b8567f6cd6be0f8f8ebe2447 | 88eeba6df8382687f36a4765bb298f76465c8e81 | /general/chainerrl/chainerrl/chainerrl/q_function.py | b47c78e34912c52abd9195222df91db7c7c75e7c | [
"MIT"
] | permissive | daniellawson9999/quick_start | db0b6e382efd640754ca1e7800753c94e668423a | 947d61f118433dcd4cb845f27649ebfbc8062ecc | refs/heads/master | 2022-02-23T21:54:16.273530 | 2019-09-27T01:46:41 | 2019-09-27T01:46:41 | 197,873,032 | 0 | 0 | null | 2019-07-20T03:12:34 | 2019-07-20T03:12:31 | null | UTF-8 | Python | false | false | 673 | py | from __future__ import division
from __future__ import unicode_literals
from __future__ import print_function
from __future__ import absolute_import
from builtins import * # NOQA
from future import standard_library
standard_library.install_aliases() # NOQA
from abc import ABCMeta
from abc import abstractmethod
from future.utils import with_metaclass
class StateQFunction(with_metaclass(ABCMeta, object)):
@abstractmethod
def __call__(self, x):
raise NotImplementedError()
class StateActionQFunction(with_metaclass(ABCMeta, object)):
@abstractmethod
def __call__(self, x, a):
raise NotImplementedError()
| [
"daniellawson9999@gmail.com"
] | daniellawson9999@gmail.com |
73f8720174f8e6518ef3716c337664d59c628864 | 82f6a6c50a1fef2d7522a43cc4f60e5ff80b37a8 | /solutions/Ambiguous Coordinates/solution.py | 309717524e6eb2befded94b0bafc3907f4e2069b | [
"MIT"
] | permissive | nilax97/leetcode-solutions | ca0f9545ce70975617738f053e0935fac00b04d4 | d3c12f2b289662d199510e0431e177bbf3cda121 | refs/heads/master | 2023-05-14T02:21:48.893716 | 2021-06-08T13:16:53 | 2021-06-08T13:16:53 | 374,466,870 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 597 | py | class Solution:
def ambiguousCoordinates(self, S: str) -> List[str]:
def make(frag):
N = len(frag)
for d in range(1, N+1):
left = frag[:d]
right = frag[d:]
if ((not left.startswith('0') or left == '0')
and (not right.endswith('0'))):
yield left + ('.' if d != N else '') + right
S = S[1:-1]
return ["({}, {})".format(*cand)
for i in range(1, len(S))
for cand in itertools.product(make(S[:i]), make(S[i:]))]
| [
"agarwal.nilaksh@gmail.com"
] | agarwal.nilaksh@gmail.com |
4291ea8563309410ba811f227c3159ae6c856f88 | cc0e381fde5cc6870770396d990d2bad66a3186c | /PythonExercicios/ex006.py | 653eff93c4c8ecd980c2238d4ae97cf870bd2c40 | [] | no_license | jnthmota/Python-PySpark-Cursos | 2c7fac79867059e0dfe4f0c4b6b6e1d32260530f | 680a4c422e14a26036379f49f0de6b5e73d7e431 | refs/heads/main | 2023-08-15T00:22:59.189649 | 2021-09-12T23:00:39 | 2021-09-12T23:00:39 | 373,610,471 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 332 | py | #06)CRIE UM ALGORITMO QUE LEIA UM NÚMERO E MOSTRE O SEU DOBRO, TRIPLO E RAIZ QUADRADA
a = int(input('Digite um valor: '))
d = a * 2
t = a * 3
#rq = a ** (1/2)
#OU
rq = pow(a, (1/2)) #POW BASE EXPONENCIAL **
print('O seu dobro de {} é: {} \n O seu triplo de {} é: {} \n A raiz Quadrada de {} é: {:.4}'.format(a, d, a, t, a, rq)) | [
"jonathan.mota@outlook.com"
] | jonathan.mota@outlook.com |
21e359d18b7e8e9f78fb251a5fb993843cc4ce54 | 762bd83dd4c96a6a5890e21b14252104fcfdc51f | /hw_2/code/conftest.py | 24d17a5bcb26cb484469f373c9bd8b63c1ad7deb | [] | no_license | Batroff/2021-1-MAILRU-SDET-Python-S-Savranskii | 1c1207bd9b22e9f2bd99af40767d6507e63c7380 | 133efd2960ddacc51ec7cba29bd7fce5e29223d9 | refs/heads/main | 2023-06-02T18:06:05.752307 | 2021-06-16T15:23:03 | 2021-06-16T15:23:03 | 349,384,217 | 0 | 0 | null | 2021-06-11T13:14:27 | 2021-03-19T10:28:47 | Python | UTF-8 | Python | false | false | 1,976 | py | import logging
import os
import shutil
import allure
from ui.fixtures import *
def pytest_addoption(parser):
parser.addoption('--url', default='http://www.target.my.com')
parser.addoption('--browser', default='chrome')
parser.addoption('--debug_log', action='store_true')
def pytest_configure(config):
base_test_dir = os.path.join('tmp', 'tests')
if not hasattr(config, 'workerinput'):
if os.path.exists(base_test_dir):
shutil.rmtree(base_test_dir)
os.makedirs(base_test_dir)
config.base_test_dir = base_test_dir
@pytest.fixture(scope='session')
def config(request):
url = request.config.getoption('--url')
browser = request.config.getoption('--browser')
debug_log = request.config.getoption('--debug_log')
return {'url': url, 'browser': browser, 'debug_log': debug_log}
@pytest.fixture(scope='session')
def repo_root():
return os.path.abspath(os.path.join(__file__, os.pardir))
@pytest.fixture(scope='function')
def test_dir(request):
# filename.py-classname-test_name
test_dir = os.path.join(request.config.base_test_dir, request._pyfuncitem.nodeid.replace('::', '-'))
os.makedirs(test_dir)
return test_dir
@pytest.fixture(scope='function', autouse=True)
def logger(test_dir, config):
log_formatter = logging.Formatter('%(asctime)s - %(filename)-15s - %(levelname)-6s - %(message)s')
log_file = os.path.join(test_dir, 'test.log')
log_level = logging.DEBUG if config['debug_log'] else logging.INFO
file_handler = logging.FileHandler(log_file, 'w')
file_handler.setFormatter(log_formatter)
file_handler.setLevel(log_level)
log = logging.getLogger('test')
log.propagate = False
log.setLevel(log_level)
log.addHandler(file_handler)
yield log
for handler in log.handlers:
handler.close()
with open(log_file, 'r') as f:
allure.attach(f.read(), 'test.log', attachment_type=allure.attachment_type.TEXT)
| [
"savran46@mail.ru"
] | savran46@mail.ru |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.