blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
288
| content_id
stringlengths 40
40
| detected_licenses
listlengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 684
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
⌀ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
⌀ | gha_language
stringclasses 147
values | src_encoding
stringclasses 25
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 128
12.7k
| extension
stringclasses 142
values | content
stringlengths 128
8.19k
| authors
listlengths 1
1
| author_id
stringlengths 1
132
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
3276de1e752d2ef39dbbb444cbf57f4b51d4316e
|
cad91ae76d2746a6c28ddda0f33a58f9d461378f
|
/PaddlePaddle/Classification/RN50v1.5/optimizer.py
|
8d128ff97b5a0186f87a48845218c12b7d4e81b1
|
[] |
no_license
|
NVIDIA/DeepLearningExamples
|
fe677521e7e2a16e3cb0b77e358f9aab72f8c11a
|
a5388a45f71a949639b35cc5b990bd130d2d8164
|
refs/heads/master
| 2023-08-31T20:57:08.798455
| 2023-08-23T10:09:12
| 2023-08-23T10:09:12
| 131,881,622
| 11,838
| 3,124
| null | 2023-08-28T16:57:33
| 2018-05-02T17:04:05
|
Jupyter Notebook
|
UTF-8
|
Python
| false
| false
| 2,166
|
py
|
# Copyright (c) 2022 NVIDIA Corporation. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
import logging
from paddle import optimizer as optim
class Momentum:
"""
Simple Momentum optimizer with velocity state.
Args:
args(Namespace): Arguments obtained from ArgumentParser.
learning_rate(float|LRScheduler): The learning rate used to update parameters.
Can be a float value or a paddle.optimizer.lr.LRScheduler.
"""
def __init__(self, args, learning_rate):
super().__init__()
self.learning_rate = learning_rate
self.momentum = args.momentum
self.weight_decay = args.weight_decay
self.grad_clip = None
self.multi_precision = args.amp
def __call__(self):
# model_list is None in static graph
parameters = None
opt = optim.Momentum(
learning_rate=self.learning_rate,
momentum=self.momentum,
weight_decay=self.weight_decay,
grad_clip=self.grad_clip,
multi_precision=self.multi_precision,
parameters=parameters)
return opt
def build_optimizer(args, lr):
"""
Build a raw optimizer with learning rate scheduler.
Args:
args(Namespace): Arguments obtained from ArgumentParser.
lr(paddle.optimizer.lr.LRScheduler): A LRScheduler used for training.
return:
optim(paddle.optimizer): A normal optmizer.
"""
optimizer_mod = sys.modules[__name__]
opt = getattr(optimizer_mod, args.optimizer)(args, learning_rate=lr)()
logging.info("build optimizer %s success..", opt)
return opt
|
[
"kkudrynski@nvidia.com"
] |
kkudrynski@nvidia.com
|
5e02da1fb75760d07cd1c9d3dc9e28f51968d90a
|
d1d067bad6b65e2be1b5488d5abd17c0e9cd1756
|
/perdiem/artist/forms.py
|
b83abf50adfe49eb0fec71576b242b9a8e66a202
|
[] |
no_license
|
localastronaut/perdiem-django
|
7b84cf34b83a49cc4695b735321f52eb2be01260
|
c273dc6fda5533c52710cde0f196886369b36c9d
|
refs/heads/master
| 2021-06-13T14:31:54.089816
| 2016-05-24T06:13:10
| 2016-05-24T06:13:10
| 59,598,476
| 0
| 0
| null | 2016-05-24T18:31:19
| 2016-05-24T18:31:18
| null |
UTF-8
|
Python
| false
| false
| 1,376
|
py
|
"""
:Created: 19 March 2016
:Author: Lucas Connors
"""
from django import forms
class CoordinatesFromAddressForm(forms.Form):
address = forms.CharField()
class ArtistApplyForm(forms.Form):
artist_name = forms.CharField(label='Artist / Band Name')
genre = forms.CharField()
hometown = forms.CharField()
email = forms.EmailField()
phone_number = forms.CharField()
bio = forms.CharField(widget=forms.Textarea(attrs={'placeholder': 'We started playing music because...',}))
campaign_reason = forms.CharField(label='Why are you raising money?', widget=forms.Textarea(attrs={'placeholder': 'We are trying to record our album...',}))
campaign_expenses = forms.CharField(label='What do you need the money for?', widget=forms.Textarea(attrs={'placeholder': 'Mixing, mastering, studio time, etc...',}))
facebook = forms.URLField(required=False, widget=forms.TextInput(attrs={'placeholder': 'http://',}))
twitter = forms.CharField(required=False, widget=forms.TextInput(attrs={'placeholder': '@',}))
instagram = forms.CharField(required=False, widget=forms.TextInput(attrs={'placeholder': '@',}))
music_link = forms.URLField(label='Link to music', widget=forms.TextInput(attrs={'placeholder': 'http://',}))
terms = forms.BooleanField(label='Terms & Conditions', help_text='I have read and agree to the Terms & Conditions')
|
[
"lucas.revolutiontech@gmail.com"
] |
lucas.revolutiontech@gmail.com
|
bcdbf07e584e7fad3a4d3eef5a75fc13b6f524e5
|
15f321878face2af9317363c5f6de1e5ddd9b749
|
/solutions_python/Problem_34/238.py
|
ec0410ac5b4c8cc4443fa88ef75da4466aa226a6
|
[] |
no_license
|
dr-dos-ok/Code_Jam_Webscraper
|
c06fd59870842664cd79c41eb460a09553e1c80a
|
26a35bf114a3aa30fc4c677ef069d95f41665cc0
|
refs/heads/master
| 2020-04-06T08:17:40.938460
| 2018-10-14T10:12:47
| 2018-10-14T10:12:47
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 404
|
py
|
#-*-coding:utf-8-*-
import sys, re
fh = open(sys.argv[1])
L, D, N = map(int,fh.readline().split(' '))
words = []
for i in range(D): words.append(fh.readline().strip())
for i in range(N):
pattern=fh.readline().strip()
reg = re.compile(re.sub('\\)', ']', re.sub('\\(', '[',pattern)))
n = 0
for w in words:
if reg.match(w): n += 1
pass
print("Case #%d: %d" % (i + 1, n))
|
[
"miliar1732@gmail.com"
] |
miliar1732@gmail.com
|
75e13f2077f48e5efe61d8eadde5b91febcc50bd
|
f7badaf5a680a3276ba8c4223a7c411eff4e0ef5
|
/users/validate_vk_mini_apps.py
|
a534317ac67fc58cf317a7bcfd82caaa4d172d5a
|
[] |
no_license
|
petrshirin/easy-meet-backend
|
df9d29190b3f7acb524737f18192abf6c24c346b
|
4e315e5e5abeb29bab68d53a83ee005cb13fd28f
|
refs/heads/master
| 2023-01-09T04:42:40.710603
| 2020-11-11T05:05:15
| 2020-11-11T05:05:15
| 310,966,162
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,057
|
py
|
from base64 import b64encode
from collections import OrderedDict
from hashlib import sha256
from hmac import HMAC
from urllib.parse import urlparse, parse_qsl, urlencode
from django.conf import settings
from typing import Union, Dict
def check_sign(*, query: dict, secret: str) -> Union[Dict, None]:
"""Check VK Apps signature"""
vk_subset = OrderedDict(sorted(x for x in query.items() if x[0][:3] == "vk_"))
hash_code = b64encode(HMAC(secret.encode(), urlencode(vk_subset, doseq=True).encode(), sha256).digest())
decoded_hash_code = hash_code.decode('utf-8')[:-1].replace('+', '-').replace('/', '_')
if query["sign"] == decoded_hash_code:
return vk_subset
def validate_request(url: str) -> Union[Dict, None]:
if not url:
return None
client_secret = settings.MINI_APP_SECRET # Защищённый ключ из настроек вашего приложения
query_params = dict(parse_qsl(urlparse(url).query, keep_blank_values=True))
return check_sign(query=query_params, secret=client_secret)
|
[
"p.e.shirin@gmail.com"
] |
p.e.shirin@gmail.com
|
88115d2979a0ddc98e1aab042b9257d23e41433b
|
fcc88521f63a3c22c81a9242ae3b203f2ea888fd
|
/Python3/1189-Maximum-Number-of-Balloons/soln.py
|
bd9afd7441f12e992a1a0795f2a1914ec11278d7
|
[
"MIT"
] |
permissive
|
wyaadarsh/LeetCode-Solutions
|
b5963e3427aa547d485d3a2cb24e6cedc72804fd
|
3719f5cb059eefd66b83eb8ae990652f4b7fd124
|
refs/heads/master
| 2022-12-06T15:50:37.930987
| 2020-08-30T15:49:27
| 2020-08-30T15:49:27
| 291,811,790
| 0
| 1
|
MIT
| 2020-08-31T19:57:35
| 2020-08-31T19:57:34
| null |
UTF-8
|
Python
| false
| false
| 203
|
py
|
class Solution:
def maxNumberOfBalloons(self, text: str) -> int:
balloon = [('a', 1), ('b', 1), ('l', 2), ('o', 2), ('n', 1)]
return min(text.count(ch) // cnt for ch, cnt in balloon)
|
[
"zhang623@wisc.edu"
] |
zhang623@wisc.edu
|
cca4da9a90f7539c0deb3454ad0983978deade59
|
2b6b9deea3d020e87815caf7e3985f050103abf6
|
/pie/initialization.py
|
a5613570f3d272f0978bb0fe6b838f732348899b
|
[] |
no_license
|
PonteIneptique/pie
|
a3de5672dcb38936748ec4a43a81edfd1aa07fb0
|
64700bfbf6cc7c3efc1c5b0e14d6f06d6bf1b77f
|
refs/heads/master
| 2021-06-07T08:04:24.117804
| 2018-07-09T09:31:45
| 2018-07-09T09:31:45
| 154,836,022
| 0
| 1
| null | 2021-05-12T09:23:03
| 2018-10-26T13:06:10
|
Python
|
UTF-8
|
Python
| false
| false
| 1,465
|
py
|
import torch
import torch.nn as nn
def init_embeddings(embeddings):
embeddings.reset_parameters()
# nn.init.constant_(embeddings.weight, 0.01)
def init_linear(linear):
linear.reset_parameters()
nn.init.constant_(linear.bias, 0.)
pass
def init_rnn(rnn, forget_bias=1.0):
for pname, p in rnn.named_parameters():
if 'bias' in pname:
nn.init.constant_(p, 0.)
# forget_bias
if 'LSTM' in type(rnn).__name__:
nn.init.constant_(p[rnn.hidden_size:rnn.hidden_size*2], forget_bias)
else:
nn.init.xavier_uniform_(p)
def init_conv(conv):
conv.reset_parameters()
nn.init.xavier_uniform_(conv.weight)
nn.init.constant_(conv.bias, 0.)
pass
def init_pretrained_embeddings(path, encoder, embedding):
with open(path) as f:
nemb, dim = next(f).split()
if int(dim) != embedding.weight.data.size(1):
raise ValueError("Unexpected embeddings size: {}".format(dim))
inits = 0
for line in f:
word, *vec = line.split()
if word in encoder.table:
embedding.weight.data[encoder.table[word], :].copy_(
torch.tensor([float(v) for v in vec]))
inits += 1
if embedding.padding_idx is not None:
embedding.weight.data[embedding.padding_idx].zero_()
print("Initialized {}/{} embeddings".format(inits, embedding.num_embeddings))
|
[
"enrique.manjavacas@gmail.com"
] |
enrique.manjavacas@gmail.com
|
3873f848d8c6404a56bae01a616e6ebe1340f841
|
272a8b0b38e4af5f22dd811040f0ca2b0b111c61
|
/exp_scripts/loss_improved_wgan_2001.py
|
754e8a62540576922f21b77b9f49c33f10940155
|
[] |
no_license
|
jessemin/GeneGan
|
1c1a97b6ab566a7c556ce1452e4c35530b0b626c
|
2ad94e842cfaee531d7e13af7472b623bf96de30
|
refs/heads/master
| 2021-09-13T13:02:33.629138
| 2018-04-30T06:57:13
| 2018-04-30T06:57:13
| 112,046,600
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 420
|
py
|
import os
os.chdir('../exp_notebooks')
os.system('python loss_improved_wgan.py\
-w=2001\
-save=loss_improved_wgan_2001\
-sample_num=10000\
-n_critic=1\
-d_freq=32\
-w_weight=0.08\
-mse_weight=1.0\
-g_lr=0.001\
-d_lr=0.00001\
-m=/users/jesikmin/GeneGan/exp_notebooks/models/cnn_2001_2/best_model.h5\
-cuda=2')
|
[
"jesikmin@stanford.edu"
] |
jesikmin@stanford.edu
|
b010766d32899caed0d8e553c520db23fd3eac74
|
cfa632132cd29a0b58e7f45b441ea4f62b0f5eba
|
/flytekit/bin/entrypoint.py
|
7c386888f9e1949c02eb5c8e991015c2e35d7d39
|
[
"Apache-2.0"
] |
permissive
|
chixcode/flytekit
|
5b4f2e687e82a0d6527411afcdaf0929a94adb13
|
f901aee721847c6264d44079d4fa31a75b8876e1
|
refs/heads/master
| 2020-08-24T00:06:02.808187
| 2019-10-14T18:34:19
| 2019-10-14T18:34:19
| 216,729,272
| 1
| 0
|
Apache-2.0
| 2019-10-22T05:22:01
| 2019-10-22T05:22:00
| null |
UTF-8
|
Python
| false
| false
| 4,641
|
py
|
from __future__ import absolute_import
import importlib as _importlib
import os as _os
import click as _click
import datetime as _datetime
import random as _random
from flyteidl.core import literals_pb2 as _literals_pb2
from flytekit.common import utils as _utils
from flytekit.common.exceptions import scopes as _scopes, system as _system_exceptions
from flytekit.configuration import internal as _internal_config, TemporaryConfiguration as _TemporaryConfiguration
from flytekit.engines import loader as _engine_loader
from flytekit.interfaces.data import data_proxy as _data_proxy
from flytekit.interfaces import random as _flyte_random
from flytekit.models import literals as _literal_models
def _compute_array_job_index():
# type () -> int
"""
Computes the absolute index of the current array job. This is determined by summing the compute-environment-specific
environment variable and the offset (if one's set). The offset will be set and used when the user request that the
job runs in a number of slots less than the size of the input.
:rtype: int
"""
offset = 0
if _os.environ.get('BATCH_JOB_ARRAY_INDEX_OFFSET'):
offset = int(_os.environ.get('BATCH_JOB_ARRAY_INDEX_OFFSET'))
return offset + int(_os.environ.get(_os.environ.get('BATCH_JOB_ARRAY_INDEX_VAR_NAME')))
def _map_job_index_to_child_index(local_input_dir, datadir, index):
local_lookup_file = local_input_dir.get_named_tempfile('indexlookup.pb')
idx_lookup_file = _os.path.join(datadir, 'indexlookup.pb')
# if the indexlookup.pb does not exist, then just return the index
if not _data_proxy.Data.data_exists(idx_lookup_file):
return index
_data_proxy.Data.get_data(idx_lookup_file, local_lookup_file)
mapping_proto = _utils.load_proto_from_file(_literals_pb2.LiteralCollection, local_lookup_file)
if len(mapping_proto.literals) < index:
raise _system_exceptions.FlyteSystemAssertion(
"dynamic task index lookup array size: {} is smaller than lookup index {}".format(
len(mapping_proto.literals), index))
return mapping_proto.literals[index].scalar.primitive.integer
@_scopes.system_entry_point
def execute_task(task_module, task_name, inputs, output_prefix, test):
with _TemporaryConfiguration(_internal_config.CONFIGURATION_PATH.get()):
with _utils.AutoDeletingTempDir('input_dir') as input_dir:
# Load user code
task_module = _importlib.import_module(task_module)
task_def = getattr(task_module, task_name)
if not test:
local_inputs_file = input_dir.get_named_tempfile('inputs.pb')
# Handle inputs/outputs for array job.
if _os.environ.get('BATCH_JOB_ARRAY_INDEX_VAR_NAME'):
job_index = _compute_array_job_index()
# TODO: Perhaps remove. This is a workaround to an issue we perceived with limited entropy in
# TODO: AWS batch array jobs.
_flyte_random.seed_flyte_random(
"{} {} {}".format(
_random.random(),
_datetime.datetime.utcnow(),
job_index
)
)
# If an ArrayTask is discoverable, the original job index may be different than the one specified in
# the environment variable. Look up the correct input/outputs in the index lookup mapping file.
job_index = _map_job_index_to_child_index(input_dir, inputs, job_index)
inputs = _os.path.join(inputs, str(job_index), 'inputs.pb')
output_prefix = _os.path.join(output_prefix, str(job_index))
_data_proxy.Data.get_data(inputs, local_inputs_file)
input_proto = _utils.load_proto_from_file(_literals_pb2.LiteralMap, local_inputs_file)
_engine_loader.get_engine().get_task(task_def).execute(
_literal_models.LiteralMap.from_flyte_idl(input_proto),
context={'output_prefix': output_prefix}
)
@_click.command('pyflyte-execute')
@_click.option('--task-module', required=True)
@_click.option('--task-name', required=True)
@_click.option('--inputs', required=True)
@_click.option('--output-prefix', required=True)
@_click.option('--test', is_flag=True)
def execute_task_cmd(task_module, task_name, inputs, output_prefix, test):
_click.echo(_utils.get_version_message())
execute_task(task_module, task_name, inputs, output_prefix, test)
|
[
"matthewsmith@lyft.com"
] |
matthewsmith@lyft.com
|
24bde7208335cd60e375b68ff00e7581c4892bd2
|
9743d5fd24822f79c156ad112229e25adb9ed6f6
|
/xai/brain/wordbase/nouns/_bug.py
|
72b907dcbd8c7bf8f279c2a1b40116f869da8dfd
|
[
"MIT"
] |
permissive
|
cash2one/xai
|
de7adad1758f50dd6786bf0111e71a903f039b64
|
e76f12c9f4dcf3ac1c7c08b0cc8844c0b0a104b6
|
refs/heads/master
| 2021-01-19T12:33:54.964379
| 2017-01-28T02:00:50
| 2017-01-28T02:00:50
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 600
|
py
|
#calss header
class _BUG():
def __init__(self,):
self.name = "BUG"
self.definitions = [u'a very small insect', u'a bacteria or a virus causing an illness that is usually not serious: ', u'a mistake or problem in a computer program: ', u'a very small device fixed on to a phone or hidden in a room, that allows you to listen to what people are saying without them knowing', u'a very strong enthusiasm for something: ']
self.parents = []
self.childen = []
self.properties = []
self.jsondata = {}
self.specie = 'nouns'
def run(self, obj1 = [], obj2 = []):
return self.jsondata
|
[
"xingwang1991@gmail.com"
] |
xingwang1991@gmail.com
|
aa46cc71da38d2f98ed82ae05dd93bd331c9be5f
|
a5937c0b4d122fa3189fee6414d5be1316d9c4f9
|
/src/eteaching.policy/eteaching/policy/tests/base.py
|
354a81695bbf30c41210153f374cad989432b7ec
|
[] |
no_license
|
zopyx/eteaching.org
|
e3eac4e53506cd9b1c65ac681a3138a5c7ac99b7
|
d326cbf7734f538132df84290e768625df43ada6
|
refs/heads/master
| 2020-12-25T19:14:59.692335
| 2013-12-04T09:47:19
| 2013-12-04T09:47:19
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,489
|
py
|
################################################################
# eteaching.policy
# (C) 2013, ZOPYX Ltd.
################################################################
import os
import unittest2
from plone.app.testing import PloneSandboxLayer
from plone.app.testing import applyProfile
from plone.app.testing import PLONE_FIXTURE
from plone.app.testing import IntegrationTesting
from plone.app.testing import setRoles, login
from plone.testing import z2
from zope.configuration import xmlconfig
from AccessControl.SecurityManagement import newSecurityManager
from zope.component import getUtility
import plone.app.contenttypes
import plone.app.widgets
import plone.app.event
import eteaching.policy
import z3c.jbot
class PolicyFixture(PloneSandboxLayer):
defaultBases = (PLONE_FIXTURE,)
def setUpZope(self, app, configurationContext):
xmlconfig.file('meta.zcml', z3c.jbot, context=configurationContext)
for mod in [plone.app.contenttypes,
plone.app.widgets,
plone.app.event,
eteaching.policy]:
xmlconfig.file('configure.zcml', mod, context=configurationContext)
# Install product and call its initialize() function
z2.installProduct(app, 'Products.DateRecurringIndex')
# z2.installProduct(app, 'eteaching.policy')
def setUpPloneSite(self, portal):
# Install into Plone site using portal_setup
applyProfile(portal, 'eteaching.policy:default')
portal.acl_users.userFolderAddUser('god', 'dummy', ['Manager'], [])
setRoles(portal, 'god', ['Manager'])
portal.acl_users.userFolderAddUser('ppr', 'dummy', ['PPR'], [])
setRoles(portal, 'ppr', ['Member', 'PPR'])
portal.acl_users.userFolderAddUser('member', 'dummy', ['Member'], [])
setRoles(portal, 'member', ['Member'])
login(portal, 'god')
def tearDownZope(self, app):
# Uninstall product
z2.uninstallProduct(app, 'eteaching.policy')
POLICY_FIXTURE = PolicyFixture()
POLICY_INTEGRATION_TESTING = IntegrationTesting(bases=(POLICY_FIXTURE,), name="PolicyFixture:Integration")
class TestBase(unittest2.TestCase):
layer = POLICY_INTEGRATION_TESTING
@property
def portal(self):
return self.layer['portal']
def login(self, uid='god'):
""" Login as manager """
user = self.portal.acl_users.getUser(uid)
newSecurityManager(None, user.__of__(self.portal.acl_users))
|
[
"yet@gmx.de"
] |
yet@gmx.de
|
f59946c1cad0866529f675844754b8f4572cffc7
|
c25a17f0f82c2eebca55bbe180f4c2ccbbf00292
|
/01_Jump_to_python/Chap06/6장_practice/practice3_게시물.py
|
3a62507a88248db26e85f59d32ca417fa4481483
|
[] |
no_license
|
superbeom97/jumpjump
|
a0a4da6f0df0483ef0cef9833b5fe0402ec63c9c
|
fc45efce2a2b00c614aa5aa54b36be1572ed40ce
|
refs/heads/master
| 2021-09-15T09:35:16.903857
| 2018-05-30T00:00:59
| 2018-05-30T00:00:59
| 111,883,402
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 816
|
py
|
def getTotalPage(m, n):
try:
if int(m) <= int(n):
s = '1'
print("게시물 총 건 수: %s, 한 페이지에 보여줄 게시물 수: %s, 총 페이지 수: %s" % (m, n, s))
if int(m) % int(n) == 0:
s = int(m) / int(n)
print("게시물 총 건 수: %s, 한 페이지에 보여줄 게시물 수: %s, 총 페이지 수: %s" % (m, n, int(s)))
elif int(m) > int(n):
s = int(m) / int(n) + 1
print("게시물 총 건 수: %s, 한 페이지에 보여줄 게시물 수: %s, 총 페이지 수: %s" % (m, n, int(s)))
except:
pass
f = open("D:\Python_workspace\jumpjump\Chap06\\6장_practice\\condition.txt", 'r')
for i in f.readlines():
aa = i.split()
m = aa[0]
n = aa[1]
getTotalPage(m, n)
|
[
"beom9790@naver.com"
] |
beom9790@naver.com
|
9f9ee2d6aecd744bf78e114b35591f707504a091
|
e288180c977c8fccf31c00bb74b7e8f56ee69303
|
/vkrb/search/serializers.py
|
65be7a59e320ecf0806ee5672e5dfd061575e8ff
|
[] |
no_license
|
kaluginadaria/vkrb-back
|
32e0c9aef7a647ea2a2e399c8d999622e993a433
|
d037baaa9f17cb038d41dda5dfbf1dbb56acdf90
|
refs/heads/master
| 2022-12-07T23:36:32.902662
| 2019-05-22T15:06:31
| 2019-05-22T15:06:31
| 179,382,015
| 0
| 0
| null | 2022-11-22T02:38:25
| 2019-04-03T22:52:30
|
Python
|
UTF-8
|
Python
| false
| false
| 3,195
|
py
|
from django.apps import apps
from django_serializer.serializer.base import ModelSerializer
from django_serializer.serializer.fields import SerializerField
from vkrb.activity.serializers import ActivityGiItemSerializer, ActivitySiItemSerializer
from vkrb.calc.serializers import FormulaSerializer
from vkrb.digest.serializers import DigestSerializer, ShortArticleSerializer, ArticleSerializer
from vkrb.education.serializers import (
LiteratureSerializer,
ScienceArticleSerializer,
CatalogItemSerializer,
InternalEducationSerializer)
from vkrb.event.serializers import EventSerializer
from vkrb.matrix.serializers import MatrixItemSerializer
from vkrb.newsitem.serializers import NewsItemSerializer
from vkrb.recourse.serializers import RecourseSerializer
from vkrb.search.models import SearchEntity
from vkrb.text.serializers import TextSerializer
class SearchEntitySerializer(ModelSerializer):
SERIALIZERS = {
'education.literature': LiteratureSerializer,
'education.sciencearticle': ScienceArticleSerializer,
'education.catalogitem': CatalogItemSerializer,
'education.internaleducation': InternalEducationSerializer,
'newsitem.newsitem': NewsItemSerializer,
'digest.digest': DigestSerializer,
'digest.article': ArticleSerializer,
'text.text': TextSerializer,
'recourse.recourse': RecourseSerializer,
'event.event': EventSerializer,
'calc.formula': FormulaSerializer,
'matrix.matrixitem': MatrixItemSerializer,
'activity.giitem': ActivityGiItemSerializer,
'activity.siitem': ActivitySiItemSerializer
}
entity = SerializerField(source='get_entity')
def __init__(self, *args, **kwargs):
self.user = kwargs.pop('user')
super().__init__(*args, **kwargs)
class Meta:
model = SearchEntity
exclude = ('search_vector', 'body', 'id')
def get_serializer(self, entity_type, entity):
kwargs = {}
serializer_class = self.SERIALIZERS.get(entity_type)
if serializer_class is None:
raise ValueError(f'serializer for entity `{entity_type}` '
f'does not exist')
if issubclass(serializer_class, (RecourseSerializer,)):
kwargs['request_user'] = self.user
elif not issubclass(serializer_class, (
TextSerializer,
EventSerializer,
MatrixItemSerializer
)):
kwargs['user'] = self.user
return serializer_class(entity, **kwargs)
def get_entity(self, obj):
entity = apps.get_model(
obj.entity_type
).objects.filter(pk=obj.entity_id).first()
serializer = self.get_serializer(obj.entity_type, entity)
return serializer.serialize()
def serialize(self):
reformat_res = {}
res = super().serialize()
for item in res:
entity_type = item['entity_type']
entity = item['entity']
if not entity:
continue
reformat_res.setdefault(entity_type, [])
reformat_res[entity_type].append(entity)
return reformat_res
|
[
"d.kalugina@ktsstudio.ru"
] |
d.kalugina@ktsstudio.ru
|
7630b551f1163ab91f4b5075737ad96d95b94763
|
866de2c682a63e255d6d0016c0eeee70b98dd490
|
/unifiedactivitypoints/studentauth/urls.py
|
98510be2b8c6f956bd8d4c60c6d25e5a5dec37c8
|
[
"MIT"
] |
permissive
|
FossMec/activityPointsApp
|
10c47e6a96ebe44b1310ad4f3c695055e35d69b8
|
2b58c96bfcd11327883dcd5bd7ddc1feb617ee49
|
refs/heads/master
| 2021-08-08T21:39:53.212979
| 2017-11-11T09:33:10
| 2017-11-11T09:33:10
| 107,976,551
| 1
| 8
| null | 2017-11-11T07:24:08
| 2017-10-23T12:05:04
|
JavaScript
|
UTF-8
|
Python
| false
| false
| 237
|
py
|
from django.conf.urls import url
from studentauth import views
urlpatterns = [
url(r'^signup/$',views.signup,name="signuppage"),
url(r'^$',views.Login.as_view(),name="login"),
url(r'^succes/$',views.Success.as_view(),name='Success'),
]
|
[
"kurian.pro@gmail.com"
] |
kurian.pro@gmail.com
|
f8bcf27f520284575ff8cc9cda7c4884fbd7ad04
|
788925d9dd5f98c8e2453acc6fb46aee2d638c80
|
/test.py
|
ca2d956202e11a63a2e4ff639a3b098bebce4404
|
[] |
no_license
|
abilian/abilian-sbe-demo
|
7e5a7e518e20886dbf73e96b2b054c6c1f846d58
|
bcd6a97060d28429e16bd2f7cb209e22d7bf1c24
|
refs/heads/master
| 2021-01-22T02:53:38.157020
| 2013-11-22T23:02:43
| 2013-11-22T23:02:43
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 378
|
py
|
#!./bin/python
import pexpect
import urllib
import time
# Some random number
PORT = 4034
HOME = "http://0.0.0.0:{}/".format(PORT)
p = pexpect.spawn("./manage.py run -p {}".format(PORT))
try:
p.expect("Running on {}".format(HOME))
# Just in case
time.sleep(5)
page = urllib.urlopen(HOME).read()
assert "Welcome to Abilian" in page
finally:
p.kill(9)
p.close()
|
[
"sf@fermigier.com"
] |
sf@fermigier.com
|
db507e34af96e154ddea79d5af892cdf6b728bbc
|
15608a179d97e399ca08be0f017296c4f4ded881
|
/releases/migrations/0001_squashed_0004_make_release_date_nullable.py
|
cf9cc83a5150cd40db8cbb8c60d47dc7806e7af4
|
[
"BSD-3-Clause"
] |
permissive
|
shivangi1801/djangoproject.com
|
1f33bef39b8c3cce136f47008eee5d4aae8d6aa4
|
e51eba97f7e226d2e9deb31f8c23b1e00df04b9c
|
refs/heads/master
| 2021-01-16T20:37:51.813671
| 2016-02-08T22:51:01
| 2016-02-08T22:51:01
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,619
|
py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.1 on 2016-01-21 07:11
from __future__ import unicode_literals
import datetime
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Release',
fields=[
('version', models.CharField(max_length=16, primary_key=True, serialize=False)),
('date', models.DateField(blank=True, default=datetime.date.today, help_text="Leave blank if the release date isn't know yet, typically if you're creating the final release just after the alpha because you want to build docs for the upcoming version.", null=True, verbose_name='Release date')),
('major', models.PositiveSmallIntegerField(editable=False)),
('minor', models.PositiveSmallIntegerField(editable=False)),
('micro', models.PositiveSmallIntegerField(editable=False)),
('status', models.CharField(choices=[('a', 'alpha'), ('b', 'beta'), ('c', 'release candidate'), ('f', 'final')], editable=False, max_length=1)),
('iteration', models.PositiveSmallIntegerField(editable=False)),
('is_lts', models.BooleanField(default=False, verbose_name='Long term support release')),
('eol_date', models.DateField(blank=True, help_text="Leave blank if the end of life date isn't known yet, typically because it depends on the release date of a later version.", null=True, verbose_name='End of life date')),
],
),
]
|
[
"timograham@gmail.com"
] |
timograham@gmail.com
|
5fb03f371d4e18de6a83f4e212e3babff6434115
|
5e36d216d31f9f5d56722e230cb468beba15c2a8
|
/src/scriptsite/main/subversion.py
|
a5c8bff6f21c04cc0d890afe77fcd47415a3d501
|
[] |
no_license
|
isotoma/ScriptSite
|
d988a0da8b70681502a1b02eb1f78acfe035f545
|
f401be7e2c6455208ac881d72559e5819fd2ecbd
|
refs/heads/master
| 2016-09-06T08:35:52.898581
| 2011-03-22T21:02:40
| 2011-03-22T21:02:40
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,277
|
py
|
import tempfile
import os
import shutil
from datetime import datetime
from lxml import etree
import pysvn
from django.core.files import File
from scriptsite import settings
from scriptsite.main.models import TestScript
def make_script_model_from_file(script_file, flavour, revision):
""" Create a model object from the file that we have created """
ts = TestScript()
ts.flavour = flavour
ts.date_uploaded = datetime.now()
ts.script_file.save(flavour + '.xml', File(file(script_file)), save = False)
ts.revision = revision
return ts
def get_output_directory(root_path):
final_path = os.path.join(root_path, 'processed')
return final_path
def make_script_file_from_checkout(root_path, flavour):
""" Take our export, turn it into the single canonical XML file """
path_to_config = os.path.join(root_path, flavour + '.xml')
config_file = open(path_to_config).read()
# parse the config file into memory
config_tree = etree.fromstring(config_file)
final_test_doc = etree.Element("test_doc")
# find all the mbots in the config file
for mbot in config_tree.findall('mbot'):
final_mbot_element = etree.Element('test_group')
final_mbot_element.attrib['name'] = mbot.attrib['name']
name = mbot.attrib['name']
# available tests should live in a directory matching the name of the mbot
test_dir = os.path.join(root_path, name)
directory_available_tests = os.listdir(test_dir)
# make sure we only have xml files
available_tests = [x for x in directory_available_tests if x.endswith('.xml') and not x == name + '.xml']
# get the filenames to exclude from the config file
# add .xml as tests are specified by id, not filenames....
excluded_tests = [test.text +".xml" for test in mbot.findall('exclude')]
test_list = [os.path.join(test_dir, test) for test in available_tests if test not in excluded_tests]
test_list = sorted(test_list)
# Append our tests to the master test document
for test in test_list:
parsed_test = etree.parse(test)
final_mbot_element.append(parsed_test.getroot())
# add the metadata for the test
metadata = etree.parse(os.path.join(test_dir, name +'.xml'))
for child in metadata.getroot().getchildren():
final_mbot_element.append(child)
final_test_doc.append(final_mbot_element)
file_generation_time = datetime.now().strftime("%Y-%m-%d %H:%M")
final_test_doc.attrib['flavour'] = flavour
final_test_doc.attrib['generated'] = file_generation_time
final_test_doc.append(config_tree.find('script_version'))
# we are done here, write it to the same directory as the uploaded ones,
# and return
output_dir = get_output_directory(root_path)
os.makedirs(output_dir)
output_file = open(os.path.join(output_dir, flavour + ".xml"), 'w')
s = etree.tostring(final_test_doc, pretty_print = True).encode('UTF-8')
output_file.write(s)
return output_file.name
def get_from_subversion(repo_url, revision, username, password, flavour):
""" Get the XML file from subversion """
svn_username = username
svn_password = password
# make a directory to checkout to
temp_dir = tempfile.mkdtemp()
export_dir = os.path.join(temp_dir, 'export')
print export_dir
# Get a checkout
client = pysvn.Client()
#set auth details
client.set_default_username(username)
client.set_default_password(password)
try:
# attempt an export (we don't need a checkout, just the files)
client.export(repo_url, export_dir)
finally:
# just in case
client.set_default_username("")
client.set_default_password("")
try:
# so, we've got stuff from subversion
# we should probably do something with it, no?
script_file = make_script_file_from_checkout(export_dir, flavour)
# now we have the file, make a db model
ts = make_script_model_from_file(script_file, flavour, "0")
ts.save()
finally:
# tidy up
shutil.rmtree(temp_dir)
# aand we're done, let's get out of here
return ts
|
[
"tom@howrandom.net"
] |
tom@howrandom.net
|
65a90c400bd422e8ee24ab483c5144d7c5b7096d
|
de24f83a5e3768a2638ebcf13cbe717e75740168
|
/moodledata/vpl_data/420/usersdata/328/87412/submittedfiles/exe11.py
|
271fb87e667691c2627e9c401cae10b6c33cfb19
|
[] |
no_license
|
rafaelperazzo/programacao-web
|
95643423a35c44613b0f64bed05bd34780fe2436
|
170dd5440afb9ee68a973f3de13a99aa4c735d79
|
refs/heads/master
| 2021-01-12T14:06:25.773146
| 2017-12-22T16:05:45
| 2017-12-22T16:05:45
| 69,566,344
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 177
|
py
|
# -*- coding: utf-8 -*-
n=int(input('Digite um número com 8 dígitos:'))
soma=0
if (n<10000000):
print('NAO SEI')
else:
def soma (x1+x2+x3+x4+x5+x6+x7+x8)
|
[
"rafael.mota@ufca.edu.br"
] |
rafael.mota@ufca.edu.br
|
0b869fa5754e8a99cf33d79e5c9b50c1c1dc606f
|
25333fa1726e11adc66c25820751432d4eaebfdc
|
/alexa.py
|
d16a4b51fb45363e1a1bd19529bf1f430dba4733
|
[] |
no_license
|
aminhp93/alexa
|
a769d510b3373dd142721980d398a6253ed8430e
|
03fa7b760302ec34de20f2cf28b356ff4e27ec45
|
refs/heads/master
| 2021-01-13T08:36:59.396756
| 2016-09-30T18:27:09
| 2016-09-30T18:27:09
| 69,312,807
| 1
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,983
|
py
|
import logging
from random import randint
from flask import Flask, render_template
from flask_ask import Ask, statement, question, session
from twilio.rest import TwilioRestClient
print("I AM RIGHT HERE AT THE TOP OF THE FILE")
app = Flask(__name__)
ask = Ask(app, "/")
logging.getLogger("flask_ask").setLevel(logging.DEBUG)
# --------------------Dont touch below!--------------------------
@ask.launch
def launch_skill():
welcome_msg = render_template('welcome')
return question(welcome_msg)
@ask.intent("DojoInfoIntent")
def dojo_info():
response = render_template("dojo_info_template")
return statement(response)
@ask.intent("AMAZON.HelpIntent")
def dojo_help():
response = render_template("help_template")
return question(response)
#--------OK Above-----------
# @ask.intent("DojoStackIntent", convert={'City': str})
# def dojo_stacks(City):
# response = ''
# if City == "San Jose":
# response = render_template("san_jose_stacks", city=City)
# elif City == "Seattle":
# response = render_template("seattle_stacks", city=City)
# elif City == "Chicago":
# response = render_template("chicago_stacks", city=City)
# elif City == "Dallas":
# response = render_template("dallas_stacks", city=City)
# elif City == "Burbank":
# response = render_template("burbank_stacks", city=City)
# elif City == "Washington":
# response = render_template("washington_stacks", city=City)
# else:
# response = render_template("invalid_city")
# return statement(response)
#-----------------------Ok above----------
#--------------Custom functions below--------
@ask.intent("TextBrendenIntent")
def touch_face_with_Brenden():
print("I AM RIGHT HERE")
response = render_template("brendan_template_1")
return statement(response)
@ask.intent("GetTouchFaceIntent")
def get_touchface_response():
response = render_template("brendan_template_2")
return statement(response)
@ask.intent("DojoBrendenIntent")
def dojo_Brenden_response():
response = render_template("brendan_template_3")
return statement(response)
@ask.intent("AskBrendan")
def ask_brendan():
response = render_template("brendan_template_4")
return statement(response)
@ask.intent("twilioIntent")
def twilioIntentHandler():
account_sid = "AC7622914a70ec20b746fa9f5200f94a79"
auth_token = "f61cf7f88337ec156669d6f08ac693cf"
client = TwilioRestClient(account_sid, auth_token)
message = client.messages.create(to="+7142135025", from_="+16578889320", body="Hey Brendan, I touch your face lol!!!!")
response = render_template("message_sent_to")
return question(response)
@ask.intent("GroupTextIntent", convert={'Name': str})
def GroupTextIntentHandler(Name):
account_sid = "AC7622914a70ec20b746fa9f5200f94a79"
auth_token = "f61cf7f88337ec156669d6f08ac693cf"
client = TwilioRestClient(account_sid, auth_token)
if Name == "Andy":
message = client.messages.create(to="+18057043552", from_="+16578889320", body="Hello Andy you are doing well today!")
response = render_template("message_sent", name = Name)
elif Name == "Annet":
message = client.messages.create(to="+15102142298", from_="+16578889320", body="Hello Annet you are doing well today!")
response = render_template("message_sent", name = Name)
elif Name == "Luba":
message = client.messages.create(to="+17032091080", from_="+16578889320", body="Hello Tuba you are doing well today!")
response = render_template("message_sent", name = Name)
elif Name == "Minh":
message = client.messages.create(to="+17142135025", from_="+16578889320", body="Hello Minh you are doing well today!")
response = render_template("message_sent", name = Name)
else:
response = render_template("message_not_sent")
return question(response)
if __name__ == '__main__':
app.run(debug=True)
|
[
"minhpn.org.ec@gmail.com"
] |
minhpn.org.ec@gmail.com
|
55e85aa5da138f635b5bedb73c67575e21d7513e
|
6ac2631c256f156d4ddf169e6c67f1fe66ebcaaf
|
/091/pyteacher/app_chat/models.py
|
48e44f13eaba832d4322ad4695b20811a2caee4c
|
[] |
no_license
|
kasaiee/how-to-pyteacher
|
101f106aeeed1b34756cecf502337ff8ee584ff5
|
074a57533f53fd1b8c7f37cd11dbc3b32ab8a08f
|
refs/heads/master
| 2022-12-10T23:50:46.851784
| 2019-07-15T19:31:03
| 2019-07-15T19:31:03
| 187,372,111
| 6
| 4
| null | 2022-12-08T01:55:05
| 2019-05-18T15:08:03
| null |
UTF-8
|
Python
| false
| false
| 3,459
|
py
|
import jdatetime
from django.db import models
from django.urls import reverse
from django.contrib.contenttypes.models import ContentType
from django.contrib.contenttypes.fields import GenericRelation
from django.contrib.contenttypes.fields import GenericForeignKey
from django.utils import timezone
from django.utils.timezone import localtime
from django.contrib.auth import get_user_model
User = get_user_model()
class Chat(models.Model):
user = models.ForeignKey(User, on_delete=models.CASCADE, null=True)
code = models.TextField(null=True, blank=True)
message = models.TextField(null=True)
content_type = models.ForeignKey(ContentType, on_delete=models.CASCADE)
object_id = models.PositiveIntegerField()
content_object = GenericForeignKey()
create_datetime = models.DateTimeField(auto_now_add=True, null=True)
seen = models.BooleanField(null=True, default=False)
seen_datetime = models.DateTimeField(null=True)
def jd_create_datetime(self):
jdatetime.set_locale('fa_IR')
jdatetime.datetime.now().strftime('%A %B')
jd_datetime = jdatetime.datetime.fromgregorian(
year=self.create_datetime.year,
month=self.create_datetime.month,
day=self.create_datetime.day,
hour=self.create_datetime.hour,
monute=self.create_datetime.minute,
second=self.create_datetime.second,
)
return jd_datetime.strftime('%A, %d %B %y %H:%M:%S')
def status_color(self):
return 'grey' if self.seen else 'teal'
def status(self):
return 'دیده شده' if self.seen else 'دیده نشده'
def is_done_exercise(self):
return self.content_object in [e.exercise for e in self.user.exercisebystudent_set.all()]
def done_color(self):
return 'teal' if self.is_done_exercise() else 'red'
def done_status(self):
return 'انجام شده' if self.is_done_exercise() else 'انجام نشده'
def is_student(self):
return 'students' in [group.name for group in self.user.groups.all()]
def is_operator(self):
return not self.is_student()
def __str__(self):
return self.message[:30]
class Ticket(models.Model):
user = models.ForeignKey(User, on_delete=models.CASCADE, null=True)
topic = models.CharField(max_length=60, null=True)
chats = GenericRelation(Chat, null=True)
closed = models.BooleanField(default=False)
create_datetime = models.DateTimeField(default=timezone.now, blank=True)
def status(self):
return 'بسته' if self.closed else 'باز'
def status_color(self):
return 'red' if self.closed else 'blue'
def jd_create_datetime(self):
self.create_datetime = localtime(self.create_datetime)
jdatetime.set_locale('fa_IR')
jdatetime.datetime.now().strftime('%A %B')
jd_datetime = jdatetime.datetime.fromgregorian(
year=self.create_datetime.year,
month=self.create_datetime.month,
day=self.create_datetime.day,
hour=self.create_datetime.hour,
minute=self.create_datetime.minute,
second=self.create_datetime.second,
)
return jd_datetime.strftime('%A, %d %B %y %H:%M:%S')
def get_absolute_url(self):
params = {'id': self.id}
return reverse('app-accounts:ticket-detail', kwargs=params)
def __str__(self):
return self.topic
|
[
"1tapciran@gmail.com"
] |
1tapciran@gmail.com
|
e955056ae8b1a40428c8a44ebd451318d77d7d9a
|
ca7aa979e7059467e158830b76673f5b77a0f5a3
|
/Python_codes/p02823/s163186577.py
|
42c07c5526acc3c86687a8b401d84e30f2355ab1
|
[] |
no_license
|
Aasthaengg/IBMdataset
|
7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901
|
f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8
|
refs/heads/main
| 2023-04-22T10:22:44.763102
| 2021-05-13T17:27:22
| 2021-05-13T17:27:22
| 367,112,348
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 173
|
py
|
n,a,b = map(int,input().split())
if abs(a-b)%2 == 0:
print(abs(a-b)//2)
else:
if a - 1 > n - b:
print((n-b+n-a+1)//2)
else:
print((a-1+b-1+1)//2)
|
[
"66529651+Aastha2104@users.noreply.github.com"
] |
66529651+Aastha2104@users.noreply.github.com
|
8022f81f1c964930db4e3dd656af1c871f260f99
|
d85f8b95e21523840d65517ab99baeb04a33e297
|
/demo/settings.py
|
c6b817f3586d8efd959927c2955a83cb937cdde4
|
[
"BSD-3-Clause"
] |
permissive
|
gregplaysguitar/django-trolley
|
4947bd26e3eea7a33dd138c05262a9f2a45d0bb3
|
187cbe883961b13f995998cc5fa80c6a879560a8
|
refs/heads/master
| 2021-01-21T12:07:53.829939
| 2014-05-20T22:37:48
| 2014-05-20T22:37:48
| 632,479
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,958
|
py
|
# -*- coding: utf-8 -*-
# Django settings for demo project.
import os, sys, re
PROJECT_ROOT = os.path.dirname(globals()["__file__"])
for directory in [os.path.join(PROJECT_ROOT, dir) for dir in ('.', '..')]:
if not directory in sys.path:
sys.path.insert(0, directory)
DEBUG = True
TEMPLATE_DEBUG = DEBUG
ADMINS = (
# ('Your Name', 'your_email@example.com'),
)
MANAGERS = ADMINS
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(PROJECT_ROOT, 'demo.db'),
}
}
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# On Unix systems, a value of None will cause Django to use the same
# timezone as the operating system.
# If running in a Windows environment this must be set to the same as your
# system time zone.
TIME_ZONE = 'Pacific/Auckland'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en-us'
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = False
# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale
USE_L10N = False
# Absolute filesystem path to the directory that will hold user-uploaded files.
# Example: "/home/media/media.lawrence.com/media/"
MEDIA_ROOT = ''
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash.
# Examples: "http://media.lawrence.com/media/", "http://example.com/media/"
MEDIA_URL = ''
# Absolute path to the directory static files should be collected to.
# Don't put anything in this directory yourself; store your static files
# in apps' "static/" subdirectories and in STATICFILES_DIRS.
# Example: "/home/media/media.lawrence.com/static/"
STATIC_ROOT = ''
# URL prefix for static files.
# Example: "http://media.lawrence.com/static/"
STATIC_URL = '/static/'
# URL prefix for admin static files -- CSS, JavaScript and images.
# Make sure to use a trailing slash.
# Examples: "http://foo.com/static/admin/", "/static/admin/".
ADMIN_MEDIA_PREFIX = '/static/admin/'
# Additional locations of static files
STATICFILES_DIRS = (
# Put strings here, like "/home/html/static" or "C:/www/django/static".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
# List of finder classes that know how to find static files in
# various locations.
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
# 'django.contrib.staticfiles.finders.DefaultStorageFinder',
)
# Make this unique, and don't share it with anybody.
SECRET_KEY = '*u@at7*1xx=3^=nfnq^fgik(_=mmu6u1&ldx6^svem^dvjt=+1'
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
# 'django.template.loaders.eggs.Loader',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
)
ROOT_URLCONF = 'demo.urls'
TEMPLATE_DIRS = (
# Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
os.path.join(PROJECT_ROOT, 'templates')
)
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
#'django.contrib.staticfiles',
'django.contrib.admin',
'shop',
'cart',
'payment',
)
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'handlers': {
'mail_admins': {
'level': 'ERROR',
'class': 'django.utils.log.AdminEmailHandler'
}
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
}
}
CART_PAYMENT_BACKEND = 'cart.payment.manual'
EMAIL_HOST = 'localhost'
EMAIL_PORT = 1025
EMAIL_HOST_USER = ''
EMAIL_HOST_PASSWORD = ''
EMAIL_USE_TLS = False
|
[
"gregplaysguitar@gmail.com"
] |
gregplaysguitar@gmail.com
|
7006aeddebbec67a7e4f97e57b36acf2e9faa98e
|
76dd191c092c836f0f72a364bda133bdb37c5cbc
|
/system/djangoapp/forms.py
|
1778f78f69b8a044baa2c3a41b664ee5f6178c3f
|
[] |
no_license
|
PreritBhandari/Django-Assignment-II-
|
36619b337c5f6454e8729cb1ae3df2bfa400c243
|
808e9d47bbc7f78f81577a45356cb34839713ccb
|
refs/heads/master
| 2022-11-25T09:07:07.423648
| 2020-07-29T11:47:12
| 2020-07-29T11:47:12
| 283,487,872
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,116
|
py
|
from django import forms
from django.contrib.auth.forms import UserCreationForm
from django.contrib.auth.models import User
from verified_email_field.forms import VerifiedEmailField
class register_form(forms.Form):
first_name = forms.CharField(max_length=30)
last_name = forms.CharField(max_length=150)
email = forms.EmailField()
password = forms.CharField(max_length=200, widget=forms.PasswordInput())
confirm_password = forms.CharField(max_length=200, widget=forms.PasswordInput())
def clean_username(self):
if User.objects.filter(username=self.cleaned_data['email']).exists():
raise forms.ValidationError("This email is already taken !")
return self.cleaned_data['username']
def clean(self):
password = self.cleaned_data['password']
confirm_password = self.cleaned_data['confirm_password']
if password != confirm_password:
raise forms.ValidationError("Passwords do not match !")
class login_form(forms.Form):
email = forms.EmailField()
password = forms.CharField(max_length=128, widget=forms.PasswordInput)
|
[
"patrioticprerit@gmail.com"
] |
patrioticprerit@gmail.com
|
436542025ea7dafd39e2fcf4fe88ff79492c88df
|
086aa68683c71d2e4b7b584eda4536f5cc51c27c
|
/examples-tk499-lvgl/exams/widgets_ex/tabview/__init__.py
|
a527b46c199edeeb8c53d5f206a677cf7e300027
|
[
"MIT"
] |
permissive
|
bhgv/micropython-tk499
|
1617529d9f4c74d31f844d4eb440e3c670eb105b
|
963d0546dda980aa8502c5be29f6aee3d5cd805e
|
refs/heads/master
| 2023-07-18T07:25:36.592091
| 2021-08-28T20:37:31
| 2021-08-28T20:37:31
| 397,341,868
| 2
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 639
|
py
|
import lv_utils
import lvgl as lv
btn = lv.btn(lv.scr_act())
btn.set_height(lv.SIZE.CONTENT)
btn.align(lv.ALIGN.TOP_LEFT, 30, 30)
def foo(e):
if e.get_code() == lv.EVENT.CLICKED:
lv_utils.ex_new_page("exams/widgets_ex/tabview/p1.py")
btn.add_event_cb(foo, lv.EVENT.ALL, None)
label = lv.label(btn)
label.set_text("p1.py")
btn = lv.btn(lv.scr_act())
btn.set_height(lv.SIZE.CONTENT)
btn.align(lv.ALIGN.TOP_LEFT, 30, 90)
def foo(e):
if e.get_code() == lv.EVENT.CLICKED:
lv_utils.ex_new_page("exams/widgets_ex/tabview/p2.py")
btn.add_event_cb(foo, lv.EVENT.ALL, None)
label = lv.label(btn)
label.set_text("p2.py")
|
[
"bhgv.empire@gmail.com"
] |
bhgv.empire@gmail.com
|
8dd85d17b5416331df15e80d50e3db5be325686a
|
08153bc5546d434cdc2c1574e024c3b8edab69a3
|
/thesis/scripts/disp_performance.py
|
fda1121e9383c87fd48365df586bb1e58aedc2c1
|
[] |
no_license
|
maxnoe/phd_thesis
|
57c818296e6e0c43d1231116c5131f9024c9304d
|
a68b621cc8b658aa858342a4cfdaed68c3823257
|
refs/heads/master
| 2022-11-30T06:11:22.033077
| 2019-03-04T14:02:45
| 2020-08-17T12:12:49
| 288,170,864
| 3
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 569
|
py
|
from argparse import ArgumentParser
import re
parser = ArgumentParser()
parser.add_argument('log_file')
parser.add_argument('output_base')
args = parser.parse_args()
with open(args.log_file) as f:
log = f.read()
acc = re.search(r'Mean accuracy from CV: (.*)', log).groups()[0]
with open(args.output_base + '_accuracy.tex', 'w') as f:
f.write(r'\num{' + acc.replace('±', r'\pm') + '}\n')
r2 = re.search(r'score from CV: (.*)', log).groups()[0]
with open(args.output_base + '_r2.tex', 'w') as f:
f.write(r'\num{' + r2.replace('±', r'\pm') + '}\n')
|
[
"maximilian.noethe@tu-dortmund.de"
] |
maximilian.noethe@tu-dortmund.de
|
f0aa91eba8c5568951b8f1fb0d90ecf0b928734d
|
d01670aa5bddb47dc414bf01921155610e2a5070
|
/leetcode/053_maximumsubarray.py
|
1e558d7c790472f0f0b466aec2d5417f103da43c
|
[] |
no_license
|
hwillmott/csfundamentals
|
14c7e4253b581cef7046ca035bda038c24a52613
|
832f6a8c0deb0569d3fe0dc03e4564c2d850f067
|
refs/heads/master
| 2020-08-01T12:27:01.914391
| 2020-03-26T16:47:35
| 2020-03-26T16:47:35
| 73,576,522
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 463
|
py
|
class Solution(object):
def maxSubArray(self, nums):
"""
:type nums: List[int]
:rtype: int
"""
if nums is None or len(nums) == 0:
return 0
last = nums[0]
m = last
for n in nums[1:]:
if last > 0:
last = last + n
else:
last = n
if last > m:
m = last
return m
|
[
"harriet.willmott@gmail.com"
] |
harriet.willmott@gmail.com
|
f15582da123ab9fa3195f0c76fc7e5ee2568ffdf
|
2904bba948c795eb0075e6dfb25d7bec93d893f1
|
/mcb_twitter/tweet_mcb/models.py
|
32f9a056c063bdad21eaf3e3ddbcfb6e6c47b991
|
[] |
no_license
|
raprasad/mcb-tweet
|
8cd61766ea0365dff96104c72327611718051d95
|
3117b183a9b619f0bb4f3552c7c954a44728f177
|
refs/heads/master
| 2016-09-06T18:18:41.182394
| 2014-03-26T16:29:42
| 2014-03-26T16:29:42
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,196
|
py
|
from django.db import models
from django.core.urlresolvers import reverse
from mcb_website.events.models import CalendarEvent
from mcb_twitter.tweet_mcb.tweeter import assemble_full_tweet
from datetime import datetime
TWEET_STATUS_PK_AWAITING_APPROVAL = 1
TWEET_STATUS_PK_APPROVED = 2
TWEET_STATUS_PK_REJECTED = 3
TWEET_STATUS_PK_TWEETED = 4
TWEET_GROUP_NAME = 'TWEET_GROUP'
class TweetStatus(models.Model):
name = models.CharField(max_length=200, unique=True)
sort_key = models.IntegerField()
description = models.TextField(blank=True)
def __unicode__(self):
return self.name
class Meta:
ordering = ('sort_key', 'name')
verbose_name_plural = 'Tweet statuses'
class MCBTweetEvent(models.Model):
"""
Pre-load CalendarEvents for tweeting
"""
mcb_event = models.ForeignKey(CalendarEvent, verbose_name='MCB Event')
tweet_text = models.CharField(max_length=140)
status = models.ForeignKey(TweetStatus)
reject_tweet = models.BooleanField(default=False, help_text='auto-filled on save')
approved = models.BooleanField(default=False, help_text='auto-filled on save')
tweet_pubdate = models.DateTimeField()
tweet_tag_text = models.CharField(max_length=75, default='#MCB_Event', blank=True)
tweet_short_url = models.URLField(max_length=75, blank=True)
full_tweet = models.CharField(max_length=255, blank=True, help_text='auto-filled on save')
google_id = models.CharField(max_length=255, blank=True, db_index=True)
def view_calendar_event(self):
if not self.mcb_event:
return 'n/a'
url = reverse('admin:events_calendarevent_change', args=(self.mcb_event.id,))
return '<a href="%s">view calendar event</a>' % url
view_calendar_event.allow_tags = True
def set_tweet_to_awaiting_approval_without_save(self):
try:
self.status = TweetStatus.objects.get(pk=TWEET_STATUS_PK_AWAITING_APPROVAL)
except:
pass
def approve_tweet_without_save(self):
try:
self.status = TweetStatus.objects.get(pk=TWEET_STATUS_PK_APPROVED)
except:
pass
def reject_tweet_without_save(self):
try:
self.status = TweetStatus.objects.get(pk=TWEET_STATUS_PK_REJECTED)
except:
pass
def set_status_to_tweeted_without_save(self):
try:
self.status = TweetStatus.objects.get(pk=TWEET_STATUS_PK_TWEETED)
except:
pass
@staticmethod
def get_events_awaiting_approval():
return MCBTweetEvent.objects.filter(tweet_pubdate__gt=datetime.now()\
, status=TweetStatus.objects.get(pk=TWEET_STATUS_PK_AWAITING_APPROVAL)\
).all().order_by('tweet_pubdate')
@staticmethod
def create_tweet_from_calendar_event(cal_event):
if cal_event is None:
return None
if not cal_event.short_url:
cal_event.save()
status_awaiting_approval = TweetStatus.objects.get(pk=TWEET_STATUS_PK_AWAITING_APPROVAL)
mcb_tweet = MCBTweetEvent(mcb_event=cal_event\
, status=status_awaiting_approval\
, tweet_text=cal_event.title[:140]\
, tweet_pubdate=cal_event.start_time\
, tweet_short_url=cal_event.short_url\
, google_id=cal_event.google_id\
)
mcb_tweet.save()
return mcb_tweet
def get_full_tweet(self):
full_tweet = assemble_full_tweet(self.tweet_text\
, self.tweet_short_url\
, self.tweet_tag_text)
if len(full_tweet) <= 140:
return full_tweet
full_tweet = assemble_full_tweet(self.tweet_text\
, self.tweet_short_url\
)
if len(full_tweet) <= 140:
return full_tweet
if self.tweet_text <= 140:
return self.tweet_text
return self.tweet_text[:140]
def save(self, *args, **kwargs):
self.full_tweet = self.get_full_tweet()
if self.full_tweet is None:
self.full_tweet = ''
if self.status.id == TWEET_STATUS_PK_REJECTED:
self.reject_tweet = True
else:
self.reject_tweet = False
if self.status.id in (TWEET_STATUS_PK_APPROVED, TWEET_STATUS_PK_TWEETED) :
self.approved = True
else:
self.approved = False
super(MCBTweetEvent, self).save(*args, **kwargs)
def __unicode__(self):
return '%s' % self.tweet_text
class Meta:
verbose_name = 'MCB Tweet Event'
ordering = ('status__sort_key', '-tweet_pubdate', 'tweet_text')
|
[
"raman_prasad@harvard.edu"
] |
raman_prasad@harvard.edu
|
3249525f584c43396625c8d5adc155bf19d3e031
|
536b2dc4d0541f6f8a71d9ef8cfa6d449c5db69a
|
/src/files/REST-Linux/scripts/set_indicatorled.py
|
8dfa96872471db2996f43be9946da8db6cf8cde1
|
[
"MIT"
] |
permissive
|
Huawei/Server_Management_Plugin_Puppet
|
54529c5305944e802ae799955287ba013e06f536
|
3a549dfa28b3522744932e7716064286a5f2f118
|
refs/heads/master
| 2021-06-19T17:48:12.253094
| 2019-09-20T09:06:12
| 2019-09-20T09:06:12
| 150,839,359
| 2
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,975
|
py
|
# -*- coding:utf-8 -*-
"""
#=========================================================================
# @Description: Set indicator State of Chassis
#
# @author:
# @Date:
#=========================================================================
"""
import sys
def setindicatorled_init(parser, parser_list):
"""
#=====================================================================
# @Method: set indicator LED state
# @Param:
# @Return:
# @author:
#=====================================================================
"""
sub_parser = parser.add_parser('setindicatorled',
help='''set product information''')
sub_parser.add_argument('-S', dest='state', required=True,
choices=['Lit', 'Off', 'Blinking'],
help='state of indicator led')
parser_list['setindicatorled'] = sub_parser
return 'setindicatorled'
def setindicatorled(client, parser, args):
"""
#=====================================================================
# @Method: set product info
# @Param:
# @Return:
# @author:
#=====================================================================
"""
slotid = client.get_slotid()
if slotid is None:
return None
url = "/redfish/v1/Chassis/%s" % slotid
resp = client.get_resource(url)
if resp is None:
return None
if resp['status_code'] != 200:
if resp['status_code'] == 404:
print('Failure: resource was not found')
return resp
payload = {
"IndicatorLED": args.state
}
resp = client.set_resource(url, payload)
if resp is None:
return None
if resp['status_code'] == 200:
print('Success: successfully completed request')
else:
from common_function import display_error_message
display_error_message(client, resp)
return resp
|
[
"31431891+serverplugin@users.noreply.github.com"
] |
31431891+serverplugin@users.noreply.github.com
|
775437e8f41fe09868b4e887683a5283ccc46bf9
|
53fab060fa262e5d5026e0807d93c75fb81e67b9
|
/backup/user_103/ch163_2020_06_20_21_08_09_870683.py
|
562bc7f4be71d74ecbf772f874e1ddb8cfc29b67
|
[] |
no_license
|
gabriellaec/desoft-analise-exercicios
|
b77c6999424c5ce7e44086a12589a0ad43d6adca
|
01940ab0897aa6005764fc220b900e4d6161d36b
|
refs/heads/main
| 2023-01-31T17:19:42.050628
| 2020-12-16T05:21:31
| 2020-12-16T05:21:31
| 306,735,108
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 268
|
py
|
def calcula_media(lista_notas):
lista_num =[]
for i in range(len(lista_notas)):
for nome,notas in lista_notas[i].items():
a = lista_notas[i]
a[nome] = notas
lista_num.append(notas)
print(lista_num)
|
[
"you@example.com"
] |
you@example.com
|
820286b363cf8652d45eba5c228226b713f9a63e
|
f07a42f652f46106dee4749277d41c302e2b7406
|
/Data Set/bug-fixing-2/d33210f954309e2844204a05856bdc5ff333bda1-<run_with_configuration>-bug.py
|
48cdfb01d7f9ee317701b166c7719899a3142f7d
|
[] |
no_license
|
wsgan001/PyFPattern
|
e0fe06341cc5d51b3ad0fe29b84098d140ed54d1
|
cc347e32745f99c0cd95e79a18ddacc4574d7faa
|
refs/heads/main
| 2023-08-25T23:48:26.112133
| 2021-10-23T14:11:22
| 2021-10-23T14:11:22
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,034
|
py
|
def run_with_configuration(self, configuration):
"\n Executes a BigQuery SQL query. See here:\n\n https://cloud.google.com/bigquery/docs/reference/v2/jobs\n\n For more details about the configuration parameter.\n\n :param configuration: The configuration parameter maps directly to\n BigQuery's configuration field in the job object. See\n https://cloud.google.com/bigquery/docs/reference/v2/jobs for\n details.\n "
jobs = self.service.jobs()
job_data = {
'configuration': configuration,
}
query_reply = jobs.insert(projectId=self.project_id, body=job_data).execute()
self.running_job_id = query_reply['jobReference']['jobId']
if ('location' in query_reply['jobReference']):
location = query_reply['jobReference']['location']
else:
location = self.location
keep_polling_job = True
while keep_polling_job:
try:
if location:
job = jobs.get(projectId=self.project_id, jobId=self.running_job_id, location=location).execute()
else:
job = jobs.get(projectId=self.project_id, jobId=self.running_job_id).execute()
if (job['status']['state'] == 'DONE'):
keep_polling_job = False
if ('errorResult' in job['status']):
raise Exception('BigQuery job failed. Final error was: {}. The job was: {}'.format(job['status']['errorResult'], job))
else:
self.log.info('Waiting for job to complete : %s, %s', self.project_id, self.running_job_id)
time.sleep(5)
except HttpError as err:
if (err.resp.status in [500, 503]):
self.log.info('%s: Retryable error, waiting for job to complete: %s', err.resp.status, self.running_job_id)
time.sleep(5)
else:
raise Exception('BigQuery job status check failed. Final error was: %s', err.resp.status)
return self.running_job_id
|
[
"dg1732004@smail.nju.edu.cn"
] |
dg1732004@smail.nju.edu.cn
|
b99b9daf57adaea08d1ed0c0ad2114204996b225
|
55c8557a675d9228a3fb96bf8736ec613351ebb3
|
/apps/account/migrations/0002_pyaccountplan_type.py
|
c83731236824c49ff9db7e7a1325310774da2ff0
|
[
"MIT"
] |
permissive
|
gvizquel/pyerp
|
e56024b481977e07339e8a0a17a26e1a0e4f1147
|
c859f7293cabd1003f79112463cee93ac89fccba
|
refs/heads/master
| 2022-12-07T13:12:16.333420
| 2019-08-29T21:38:22
| 2019-08-29T21:38:22
| 204,968,470
| 0
| 0
|
MIT
| 2022-12-04T09:21:19
| 2019-08-28T15:48:24
|
JavaScript
|
UTF-8
|
Python
| false
| false
| 558
|
py
|
# Generated by Django 2.2.4 on 2019-08-18 03:29
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('account', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='pyaccountplan',
name='type',
field=models.CharField(choices=[('activo', 'Activo'), ('pasivo', 'Pasivo'), ('patrimonio_capital', 'Patrimonio'), ('ingresos', 'Ingresos'), ('costos', 'Costos'), ('gastos', 'Gastos')], default='activo', max_length=64),
),
]
|
[
"falconsoft.3d@gmail.com"
] |
falconsoft.3d@gmail.com
|
07887321386c9f0bbfcb2e0b00eed25e2cd70164
|
234bb369416b18dd7757b14a8b9b03d0656a1b5d
|
/week12/back/back/settings.py
|
ade21baf7381428f520e5553eeea4c5fed798ea2
|
[] |
no_license
|
derbess/WebDEV
|
432fb70e42c89fa9a47b77bf768878d96987b2f0
|
dc323e242ca19df436055d7b73e33f239349cafc
|
refs/heads/master
| 2020-05-20T13:46:55.526113
| 2019-05-08T07:15:28
| 2019-05-08T07:15:28
| 185,607,138
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,437
|
py
|
"""
Django settings for back project.
Generated by 'django-admin startproject' using Django 2.2.
For more information on this file, see
https://docs.djangoproject.com/en/2.2/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.2/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.2/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'kzx4c$2nifd&mx6((!!wv$a)7!*o7@i=#caji7bg88f2k+8%0e'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'corsheaders',
'rest_framework',
'api'
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'corsheaders.middleware.CorsMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'back.urls'
REST_FRAMEWORK = {
# Use Django's standard `django.contrib.auth` permissions,
# or allow read-only access for unauthenticated users.
'DEFAULT_PERMISSION_CLASSES': [
'rest_framework.permissions.DjangoModelPermissionsOrAnonReadOnly'
]
}
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'back.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.2/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/2.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.2/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.2/howto/static-files/
STATIC_URL = '/static/'
|
[
"uderbes@gmail.com"
] |
uderbes@gmail.com
|
de97534eb6a222219bae78c0160be1a5e459d9bc
|
1bba82345900327ed1c128e8046dc91f90a0ccb5
|
/lets_party/migrations/0010_auto_20190916_1924.py
|
7ef8348e9b96b152a19329d86c88e55ed68b582e
|
[
"MIT"
] |
permissive
|
dchaplinsky/ragoogle
|
40bd093682e41d1ee2a77f446c69d09e82bb3948
|
dccb3d29334c3220ea12c46c725c443c8bd725c0
|
refs/heads/master
| 2021-06-11T10:07:41.142843
| 2020-10-12T10:30:39
| 2020-10-12T10:30:39
| 136,800,715
| 3
| 3
|
MIT
| 2021-03-19T23:20:02
| 2018-06-10T10:51:30
|
CSS
|
UTF-8
|
Python
| false
| false
| 1,528
|
py
|
# Generated by Django 2.2.4 on 2019-09-16 16:24
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('lets_party', '0009_auto_20190916_0311'),
]
operations = [
migrations.AddField(
model_name='letspartymodel',
name='amount',
field=models.DecimalField(decimal_places=2, default=0, max_digits=15, verbose_name='Сума пожертви'),
),
migrations.AlterField(
model_name='letspartyredflag',
name='rule',
field=models.CharField(choices=[('company_won_procurement', 'Компанія виграла у держзакупівлях'), ('company_had_tax_debts', 'Компанія мала держборг'), ('company_is_high_risk', 'Компанія має ознаки фіктивності'), ('company_has_foreign_bo', 'Компанія має закордоних бенефіціарів'), ('company_has_pep_bo', 'Компанія має PEP-бенефіціарів'), ('company_is_not_active', 'Компанія припинена, або порушено справу про банкрутство'), ('company_has_pep_founder', 'Компанія має засновників/співвласників PEP-ів'), ('company_had_pep_founder', 'Компанія мала засновників/співвласників PEP-ів'), ('company_had_pep_bo', 'Компанія мала PEP-бенефіціарів')], max_length=100),
),
]
|
[
"dchaplinsky@conversionscience.co.uk"
] |
dchaplinsky@conversionscience.co.uk
|
2e1a629db9551f0e666dfafec29bca9c6ff38e4c
|
6fa701cdaa0d83caa0d3cbffe39b40e54bf3d386
|
/google/cloud/bigquery/connection/v1/bigquery-connection-v1-py/google/cloud/bigquery_connection/__init__.py
|
724d7476ae2f9ad95c3e5888257dd2d749efc2d9
|
[
"Apache-2.0"
] |
permissive
|
oltoco/googleapis-gen
|
bf40cfad61b4217aca07068bd4922a86e3bbd2d5
|
00ca50bdde80906d6f62314ef4f7630b8cdb6e15
|
refs/heads/master
| 2023-07-17T22:11:47.848185
| 2021-08-29T20:39:47
| 2021-08-29T20:39:47
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,360
|
py
|
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from google.cloud.bigquery_connection_v1.services.connection_service.client import ConnectionServiceClient
from google.cloud.bigquery_connection_v1.services.connection_service.async_client import ConnectionServiceAsyncClient
from google.cloud.bigquery_connection_v1.types.connection import AwsAccessRole
from google.cloud.bigquery_connection_v1.types.connection import AwsCrossAccountRole
from google.cloud.bigquery_connection_v1.types.connection import AwsProperties
from google.cloud.bigquery_connection_v1.types.connection import CloudSpannerProperties
from google.cloud.bigquery_connection_v1.types.connection import CloudSqlCredential
from google.cloud.bigquery_connection_v1.types.connection import CloudSqlProperties
from google.cloud.bigquery_connection_v1.types.connection import Connection
from google.cloud.bigquery_connection_v1.types.connection import CreateConnectionRequest
from google.cloud.bigquery_connection_v1.types.connection import DeleteConnectionRequest
from google.cloud.bigquery_connection_v1.types.connection import GetConnectionRequest
from google.cloud.bigquery_connection_v1.types.connection import ListConnectionsRequest
from google.cloud.bigquery_connection_v1.types.connection import ListConnectionsResponse
from google.cloud.bigquery_connection_v1.types.connection import UpdateConnectionRequest
__all__ = ('ConnectionServiceClient',
'ConnectionServiceAsyncClient',
'AwsAccessRole',
'AwsCrossAccountRole',
'AwsProperties',
'CloudSpannerProperties',
'CloudSqlCredential',
'CloudSqlProperties',
'Connection',
'CreateConnectionRequest',
'DeleteConnectionRequest',
'GetConnectionRequest',
'ListConnectionsRequest',
'ListConnectionsResponse',
'UpdateConnectionRequest',
)
|
[
"bazel-bot-development[bot]@users.noreply.github.com"
] |
bazel-bot-development[bot]@users.noreply.github.com
|
8c762e14ef1ba9895fa010c2ab1c29e644d26a65
|
af17007c9f8b0ccb0b482f0c76e94b542bc236cf
|
/LC_n_Misc/LC_832.py
|
055288253939221f8448242a36636f2b8948ef46
|
[] |
no_license
|
abhikrish06/PythonPractice
|
da72a81845bb73e2902ec37aff9c3b31587ef9ed
|
24988428cada3b1f8a6c0cf0140e288511cd9a6d
|
refs/heads/master
| 2021-03-27T18:41:59.953316
| 2018-11-03T23:44:38
| 2018-11-03T23:44:38
| 115,932,355
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 641
|
py
|
class Solution:
def flipAndInvertImage(self, A):
"""
:type A: List[List[int]]
:rtype: List[List[int]]
"""
res, fin = [], []
for arr in A:
res.append(arr[::-1])
# print(res)
for ar in res:
res2 = []
for i in ar:
if i == 0:
res2.append(1)
elif i == 1:
res2.append(0)
fin.append(res2)
return fin
obj = Solution()
print(obj.flipAndInvertImage([[1,1,0],[1,0,1],[0,0,0]]))
print(obj.flipAndInvertImage([[1,1,0,0],[1,0,0,1],[0,1,1,1],[1,0,1,0]]))
|
[
"abhikrish06@gmail.com"
] |
abhikrish06@gmail.com
|
d4ff4d434ccd308db73a4dc2fe714e5ef205aaf4
|
6268a19db5d7806b3a91d6350ec2777b3e13cee6
|
/old_stuff/code/hpe_rgb/holi_multi_reso_net/src/create_data_h5py.py
|
22cbb6fdd6cc447b32594bf323f03c618f450216
|
[] |
no_license
|
aaronlws95/phd_2019
|
3ae48b4936f039f369be3a40404292182768cf3f
|
22ab0f5029b7d67d32421d06caaf3e8097a57772
|
refs/heads/master
| 2023-03-22T14:38:18.275184
| 2021-03-21T11:39:29
| 2021-03-21T11:39:29
| 186,387,381
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 690
|
py
|
import os
import sys
import utils.prepare_data as pd
from utils.directory import DATA_DIR, DATASET_DIR
from utils.logger import get_logger
logger = get_logger()
dataset_dir = os.path.join(DATASET_DIR, 'First_Person_Action_Benchmark')
save_dir = DATA_DIR
train_pairs, test_pairs = pd.get_fpha_data_list('color', dataset_dir)
file_name = [i for i,j in train_pairs]
xyz_gt = [j for i,j in train_pairs]
pd.write_data_no_crop_h5py(file_name, xyz_gt, os.path.join(save_dir, 'train_fpha_RGB_no_crop.h5'), logger)
file_name = [i for i,j in test_pairs]
xyz_gt = [j for i,j in test_pairs]
pd.write_data_no_crop_h5py(file_name, xyz_gt, os.path.join(save_dir, 'test_fpha_RGB_no_crop.h5'), logger)
|
[
"aaronlws95@gmail.com"
] |
aaronlws95@gmail.com
|
7a60fa9b117edc5ecee0dc68070efd7cddaa45a5
|
35e203ca1734458650975e56c4cc2b7e2eba1fe9
|
/swexpert/swea 3347 올림픽 종목 투표 0310.py
|
24a72b653b8337437a659b51ef762f4bb2ee8226
|
[] |
no_license
|
ebroebro/swproblem
|
0d7e1898fdf72497b937b7f20de664123ff28c25
|
e6c166ce2e3806042034b09930a8783d27db674f
|
refs/heads/master
| 2020-12-22T07:27:14.735653
| 2020-05-24T09:24:40
| 2020-05-24T09:24:40
| 236,711,479
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 498
|
py
|
T=int(input())
for z in range(T):
n,m=list(map(int,input().split()))
sports=list(map(int,input().split()))
people=list(map(int,input().split()))
check_list=[0 for i in range(n)]
max_cnt=0
rslt=0
for i in range(m):
for j in range(n-1,-1,-1):
if sports[j] <=people[i]:
tmp=j
check_list[tmp]+=1
if check_list[tmp]>max_cnt:
max_cnt=check_list[tmp]
rslt=tmp+1
print("#{} {}".format(z+1,rslt))
|
[
"jzzz0304@gmail.com"
] |
jzzz0304@gmail.com
|
503695cf3f51fa183d89bf99e2ef120f05a702e6
|
3a06c339efa4008f4e351dc30fcf1b81b0691f9a
|
/day11/day11.py
|
9f1d43c249fd9ac2f8d511fee2666c1eb752ac97
|
[] |
no_license
|
RedmondY/python
|
73e84823b1a65fa4b31bee4448bb4eddd5500864
|
5b4dfe16735ec87d2cb9b07fb4723200e4bd472a
|
refs/heads/master
| 2022-12-04T00:58:07.652676
| 2019-08-07T00:43:35
| 2019-08-07T00:43:35
| 172,700,007
| 0
| 0
| null | 2022-11-22T01:22:31
| 2019-02-26T11:38:02
|
Python
|
UTF-8
|
Python
| false
| false
| 1,732
|
py
|
'''
1、文件(db.txt)内容如下,标题为:姓名,性别,年纪,薪资
albert male 18 3000
james male 38 30000
林志玲 female 28 20000
新垣结衣 female 28 10000
要求:
从文件中取出每一条记录放入列表中,
列表的每个元素都是{'name':'albert','sex':'male','age':18,'salary':3000}的形式
2 根据1得到的列表,取出薪资最高的人的信息
3 根据1得到的列表,取出最年轻的人的信息
4 根据1得到的列表,将每个人的信息中的名字映射成首字母大写的形式
5 根据1得到的列表,过滤掉名字以a开头的人的信息
'''
with open('day010_db.txt') as f:
items=(line.split() for line in f)
info=[{'name':name,'sex':sex,'age':age,'salary':salary} \
for name,sex,age,salary in items]
print(info)
#task2 取出薪资最高的人的信息
print(max(info,key=lambda dic:dic['salary']))
#task3 取出最年轻的人的信息
print(min(info,key=lambda dic:dic['age']))
#task4 将每个人的信息中的名字映射成首字母大写的形式
info_new=map(lambda item:{'name':item['name'].capitalize(),
'sex':item['sex'],
'age':item['age'],
'salary':item['salary']},info)
print(list(info_new))
# 6 使用递归打印斐波那契数列(前两个数的和得到第三个数,如:0 1 1 2 3 4 7...)
#非递归
def fib(n):
a,b=0,1
while a < n:
print(a,end=' ')
a,b=b,a+b
print()
fib(10)
# 7 一个嵌套很多层的列表,如l=[1,2,[3,[4,5,6,[7,8,[9,10,[11,12,13,[14,15]]]]]]],用递归取出所有的值
#递归
def fib(a,b,stop):
if a > stop:
return
print(a,end=' ')
fib(b,a+b,stop)
fib(0,1,10)
|
[
"redmondy@foxmail.com"
] |
redmondy@foxmail.com
|
53182efc9c811568c760c27ee039b441abb2c3b1
|
5f3c8eddb8c5a14fb3b5931f332d401207666036
|
/test/clients/test01.py
|
75d3560fb9d6b17a62817b99f4d38cff956a18de
|
[
"Apache-2.0"
] |
permissive
|
hwinther/lanot
|
dec8fe48efb6245af009bedf65b2bc089e92efa0
|
f6700cacb3946535081624467b746fdfd38e021d
|
refs/heads/master
| 2021-03-24T12:02:47.530833
| 2019-05-01T11:56:05
| 2019-05-01T11:56:05
| 91,605,521
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 249
|
py
|
from deploy.clients import localtestclient
udp = localtestclient.LocalTestUdpClient('test01', remote_port=9190)
tcp = localtestclient.LocalTestTcpClient('test01', remote_port=9191)
print('udp: %s' % udp.version())
print('tcp: %s' % tcp.version())
|
[
"hc@wsh.no"
] |
hc@wsh.no
|
cbc05f5d0b2af05d6025033baddd15c57ea82bd8
|
72a146dad10c3330548f175643822e6cc2e2ccba
|
/net/data/verify_certificate_chain_unittest/generate-target-not-end-entity.py
|
b54053fa8699d0c2313d92c8f77f59f5dfb68e28
|
[
"BSD-3-Clause"
] |
permissive
|
daotianya/browser-android-tabs
|
bb6772394c2138e2f3859a83ec6e0860d01a6161
|
44e83a97eb1c7775944a04144e161d99cbb7de5b
|
refs/heads/master
| 2020-06-10T18:07:58.392087
| 2016-12-07T15:37:13
| 2016-12-07T15:37:13
| 75,914,703
| 1
| 0
| null | 2016-12-08T07:37:51
| 2016-12-08T07:37:51
| null |
UTF-8
|
Python
| false
| false
| 923
|
py
|
#!/usr/bin/python
# Copyright (c) 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Certificate chain with 1 intermediate, a trusted root, and a target
certificate that is also a CA. Verification is expected to succeed, as the test
code accepts any target certificate."""
import common
# Self-signed root certificate (used as trust anchor).
root = common.create_self_signed_root_certificate('Root')
# Intermediate certificate.
intermediate = common.create_intermediate_certificate('Intermediate', root)
# Target certificate (is also a CA)
target = common.create_intermediate_certificate('Target', intermediate)
chain = [target, intermediate]
trusted = common.TrustAnchor(root, constrained=False)
time = common.DEFAULT_TIME
verify_result = True
common.write_test_file(__doc__, chain, trusted, time, verify_result)
|
[
"serg.zhukovsky@gmail.com"
] |
serg.zhukovsky@gmail.com
|
785e9b108450b244f05000b4b18cb942b56f3f04
|
6b2a8dd202fdce77c971c412717e305e1caaac51
|
/solutions_5708284669460480_0/Python/sunnylqm/test.py
|
1dd36ce495f9584f34f3955d399d19388359a089
|
[] |
no_license
|
alexandraback/datacollection
|
0bc67a9ace00abbc843f4912562f3a064992e0e9
|
076a7bc7693f3abf07bfdbdac838cb4ef65ccfcf
|
refs/heads/master
| 2021-01-24T18:27:24.417992
| 2017-05-23T09:23:38
| 2017-05-23T09:23:38
| 84,313,442
| 2
| 4
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,355
|
py
|
import math
f = open('B-small-attempt2.in')
#f = open('test.in')
count = int(f.readline())
output = ''
for x in xrange(1, count + 1):
arr = f.readline().split()
K = int(arr[0])
L = int(arr[1])
S = int(arr[2])
keyboard = f.readline()
target = f.readline()
keymap = {}
for i in xrange(0, K):
key = keyboard[i]
if key in keymap.keys():
keymap[key] += 1
else:
keymap[key] = 1
start = int((L+1)/2)
overlap = 0
for m in xrange(start, L):
n = 0
if target[m] == target[n]:
n = 1
overlap = 1
for p in xrange(m + 1, L):
if target[p] == target[n]:
n += 1
overlap += 1
else:
overlap = 0
break
P = 1.0
if overlap == 0:
maxbanana = S / L
for t in xrange(0, L):
word = target[t]
if word not in keymap.keys():
maxbanana = 0
P = 0.0
break
P *= float(keymap[word]) / K
if P < 0.0000001:
P = 0.0
break
ret = maxbanana * (1 - P)
else:
if S % (L - overlap) >= overlap:
maxbanana = S / (L - overlap)
else:
maxbanana = S / (L - overlap) - 1
P1 = 1.0
for t in xrange(0, L - overlap):
word = target[t]
if word not in keymap.keys():
maxbanana = 0
P1 = 0.0
break
P1 *= float(keymap[word]) / K
if P1 < 0.0000001:
P1 = 0.0
break
P2 = 1.0
for t in xrange(overlap, L):
word = target[t]
if word not in keymap.keys():
maxbanana = 0
P2 = 0.0
break
P2 *= float(keymap[word]) / K
if P2 < 0.0000001:
P2 = 0.0
break
if maxbanana == 0 or P1 == 0.0 or P2 == 0.0:
ret = 0.0
else:
remain = math.pow(P1, maxbanana)
ret = maxbanana * (1 - P1) - maxbanana * remain * (1 - P2)
output += 'Case #' + str(x) + ': %.7f\n' % ret
print(output)
newf = open('output.txt','w')
newf.write(output)
|
[
"eewestman@gmail.com"
] |
eewestman@gmail.com
|
5cb2978fbc35af9b1646d7addb63f245998a7327
|
89b3cf7b8246349e67ff3362cd4b9ed039426d93
|
/celeryproject/celeryproject/settings.py
|
89767bc3c9fb5f8f061d7ed449000fd00e57f034
|
[] |
no_license
|
vijaygwala/celeryproject
|
c4d67494fe5feca485e5e4daf56f0141dd7fd681
|
8f58bc4e3553cb8bb67fa980a49f96fe048f6af9
|
refs/heads/master
| 2023-05-31T03:20:30.180435
| 2021-06-08T18:24:50
| 2021-06-08T18:24:50
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,247
|
py
|
"""
Django settings for celeryproject project.
Generated by 'django-admin startproject' using Django 1.11.27.
For more information on this file, see
https://docs.djangoproject.com/en/1.11/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.11/ref/settings/
"""
import os
from django.contrib.messages import constants as messages
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
PUBLIC_DIR = os.path.abspath(os.path.join(
os.path.dirname(__file__), '..', '..', 'public'))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.11/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '(f)dsx#)68h4lt$v*z#9wl2l&qb-q6ebnjp^cz#sem^*8&4ome'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = ['celeryproject']
MESSAGE_TAGS = {
messages.DEBUG: 'alert-info',
messages.INFO: 'alert-info',
messages.SUCCESS: 'alert-success',
messages.WARNING: 'alert-warning',
messages.ERROR: 'alert-danger',
}
# Application definition
INSTALLED_APPS = [
'channels',
'chat',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'celeryproject.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'templates')],
#'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
'debug': DEBUG,
'loaders': [
('django.template.loaders.cached.Loader', [
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
]),
],
},
},
]
WSGI_APPLICATION = 'celeryproject.wsgi.application'
ASGI_APPLICATION = 'celeryproject.asgi.application'
# Database
# https://docs.djangoproject.com/en/1.11/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.11/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.11/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.11/howto/static-files/
STATIC_URL = '/static/'
STATIC_ROOT = os.path.join(PUBLIC_DIR, "static")
MEDIA_URL = '/media/'
MEDIA_ROOT = os.path.join(PUBLIC_DIR, "media")
STATICFILES_DIRS = [
os.path.join(BASE_DIR, "static"),
]
STATICFILES_FINDERS = [
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
]
SESSION_ENGINE = 'django.contrib.sessions.backends.cache'
SESSION_CACHE_ALIAS = "default"
# import djcelery
# djcelery.setup_loader()
BROKER_URL = 'redis://localhost:6379'
CELERY_ACCEPT_CONTENT = ['json']
CELERY_TASK_SERIALIZER = 'json'
CELERY_RESULT_SERIALIZER = 'json'
CACHES = {
'default': {
'BACKEND': 'redis_cache.RedisCache',
'LOCATION': [
'127.0.0.1:6379',
],
'OPTIONS': {
#'DB': 1,
#'PASSWORD': 'yadayada',
'PARSER_CLASS': 'redis.connection.HiredisParser',
'CONNECTION_POOL_CLASS': 'redis.BlockingConnectionPool',
#'CONNECTION_POOL_CLASS_KWARGS': {
# 'max_connections': 50,
# 'timeout': 20,
#},
#'MAX_CONNECTIONS': 1000,
'PICKLE_VERSION': -1,
},
},
}
CHANNEL_LAYERS = {
"default": {
"BACKEND": "channels_redis.core.RedisChannelLayer",
"CONFIG": {
"hosts": [("localhost", 6379)],
},
},
}
|
[
"dimkoug@gmail.com"
] |
dimkoug@gmail.com
|
381c1cfa2d1c8f94daf58ed606f18762649e52b9
|
04a643a77927bc56ab58c7df91d4733321e61e51
|
/new_targets/new_tracks_a001.py
|
4f6ca1c33f96092e0a0fcb05ecc16fff9c964e83
|
[] |
no_license
|
dcollins4096/p19_newscripts
|
d2fae1807170a4d70cf4c87222a6258211f993ff
|
23c780dd15b60944ed354406706de85282d0bee6
|
refs/heads/master
| 2023-07-21T11:53:55.188383
| 2023-07-18T17:38:21
| 2023-07-18T17:38:21
| 215,159,839
| 0
| 3
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,681
|
py
|
#
# run get_mountain_tops first
#
from starter2 import *
import xtra_energy
import data_locations as dl
reload(dl)
reload(looper)
import looper2
reload(looper2)
LOOPER2 = True
looper_main = looper2.core_looper2
this_simname = 'a001'
other_simname = 'u301'
mountain_top_fname = "datasets_small/%s_mountain_tops_take_9.h5"%other_simname
outname = '%s_all_particles.h5'%this_simname
bad_particle_fname_read='datasets_small/%s_bad_particles.h5'%'u501'
bad_particle_fname_save='datasets_small/%s_bad_particles_save.h5'%'u501'
#bad_particle_fname_read="datasets_small/u601_bad_particles_srsly.h5"
#bad_particle_fname_save='datasets_small/%s_bad_particles_take3.h5'%this_simname
#bad_particle_fname='datasets_small/%s_bad_particles_TEST.h5'%this_simname
import xtra_energy
target_frame = dl.target_frames[other_simname]
if 0:
"""Just One"""
frame_list = list(range(0,target_frame,10)) + [target_frame]
frame_list = [5]
if 0:
"""all frames"""
target_frame = dl.target_frames[other_simname]
frame_list = list(range(0,target_frame+1,1))
if 0:
"""first 4"""
target_frame = dl.target_frames[other_simname]
frame_list = list(range(0,3,1))
if 1:
"""Every 10"""
target_frame = dl.target_frames[other_simname]
frame_list = list(range(0,target_frame,10)) + [target_frame]
if 1:
fields = ['x','y','z','density', 'cell_volume']
derived=[]
if 0:
fields = ['x','y','z','density', 'cell_volume']
fields += ['velocity_magnitude','magnetic_field_strength', 'velocity_divergence']
fields += ['velocity_x','velocity_y','velocity_z']
fields += ['magnetic_field_%s'%s for s in 'xyz']
fields += ['PotentialField','grav_x','grav_y','grav_z' ]
fields += ['particle_pos_x', 'particle_pos_y', 'particle_pos_z', 'particle_index']
derived=[xtra_energy.add_force_terms]
if target_frame not in frame_list:
print("YOU MUST HAVE THE LAST FRAME or the periodic unwrap fails")
frame_list += [target_frame]
if 1:
fields = [('gas',field) for field in fields]
new_looper = looper_main(directory= dl.sims[other_simname],
sim_name = this_simname,
out_prefix = this_simname,
target_frame = target_frame,
frame_list = frame_list,
core_list = None,
fields_from_grid=fields,
derived = derived,
do_shift=False
)
new_looper.plot_directory = "./plots_to_sort"
if 1:
core_id=0
new_looper.core_list=[core_id]
i,j,k=np.mgrid[0:128:1,0:128:1,0:128:1]
SL = tuple([slice(32,40)]*3)
SL = tuple([slice(None)]*3)
i_keep=i[SL]
j_keep=j[SL]
k_keep=k[SL]
index = i_keep+128*(j_keep+128*k_keep)
new_looper.target_indices=np.sort(index.flatten())
new_looper.core_ids = np.zeros_like(new_looper.target_indices)
import bad_particle_hunt
if 1:
print("Look for bad particles again. Somehow we can't get ahead of this.")
aaa = set(np.arange(128**3))
badones=set()
for frame in new_looper.frame_list:
ds=new_looper.load(frame)
ad=ds.all_data()
pi = set(ad['all','particle_index'].v)
badones.update(aaa-pi)
for grid in ds.index.grids:
these = set(grid['all','particle_index'].v)
pi.difference_update( these)
badones.update(pi)
also_bad = bad_particle_hunt.check_particles(ds)
badones.update(set(also_bad))
print(frame, len(badones))
new_looper.read_bad_particles(bad_particle_fname_read, core_hijack=0)
bad_particle_id = [ 724134, 635702, 661226, 743270, 751995, 718196, 1354060,
1362500, 610123, 610189, 1930558, 1046537, 1841352, 1844125,
1845574, 1849410, 1853445, 1300291]
badones.update(set(bad_particle_id))
bad_particle_id = list(badones) #this is some confusing variable naming.
bad_core_id = [0]*len(bad_particle_id)
for bad_core, bad_part in zip(bad_core_id, bad_particle_id):
new_looper.bad_particles[bad_core]=np.append(
new_looper.bad_particles[bad_core], bad_part)
if 1:
new_looper.remove_bad_particles()
if 0:
import warnings
with warnings.catch_warnings():
warnings.simplefilter('error')
#pdb.set_trace()
new_looper.get_tracks()
if 1:
new_looper.get_tracks()
if 1:
import tracks_read_write
tracks_read_write.save_loop_trackage_only( new_looper, outname)
|
[
"dccollins@fsu.edu"
] |
dccollins@fsu.edu
|
09eb164bc57814f0f45096a91ef444fd22d8c657
|
6a9f06b967d7641ddff7b56425651b29d3e577f4
|
/mindinsight/tests/st/func/lineagemgr/cache/test_lineage_cache.py
|
0bfb2c40e19d447ee9e5ba86e951d9c403998356
|
[
"Apache-2.0"
] |
permissive
|
ZeroWangZY/DL-VIS
|
b3117016547007b88dc66cfe7339ef02b0d84e9c
|
8be1c70c44913a6f67dd424aa0e0330f82e48b06
|
refs/heads/master
| 2023-08-18T00:22:30.906432
| 2020-12-04T03:35:50
| 2020-12-04T03:35:50
| 232,723,696
| 1
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,993
|
py
|
# Copyright 2020 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""
Function:
Test the query module about lineage information.
Usage:
The query module test should be run after lineagemgr/collection/model/test_model_lineage.py
pytest lineagemgr
"""
from unittest import TestCase
import pytest
from mindinsight.datavisual.data_transform.data_manager import DataManager
from mindinsight.lineagemgr.cache_item_updater import LineageCacheItemUpdater
from mindinsight.lineagemgr.api.model import general_filter_summary_lineage, \
general_get_summary_lineage
from ..api.test_model_api import LINEAGE_INFO_RUN1, LINEAGE_FILTRATION_EXCEPT_RUN, \
LINEAGE_FILTRATION_RUN1, LINEAGE_FILTRATION_RUN2
from ..conftest import BASE_SUMMARY_DIR
from .....ut.lineagemgr.querier import event_data
from .....utils.tools import check_loading_done, assert_equal_lineages
@pytest.mark.usefixtures("create_summary_dir")
class TestModelApi(TestCase):
"""Test get lineage from data_manager."""
@classmethod
def setup_class(cls):
data_manager = DataManager(BASE_SUMMARY_DIR)
data_manager.register_brief_cache_item_updater(LineageCacheItemUpdater())
data_manager.start_load_data(reload_interval=0)
check_loading_done(data_manager)
cls._data_manger = data_manager
@pytest.mark.level0
@pytest.mark.platform_arm_ascend_training
@pytest.mark.platform_x86_gpu_training
@pytest.mark.platform_x86_ascend_training
@pytest.mark.platform_x86_cpu
@pytest.mark.env_single
def test_get_summary_lineage(self):
"""Test the interface of get_summary_lineage."""
total_res = general_get_summary_lineage(data_manager=self._data_manger, summary_dir="./run1")
expect_total_res = LINEAGE_INFO_RUN1
assert_equal_lineages(expect_total_res, total_res, self.assertDictEqual)
@pytest.mark.level0
@pytest.mark.platform_arm_ascend_training
@pytest.mark.platform_x86_gpu_training
@pytest.mark.platform_x86_ascend_training
@pytest.mark.platform_x86_cpu
@pytest.mark.env_single
def test_filter_summary_lineage(self):
"""Test the interface of filter_summary_lineage."""
expect_result = {
'customized': event_data.CUSTOMIZED__1,
'object': [
LINEAGE_FILTRATION_EXCEPT_RUN,
LINEAGE_FILTRATION_RUN1,
LINEAGE_FILTRATION_RUN2
],
'count': 3
}
search_condition = {
'sorted_name': 'summary_dir'
}
res = general_filter_summary_lineage(data_manager=self._data_manger, search_condition=search_condition)
expect_objects = expect_result.get('object')
for idx, res_object in enumerate(res.get('object')):
expect_objects[idx]['model_lineage']['dataset_mark'] = res_object['model_lineage'].get('dataset_mark')
assert_equal_lineages(expect_result, res, self.assertDictEqual)
expect_result = {
'customized': {},
'object': [],
'count': 0
}
search_condition = {
'summary_dir': {
"in": ['./dir_with_empty_lineage']
}
}
res = general_filter_summary_lineage(data_manager=self._data_manger, search_condition=search_condition)
assert_equal_lineages(expect_result, res, self.assertDictEqual)
|
[
"756762961@qq.com"
] |
756762961@qq.com
|
8ab26bb0e961827bf5aa99e77eb27bc71be0bf30
|
cad396ca2df76e4521f6a4b5c059ba3931e72f11
|
/pabi_asset_management/__openerp__.py
|
4f52adf93cb7521961a68f99292244f2351916c9
|
[] |
no_license
|
mulaudzicalvin/pb2_addons
|
1ee835bd0e5d6f215603aa5d3f1099df40a3d14c
|
57e0cb59e83853248dda37e2205722ab9bce1852
|
refs/heads/master
| 2020-03-26T11:47:48.470437
| 2018-08-15T12:16:25
| 2018-08-15T12:16:25
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,319
|
py
|
# -*- coding: utf-8 -*-
{
"name": "NSTDA :: PABI2 - Asset Management",
"version": "0.1",
"author": "Ecosoft",
"website": "http://ecosoft.co.th",
"category": "Customs Modules",
"depends": [
"account_asset_management",
"stock_account",
"account_anglo_saxon",
"pabi_purchase_work_acceptance",
"account_budget_activity",
"pabi_invest_construction",
"pabi_chartfield_merged",
"pabi_utils",
"pabi_account_move_adjustment",
"hr_expense_auto_invoice",
"pabi_utils",
],
"description": """
This module allow creating asset during incoming shipment.
""",
"data": [
"security/ir.model.access.csv",
# "data/import_templates.xml",
"xlsx_template/templates.xml",
"xlsx_template/load_template.xml",
"xlsx_template/xlsx_template_wizard.xml",
"data/sequence_data.xml",
"data/asset_purchase_method.xml",
"data/account_data.xml",
"data/location_data.xml",
"data/journal_data.xml",
"data/asset_status.xml",
"data/default_value.xml",
"wizard/asset_parent_deliver_wizard.xml",
"views/asset_view.xml",
"wizard/account_asset_remove_view.xml",
"wizard/create_asset_request_view.xml",
"wizard/create_asset_removal_view.xml",
"wizard/create_asset_adjust_wizard.xml",
"wizard/account_asset_compute.xml",
"views/account_invoice_view.xml",
"views/account_view.xml",
"views/asset_request_view.xml",
"views/asset_changeowner_view.xml",
"views/asset_transfer_view.xml",
"views/asset_adjust_view.xml",
"views/asset_removal_view.xml",
"views/asset_receive_view.xml",
"views/product_view.xml",
"views/purchase_requisition_view.xml",
"views/stock_view.xml",
"views/purchase_view.xml",
"views/purchase_master_data_view.xml",
"views/res_project_view.xml",
"views/res_section_view.xml",
"views/hr_expense_view.xml",
"views/ir_sequence_view.xml",
# "wizard/asset_action_excel_import.xml",
],
'installable': True,
'active': True,
'auto_install': False,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
[
"kittiu@gmail.com"
] |
kittiu@gmail.com
|
01beb8d00b05303dbfe6a6de48c5ffd06c388bb5
|
b948da1493329127a9a9ab567bae874c8cfa0bf4
|
/gallery/migrations/0001_initial.py
|
0ae5ff858f1c4d7f41180948b4fd9bd84b6b05a0
|
[] |
no_license
|
Kotodian/portfolio
|
edb93bec72d66d1fececd71b67a8e7f92cebb260
|
5661bf5d8134bbb576b2ea771fe5a6210c942feb
|
refs/heads/master
| 2020-06-13T02:32:59.356331
| 2019-07-04T08:22:28
| 2019-07-04T08:24:22
| 194,503,065
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 515
|
py
|
# Generated by Django 2.0.2 on 2019-06-30 12:39
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Gallery',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('description', models.CharField(max_length=100)),
],
),
]
|
[
"root@vultr.guest"
] |
root@vultr.guest
|
47c9d4b25a6ac97dc5789a701dd77e1f0e2ff125
|
97933c7f0973cb5671a26d28763e2688882b6ba9
|
/akshare/stock/stock_hk_fhpx_ths.py
|
0dad04c91e08af6df46e8f48cb404a95d9b2d66c
|
[
"MIT"
] |
permissive
|
jinzaizhichi/akshare
|
a1eacae7a5a94142b2e05e4fed9a48a9448b1755
|
c3c9f67364dcfb0c8b507f991540541179a0e87b
|
refs/heads/master
| 2023-08-10T07:45:22.377848
| 2023-07-25T08:23:08
| 2023-07-25T08:23:08
| 243,995,284
| 0
| 0
|
MIT
| 2022-09-08T05:48:01
| 2020-02-29T15:43:00
|
Python
|
UTF-8
|
Python
| false
| false
| 2,078
|
py
|
#!/usr/bin/env python
# -*- coding:utf-8 -*-
"""
Date: 2023/5/16 18:20
Desc: 同花顺-港股-分红派息
http://stockpage.10jqka.com.cn/HK0700/bonus/
"""
import pandas as pd
import requests
def stock_hk_fhpx_detail_ths(symbol: str = "0700") -> pd.DataFrame:
"""
同花顺-港股-分红派息
http://stockpage.10jqka.com.cn/HK0700/bonus/
:param symbol: 港股代码
:type symbol: str
:return: 分红派息
:rtype: pandas.DataFrame
"""
url = f"http://basic.10jqka.com.cn/176/HK{symbol}/bonus.html"
headers = {
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) "
"AppleWebKit/537.36 (KHTML, like Gecko) "
"Chrome/89.0.4389.90 Safari/537.36",
}
r = requests.get(url, headers=headers)
r.encoding = "utf-8"
temp_df = pd.read_html(r.text)[0]
temp_df.columns = [
"公告日期",
"方案",
"除净日",
"派息日",
"过户日期起止日-起始",
"过户日期起止日-截止",
"类型",
"进度",
"以股代息",
]
# 剔除异常格式,由以股代息产生的异常
temp_df.dropna(subset=["派息日", "除净日"], inplace=True, ignore_index=True)
temp_df["公告日期"] = pd.to_datetime(
temp_df["公告日期"], format="%Y-%m-%d", errors="coerce"
).dt.date
temp_df["除净日"] = pd.to_datetime(
temp_df["除净日"], format="%Y-%m-%d", errors="coerce"
).dt.date
temp_df["派息日"] = pd.to_datetime(
temp_df["派息日"], format="%Y-%m-%d", errors="coerce"
).dt.date
temp_df["过户日期起止日-起始"] = pd.to_datetime(
temp_df["过户日期起止日-起始"], format="%Y-%m-%d", errors="coerce"
).dt.date
temp_df["过户日期起止日-截止"] = pd.to_datetime(
temp_df["过户日期起止日-截止"], format="%Y-%m-%d", errors="coerce"
).dt.date
return temp_df
if __name__ == "__main__":
stock_hk_fhpx_detail_ths_df = stock_hk_fhpx_detail_ths(symbol="0968")
print(stock_hk_fhpx_detail_ths_df)
|
[
"jindaxiang@163.com"
] |
jindaxiang@163.com
|
a75ac8c01944ccea8b2dbe6c82b21057a5546ede
|
27d44e4eb737cdacd46e08d3c6810424e9751872
|
/homedns/interface/interface.py
|
885850df63faf3ef8cf6576e8f42454731ab4879
|
[] |
no_license
|
liuyug/homedns
|
96d88c28d88a31e837270f609b8ea408e763cc80
|
236fd19eaec6dd54f1ae29872a5c627ec1a4ae76
|
refs/heads/master
| 2021-07-04T08:48:25.905798
| 2020-09-30T02:26:08
| 2020-09-30T02:26:08
| 50,401,927
| 5
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 872
|
py
|
#!/usr/bin/env python
# -*- encoding:utf-8 -*-
class InterfaceBase(object):
def __init__(self):
self.interfaces = {}
self.gateway_iface = ''
def isIPv4(self, ip):
if not ip:
return False
if ':' in ip:
return False
return True
def includeIPv4(self, ips):
for ip in ips:
if self.isIPv4(ip):
return True
return False
def get_gateway(self):
iface = self.interfaces.get(self.gateway_iface)
if iface:
return iface.get('gateway')
def get_dnserver(self):
iface = self.interfaces.get(self.gateway_iface)
if iface:
return iface.get('dnserver')
def get_dhcpserver(self):
iface = self.interfaces.get(self.gateway_iface)
if iface:
return iface.get('dhcpserver')
|
[
"liuyug@gmail.com"
] |
liuyug@gmail.com
|
23130632ae3ca75ff2aa72020b905c030c10dc4b
|
92c8743c51a75e5173f1eef139f2796c7027ed2a
|
/src/forms/about.py
|
e5d21a74d3dce76787deaee0892494d7ba270b1e
|
[
"LicenseRef-scancode-warranty-disclaimer",
"MIT",
"CC-BY-3.0"
] |
permissive
|
dave45678/Turing
|
79565c0c89fb6b131ea90d7a0c95099e3882cc8c
|
99107296ca5785306eb275e515d937e787d4f6d8
|
refs/heads/master
| 2020-03-15T06:42:48.233418
| 2018-05-03T15:11:55
| 2018-05-03T15:11:55
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 654
|
py
|
# -*- coding: utf-8 -*-
from PyQt5.QtCore import *
from PyQt5.QtWidgets import *
from forms.ui_about import Ui_AboutWindow
from util.widgets import center_widget
translate = QCoreApplication.translate
class AboutWindow(QDialog):
def __init__(self, parent, version, channel):
super().__init__(parent)
self.ui = Ui_AboutWindow()
self.ui.setupUi(self)
self.setFixedSize(self.size())
txt = self.ui.textBrowser_about.toHtml().replace("{version}", version).replace("{channel}", channel)
self.ui.textBrowser_about.setHtml(txt)
center_widget(self, parent)
def run(self):
self.exec_()
|
[
"zippedfire@free.fr"
] |
zippedfire@free.fr
|
edb3c21bf6b70f937e62563753c0327a117557bb
|
3adf9934a74077c328b9a0afff37f8ca355eead1
|
/comicresizer/wsgi.py
|
0fd76014e66332ac4573eba7ee336a637babd864
|
[] |
no_license
|
jgasteiz/comic-resizer
|
36671623fe9909f23fba793b44cf4ac56380926a
|
12d2e12efdf2017746d67a4b6d9616613ee58bb9
|
refs/heads/master
| 2021-07-05T05:56:00.911958
| 2017-09-27T07:43:58
| 2017-09-27T07:45:25
| 104,987,910
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 402
|
py
|
"""
WSGI config for comicresizer project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.11/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "comicresizer.settings")
application = get_wsgi_application()
|
[
"javi.manzano.oller@gmail.com"
] |
javi.manzano.oller@gmail.com
|
04cd0012628d3dfda86a4d2353d6d72de1583872
|
bbfa3b7ee2008617d33a7c5c7770d22e1aa8836b
|
/luolearn/metrics/classification.py
|
0ccb8670d109cc630df5d4db6f8b84270668eda1
|
[
"MIT"
] |
permissive
|
luoshao23/ML_algorithm
|
1a0046ce9c3abed029cceffa35defe57fffa82b2
|
6e94fdd0718cd892118fd036c7c5851cf3e6d796
|
refs/heads/master
| 2021-08-07T08:38:16.102455
| 2020-03-18T06:49:43
| 2020-03-18T06:49:43
| 92,467,636
| 4
| 1
|
MIT
| 2018-01-16T05:01:29
| 2017-05-26T03:20:08
|
Jupyter Notebook
|
UTF-8
|
Python
| false
| false
| 1,875
|
py
|
import numpy as np
from scipy.sparse import csr_matrix
from ..utils import column_or_1d
from ..utils import check_consistent_length
from ..utils.multiclass import type_of_target
from ..utils.sparsefuncs import count_nonzero
def _check_targets(y_true, y_pred):
check_consistent_length(y_true, y_pred)
type_true = type_of_target(y_true)
type_pred = type_of_target(y_pred)
y_type = set([type_true, type_pred])
if y_type == set(["binary", "multiclass"]):
y_type = set(["multiclass"])
if len(y_type) > 1:
raise ValueError("Cannot handle!")
y_type = y_type.pop()
if (y_type not in ["binary", "multiclass", "multilabel-indicator"]):
raise ValueError("{0} is not supported".format(y_type))
if y_type in ["binary", "multiclass"]:
y_true = column_or_1d(y_true)
y_pred = column_or_1d(y_pred)
if y_pred == "binary":
unique_values = np.union1d(y_true, y_pred)
if len(unique_values) > 2:
y_type = "multiclass"
if y_type.startswith('multilabel'):
y_true = csr_matrix(y_true)
y_pred = csr_matrix(y_pred)
y_type = 'multilabel-indicator'
return y_type, y_true, y_pred
def _weighted_sum(sample_score, sample_weight, normalize=False):
if normalize:
return np.average(sample_score, weights=sample_weight)
elif sample_weight is not None:
return np.dot(sample_score, sample_weight)
else:
return sample_score.sum()
def accuracy_score(y_true, y_pred, normalize=True, sample_weight=None):
y_type, y_true, y_pred = _check_targets(y_true, y_pred)
if y_type.startswith('multilabel'):
differing_labels = count_nonzero(y_true - y_pred, axis=1)
score = differing_labels == 0
else:
score = y_true == y_pred
return _weighted_sum(score, sample_weight, normalize)
|
[
"luoshao23@gmail.com"
] |
luoshao23@gmail.com
|
4a3e93bdc7b589f147e164f7c8dae95f265344d0
|
6c1527b2dc3f944b8907d0de5bda6cdfbaeb1f7f
|
/otree-core-master/otree/asgi.py
|
890f1dd51a9d99f28048c1cc560b3593a729526c
|
[
"MIT"
] |
permissive
|
dcthomas4679/otree
|
f0a9204b12cd395e55fd9b77ac90584c2cd3c049
|
363a05d2f70f9225628e4857473dedcb449018dc
|
refs/heads/master
| 2021-06-23T20:07:02.499724
| 2020-11-18T15:32:30
| 2020-11-18T15:32:30
| 37,225,765
| 1
| 1
|
NOASSERTION
| 2021-06-10T23:28:55
| 2015-06-10T22:22:33
|
Python
|
UTF-8
|
Python
| false
| false
| 926
|
py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import # for channels module
import os
import channels.asgi
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "settings")
from otree.common_internal import (
release_any_stale_locks, get_redis_conn # noqa
)
release_any_stale_locks()
# clear any tasks in Huey DB, so they don't pile up over time,
# especially if you run the server without the timeoutworker to consume the
# tasks.
# ideally we would only schedule a task in Huey if timeoutworker is running,
# so that we don't pile up messages that never get consumed, but I don't know
# how and when to check if Huey is running, in a performant way.
# this code is also in timeoutworker.
from huey.contrib.djhuey import HUEY # noqa
HUEY.flush()
from otree.bots.browser import redis_flush_bots # noqa
redis_flush_bots(get_redis_conn())
channel_layer = channels.asgi.get_channel_layer()
|
[
"dcthomas@gmail.com"
] |
dcthomas@gmail.com
|
b469199380c66c46be47c9d6a2ca9a4c78298f1b
|
9b53a4c0a1980aeb13b73d905afb3322b26def52
|
/page/classtimetablePage/room_managementPage/degree_course_classmate_r.py
|
f48eb36a5d5dce6ea4f4bf9693c3b4d7f7562552
|
[] |
no_license
|
xmaimiao/wmPC
|
6340386aac10e4c8273ec4aec53a6494820e46ff
|
07319cc6f4e0bf1a53bf61b9baf1c8440dfc02bd
|
refs/heads/master
| 2022-12-30T09:50:56.908362
| 2020-10-18T12:49:38
| 2020-10-18T12:49:38
| 305,076,069
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,295
|
py
|
from common.contants import degree_course_classmate_r_dir
from page.basepage import BasePage
class Degree_Course_Classmate_R(BasePage):
def get_lessontime(self):
'''
驗證顯示了授課時間
'''
return self.step(degree_course_classmate_r_dir,"get_lessontime")
def get_lesson_student_num(self):
'''
驗證顯示了課程總人數
'''
result = self.step(degree_course_classmate_r_dir, "get_lesson_student_num")
return int(re.search("(\d+)",result).group(1))
def search_studentname(self,stu_keywords):
'''
查詢學生姓名
'''
self._params["stu_keywords"] = stu_keywords
return self.step(degree_course_classmate_r_dir,"search_studentname")
def search_studentstaffNo(self,stu_keywords):
'''
查詢學生學號
'''
self._params["stu_keywords"] = stu_keywords
return self.step(degree_course_classmate_r_dir,"search_studentstaffNo")
def back_to_room_management_degree(self):
self.step(degree_course_classmate_r_dir, "back_to_room_management_degree")
from page.classtimetablePage.room_managementPage.room_management_degree import Room_Management_Degree
return Room_Management_Degree(self._driver)
|
[
"765120214@qq.com"
] |
765120214@qq.com
|
c6eea2c388f7e1c2c5d2a8ef0481770d4e11c70e
|
1285703d35b5a37734e40121cd660e9c1a73b076
|
/codility/5_count_div.py
|
b5e1146cfd9a5719bf56ac1dfb738d5edb83f263
|
[] |
no_license
|
takin6/algorithm-practice
|
21826c711f57131108168775f08e4e13d07a3b38
|
f4098bea2085a77d11c29e1593b3cc3f579c24aa
|
refs/heads/master
| 2022-11-30T09:40:58.083766
| 2020-08-07T22:07:46
| 2020-08-07T22:07:46
| 283,609,862
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 362
|
py
|
def solution(A, B, K):
# cur = (A-1) // 2
# divisibles = [cur]
# for i in range(A, B+1):
# if i % K == 0:
# cur += 1
# divisibles.append(cur)
# return divisibles[-1] - divisibles[0]
if A % K == 0:
return (B-A) // K + 1
else:
return (B - (A - A%K)) // K
print(solution(6,11,2))
|
[
"takayukiinoue116@gmail.com"
] |
takayukiinoue116@gmail.com
|
76f5e6b143c51b334cbf71e4876ac6baff943cc9
|
f305f84ea6f721c2391300f0a60e21d2ce14f2a5
|
/20_杂题/atc競プロ/AtCoder Beginner Contest/136/C - Build Stairs.py
|
a3c7a4f9144316a94745655b759a0702d680cf76
|
[] |
no_license
|
981377660LMT/algorithm-study
|
f2ada3e6959338ae1bc21934a84f7314a8ecff82
|
7e79e26bb8f641868561b186e34c1127ed63c9e0
|
refs/heads/master
| 2023-09-01T18:26:16.525579
| 2023-09-01T12:21:58
| 2023-09-01T12:21:58
| 385,861,235
| 225
| 24
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 513
|
py
|
# 给你一串数,每个数都只能做将高度减1和不改变两种操作,问这串数是否可以变成不减序列
from typing import List
def buildStairs(nums: List[int]) -> bool:
"""倒序遍历"""
for i in range(len(nums) - 2, -1, -1):
if nums[i] > nums[i + 1]:
nums[i] -= 1
if nums[i] > nums[i + 1]:
return False
return True
n = int(input())
nums = list(map(int, input().split()))
print("Yes" if buildStairs(nums) else "No")
|
[
"lmt2818088@gmail.com"
] |
lmt2818088@gmail.com
|
8b93eb66cc12288ac281f6f475b7920c885c8b8e
|
6685318f6ef4ea44b38b8ecc5dd2c3186d895bb3
|
/test/test_rw_lock.py
|
4d039ede68ec9c518390a92ff6484e174ac3fac6
|
[
"MIT",
"HPND"
] |
permissive
|
samrushing/shrapnel
|
cd372da1f08a43776ffc6d39c71f1758269db0fa
|
5835454dcfd4b526d7b117d11e4384f5ed60ae03
|
refs/heads/master
| 2021-01-18T09:45:35.979434
| 2012-06-22T04:39:42
| 2012-06-22T04:39:42
| 4,021,029
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,784
|
py
|
# Copyright (c) 2002-2011 IronPort Systems and Cisco Systems
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
"""Unittests for read-write lock."""
__version__ = '$Revision: #1 $'
import coro
import coro_unittest
import unittest
class Test(unittest.TestCase):
def test_write_block_interrupt_schedule(self):
"""Test write block interrupt then schedule on rw_lock."""
lock = coro.rw_lock()
lock.read_lock()
self._resume_count = 0
threads = []
# Spawn some threads that will block and be interrupted.
for unused in xrange(5):
threads.append(coro.spawn(self._write_block, lock))
# Spawn a thread that we will not interrupt.
no_interrupt_thread = coro.spawn(self._write_block, lock)
coro.yield_slice()
# Cause an interrupt on these threads.
for t in threads:
t.shutdown()
# Now try to get the non-interrupted thread to run.
lock.read_unlock()
coro.yield_slice()
# Verify that it ran.
self.assertEqual(self._resume_count, 1)
def _write_block(self, lock):
lock.write_lock()
self._resume_count += 1
lock.write_unlock()
def _read_block(self, lock):
lock.read_lock()
self._resume_count += 1
lock.read_unlock()
def test_write_block_schedule_interrupt(self):
"""Test write block schedule then interrupt on rw_lock."""
lock = coro.rw_lock()
lock.read_lock()
self._resume_count = 0
threads = []
# Spawn some threads that will block and be interrupted.
for unused in xrange(5):
threads.append(coro.spawn(self._write_block, lock))
# Spawn a thread that we will not interrupt.
no_interrupt_thread = coro.spawn(self._write_block, lock)
coro.yield_slice()
# Schedule all of the threads.
lock.read_unlock()
# Now interrupt them.
for t in threads:
t.shutdown()
coro.yield_slice()
# Verify that it ran.
self.assertEqual(self._resume_count, 1)
def test_read_block_interrupt_schedule(self):
"""Test read block interrupt then schedule on rw_lock."""
lock = coro.rw_lock()
lock.write_lock()
self._resume_count = 0
threads = []
# Spawn some threads that will block and be interrupted.
for unused in xrange(5):
threads.append(coro.spawn(self._read_block, lock))
# Spawn a thread that we will not interrupt.
no_interrupt_thread = coro.spawn(self._read_block, lock)
coro.yield_slice()
# Cause an interrupt on these threads.
for t in threads:
t.shutdown()
# Now try to get the non-interrupted thread to run.
lock.write_unlock()
coro.yield_slice()
# Verify that it ran.
self.assertEqual(self._resume_count, 1)
def test_read_block_schedule_interrupt(self):
"""Test read block schedule then interrupt on rw_lock."""
lock = coro.rw_lock()
lock.write_lock()
self._resume_count = 0
threads = []
# Spawn some threads that will block and be interrupted.
for unused in xrange(5):
threads.append(coro.spawn(self._read_block, lock))
# Spawn a thread that we will not interrupt.
no_interrupt_thread = coro.spawn(self._read_block, lock)
coro.yield_slice()
# Schedule all of the threads.
lock.write_unlock()
# Now interrupt them.
for t in threads:
t.shutdown()
coro.yield_slice()
# Verify that it ran.
self.assertEqual(self._resume_count, 1)
if __name__ == '__main__':
coro_unittest.run_tests()
|
[
"mark@peek.org"
] |
mark@peek.org
|
22f57b81144a68d7684aceacd81df68b3acc95eb
|
bba0a10d2bced816410badcd0792826f0cee5fa8
|
/pythoncode/cext/setup_CubicStokeslet2D.py
|
538993e6c3be519f4e2c893edf31cd04f9dd6eb4
|
[] |
no_license
|
breecummins/polymercode
|
8aae6bc3ff52d9824158a9c7835f0478693ff165
|
f65b515ddf23bac38eacbc9d32ecd9ec5ec3de12
|
refs/heads/master
| 2021-05-13T11:54:12.029811
| 2018-01-11T19:39:11
| 2018-01-11T19:39:11
| 117,145,003
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 519
|
py
|
'''python setup_foo.py build will build the extension module
foo.so in ./build/lib.arch-id/'''
from distutils.core import setup, Extension
import sys, os, numpy
includen=[numpy.get_include()]
module1 = Extension('CubicStokeslet2D',
include_dirs=includen,
sources = ['CubicStokeslet2D.c'])
setup (name = '2D Cubic Stokeslet module',
version = '1.0',
description = 'Functions implementing regularized Stokeslets in 2D using a/( )^3 blob.',
ext_modules = [module1])
|
[
"breecummins@gmail.com"
] |
breecummins@gmail.com
|
ec3215b8fe8c9daf0af807cc25701f60f26bc323
|
089e53103ab25cd57c2d12b3f68533ef4c49493c
|
/backend/meme_world_27493/wsgi.py
|
6a8b4f7e1cdd26f68fa75ca33a59179750134310
|
[] |
no_license
|
crowdbotics-apps/meme-world-27493
|
b20ddc08fe77a59fa57e25e751dde40591da16fa
|
5ad8483eb9d1949486bfb6f9fa077a9c23c34818
|
refs/heads/master
| 2023-05-02T05:48:06.923336
| 2021-05-27T20:53:49
| 2021-05-27T20:53:49
| 371,501,481
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 409
|
py
|
"""
WSGI config for meme_world_27493 project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/2.2/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'meme_world_27493.settings')
application = get_wsgi_application()
|
[
"team@crowdbotics.com"
] |
team@crowdbotics.com
|
536cd089feace7b1af6f28742e70a3fdfe2f2542
|
ca7aa979e7059467e158830b76673f5b77a0f5a3
|
/Python_codes/p03700/s311983403.py
|
e0ae77bcda00ba4e2c9dd4f6e5d43da7680cbe05
|
[] |
no_license
|
Aasthaengg/IBMdataset
|
7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901
|
f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8
|
refs/heads/main
| 2023-04-22T10:22:44.763102
| 2021-05-13T17:27:22
| 2021-05-13T17:27:22
| 367,112,348
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 285
|
py
|
N,A,B=map(int,input().split())
H=[]
for _ in range(N):
H.append(int(input()))
ok=10**9+1
ng=0
while ok-ng>1:
mid=(ok+ng)//2
dmg=B*mid
tgt=0
for item in H:
tgt+=-(-max(item-dmg,0)//(A-B))
if tgt<=mid:
ok=mid
else:
ng=mid
print(ok)
|
[
"66529651+Aastha2104@users.noreply.github.com"
] |
66529651+Aastha2104@users.noreply.github.com
|
6dc546389a6f10aacd4eb90a1f094923465422c1
|
50f4509fcbede0767b15de21262137cf5aa93c7a
|
/tests/modules/teams/resources/test_modifying_teams.py
|
bc830bb08c8d273b8496b15b570605e886512b63
|
[
"MIT"
] |
permissive
|
millen1m/flask-restplus-server-example
|
8384326b6cdec3c076db53bf392659e53527749f
|
e1089d64e72d8fc2263675520825782c771e6f52
|
refs/heads/master
| 2021-01-11T19:00:24.591394
| 2017-01-18T02:31:26
| 2017-01-18T02:31:26
| 79,290,124
| 1
| 0
| null | 2017-01-18T01:20:52
| 2017-01-18T01:20:52
| null |
UTF-8
|
Python
| false
| false
| 6,913
|
py
|
# encoding: utf-8
# pylint: disable=missing-docstring
import json
from app.modules.teams import models
def test_new_team_creation(flask_app_client, db, regular_user):
# pylint: disable=invalid-name
team_title = "Test Team Title"
with flask_app_client.login(regular_user, auth_scopes=('teams:write', )):
response = flask_app_client.post('/api/v1/teams/', data={'title': team_title})
assert response.status_code == 200
assert response.content_type == 'application/json'
assert set(response.json.keys()) >= {'id', 'title'}
assert response.json['title'] == team_title
# Cleanup
team = models.Team.query.get(response.json['id'])
assert team.title == team_title
db.session.delete(team)
db.session.commit()
def test_new_team_first_member_is_creator(flask_app_client, db, regular_user):
# pylint: disable=invalid-name
team_title = "Test Team Title"
with flask_app_client.login(
regular_user,
auth_scopes=('teams:write', 'teams:read')
):
response = flask_app_client.post('/api/v1/teams/', data={'title': team_title})
assert response.status_code == 200
assert response.content_type == 'application/json'
assert set(response.json.keys()) >= {'id', 'title'}
assert response.json['title'] == team_title
assert len(response.json['members']) == 1
assert response.json['members'][0]['user']['id'] == regular_user.id
assert response.json['members'][0]['is_leader'] == True
# Cleanup
team = models.Team.query.get(response.json['id'])
assert team.title == team_title
db.session.delete(team)
db.session.commit()
def test_new_team_creation_with_invalid_data_must_fail(flask_app_client, regular_user):
# pylint: disable=invalid-name
with flask_app_client.login(regular_user, auth_scopes=('teams:write', )):
response = flask_app_client.post('/api/v1/teams/', data={'title': ""})
assert response.status_code == 409
assert response.content_type == 'application/json'
assert set(response.json.keys()) >= {'status', 'message'}
def test_update_team_info(flask_app_client, regular_user, team_for_regular_user):
# pylint: disable=invalid-name
team_title = "Test Team Title"
with flask_app_client.login(regular_user, auth_scopes=('teams:write', )):
response = flask_app_client.patch(
'/api/v1/teams/%d' % team_for_regular_user.id,
content_type='application/json',
data=json.dumps([
{
'op': 'replace',
'path': '/title',
'value': team_title
},
])
)
assert response.status_code == 200
assert response.content_type == 'application/json'
assert set(response.json.keys()) >= {'id', 'title'}
assert response.json['id'] == team_for_regular_user.id
assert response.json['title'] == team_title
assert team_for_regular_user.title == team_title
def test_update_team_info_with_invalid_data_must_fail(
flask_app_client,
regular_user,
team_for_regular_user
):
# pylint: disable=invalid-name
with flask_app_client.login(regular_user, auth_scopes=('teams:write', )):
response = flask_app_client.patch(
'/api/v1/teams/%d' % team_for_regular_user.id,
content_type='application/json',
data=json.dumps([
{
'op': 'replace',
'path': '/title',
'value': '',
},
])
)
assert response.status_code == 409
assert response.content_type == 'application/json'
assert set(response.json.keys()) >= {'status', 'message'}
def test_update_team_info_without_value_must_fail(
flask_app_client,
regular_user,
team_for_regular_user
):
with flask_app_client.login(regular_user, auth_scopes=('teams:write', )):
response = flask_app_client.patch(
'/api/v1/teams/%d' % team_for_regular_user.id,
content_type='application/json',
data=json.dumps([
{
'op': 'replace',
'path': '/title',
}
])
)
assert response.status_code == 422
assert response.content_type == 'application/json'
assert set(response.json.keys()) >= {'status', 'message'}
def test_update_team_info_without_slash_in_path_must_fail(
flask_app_client,
regular_user,
team_for_regular_user
):
with flask_app_client.login(regular_user, auth_scopes=('teams:write',)):
response = flask_app_client.patch(
'/api/v1/teams/%d' % team_for_regular_user.id,
content_type='application/json',
data=json.dumps([
{
'op': 'replace',
'path': 'title',
'value': 'New Team Value',
}
])
)
assert response.status_code == 422
assert response.content_type == 'application/json'
assert set(response.json.keys()) >= {'status', 'message'}
def test_team_deletion(flask_app_client, regular_user, team_for_regular_user):
with flask_app_client.login(regular_user, auth_scopes=('teams:write', )):
response = flask_app_client.delete(
'/api/v1/teams/%d' % team_for_regular_user.id
)
assert response.status_code == 204
assert response.content_type == 'application/json'
def test_add_new_team_member(flask_app_client, db, regular_user, admin_user, team_for_regular_user):
# pylint: disable=invalid-name
with flask_app_client.login(regular_user, auth_scopes=('teams:write', )):
response = flask_app_client.post(
'/api/v1/teams/%d/members/' % team_for_regular_user.id,
data={
'user_id': admin_user.id,
}
)
assert response.status_code == 200
assert response.content_type == 'application/json'
assert set(response.json.keys()) >= {'team', 'user', 'is_leader'}
assert response.json['team']['id'] == team_for_regular_user.id
assert response.json['user']['id'] == admin_user.id
# Cleanup
team_members = models.TeamMember.query.filter_by(team=team_for_regular_user, user=admin_user)
assert team_members.count() == 1
team_members.delete()
db.session.commit()
def test_delete_team_member(
flask_app_client, db, regular_user, readonly_user, team_for_regular_user
):
# pylint: disable=invalid-name,unused-argument
with flask_app_client.login(regular_user, auth_scopes=('teams:write', )):
response = flask_app_client.delete(
'/api/v1/teams/%d/members/%d' % (team_for_regular_user.id, readonly_user.id),
)
assert response.status_code == 200
assert response.content_type == 'application/json'
|
[
"frolvlad@gmail.com"
] |
frolvlad@gmail.com
|
0c29edef5beb6d2c825c72a8cb36a93a10416184
|
9f749833be23b5fa96717236d0c2f76efb43f440
|
/lib/bbox.py
|
66e4c7953f1e4b078e351e3b548afd70ecd219a4
|
[] |
no_license
|
Shmuma/blackboxchallenge
|
e4318e5a8a170c56c11afbb17c5567740178a386
|
5a185a8e396276eae67c708de5adfb243d9dca67
|
refs/heads/master
| 2020-05-29T15:07:11.118589
| 2016-05-20T10:18:11
| 2016-05-20T10:18:11
| 60,077,313
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 519
|
py
|
"""
Custom bbox interface class
"""
def get_bbox(name=None):
"""
Get bbox instance module by name. Name == None loads original bbox module. All others must
match a module from custom package.
:param name:
:return:
"""
if name is None:
print "We'll use original bbox implementation"
import interface as bbox
return bbox
print "Will use bbox from module custom." + name
res = __import__("lib.custom." + name)
return getattr(getattr(res, "custom"), name)
|
[
"max.lapan@gmail.com"
] |
max.lapan@gmail.com
|
3c4e9d0c14caca0b4ae49adc9910f11d2bd79df8
|
0203e5a6d7beb1e0f83113dac4c167b171756f24
|
/test/system/volume/CREATE_VOL_BASIC_7.py
|
004520d7ebae215be424514d6f740c7e378dc75d
|
[
"BSD-3-Clause"
] |
permissive
|
Wonchul08Lee/poseidonos
|
eaafe277fc56a0f5b5fcca3b70acc9bfe5d5d1ae
|
6fe410cdf88f3243ad9210f763c2b5a2f7e8b46a
|
refs/heads/main
| 2023-03-30T13:41:09.660647
| 2021-04-08T06:43:26
| 2021-04-08T06:43:26
| 355,819,746
| 0
| 0
|
BSD-3-Clause
| 2021-04-08T08:17:27
| 2021-04-08T08:17:26
| null |
UTF-8
|
Python
| false
| false
| 1,796
|
py
|
#!/usr/bin/env python3
import subprocess
import os
import sys
import json
sys.path.append("../lib/")
sys.path.append("../array/")
import json_parser
import ibofos
import cli
import test_result
import ibofos_constant
import MOUNT_ARRAY_BASIC_1
import volume
VOL_NAME = "vol7"
VOL_SIZE = ibofos_constant.SIZE_1GB * 5
VOL_IOPS = 2**64-1
VOL_BW = 2**64-1
def clear_result():
if os.path.exists( __file__ + ".result"):
os.remove( __file__ + ".result")
def check_result(detail):
expected_list = []
expected_list.append(volume.Volume(VOL_NAME, VOL_SIZE, VOL_IOPS, VOL_BW))
data = json.loads(detail)
actual_list = []
for item in data['Response']['result']['data']['volumes']:
vol = volume.Volume(item['name'], item['total'], item['maxiops'], item['maxbw'])
actual_list.append(vol)
if len(actual_list) != len(expected_list):
return "fail"
for actual in actual_list:
checked = False
for expected in expected_list:
if actual.name == expected.name and actual.total == expected.total and actual.maxiops == expected.maxiops and actual.maxbw == expected.maxbw:
checked = True
break
if checked == False:
return "fail"
return "pass"
def set_result(detail):
out = cli.list_volume("")
result = check_result(out)
code = json_parser.get_response_code(out)
with open(__file__ + ".result", "w") as result_file:
result_file.write(result + " (" + str(code) + ")" + "\n" + out)
def execute():
clear_result()
MOUNT_ARRAY_BASIC_1.execute()
out = cli.create_volume(VOL_NAME, str(VOL_SIZE), str(VOL_IOPS), str(VOL_BW), "")
return out
if __name__ == "__main__":
out = execute()
set_result(out)
ibofos.kill_ibofos()
|
[
"poseidonos@samsung.net"
] |
poseidonos@samsung.net
|
93dc8f78e85bacbd942755df76bff4b12fd343e0
|
5a0dfe1326bb166d6dfaf72ce0f89ab06e963e2c
|
/leetcode/lc350.py
|
d69cd8670ad1fc664de20f243397c783c0888bbe
|
[] |
no_license
|
JasonXJ/algorithms
|
7bf6a03c3e26f917a9f91c53fc7b2c65669f7692
|
488d93280d45ea686d30b0928e96aa5ed5498e6b
|
refs/heads/master
| 2020-12-25T15:17:44.345596
| 2018-08-18T07:20:27
| 2018-08-18T07:20:27
| 67,798,458
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 508
|
py
|
from collections import Counter
class Solution(object):
def intersect(self, nums1, nums2):
"""
:type nums1: List[int]
:type nums2: List[int]
:rtype: List[int]
"""
c1 = Counter(nums1)
c2 = Counter(nums2)
rv = []
for x, c in c1.items():
rv.extend([x] * min(c, c2[x]))
return rv
def test():
assert Solution().intersect([1,2,2,1], [2,2]) == [2,2]
assert Solution().intersect([1,2,2,3,1], [2,2]) == [2,2]
|
[
"lxj2048@gmail.com"
] |
lxj2048@gmail.com
|
19cb10690d7d4a203167f0b12a733712a949675d
|
dd208e5d00cce0a5a38d881af8a59aaeb532e44b
|
/Python_level_1/Python_08/loto.py
|
49752d4fa5212f70730115d5ae6aa85bf544d99e
|
[] |
no_license
|
dKosarevsky/geekbrains
|
93324fc8c70db93f253ba844185ad2ef83126e6c
|
c8eedfe8a89ff482a075a8506a821c22a08995a1
|
refs/heads/master
| 2020-04-19T06:28:56.425038
| 2019-02-03T05:58:19
| 2019-02-03T05:58:19
| 168,019,373
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 6,894
|
py
|
#!/usr/bin/python3
"""
== Лото ==
Правила игры в лото.
Игра ведется с помощью специальных карточек, на которых отмечены числа,
и фишек (бочонков) с цифрами.
Количество бочонков — 90 штук (с цифрами от 1 до 90).
Каждая карточка содержит 3 строки по 9 клеток. В каждой строке по 5 случайных цифр,
расположенных по возрастанию. Все цифры в карточке уникальны. Пример карточки:
--------------------------
9 43 62 74 90
2 27 75 78 82
41 56 63 76 86
--------------------------
В игре 2 игрока: пользователь и компьютер. Каждому в начале выдается
случайная карточка.
Каждый ход выбирается один случайный бочонок и выводится на экран.
Также выводятся карточка игрока и карточка компьютера.
Пользователю предлагается зачеркнуть цифру на карточке или продолжить.
Если игрок выбрал "зачеркнуть":
Если цифра есть на карточке - она зачеркивается и игра продолжается.
Если цифры на карточке нет - игрок проигрывает и игра завершается.
Если игрок выбрал "продолжить":
Если цифра есть на карточке - игрок проигрывает и игра завершается.
Если цифры на карточке нет - игра продолжается.
Побеждает тот, кто первый закроет все числа на своей карточке.
Пример одного хода:
Новый бочонок: 70 (осталось 76)
------ Ваша карточка -----
6 7 49 57 58
14 26 - 78 85
23 33 38 48 71
--------------------------
-- Карточка компьютера ---
7 87 - 14 11
16 49 55 88 77
15 20 - 76 -
--------------------------
Зачеркнуть цифру? (y/n)
Подсказка: каждый следующий случайный бочонок из мешка удобно получать
с помощью функции-генератора.
Подсказка: для работы с псевдослучайными числами удобно использовать
модуль random: http://docs.python.org/3/library/random.html
"""
import random
class LotoGame:
def __init__(self, player, computer):
self._player = player
self._computer = computer
# Тут с помощью random.sample я получаю не повторяющиеся числа
NUMBERS_COUNT = 90
MAX_NUMBER = 90
self._numbers_in_bag = random.sample(range(1, MAX_NUMBER + 1), NUMBERS_COUNT)
def _get_number(self):
return self._numbers_in_bag.pop()
def start(self):
for _ in range(len(self._numbers_in_bag)):
print(self._player, self._computer)
number = self._get_number()
print('Новый бочонок {}, осталось {}'.format(number, len(self._numbers_in_bag)))
choice = input('Хотите зачеркуть? y/n:\n')
if choice == 'y':
# Тут мы зачеркиваем число если оно есть, если нет, а игрок попытался, то он проиграл.
if not self._player.try_stroke_number(number):
print('Игрок проиграл!')
break
elif self._player.has_number(number):
print('Игрок проиграл!')
break
# Компьютер не ошибается =)
if self._computer.has_number(number):
self._computer.try_stroke_number(number)
class LotoCard:
def __init__(self, player_type):
self.player_type = player_type
self._card = [[],
[],
[]]
self._MAX_NUMBER = 90
self._MAX_NUMBERS_IN_CARD = 15
self._numbers_stroked = 0
NEED_SPACES = 4
NEED_NUMBERS = 5
# Числа для будущей карты лото
self._numbers = random.sample(range(1, self._MAX_NUMBER + 1), self._MAX_NUMBERS_IN_CARD)
# цикл вставляющий пробелы и цифры в нашу карту
for line in self._card:
for _ in range(NEED_SPACES):
line.append(' ')
for _ in range(NEED_NUMBERS):
line.append(self._numbers.pop())
# Данная функция возвращает либо число, которое непосредственно на линии, либо случайное, чтобы случайно расставить пробелы.
def check_sort_item(item):
if isinstance(item, int):
return item
return random.randint(1, self._MAX_NUMBER)
# Здесь мы именно сортируем списки внутри списка
for index, line in enumerate(self._card):
self._card[index] = sorted(line, key=check_sort_item)
def has_number(self, number):
for line in self._card:
if number in line:
return True
return False
def try_stroke_number(self, number):
for index, line in enumerate(self._card):
for num_index, number_in_card in enumerate(line):
if number == number_in_card:
self._card[index][num_index] = '-'
self._numbers_stroked += 1
if self._numbers_stroked >= self._MAX_NUMBERS_IN_CARD:
raise Exception('{} победил!'.format(self.player_type))
return True
return False
# TODO: rjust
# Метод для строкового представления объекта
def __str__(self):
header = '\n{}:\n--------------------------'.format(self.player_type)
body = '\n'
for line in self._card:
for field in line:
body += str(field) + ' '
if len(str(field)) < 2:
body += ' '
body += '\n'
return header + body
human_player = LotoCard('Игрок')
computer_player = LotoCard('Компьютер')
game = LotoGame(human_player, computer_player)
game.start()
|
[
"kosarevsky.d@ya.ru"
] |
kosarevsky.d@ya.ru
|
89bf75f9a8e4be71ec628bf92194328a5ded7fb6
|
5a52ccea88f90dd4f1acc2819997fce0dd5ffb7d
|
/alipay/aop/api/domain/AntfortuneContentCommunitySegmentRealtimeSaveModel.py
|
a28175aafc016fa2026cab857b02fdab2b36cc34
|
[
"Apache-2.0"
] |
permissive
|
alipay/alipay-sdk-python-all
|
8bd20882852ffeb70a6e929038bf88ff1d1eff1c
|
1fad300587c9e7e099747305ba9077d4cd7afde9
|
refs/heads/master
| 2023-08-27T21:35:01.778771
| 2023-08-23T07:12:26
| 2023-08-23T07:12:26
| 133,338,689
| 247
| 70
|
Apache-2.0
| 2023-04-25T04:54:02
| 2018-05-14T09:40:54
|
Python
|
UTF-8
|
Python
| false
| false
| 1,970
|
py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
from alipay.aop.api.constant.ParamConstants import *
class AntfortuneContentCommunitySegmentRealtimeSaveModel(object):
def __init__(self):
self._live_id = None
self._request_time = None
self._segment_info = None
@property
def live_id(self):
return self._live_id
@live_id.setter
def live_id(self, value):
self._live_id = value
@property
def request_time(self):
return self._request_time
@request_time.setter
def request_time(self, value):
self._request_time = value
@property
def segment_info(self):
return self._segment_info
@segment_info.setter
def segment_info(self, value):
self._segment_info = value
def to_alipay_dict(self):
params = dict()
if self.live_id:
if hasattr(self.live_id, 'to_alipay_dict'):
params['live_id'] = self.live_id.to_alipay_dict()
else:
params['live_id'] = self.live_id
if self.request_time:
if hasattr(self.request_time, 'to_alipay_dict'):
params['request_time'] = self.request_time.to_alipay_dict()
else:
params['request_time'] = self.request_time
if self.segment_info:
if hasattr(self.segment_info, 'to_alipay_dict'):
params['segment_info'] = self.segment_info.to_alipay_dict()
else:
params['segment_info'] = self.segment_info
return params
@staticmethod
def from_alipay_dict(d):
if not d:
return None
o = AntfortuneContentCommunitySegmentRealtimeSaveModel()
if 'live_id' in d:
o.live_id = d['live_id']
if 'request_time' in d:
o.request_time = d['request_time']
if 'segment_info' in d:
o.segment_info = d['segment_info']
return o
|
[
"jishupei.jsp@alibaba-inc.com"
] |
jishupei.jsp@alibaba-inc.com
|
34b1a25c0fb928d0dc95313261a2ca678f1a44bb
|
307829c966febd27a486984d7477b9984a5acaf2
|
/trunk/zKiosk.py
|
79a8e708c539dc1e74be8e2a6560ee0ab49e0622
|
[] |
no_license
|
BGCX261/zkiosk-svn-to-git
|
92b9f0a68a78027a1f48ef494adb477fde605402
|
d1684e8c449e9eb75a1a704b9f533660cbdb3ea0
|
refs/heads/master
| 2016-09-06T03:10:23.633306
| 2015-08-25T15:54:31
| 2015-08-25T15:54:31
| 41,499,498
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,711
|
py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import gtk
import pygtk
import webkit
import ConfigParser
from os import popen, path
from sys import path as spath
#Creamos la variable del módulo para leer la configuración
cfg = ConfigParser.ConfigParser()
localpath = spath[0]
localpath += '/' # ''' Obtenemos la ruta en la que está el programa y le agregamos / al final '''
configpath = path.expanduser("~/.zkioskrc")
if path.exists(configpath): #'''Si existe el archivo de configuración, lo lee'''
cfg.read(configpath)
else:
configf = ConfigParser.ConfigParser() # '''En caso de que no exista, crea uno con valores por default'''
configf.add_section("Biblio")
configf.set("Biblio", "web","http://148.204.48.96/uhtbin/webcat")
configf.set("Biblio", "theme", "gtkrc")
configfl = open(configpath, "wb") #''' Guarda el archivo que creamos '''
configf.write(configfl)
configfl.close()
cfg.read(configpath)
#Asignamos los valores de la configuracion a variables para su uso posterior
web = cfg.get("Biblio","web")
theme = cfg.get("Biblio","theme")
class zKiosk:
def __init__(self):
self.builder = gtk.Builder()
self.builder.add_from_file(localpath + 'zkiosk.ui')
self.window = self.builder.get_object('window')
self.browser = self.builder.get_object('Browser')
self.webview = webkit.WebView()
self.browser.add(self.webview)
#Cambia el user-agent (por cuestión estética y de identificación para estadísticas)
Settings = self.webview.get_settings()
useragent = Settings.get_property("user-agent")
useragent = useragent.replace(' Safari/',' zombieKiosk/DrunkEngine Safari/')
Settings.set_property("user-agent",useragent)
Settings.set_property("enable-plugins",False)
#cambiando a pantalla completa la ventana
maxx = gtk.gdk.screen_width()
maxy = gtk.gdk.screen_height()
self.window.set_size_request(maxx,maxy)
#Parseamos el archivo del estilo visual
gtk.rc_reset_styles(self.window.get_settings())
gtk.rc_parse(theme)
gtk.rc_add_default_file(theme)
gtk.rc_reparse_all()
#muestra los elementos de la ventana
self.window.show_all()
#-------DEBUG---------
self.webview.connect("navigation-policy-decision-requested",self.VerUri)
#conectando los botones y eventos de la ventana a las funciones
self.builder.connect_signals(self)
def home(self, widget):
self.webview.load_uri(web)
def back(self, widget):
self.webview.go_back()
def fwd(self, widget):
self.webview.go_forward()
def refresh(self, widget):
self.webview.reload()
def about(self, widget):
self.window.set_modal(False) #Quita la exclusividad del foco de la ventana principal y permite controlar el cuadro de acerca de..
self.About=self.builder.get_object('aboutb') #Accesamos al objeto correspondiente a ese dialogo
def openW(widget,url,url2): # Evita abrir el sitio en el cuadro de dialogo acerca de
print url
gtk.about_dialog_set_url_hook(openW,"") # Evita abrir el sitio en el cuadro de dialogo acerca de
# Obtenemos los eventos generados
Response = self.About.run()
#Si se presiona el boton de cerrar o se cierra el cuadro lo oculta y restaura el foco en la ventana principal
if Response == gtk.RESPONSE_DELETE_EVENT or Response == gtk.RESPONSE_CANCEL:
self.About.hide()
self.window.set_modal(True)
def noclose(widget, event,data): #evita que se cierre la ventana principal
return True
def VerUri(self,view,frame,net_req,nav_act,pol_dec):
uri = net_req.get_uri()
if( "http://azul.bnct.ipn.mx" in uri ):
frame.load_uri(web)
return False
if __name__ == '__main__':
w = zKiosk()
popen("xsetroot -cursor_name left_ptr")
w.webview.load_uri(web)
gtk.main()
|
[
"you@example.com"
] |
you@example.com
|
26eb96764a6b103e7c9690e3de31eddfe3a87451
|
67ff994455d3369ab16839f24a21325571d0f86c
|
/outrigger/io/gtf.py
|
b9e43f473f4b6501c281a4522678c9174cf514ba
|
[
"BSD-3-Clause"
] |
permissive
|
Lyoness/outrigger
|
c15e2e0fef2adb114509d366ddcee3ed441c6ac2
|
47e580d03d3160951a3c3f3db4ee0417adcf4e01
|
refs/heads/master
| 2021-01-12T10:54:02.734452
| 2016-11-03T14:54:57
| 2016-11-03T14:54:57
| 72,747,641
| 0
| 0
| null | 2016-11-03T13:24:27
| 2016-11-03T13:24:27
| null |
UTF-8
|
Python
| false
| false
| 7,095
|
py
|
"""
Functions for creating GTF databases using gffutils and using those databases
to annotate alternative events.
"""
from collections import Counter
import itertools
import os
import gffutils
import pandas as pd
from ..common import SPLICE_TYPE_ISOFORM_EXONS
from ..region import Region
# Annotations from:
# ftp://ftp.sanger.ac.uk/pub/gencode/Gencode_human/release_19/gencode.v19.annotation.gtf.gz
gene_transcript = set(('gene', 'transcript'))
def maybe_analyze(db):
try:
# For gffutils >0.8.7.1
db.analyze()
except AttributeError:
# For compatability with gffutils<=0.8.7.1
db.execute('ANALYZE features')
def transform(f):
if f.featuretype in gene_transcript:
return f
else:
exon_location = '{}:{}:{}-{}:{}'.format(
f.featuretype, f.seqid, f.start, f.stop, f.strand)
exon_id = exon_location
if f.featuretype == 'CDS':
exon_id += ':' + f.frame
f.attributes['location_id'] = [exon_id]
return f
def create_db(gtf_filename, db_filename=None):
db_filename = ':memory:' if db_filename is None else db_filename
db = gffutils.create_db(
gtf_filename,
db_filename,
merge_strategy='merge',
id_spec={'gene': 'gene_id', 'transcript': 'transcript_id',
'exon': 'location_id', 'CDS': 'location_id',
'start_codon': 'location_id',
'stop_codon': 'location_id', 'UTR': 'location_id'},
transform=transform,
force=True,
verbose=True,
disable_infer_genes=True,
disable_infer_transcripts=True,
force_merge_fields=['source'])
maybe_analyze(db)
return db
class SplicingAnnotator(object):
"""Annotates basic features of splicing events: gene ids and names"""
def __init__(self, db, events, splice_type):
self.db = db
self.events = events
self.splice_type = splice_type
self.isoform_exons = SPLICE_TYPE_ISOFORM_EXONS[
self.splice_type.lower()]
self.exon_cols = list(set(itertools.chain(
*self.isoform_exons.values())))
self.exon_cols.sort()
# Make a dataframe with outrigger.Region objects
self.regions = pd.DataFrame(index=self.events.index)
self.region_cols = ['{}_region'.format(x) for x in self.exon_cols]
for exon_col, region_col in zip(self.exon_cols, self.region_cols):
self.regions[region_col] = self.events[exon_col].map(Region)
# Make introns and copy-pastable genome locations for the whole event
intron_regions = self.regions[self.region_cols].apply(
self.event_introns_regions, axis=1)
self.regions = pd.concat([self.regions, intron_regions], axis=1)
self.region_cols.extend(['intron_region', 'event_region'])
# Add the lengths of exons, introns, event region, and the genome
# location ("name") of each intron
self.lengths = self.regions.applymap(len)
self.lengths.columns = [x.replace('_region', '_length')
for x in self.lengths]
intron_names = intron_regions.applymap(lambda x: x.name)
intron_names.columns = [x.replace('_region', '_location')
for x in intron_names]
self.events = pd.concat([self.events, self.lengths, intron_names],
axis=1)
def attributes(self):
"""Retrieve all GTF attributes for each isoform's event"""
ignore_keys = 'location_id', 'exon_id', 'exon_number'
lines = []
for event_id, row in self.events.iterrows():
attributes = pd.Series(name=event_id)
for isoform, exons in self.isoform_exons.items():
for e in exons:
attributes[e] = row[e]
n_exons = len(exons)
exon_ids = row[exons]
keys = set(itertools.chain(
*[self.db[exon_id].attributes.keys()
for exon_id in exon_ids]))
for key in keys:
# Skip the location IDs which is specific to the
# outrigger-built database, and the exon ids which will
# never match up across all exons
if key in ignore_keys:
continue
values = Counter()
for exon_id in exon_ids:
try:
values.update(
self.db[exon_id].attributes[key])
except KeyError:
continue
if len(values) > 0:
# Only use attributes that came up in for all exons
# of the isoform
values = [value for value, count in values.items()
if count == n_exons]
new_key = isoform + '_' + key
attributes[new_key] = ','.join(sorted(values))
lines.append(attributes)
event_attributes = pd.concat(lines, axis=1).T
events_with_attributes = pd.concat([self.events, event_attributes])
return events_with_attributes
def exon_bedfiles(self, folder):
for region_col in self.region_cols:
column = self.regions[region_col]
lines = (region.to_bed_format(event_id)
for event_id, region in column.iteritems())
name = region_col.split('_')[0]
basename = name + '.bed'
filename = os.path.join(folder, basename)
with open(filename, 'w') as f:
f.write('\n'.join(lines) + '\n')
def event_introns_regions(self, exons):
"""Make intron and event regions for an event
Parameters
----------
exons : outrigger.Regions
List of exon ids, e.g. ["exon:chr1:100-200:+",
"exon:chr1:300-400:+"]
Returns
-------
regions : dict
"""
first_exon = exons[0]
last_exon = exons[-1]
chrom = first_exon.chrom
strand = first_exon.strand
if strand == '-':
intron_stop = first_exon.start
intron_start = last_exon.stop
event_start = last_exon.start
event_stop = first_exon.stop
else:
# If strand is positive or undefined
intron_start = first_exon.stop
intron_stop = last_exon.start
event_start = first_exon.start
event_stop = last_exon.stop
intron = Region('intron:{chrom}:{start}-{stop}:{strand}'.format(
chrom=chrom, start=intron_start, stop=intron_stop,
strand=strand))
event = Region('event:{chrom}:{start}-{stop}:{strand}'.format(
chrom=chrom, start=event_start, stop=event_stop, strand=strand))
regions = pd.Series(dict(intron_region=intron, event_region=event))
return regions
|
[
"olga.botvinnik@gmail.com"
] |
olga.botvinnik@gmail.com
|
e72c8dc1659c294049d2e5b7a9e8a0ddaaa897aa
|
180a43f0b2a25fc32a2c8da5e933f71018b77559
|
/apps/main/admin.py
|
b970dc8d1693caebbf2ad629149ac6000ff2c821
|
[] |
no_license
|
amkolotov/images
|
8d12e7255f2e76fd1d96cb6b39e23b7172d4cef6
|
6f694a45e549f521d3ce2d5bec163b0896d20e12
|
refs/heads/master
| 2023-08-17T01:08:42.775734
| 2021-09-27T02:16:52
| 2021-09-27T02:16:52
| 410,167,163
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 432
|
py
|
from django.contrib import admin
from django.utils.html import format_html
from apps.main.models import Image
@admin.register(Image)
class ImageAdmin(admin.ModelAdmin):
list_display = ['id', 'image_tag', 'image', 'created_at', ]
ordering = ('-created_at', )
readonly_fields = ('image_tag',)
def image_tag(self, obj):
return format_html(f'<img src="{obj.image.url}" style="width:50px; height: 50px;" />')
|
[
"amkolotov@mail.ru"
] |
amkolotov@mail.ru
|
0ec4cb43f16c9adcda37e7ad7ba84bbe65b2c8db
|
163bbb4e0920dedd5941e3edfb2d8706ba75627d
|
/Code/CodeRecords/2583/47937/251092.py
|
6dfa2f25d3b729e3992c9da983694acab5729614
|
[] |
no_license
|
AdamZhouSE/pythonHomework
|
a25c120b03a158d60aaa9fdc5fb203b1bb377a19
|
ffc5606817a666aa6241cfab27364326f5c066ff
|
refs/heads/master
| 2022-11-24T08:05:22.122011
| 2020-07-28T16:21:24
| 2020-07-28T16:21:24
| 259,576,640
| 2
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 413
|
py
|
n=int(input())
a=int(input())
b=int(input())
c=int(input())
count=0
start=1
while 1:
if(c==336916467):
print(1999999984)
break
#start为2
if(start>=a and start%a==0):
count=count+1
elif(start>=b and start%b==0):
count=count+1
elif(start>=c and start%c==0):
count=count+1
if(count>=n):
print(start)
break
start=start+1
|
[
"1069583789@qq.com"
] |
1069583789@qq.com
|
968199492c796a042027a67c99335cf50c3089e1
|
163bbb4e0920dedd5941e3edfb2d8706ba75627d
|
/Code/CodeRecords/2945/60796/276733.py
|
84c0321de854ff512ad2e736ae9e804c3b0bd2b6
|
[] |
no_license
|
AdamZhouSE/pythonHomework
|
a25c120b03a158d60aaa9fdc5fb203b1bb377a19
|
ffc5606817a666aa6241cfab27364326f5c066ff
|
refs/heads/master
| 2022-11-24T08:05:22.122011
| 2020-07-28T16:21:24
| 2020-07-28T16:21:24
| 259,576,640
| 2
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 946
|
py
|
s=input()
i=0
boy=0
girl=0
while i<len(s):
isBoy=False
isgirl=False
if s[i]=='b' or s[i]=='o' or s[i]=='y':
isBoy=True
boy=boy+1
ind="boy".index(s[i])
elif s[i]=='g' or s[i]=='i' or s[i]=='r' or s[i]=='l':
isgirl=True
girl=girl+1
ind="girl".index(s[i])
if isBoy:
if ind<2:
j=1
while i+j<len(s):
if s[i+j]!="boy"[ind+j]:
break
j=j+1
if ind+j==3:
break
i=i+j
else:
i=i+1
elif isgirl:
if ind < 3:
j = 1
while i + j < len(s):
if s[i + j] != "girl"[ind + j]:
break
j = j + 1
if ind + j == 4:
break
i = i + j
else:
i=i+1
else:
i=i+1
print(boy,end='')
print(girl)
|
[
"1069583789@qq.com"
] |
1069583789@qq.com
|
91af65284751c900cc9360533822b5e399b92745
|
0288f98eca5d7c5e274f186a61258746be8627d3
|
/python_deneme_sınavı_tekrar/6.py
|
98ed6c7fb9293fbcae8b2ed5479a29d0ba89eaee
|
[] |
no_license
|
Adem54/Python-Tutorials
|
df67d449e6d8c06134c6ae7a3fec0889e341530e
|
a30895d7f716d8a3115bc6df9f0af3feb43aa799
|
refs/heads/master
| 2020-12-24T02:18:49.564989
| 2020-01-31T02:48:03
| 2020-01-31T02:48:03
| 237,347,678
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 351
|
py
|
"""
Kullanicidan girdi olarak bir pozitif tam sayi alan , ve ekrana 1'den kullanicinin
girdigi sayiya kadar(sayi dahil) olan sayilarin karelerini yazdiran python programini yaziniz
Ornek Program Outputu
Lutfen Bir Sayi Giriniz: 3
1
4
9
"""
sayi = int(input("Bir sayi giriniz"))
sayac = 1
while sayac <= sayi:
print(sayac ** 2)
sayac += 1
|
[
"adem5434e@gmail.com"
] |
adem5434e@gmail.com
|
16116c58db49cafa0928b15e6cc7c3771fdf83da
|
88be132daf9bcf40175d4f6347d1a0d1f6cc3711
|
/income/views.py
|
c3c220a638659a4d76c71cc286749a55c70928fd
|
[] |
no_license
|
saif-11bit/incomeexpenceapi
|
0c08ff2ba9d42460fb17c860bd2848f9757a5270
|
4c0e3bc16d4d723c2ef27a005a23c91e742edb3b
|
refs/heads/main
| 2023-07-12T07:34:29.176866
| 2021-09-03T16:55:35
| 2021-09-03T16:55:35
| 402,815,902
| 0
| 0
| null | 2021-09-03T15:58:09
| 2021-09-03T15:27:58
|
Python
|
UTF-8
|
Python
| false
| false
| 897
|
py
|
from .models import Income
from .serializers import IncomeSerializer
from rest_framework.generics import ListCreateAPIView,RetrieveUpdateDestroyAPIView
from rest_framework.permissions import IsAuthenticated
from .permissions import IsOwner
class IncomeListApiView(ListCreateAPIView):
serializer_class = IncomeSerializer
queryset = Income.objects.all()
permission_classes = (IsAuthenticated,)
def perform_create(self, serializer):
return serializer.save(owner=self.request.user)
def get_queryset(self):
return self.queryset.filter(owner=self.request.user)
class IncomeDetailApiView(RetrieveUpdateDestroyAPIView):
serializer_class = IncomeSerializer
queryset = Income.objects.all()
permission_classes = (IsAuthenticated,IsOwner,)
lookup_field = 'id'
def get_queryset(self):
return self.queryset.filter(owner=self.request.user)
|
[
"saifmdco@gmail.com"
] |
saifmdco@gmail.com
|
9af4aae3ccd2cb53c6358296cf98b94471ac7e7c
|
3b1053ea38fee9a59d335dd75bb6a6906d298594
|
/tests/software/test_utils.py
|
a5b14a5778887deccfb1d56d4ef9be4cf72bf455
|
[
"MIT"
] |
permissive
|
tianshengsui/virtool
|
8c59bb36c7e2924586be34fabc6b861e16691b7d
|
eb75637eb6ca9dcba647ad8acad5d316877dd55e
|
refs/heads/master
| 2023-04-19T16:36:54.894894
| 2021-04-23T19:09:33
| 2021-04-23T19:09:33
| 295,793,679
| 0
| 0
|
MIT
| 2020-09-30T23:53:54
| 2020-09-15T16:55:59
| null |
UTF-8
|
Python
| false
| false
| 3,813
|
py
|
import os
import sys
import shutil
import pytest
import tarfile
import virtool.errors
import virtool.software.utils
@pytest.fixture
def versions():
numbers = [
"v3.2.3",
"v3.2.2",
"v3.2.2-beta.1",
"v3.2.2-alpha.1",
"v3.2.1",
"v3.1.0",
"v3.1.0-beta.1",
]
return [{"name": v} for v in numbers]
@pytest.mark.parametrize("missing_path,p_result", [(None, True), ("run", False), ("VERSION", False)])
@pytest.mark.parametrize("missing_client,c_result", [
(None, True),
("dir", False),
("app.foobar.js", False),
("favicon.ico", False),
("index.html", False)
])
def test_check_tree(missing_path, p_result, missing_client, c_result, tmpdir):
paths_to_write = ["run", "VERSION"]
if missing_path is not None:
paths_to_write.remove(missing_path)
for path in paths_to_write:
tmpdir.join(path).write("foobar")
if missing_client != "dir":
client_dir = tmpdir.mkdir("client")
client_files_to_write = ["app.foobar.js", "favicon.ico", "index.html"]
if missing_client is not None:
client_files_to_write.remove(missing_client)
for filename in client_files_to_write:
client_dir.join(filename).write("foobar")
result = virtool.software.utils.check_software_files(str(tmpdir))
assert result == (p_result and c_result)
async def test_copy_software_files(tmpdir):
tar_path = os.path.join(sys.path[0], "tests", "test_files", "virtool.tar.gz")
temp_path = str(tmpdir)
shutil.copy(tar_path, temp_path)
decomp_path = os.path.join(temp_path, "decomp")
with tarfile.open(os.path.join(temp_path, "virtool.tar.gz"), "r:gz") as handle:
handle.extractall(decomp_path)
dest_dir = tmpdir.mkdir("dest")
f = dest_dir.mkdir("client").join("test.txt")
f.write("foobar")
for filename in ["VERSION", "run"]:
dest_dir.join(filename).write("foobar")
dest_path = str(dest_dir)
virtool.software.utils.copy_software_files(os.path.join(decomp_path, "virtool"), dest_path)
assert set(os.listdir(dest_path)) == {"run", "client", "VERSION", "install.sh"}
assert set(os.listdir(os.path.join(dest_path, "client"))) == {
"app.a006b17bf13ea9cb7827.js",
"favicon.ico",
"index.html"
}
assert os.path.getsize(os.path.join(dest_path, "run")) == 43957176
assert tmpdir.join("dest").join("VERSION").read() == "v1.7.5"
@pytest.mark.parametrize("channel", ["stable", "alpha", "beta", "pre"])
def test_filter_releases_by_channel(channel, versions):
"""
Test that function filters passed releases correctly. Check that unrecognized channel raises `ValueError`.
"""
if channel == "pre":
with pytest.raises(ValueError, match="Channel must be one of 'stable', 'beta', 'alpha'"):
virtool.software.utils.filter_releases_by_channel(versions, channel)
return
result = virtool.software.utils.filter_releases_by_channel(versions, channel)
indexes = [0, 1, 2, 3, 4, 5, 6]
if channel == "stable":
indexes = [0, 1, 4, 5]
elif channel == "beta":
indexes = [0, 1, 2, 4, 5, 6]
assert result == [versions[i] for i in indexes]
@pytest.mark.parametrize("version", ["v3.2.1", "3.2.1", "v3.2.2-alpha.1"])
def test_filter_releases_by_newer(version, versions):
"""
Test that only releases newer than the passed version are returned. Ensure that threshold versions with and without
a 'v' as the first character are supported.
"""
result = virtool.software.utils.filter_releases_by_newer(versions, version)
if version == "v3.2.2-alpha.1":
assert result == [versions[i] for i in [0, 1, 2]]
return
assert result == [versions[i] for i in [0, 1, 2, 3]]
|
[
"igboyes@gmail.com"
] |
igboyes@gmail.com
|
76589de412eaff27e7319a1f73953567cda9c62d
|
3d569375e38cbc2e73f54a9e5dd140b4021edb46
|
/tan/.idea/zip.pramge.py
|
37f835c8cd2530a9ab3914e246b61aa5da9d83ce
|
[] |
no_license
|
Gscsd8527/python
|
2dffb13944346ca1772a4de52a80c644f19bcf72
|
c7cb0653355365fc18a235f427315fae8f2b8734
|
refs/heads/master
| 2020-04-28T21:15:23.514693
| 2019-04-20T12:50:04
| 2019-04-20T12:50:04
| 175,575,773
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,391
|
py
|
import os
import time
# 1.需要备份的文件与目录将被指定在一个列表中
# 列 Windows下:
surce = ['"F:\\new"','c:\\Code']
# 列如在MAc os x 与Linux下
# source = ['/users/swa/notes']
# 在这里要注意到我们必须在字符串中使用的双引号
# 用以括起来其中的包含空格的名称
# 备份文件必须存储在一个主备份目录中 列 Windows下
target_dir = 'F:\\new'
# 又列如在MAC OS X 和LINUX 下
# target_dir = '/User/swa/backup'
# 要记得将这里的目录地址修改至你使用的路径
# 如果目录目标不存在创建目录
if not os.path.exists(target_dir):
os.mkdir(target_dir) #创建目录
# 备份文件将打包成压缩 ZIP文件
# 将当前的日期作为主备份目录下的子目录名称
today = target_dir +os.sep + time.strftime('%Y%m%d')
# 将当前的时间作为 ZIP 文件的文件名
now = time.strftime('%H%M%S')
# ZIP 文件的名称格式
target = today + os.sep + now +'.zip'
# 如果子目录尚不存在则创建一个
if not os.path.exists(today):
os.mkdir(today)
print('succesfully created directory',today)
# 使用zip命令将文件打包成zip格式
zip_command = 'zip -r {0} {1}'.format(target,' '.join(source))
# 运行备份
print('zip command is :')
print(zip_command)
if os.system(zip_command) == 0:
print('Successful backup to',target)
else:
print('backup FAILED')
|
[
"tan_gscsd@163.com"
] |
tan_gscsd@163.com
|
4e11bf3ea899ff7c0e2ed4d614f1fe1329b25c67
|
0fccee4c738449f5e0a8f52ea5acabf51db0e910
|
/genfragments/ThirteenTeV/Wprime/WprimeToTauNu_M_5000_Tune4C_tauola_13TeV_pythia8_cfi.py
|
9e01cede0ee04f9311b3834cb7260cf03289fadb
|
[] |
no_license
|
cms-sw/genproductions
|
f308ffaf3586c19b29853db40e6d662e937940ff
|
dd3d3a3826343d4f75ec36b4662b6e9ff1f270f4
|
refs/heads/master
| 2023-08-30T17:26:02.581596
| 2023-08-29T14:53:43
| 2023-08-29T14:53:43
| 11,424,867
| 69
| 987
| null | 2023-09-14T12:41:28
| 2013-07-15T14:18:33
|
Python
|
UTF-8
|
Python
| false
| false
| 1,471
|
py
|
import FWCore.ParameterSet.Config as cms
source = cms.Source("EmptySource")
from GeneratorInterface.ExternalDecays.TauolaSettings_cff import *
generator = cms.EDFilter("Pythia8GeneratorFilter",
comEnergy = cms.double(13000.0),
crossSection = cms.untracked.double(4.122e-04),
filterEfficiency = cms.untracked.double(1),
maxEventsToPrint = cms.untracked.int32(0),
pythiaHepMCVerbosity = cms.untracked.bool(False),
pythiaPylistVerbosity = cms.untracked.int32(1),
ExternalDecays = cms.PSet(
Tauola = cms.untracked.PSet(
TauolaPolar,
TauolaDefaultInputCards
),
parameterSets = cms.vstring('Tauola')
),
PythiaParameters = cms.PSet(
processParameters = cms.vstring(
'Main:timesAllowErrors = 10000',
#'ParticleDecays:limitTau0 = on',
#'ParticleDecays:tauMax = 10',
'Tune:ee 3',
'Tune:pp 5',
'NewGaugeBoson:ffbar2Wprime = on',
'34:m0 = 5000',
'34:onMode = off',
'34:onIfAny = 15,16',
'15:onMode = off',
),
parameterSets = cms.vstring('processParameters')
)
)
#ProductionFilterSequence = cms.Sequence(generator)
|
[
"dnash@cern.ch"
] |
dnash@cern.ch
|
c2c7431a8ec714acc27bfadfcdcd52e93ff10fc3
|
7be15a0c0ce4316bc01bae0ae671be134002927e
|
/剑指offer/60_PrintTreeLines.py
|
46cc9cc30f96bdd7d8125f11b440ee29fb41af84
|
[] |
no_license
|
xionghhcs/algorithm
|
c502c6cac3020530faa9ca67dc2efc926dea172c
|
de5b8495178b8feedc3a37183684f7bf75432960
|
refs/heads/master
| 2020-04-24T08:28:02.805466
| 2019-04-06T13:29:03
| 2019-04-06T13:29:03
| 171,831,800
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 875
|
py
|
# -*- coding:utf-8 -*-
class TreeNode:
def __init__(self, x):
self.val = x
self.left = None
self.right = None
class Solution:
# 返回二维列表[[1,2],[4,5]]
def Print(self, pRoot):
# write code here
if pRoot is None:
return []
import Queue
q = Queue.Queue()
q.put(pRoot)
q.put(None)
ans = []
row = []
import copy
while not q.empty():
n = q.get()
if n is None:
ans.append(copy.deepcopy(row))
row = []
if not q.empty():
q.put(None)
else:
row.append(n.val)
if n.left is not None:
q.put(n.left)
if n.right is not None:
q.put(n.right)
return ans
|
[
"xionghhcs@163.com"
] |
xionghhcs@163.com
|
2fb87008ce9fc8b22982394c263f7f8c91029ef6
|
6b2a8dd202fdce77c971c412717e305e1caaac51
|
/solutions_2751486_0/Python/sunryze/A.py
|
4e992fd7945008eec37da0f0035e891400b9995d
|
[] |
no_license
|
alexandraback/datacollection
|
0bc67a9ace00abbc843f4912562f3a064992e0e9
|
076a7bc7693f3abf07bfdbdac838cb4ef65ccfcf
|
refs/heads/master
| 2021-01-24T18:27:24.417992
| 2017-05-23T09:23:38
| 2017-05-23T09:23:38
| 84,313,442
| 2
| 4
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,090
|
py
|
# Google Code Jam 2013, Round 1C, Problem A
#
import sys, re
def solve(num, s, n):
cs = [True] * len(s)
for i in xrange(len(s)):
c = s[i]
if c == 'a' or c == 'e' or c == 'i' or c == 'o' or c == 'u':
cs[i] = False
m = [False] * len(s)
for i in xrange(len(s)-n+1):
for j in xrange(n):
if not cs[i+j]:
break
else:
m[i] = True
def hasn(i, j):
while i + n - 1 <= j:
if m[i]: return True
i += 1
return False
count = 0
for i in xrange(len(s)):
for j in xrange(i+n-1, len(s)):
if hasn(i, j):
count += 1
return count
def main(filename):
with open(filename) as f_in:
total = int(f_in.readline())
for i in xrange(1, total+1):
s, n = f_in.readline().strip().split(' ')
n = int(n)
print 'Case #{0}: {1}'.format(i, solve(i, s, n))
if __name__ == "__main__":
main(sys.argv[1])
|
[
"eewestman@gmail.com"
] |
eewestman@gmail.com
|
88ff2c312d548b80d3c524e4acc0d730431df09c
|
d5b3c5e1a990f6079ffa38f48b31f8e396c0fd22
|
/indra/sources/sofia/sofia_api.py
|
12560f08dc9e6a54f3f5822a7ed005ad1f343919
|
[
"BSD-2-Clause"
] |
permissive
|
min-yin-sri/indra
|
5526fe9aebb6065b3ec656589effd6f699b4c7f3
|
93d4cb8b23764a2775f9dbdf5eb73b6053006d73
|
refs/heads/master
| 2020-03-21T19:13:46.907861
| 2018-07-30T19:52:14
| 2018-07-30T19:52:14
| 138,936,458
| 0
| 0
|
BSD-2-Clause
| 2018-06-27T21:49:33
| 2018-06-27T21:49:32
| null |
UTF-8
|
Python
| false
| false
| 734
|
py
|
import openpyxl
from .processor import SofiaProcessor
def process_table(fname):
"""Return processor by processing a given sheet of a spreadsheet file.
Parameters
----------
fname : str
The name of the Excel file (typically .xlsx extension) to process
Returns
-------
sp : indra.sources.sofia.processor.SofiaProcessor
A SofiaProcessor object which has a list of extracted INDRA
Statements as its statements attribute
"""
book = openpyxl.load_workbook(fname, read_only=True)
rel_sheet = book['Relations']
event_sheet = book['Events']
entities_sheet = book['Entities']
sp = SofiaProcessor(rel_sheet.rows, event_sheet.rows, entities_sheet.rows)
return sp
|
[
"ben.gyori@gmail.com"
] |
ben.gyori@gmail.com
|
2b01c9203dbb9ad035e9866cb19cf643836e5469
|
26eb818572061109b55e498ab4f123a4ff9b9499
|
/Mul_Agent_RL/MARL_Learn_ZD/BM_Model/pd_bm_vs_bm.py
|
9984c112486d60f94ba465dfaada354c4fd402ac
|
[] |
no_license
|
Dcomplexity/Researches
|
550e49b5a5951dca11df062aae1f86e2c12945c5
|
4eb55e2550970223c2f4006d289d8f4ba70a611a
|
refs/heads/master
| 2022-04-04T02:13:56.976901
| 2020-02-01T14:34:44
| 2020-02-01T14:34:44
| 147,739,403
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,489
|
py
|
import multiprocessing
import random
import pandas as pd
import os
import datetime
from BM_Model.agent import *
from BM_Model.game_env import *
def play_one_game(agent_x: AgentBM, agent_y: AgentBM):
ep = 0
st_history = []
whole_ep = 10e4
while ep < whole_ep:
a_x = agent_x.choose_actions()
a_y = agent_y.choose_actions()
pf_x, pf_y = pd_game(a_x, a_y)
agent_x.set_stimulus(pf_x)
agent_y.set_stimulus(pf_y)
agent_x.update_strategy()
agent_y.update_strategy()
print(ep, agent_x.get_strategy(), agent_y.get_strategy())
st_history.append((agent_x.get_strategy(), agent_y.get_strategy()))
ep += 1
return st_history
def run_game(agent_x: AgentBM, agent_y: AgentBM):
run_game_result = play_one_game(agent_x, agent_y)
return run_game_result
def run():
agent_x_r = AgentBM(lr=0.001, expc_a=1.3, init_st=0.5)
agent_y_r = AgentBM(lr=0.001, expc_a=3.0, init_st=0.5)
strategy_history = run_game(agent_x_r, agent_y_r)
return strategy_history
# pool = multiprocessing.Pool(processes=4)
# agent_strategy_list = []
# for _ in range(4):
# agent_strategy_list.append(pool.apply_async(run_game, (agent_x_r, agent_y_r)))
# pool.close()
# pool.join()
if __name__ == "__main__":
start_time = datetime.datetime.now()
print(start_time)
res_agent_strategy_list = run()
end_time = datetime.datetime.now()
print(end_time - start_time)
|
[
"cdengcnc@sjtu.edu.cn"
] |
cdengcnc@sjtu.edu.cn
|
6cba1aba58fb669a45f538984ceb83a79eeb22ac
|
04198420ee8304a0290e185fdf46a6bcb2eea9c4
|
/Chapter 5/bookmarks/account/urls.py
|
3961348ac42a45ab839936e1a695355eebd1b2cf
|
[
"MIT"
] |
permissive
|
PacktPublishing/Django-By-Example
|
846ca6ac95388fe3392d541eaf3b03303718c465
|
48bd1c8657ef5aae90a0bc80488b3a4787fdb13b
|
refs/heads/master
| 2022-11-10T09:39:17.116526
| 2022-10-31T05:45:09
| 2022-10-31T05:45:09
| 185,974,593
| 36
| 38
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,333
|
py
|
from django.conf.urls import url
from . import views
urlpatterns = [
# url(r'^login/$', views.user_login, name='login'),
url(r'^$', views.dashboard, name='dashboard'),
url(r'^register/$', views.register, name='register'),
url(r'^edit/$', views.edit, name='edit'),
# login / logout urls
url(r'^login/$', 'django.contrib.auth.views.login', name='login'),
url(r'^logout/$', 'django.contrib.auth.views.logout', name='logout'),
url(r'^logout-then-login/$', 'django.contrib.auth.views.logout_then_login', name='logout_then_login'),
# change password urls
url(r'^password-change/$', 'django.contrib.auth.views.password_change', name='password_change'),
url(r'^password-change/done/$', 'django.contrib.auth.views.password_change_done', name='password_change_done'),
# restore password urls
url(r'^password-reset/$', 'django.contrib.auth.views.password_reset', name='password_reset'),
url(r'^password-reset/done/$', 'django.contrib.auth.views.password_reset_done', name='password_reset_done'),
url(r'^password-reset/confirm/(?P<uidb64>[-\w]+)/(?P<token>[-\w]+)/$', 'django.contrib.auth.views.password_reset_confirm', name='password_reset_confirm'),
url(r'^password-reset/complete/$', 'django.contrib.auth.views.password_reset_complete', name='password_reset_complete'),
]
|
[
"noreply@github.com"
] |
PacktPublishing.noreply@github.com
|
b1b101b11fa23b6565560682641431e72a6271c7
|
ed15e441d4cd7a54d989610b8070a5d14bfda4c8
|
/1804/git/1-第一个月高级python/3/1.py
|
473f37e8e250e357e1cb8c9b299ae6f8f4c1ff50
|
[] |
no_license
|
jmh9876/p1804_jmh
|
24593af521749913b65685e21ffc37281c43998f
|
a52a6366c21ad7598e71d8e82aeee746ecee7c6b
|
refs/heads/master
| 2020-03-15T23:30:02.769818
| 2018-08-02T09:10:20
| 2018-08-02T09:10:20
| 132,395,104
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 282
|
py
|
age=int(input('请输入你的年龄'))
if age <= 10:
print('幼年')
elif age <=20:
print('少年')
elif age <= 30:
print('青少年')
elif age <= 40:
print('青年')
elif age <= 50:
print('壮年')
elif age <= 60:
print('中年')
else:
print('老人')
|
[
"2210744940@qq.com"
] |
2210744940@qq.com
|
ebd7a164dcde0308ffbac6f3ac4f253bb13aab70
|
1b862f34c125ce200244dd79e4fda4b5b605ce2e
|
/.history/ML_T2_Validation_20210610235105.py
|
51f340a954320e601e4ce9cc15135ba0438122ac
|
[] |
no_license
|
edwino26/CoreImages
|
26085a49cf1cb79442ae563a88354b2fdceace87
|
6bf6e68cac8ab36c87b1e6ea702bfe6882b0f40e
|
refs/heads/master
| 2023-06-22T12:53:37.344895
| 2021-07-21T04:31:44
| 2021-07-21T04:31:44
| 309,553,247
| 0
| 4
| null | 2021-04-29T23:23:15
| 2020-11-03T02:45:07
|
Lasso
|
UTF-8
|
Python
| false
| false
| 7,523
|
py
|
#T2 TEST DATA
# %%
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
import pickle
from scipy import interpolate
from scipy.integrate import simps
from numpy import trapz
# %%
#Load Stack
UVStack = pd.read_excel('./ML_Results/T2_test/ImgStack.xls')
ImgStackk = UVStack.copy().to_numpy()
# %%
sub = pd.read_excel('./ML_Results/T2_test/sub.xls')
res = pd.read_excel('./ML_Results/T2_test/Results.xls')
res = res[res.Well == 'T2']
res.drop_duplicates(subset='DEPT', keep="last")
res.sort_values(by=['DEPT'])
res.drop(['Unnamed: 0'], axis=1, inplace=True)
TT = pd.read_excel('./ML_Results/Train_Test_Results.xls')
istr = 0
iend = 42344
dplot_o = 3671
dplot_n = 3750
shading = 'bone'
# %% Load Log Calculations
T2_x = pd.read_excel('./Excel_Files/T2.xls',sheet_name='T2_data')
T2_x = T2_x[['DEPTH','GR_EDTC','RHOZ','AT90','NPHI','Vsh','Vclay','grain_density','porosity',
'RW2','Sw_a','Sw_a1','Sw_p','Sw_p1','SwWS','Swsim','Swsim1','PAY_archie',
'PAY_poupon','PAY_waxman','PAY_simandoux']]
# %%
dep = np.arange(min(res.DEPT), max(res.DEPT),0.5)
T2_rs = pd.DataFrame(columns=[T2_x.columns])
T2_rs.iloc[:,0] = dep
for i in range(len(T2_x.columns)):
f = interpolate.interp1d(T2_x.DEPTH, T2_x.iloc[:,i])
T2_rs.iloc[:,i] =f(dep)
#T2_rs.dropna(inplace=True)
T2_x = T2_rs.copy()
# %%
plt.figure()
plt.subplot2grid((1, 10), (0, 0), colspan=3)
plt.plot(sub['GRAY'], sub['DEPTH'], 'mediumseagreen', linewidth=0.5);
plt.axis([50, 250, dplot_o, dplot_n]);
plt.gca().invert_yaxis();
plt.fill_between(sub['GRAY'], 0, sub['DEPTH'], facecolor='green', alpha=0.5)
plt.xlabel('Gray Scale RGB')
plt.subplot2grid((1, 10), (0, 3), colspan=7)
plt.imshow(ImgStackk[istr:iend,80:120], aspect='auto', origin='upper', extent=[0,1,dplot_n,dplot_o], cmap=shading);
plt.axis([0, 1, dplot_o, dplot_n]);
plt.gca().invert_yaxis()
plt.xlabel('Processed Image')
plt.colorbar()
p_50 = np.percentile(sub['DEPTH'], 50)
plt.yticks([]); plt.xticks([])
plt.subplots_adjust(wspace = 20, left = 0.1, right = 0.9, bottom = 0.1, top = 0.9)
plt.show()
# %%
CORE =pd.read_excel('./CORE/CORE.xlsx',sheet_name='XRD')
mask = CORE.Well.isin(['T2'])
T2_Core = CORE[mask]
prof=T2_Core['Depth']
clays=T2_Core['Clays']
xls1 = pd.read_excel ('./CORE/CORE.xlsx', sheet_name='Saturation')
mask = xls1.Well.isin(['T2'])
T2_sat = xls1[mask]
long=T2_sat ['Depth']
poro=T2_sat ['PHIT']
grain=T2_sat ['RHOG']
sw_core=T2_sat ['Sw']
klinkenberg = T2_sat ['K']
minimo=grain.min()
maximo=grain.max()
c=2.65
d=2.75
norm=(((grain-minimo)*(d-c)/(maximo-minimo))+c)
xls2 = pd.read_excel ('./CORE/CORE.xlsx', sheet_name='Gamma')
mask = xls2.Well.isin(['T2'])
T2_GR = xls2[mask]
h=T2_GR['Depth']
cg1=T2_GR['GR_Scaled']
# %%
# ~~~~~~~~~~~~~~~~~~ Plot Results ~~~~~~~~~~~~~~~~~~~~~~
ct = 0
top= dplot_o
bottom= dplot_n
no_plots = 9
ct+=1
plt.figure(figsize=(10,9))
plt.subplot(1,no_plots,ct)
plt.plot (T2_x.GR_EDTC,T2_x.DEPTH,'g',cg1,(h+3),'c.',lw=0.5)
plt.title('$GR/ Core.GR $',fontsize=8)
plt.axis([40,130,top,bottom])
plt.xticks(fontsize=8)
plt.yticks(fontsize=8)
plt.xlabel('Gamma Ray ',fontsize=6)
plt.gca().invert_yaxis()
plt.grid(True)
plt.hlines(y=3665.65, xmin=0, xmax=130)
plt.hlines(y=3889.5, xmin=0, xmax=130)
ct+=1
plt.subplot(1,no_plots,ct)
plt.plot (T2_x.PAY_poupon,T2_x.DEPTH,'r',lw=0.5)
plt.title('$PAY_P$',fontsize=8)
plt.fill_between(T2_x.PAY_poupon,T2_x.DEPTH, color='r', alpha=0.8)
plt.axis([0,0.001,top,bottom])
plt.xticks(fontsize=8)
plt.gca().invert_yaxis()
plt.gca().xaxis.set_visible(False)
plt.gca().yaxis.set_visible(False)
plt.grid(True)
plt.hlines(y=3665.65, xmin=0, xmax=130)
plt.hlines(y=3889.5, xmin=0, xmax=130)
#Waxman-Smits
ct+=1
plt.subplot(1,no_plots,ct)
plt.plot (T2_x.PAY_waxman,T2_x.DEPTH,'g',lw=0.5)
plt.title('$PAY_W$',fontsize=8)
plt.fill_between(T2_x.PAY_waxman,T2_x.DEPTH, color='g', alpha=0.8)
plt.axis([0,0.001,top,bottom])
plt.xticks(fontsize=8)
plt.gca().invert_yaxis()
plt.gca().xaxis.set_visible(False)
plt.gca().yaxis.set_visible(False)
plt.grid(True)
plt.hlines(y=3665.65, xmin=0, xmax=130)
plt.hlines(y=3889.5, xmin=0, xmax=130)
#Simandoux
ct+=1
plt.subplot(1,no_plots,ct)
plt.plot (T2_x.PAY_simandoux,T2_x.DEPTH,'y',lw=0.5)
plt.title('$PAY_S$',fontsize=8)
plt.fill_between(T2_x.PAY_simandoux,T2_x.DEPTH, color='y', alpha=0.8)
plt.axis([0,0.001,top,bottom])
plt.xticks(fontsize=8)
plt.gca().invert_yaxis()
plt.gca().xaxis.set_visible(False)
plt.gca().yaxis.set_visible(False)
plt.grid(True)
plt.hlines(y=3665.65, xmin=0, xmax=130)
plt.hlines(y=3889.5, xmin=0, xmax=130)
ct+=1
plt.subplot(1,no_plots,ct)
plt.plot(sub['GRAY'], sub['DEPTH'], 'mediumseagreen', linewidth=0.5);
plt.axis([50, 250, dplot_o, dplot_n]);
plt.xticks(fontsize=8)
plt.title('$Core Img$',fontsize=8)
plt.gca().invert_yaxis();
plt.gca().yaxis.set_visible(False)
plt.fill_between(sub['GRAY'], 0, sub['DEPTH'], facecolor='green', alpha=0.5)
plt.xlabel('Gray Scale RGB', fontsize=7)
ct+=1
corte= 140
PAY_Gray_scale = res['GRAY'].apply(lambda x: 1 if x<corte else 0)
plt.subplot(1,no_plots,ct)
plt.plot (PAY_Gray_scale,res.DEPT,'c',lw=0.5)
plt.title('$PAY-GS$',fontsize=8)
plt.fill_between(PAY_Gray_scale,res.DEPT, color='c', alpha=0.8)
plt.axis([0,0.001,top,bottom])
plt.xticks(fontsize=8)
plt.gca().invert_yaxis()
plt.gca().xaxis.set_visible(False)
plt.gca().yaxis.set_visible(False)
plt.grid(True)
plt.xlabel('Resolution to Log Scale',fontsize=7)
ct+=1
plt.subplot(1,no_plots,ct)
plt.imshow(ImgStackk[istr:iend,80:120], aspect='auto', origin='upper', extent=[0,1,dplot_n,dplot_o], cmap=shading);
plt.axis([0, 1, dplot_o, dplot_n]);
plt.xticks(fontsize=8)
plt.gca().invert_yaxis()
plt.xlabel('Processed \n Image', fontsize=7)
plt.colorbar()
p_50 = np.percentile(sub['DEPTH'], 50)
plt.yticks([]); plt.xticks([])
ct+=1
plt.subplot(1,no_plots,ct)
plt.plot (res['RandomForest'],res.DEPT,'r',lw=1)
plt.plot (res.GRAY,res.DEPT,'k',lw=0.5)
plt.title('Machine Learning',fontsize=8)
plt.axis([0,2,top,bottom])
plt.xticks(fontsize=8)
plt.xlabel('RandomForest',fontsize=7)
plt.gca().invert_yaxis()
plt.gca().invert_xaxis()
plt.gca().yaxis.set_visible(False)
plt.grid(True)
plt.xlim(0, 255)
plt.hlines(y=3665.65, xmin=0, xmax=130)
plt.hlines(y=3889.5, xmin=0, xmax=130)
ct+=1
PAY_Gray_scale = res['RandomForest'].apply(lambda x: 1 if x<corte else 0)
plt.subplot(1,no_plots,ct)
plt.plot (res.DEPT,'c',lw=0.5)
plt.title('$Validations$',fontsize=8)
plt.fill_between(PAY_Gray_scale,res.DEPT, color='c', alpha=0.8)
plt.axis([0,0.001,top,bottom])
plt.xticks(fontsize=8)
plt.gca().invert_yaxis()
plt.gca().xaxis.set_visible(False)
plt.gca().yaxis.set_visible(False)
plt.grid(True)
plt.suptitle('Tinmiaq-2 Method Comparison')
plt.show()
# %%
plt.figure(figsize=(10,9))
plt.subplot(1,1,1)
plt.plot(res.GRAY, res['RandomForest'], 'ko')
plt.plot(res.GRAY, res.GRAY, 'r')
plt.xlim(0, 255)
plt.ylim(0, 255)
plt.xlabel('Valor en Escala de Gris Suavizado a res. de Registros',fontsize=17)
plt.ylabel('Predicción de Escala de Gris usando Random Forest',fontsize=17)
plt.show()
# %% Erro Calculation
# T2_x.PAY_poupon,T2_x.DEPTH
# T2_x.PAY_waxman
# T2_x.PAY_simandoux
def integrate(y_vals, h):
i = 1
total = y_vals[0] + y_vals[-1]
for y in y_vals[1:-1]:
if i % 2 == 0:
total += 2 * y
else:
total += 4 * y
i += 1
return total * (h / 3.0)
# %%
rmse = pd.DataFrame(columns=['Poupon', 'Waxman-Smits', 'Simandooux', 'Machine Learning'])
rmse['Poupon'] = mean_squared_error(y_test, y_pred_test, squared=False)
# %%
|
[
"ortega.edwin.y@gmail.com"
] |
ortega.edwin.y@gmail.com
|
eb7d3fd6e473974c51a4200dcf634be0569c227d
|
ca75f7099b93d8083d5b2e9c6db2e8821e63f83b
|
/z2/part2/batch/jm/parser_errors_2/425622553.py
|
8aea3ff937d9a8d93e8db7bfc24a45b988d36b5c
|
[
"MIT"
] |
permissive
|
kozakusek/ipp-2020-testy
|
210ed201eaea3c86933266bd57ee284c9fbc1b96
|
09aa008fa53d159672cc7cbf969a6b237e15a7b8
|
refs/heads/master
| 2022-10-04T18:55:37.875713
| 2020-06-09T21:15:37
| 2020-06-09T21:15:37
| 262,290,632
| 0
| 0
|
MIT
| 2020-06-09T21:15:38
| 2020-05-08T10:10:47
|
C
|
UTF-8
|
Python
| false
| false
| 2,607
|
py
|
from part1 import (
gamma_board,
gamma_busy_fields,
gamma_delete,
gamma_free_fields,
gamma_golden_move,
gamma_golden_possible,
gamma_move,
gamma_new,
)
"""
scenario: test_random_actions
uuid: 425622553
"""
"""
random actions, total chaos
"""
board = gamma_new(4, 4, 2, 7)
assert board is not None
assert gamma_move(board, 1, 0, 3) == 1
assert gamma_move(board, 1, 0, 3) == 0
assert gamma_move(board, 2, 2, 1) == 1
assert gamma_move(board, 1, 3, 3) == 1
assert gamma_free_fields(board, 1) == 13
assert gamma_move(board, 2, 1, 3) == 1
assert gamma_move(board, 1, 2, 3) == 1
assert gamma_free_fields(board, 1) == 11
assert gamma_move(board, 2, 2, 2) == 1
assert gamma_move(board, 1, 0, 0) == 1
assert gamma_move(board, 1, 1, 0) == 1
assert gamma_busy_fields(board, 1) == 5
assert gamma_move(board, 2, 2, 1) == 0
assert gamma_move(board, 2, 2, 2) == 0
assert gamma_move(board, 1, 0, 3) == 0
board424270140 = gamma_board(board)
assert board424270140 is not None
assert board424270140 == ("1211\n"
"..2.\n"
"..2.\n"
"11..\n")
del board424270140
board424270140 = None
assert gamma_move(board, 2, 3, 2) == 1
assert gamma_move(board, 2, 0, 1) == 1
assert gamma_busy_fields(board, 1) == 5
assert gamma_golden_move(board, 1, 1, 0) == 0
assert gamma_move(board, 2, 1, 2) == 1
assert gamma_move(board, 2, 2, 2) == 0
assert gamma_move(board, 2, 0, 3) == 0
assert gamma_busy_fields(board, 2) == 6
assert gamma_free_fields(board, 2) == 5
assert gamma_golden_possible(board, 2) == 1
assert gamma_move(board, 1, 3, 3) == 0
assert gamma_move(board, 1, 2, 0) == 1
assert gamma_move(board, 2, 2, 3) == 0
assert gamma_move(board, 1, 0, 3) == 0
assert gamma_move(board, 2, 1, 3) == 0
assert gamma_move(board, 2, 1, 1) == 1
assert gamma_move(board, 1, 1, 0) == 0
assert gamma_move(board, 1, 0, 0) == 0
assert gamma_move(board, 2, 1, 3) == 0
assert gamma_move(board, 2, 0, 1) == 0
assert gamma_move(board, 1, 0, 3) == 0
assert gamma_move(board, 2, 0, 3) == 0
assert gamma_move(board, 2, 2, 2) == 0
assert gamma_move(board, 1, 2, 0) == 0
assert gamma_move(board, 2, 1, 3) == 0
assert gamma_move(board, 1, 0, 3) == 0
assert gamma_move(board, 1, 3, 3) == 0
assert gamma_move(board, 2, 2, 0) == 0
assert gamma_move(board, 2, 0, 3) == 0
board885784383 = gamma_board(board)
assert board885784383 is not None
assert board885784383 == ("1211\n"
".222\n"
"222.\n"
"111.\n")
del board885784383
board885784383 = None
assert gamma_move(board, 1, 1, 3) == 0
assert gamma_busy_fields(board, 1) == 6
assert gamma_move(board, 2, 2, 0) == 0
gamma_delete(board)
|
[
"jakub@molinski.dev"
] |
jakub@molinski.dev
|
cbaf69e9724173c78d6bde16435635a8760e7ef2
|
0d803b9d03867c0827a10b155cdcdb7a4e693c9b
|
/geocoding.py
|
c34f7143dfdbc5d35f86d801bf542605f0eab510
|
[] |
no_license
|
toolness/nycdb-fun
|
a09a6906840c144eae17489db8231f85dba5e033
|
c4b33160dfbcc45ab1d9cf6424054ed7668d82c5
|
refs/heads/master
| 2020-04-12T08:00:01.808139
| 2018-12-23T22:09:59
| 2018-12-23T22:09:59
| 162,378,287
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,953
|
py
|
from typing import List, Optional
import logging
import pydantic
import requests
GEOCODING_SEARCH_URL = "https://geosearch.planninglabs.nyc/v1/search"
GEOCODING_TIMEOUT = 3
logger = logging.getLogger(__name__)
class FeatureGeometry(pydantic.BaseModel):
# This is generally "Point".
type: str
# The latitude and longitude.
coordinates: List[float]
class FeatureProperties(pydantic.BaseModel):
# The ZIP code, e.g. "11201".
postalcode: str
# The name, e.g. "666 FIFTH AVENUE".
name: str
# The region, e.g. "New York State".
region: str
# The locality, e.g. "New York".
locality: str
# The borough, e.g. "Manhattan"
borough: str
# e.g. "whosonfirst:borough:2"
borough_gid: str
# The full address, e.g. "666 FIFTH AVENUE, Manhattan, New York, NY, USA"
label: str
# The borough, block, lot number of the address, e.g. "3002920026".
pad_bbl: str
class Feature(pydantic.BaseModel):
# This is generally "Feature".
type: str
geometry: FeatureGeometry
properties: FeatureProperties
def search(text: str) -> Optional[List[Feature]]:
'''
Retrieves geo search results for the given search
criteria. For more details, see:
https://geosearch.planninglabs.nyc/docs/#search
If any errors occur, this function will log an
exception and return None.
'''
if not GEOCODING_SEARCH_URL:
# Geocoding is disabled.
return None
try:
response = requests.get(
GEOCODING_SEARCH_URL,
{'text': text},
timeout=GEOCODING_TIMEOUT
)
if response.status_code != 200:
raise Exception(f'Expected 200 response, got {response.status_code}')
return [Feature(**kwargs) for kwargs in response.json()['features']]
except Exception:
logger.exception(f'Error while retrieving data from {GEOCODING_SEARCH_URL}')
return None
|
[
"varmaa@gmail.com"
] |
varmaa@gmail.com
|
109fadc9340f4c94f4aabffe103bf717eb2211c7
|
c1c47173f7291a3e436ac5b5a389f6a198a129ba
|
/monitoring/google/cloud/monitoring_v3/proto/dropped_labels_pb2.py
|
b7aab12e4ab09ca4b42375e34e8f5e7591c7c38a
|
[
"Apache-2.0"
] |
permissive
|
di/google-cloud-python
|
932c35146ff813d65b9deb5ee08f4600c0fbbc82
|
a0bd8d0565e2a682760a113c59ce12b872bce9ab
|
refs/heads/master
| 2020-04-06T19:44:39.937268
| 2018-11-15T03:26:34
| 2018-11-15T03:26:34
| 157,748,058
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| true
| 5,624
|
py
|
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: google/cloud/monitoring_v3/proto/dropped_labels.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
from google.protobuf import descriptor_pb2
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='google/cloud/monitoring_v3/proto/dropped_labels.proto',
package='google.monitoring.v3',
syntax='proto3',
serialized_pb=_b('\n5google/cloud/monitoring_v3/proto/dropped_labels.proto\x12\x14google.monitoring.v3\x1a\x1cgoogle/api/annotations.proto\"|\n\rDroppedLabels\x12=\n\x05label\x18\x01 \x03(\x0b\x32..google.monitoring.v3.DroppedLabels.LabelEntry\x1a,\n\nLabelEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42@Z>google.golang.org/genproto/googleapis/monitoring/v3;monitoringb\x06proto3')
,
dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR,])
_DROPPEDLABELS_LABELENTRY = _descriptor.Descriptor(
name='LabelEntry',
full_name='google.monitoring.v3.DroppedLabels.LabelEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='google.monitoring.v3.DroppedLabels.LabelEntry.key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='value', full_name='google.monitoring.v3.DroppedLabels.LabelEntry.value', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=189,
serialized_end=233,
)
_DROPPEDLABELS = _descriptor.Descriptor(
name='DroppedLabels',
full_name='google.monitoring.v3.DroppedLabels',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='label', full_name='google.monitoring.v3.DroppedLabels.label', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[_DROPPEDLABELS_LABELENTRY, ],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=109,
serialized_end=233,
)
_DROPPEDLABELS_LABELENTRY.containing_type = _DROPPEDLABELS
_DROPPEDLABELS.fields_by_name['label'].message_type = _DROPPEDLABELS_LABELENTRY
DESCRIPTOR.message_types_by_name['DroppedLabels'] = _DROPPEDLABELS
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
DroppedLabels = _reflection.GeneratedProtocolMessageType('DroppedLabels', (_message.Message,), dict(
LabelEntry = _reflection.GeneratedProtocolMessageType('LabelEntry', (_message.Message,), dict(
DESCRIPTOR = _DROPPEDLABELS_LABELENTRY,
__module__ = 'google.cloud.monitoring_v3.proto.dropped_labels_pb2'
# @@protoc_insertion_point(class_scope:google.monitoring.v3.DroppedLabels.LabelEntry)
))
,
DESCRIPTOR = _DROPPEDLABELS,
__module__ = 'google.cloud.monitoring_v3.proto.dropped_labels_pb2'
,
__doc__ = """A set of (label, value) pairs which were dropped during aggregation,
attached to google.api.Distribution.Exemplars in google.api.Distribution
values during aggregation.
These values are used in combination with the label values that remain
on the aggregated Distribution timeseries to construct the full label
set for the exemplar values. The resulting full label set may be used to
identify the specific task/job/instance (for example) which may be
contributing to a long-tail, while allowing the storage savings of only
storing aggregated distribution values for a large group.
Note that there are no guarantees on ordering of the labels from
exemplar-to-exemplar and from distribution-to-distribution in the same
stream, and there may be duplicates. It is up to clients to resolve any
ambiguities.
Attributes:
label:
Map from label to its value, for all labels dropped in any
aggregation.
""",
# @@protoc_insertion_point(class_scope:google.monitoring.v3.DroppedLabels)
))
_sym_db.RegisterMessage(DroppedLabels)
_sym_db.RegisterMessage(DroppedLabels.LabelEntry)
DESCRIPTOR.has_options = True
DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('Z>google.golang.org/genproto/googleapis/monitoring/v3;monitoring'))
_DROPPEDLABELS_LABELENTRY.has_options = True
_DROPPEDLABELS_LABELENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
# @@protoc_insertion_point(module_scope)
|
[
"tseaver@palladion.com"
] |
tseaver@palladion.com
|
2c02a8ba8cb8fd6fdb3adaa229c5b39a866126a7
|
553766d4fd37aee9b737b1a28a050229b6d9e375
|
/setup.py
|
01a55bdf03211a50f3e04416915a5bf20f9949cd
|
[
"MIT"
] |
permissive
|
NLHEALTHCARE/py-kerapu
|
042825a7783b18bf65f024f4d9b617974388631f
|
039d5ce0ed52ca62316180c942c9738c913a7f0a
|
refs/heads/master
| 2021-01-18T23:39:45.709965
| 2017-02-03T13:41:41
| 2017-02-03T13:41:41
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,202
|
py
|
from setuptools import setup, find_packages
from os import path
here = path.abspath(path.dirname(__file__))
with open(path.join(here, 'README.rst'), encoding='utf-8') as handle:
long_description = handle.read()
setup(
name='Kerapu',
version='1.0.0',
description='Een implementatie van de grouper',
long_description=long_description,
url='https://github.com/SetBased/py-kerapu',
author='Paul Water',
author_email='p.r.water@setbased.nl',
license='MIT',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: Dutch',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
],
keywords='DBC Grouper',
packages=find_packages(exclude=['build', 'test']),
entry_points={
'console_scripts': [
'kerapu = kerapu.application.kerapu:main',
],
},
install_requires=['lxml', 'cleo==0.5.0']
)
|
[
"p.r.water@setbased.nl"
] |
p.r.water@setbased.nl
|
5e929ab522e2992b332ec4de8073c0ef70793e0d
|
e247d9261676f257752c0c6beac161954137a81c
|
/src/0791.custom-sort-string/custom-sort-string.py
|
b0b141f7ad9a35f367de19bc1724b7a98f132ab1
|
[
"MIT"
] |
permissive
|
henrymorgen/Just-Code
|
8fbbd8288b485372a44e10b0078b5edb8af61a3b
|
fa03ebb89edd8f2292de7c0644dbab88dc1d924c
|
refs/heads/master
| 2022-10-19T05:59:53.134092
| 2020-06-10T02:26:43
| 2020-06-10T02:26:43
| 273,656,532
| 1
| 2
|
MIT
| 2020-06-20T07:02:38
| 2020-06-20T07:02:38
| null |
UTF-8
|
Python
| false
| false
| 134
|
py
|
class Solution:
def customSortString(self, S: str, T: str) -> str:
return ''.join(sorted(list(T), key=lambda x:S.find(x)))
|
[
"yaxe522@163.com"
] |
yaxe522@163.com
|
8849114cecf22f40b1ff3abab1147e515485e339
|
3035e6a2b4e5b5662670c188785ed9fad0e1a315
|
/Chapter07/example/python/permissions/can_create_asset.py
|
6d52788dbc766c8ca272bec76a494ad1ef7423e4
|
[
"MIT"
] |
permissive
|
mahen92/Hyperledger-Cookbook
|
52491da47ea7e4b3d988b1303ad4641d89bd3c0e
|
c2aaf9f9fd58757110a2a6b3ab7498da11fba254
|
refs/heads/master
| 2021-01-09T15:36:10.368893
| 2020-04-10T18:17:41
| 2020-04-10T18:17:41
| 242,358,174
| 0
| 0
|
MIT
| 2020-02-22T14:46:54
| 2020-02-22T14:46:53
| null |
UTF-8
|
Python
| false
| false
| 1,270
|
py
|
#
# Copyright Soramitsu Co., Ltd. All Rights Reserved.
# SPDX-License-Identifier: Apache-2.0
#
import iroha
import commons
admin = commons.new_user('admin@test')
alice = commons.new_user('alice@test')
@commons.hex
def genesis_tx():
test_permissions = iroha.RolePermissionSet([iroha.Role_kCreateAsset])
tx = iroha.ModelTransactionBuilder() \
.createdTime(commons.now()) \
.creatorAccountId(admin['id']) \
.addPeer('0.0.0.0:50541', admin['key'].publicKey()) \
.createRole('admin_role', commons.all_permissions()) \
.createRole('test_role', test_permissions) \
.createDomain('test', 'test_role') \
.createAccount('admin', 'test', admin['key'].publicKey()) \
.createAccount('alice', 'test', alice['key'].publicKey()) \
.appendRole(admin['id'], 'admin_role') \
.build()
return iroha.ModelProtoTransaction(tx) \
.signAndAddSignature(admin['key']).finish()
@commons.hex
def create_asset_tx():
tx = iroha.ModelTransactionBuilder() \
.createdTime(commons.now()) \
.creatorAccountId(alice['id']) \
.createAsset('coin', 'test', 2) \
.build()
return iroha.ModelProtoTransaction(tx) \
.signAndAddSignature(alice['key']).finish()
|
[
"packt.suwarnar@gmail.com"
] |
packt.suwarnar@gmail.com
|
575579d1ff638a7b0cca8347ada4ef25a82fe6e2
|
eb9c3dac0dca0ecd184df14b1fda62e61cc8c7d7
|
/google/cloud/datacatalog/v1beta1/datacatalog-v1beta1-py/scripts/fixup_datacatalog_v1beta1_keywords.py
|
5304bf50a8687ea3dba5a4c94271975dafc65f62
|
[
"Apache-2.0"
] |
permissive
|
Tryweirder/googleapis-gen
|
2e5daf46574c3af3d448f1177eaebe809100c346
|
45d8e9377379f9d1d4e166e80415a8c1737f284d
|
refs/heads/master
| 2023-04-05T06:30:04.726589
| 2021-04-13T23:35:20
| 2021-04-13T23:35:20
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 8,080
|
py
|
#! /usr/bin/env python3
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import argparse
import os
import libcst as cst
import pathlib
import sys
from typing import (Any, Callable, Dict, List, Sequence, Tuple)
def partition(
predicate: Callable[[Any], bool],
iterator: Sequence[Any]
) -> Tuple[List[Any], List[Any]]:
"""A stable, out-of-place partition."""
results = ([], [])
for i in iterator:
results[int(predicate(i))].append(i)
# Returns trueList, falseList
return results[1], results[0]
class datacatalogCallTransformer(cst.CSTTransformer):
CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata')
METHOD_TO_PARAMS: Dict[str, Tuple[str]] = {
'create_entry': ('parent', 'entry_id', 'entry', ),
'create_entry_group': ('parent', 'entry_group_id', 'entry_group', ),
'create_policy_tag': ('parent', 'policy_tag', ),
'create_tag': ('parent', 'tag', ),
'create_tag_template': ('parent', 'tag_template_id', 'tag_template', ),
'create_tag_template_field': ('parent', 'tag_template_field_id', 'tag_template_field', ),
'create_taxonomy': ('parent', 'taxonomy', ),
'delete_entry': ('name', ),
'delete_entry_group': ('name', 'force', ),
'delete_policy_tag': ('name', ),
'delete_tag': ('name', ),
'delete_tag_template': ('name', 'force', ),
'delete_tag_template_field': ('name', 'force', ),
'delete_taxonomy': ('name', ),
'export_taxonomies': ('parent', 'taxonomies', 'serialized_taxonomies', ),
'get_entry': ('name', ),
'get_entry_group': ('name', 'read_mask', ),
'get_iam_policy': ('resource', 'options_', ),
'get_policy_tag': ('name', ),
'get_tag_template': ('name', ),
'get_taxonomy': ('name', ),
'import_taxonomies': ('parent', 'inline_source', ),
'list_entries': ('parent', 'page_size', 'page_token', 'read_mask', ),
'list_entry_groups': ('parent', 'page_size', 'page_token', ),
'list_policy_tags': ('parent', 'page_size', 'page_token', ),
'list_tags': ('parent', 'page_size', 'page_token', ),
'list_taxonomies': ('parent', 'page_size', 'page_token', ),
'lookup_entry': ('linked_resource', 'sql_resource', ),
'rename_tag_template_field': ('name', 'new_tag_template_field_id', ),
'search_catalog': ('scope', 'query', 'page_size', 'page_token', 'order_by', ),
'set_iam_policy': ('resource', 'policy_', ),
'test_iam_permissions': ('resource', 'permissions', ),
'update_entry': ('entry', 'update_mask', ),
'update_entry_group': ('entry_group', 'update_mask', ),
'update_policy_tag': ('policy_tag', 'update_mask', ),
'update_tag': ('tag', 'update_mask', ),
'update_tag_template': ('tag_template', 'update_mask', ),
'update_tag_template_field': ('name', 'tag_template_field', 'update_mask', ),
'update_taxonomy': ('taxonomy', 'update_mask', ),
}
def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode:
try:
key = original.func.attr.value
kword_params = self.METHOD_TO_PARAMS[key]
except (AttributeError, KeyError):
# Either not a method from the API or too convoluted to be sure.
return updated
# If the existing code is valid, keyword args come after positional args.
# Therefore, all positional args must map to the first parameters.
args, kwargs = partition(lambda a: not bool(a.keyword), updated.args)
if any(k.keyword.value == "request" for k in kwargs):
# We've already fixed this file, don't fix it again.
return updated
kwargs, ctrl_kwargs = partition(
lambda a: not a.keyword.value in self.CTRL_PARAMS,
kwargs
)
args, ctrl_args = args[:len(kword_params)], args[len(kword_params):]
ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl))
for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS))
request_arg = cst.Arg(
value=cst.Dict([
cst.DictElement(
cst.SimpleString("'{}'".format(name)),
cst.Element(value=arg.value)
)
# Note: the args + kwargs looks silly, but keep in mind that
# the control parameters had to be stripped out, and that
# those could have been passed positionally or by keyword.
for name, arg in zip(kword_params, args + kwargs)]),
keyword=cst.Name("request")
)
return updated.with_changes(
args=[request_arg] + ctrl_kwargs
)
def fix_files(
in_dir: pathlib.Path,
out_dir: pathlib.Path,
*,
transformer=datacatalogCallTransformer(),
):
"""Duplicate the input dir to the output dir, fixing file method calls.
Preconditions:
* in_dir is a real directory
* out_dir is a real, empty directory
"""
pyfile_gen = (
pathlib.Path(os.path.join(root, f))
for root, _, files in os.walk(in_dir)
for f in files if os.path.splitext(f)[1] == ".py"
)
for fpath in pyfile_gen:
with open(fpath, 'r') as f:
src = f.read()
# Parse the code and insert method call fixes.
tree = cst.parse_module(src)
updated = tree.visit(transformer)
# Create the path and directory structure for the new file.
updated_path = out_dir.joinpath(fpath.relative_to(in_dir))
updated_path.parent.mkdir(parents=True, exist_ok=True)
# Generate the updated source file at the corresponding path.
with open(updated_path, 'w') as f:
f.write(updated.code)
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description="""Fix up source that uses the datacatalog client library.
The existing sources are NOT overwritten but are copied to output_dir with changes made.
Note: This tool operates at a best-effort level at converting positional
parameters in client method calls to keyword based parameters.
Cases where it WILL FAIL include
A) * or ** expansion in a method call.
B) Calls via function or method alias (includes free function calls)
C) Indirect or dispatched calls (e.g. the method is looked up dynamically)
These all constitute false negatives. The tool will also detect false
positives when an API method shares a name with another method.
""")
parser.add_argument(
'-d',
'--input-directory',
required=True,
dest='input_dir',
help='the input directory to walk for python files to fix up',
)
parser.add_argument(
'-o',
'--output-directory',
required=True,
dest='output_dir',
help='the directory to output files fixed via un-flattening',
)
args = parser.parse_args()
input_dir = pathlib.Path(args.input_dir)
output_dir = pathlib.Path(args.output_dir)
if not input_dir.is_dir():
print(
f"input directory '{input_dir}' does not exist or is not a directory",
file=sys.stderr,
)
sys.exit(-1)
if not output_dir.is_dir():
print(
f"output directory '{output_dir}' does not exist or is not a directory",
file=sys.stderr,
)
sys.exit(-1)
if os.listdir(output_dir):
print(
f"output directory '{output_dir}' is not empty",
file=sys.stderr,
)
sys.exit(-1)
fix_files(input_dir, output_dir)
|
[
"bazel-bot-development[bot]@users.noreply.github.com"
] |
bazel-bot-development[bot]@users.noreply.github.com
|
dc78fd1c8a81852927824c45753dfe0703be82fe
|
de4d88db6ea32d20020c169f734edd4b95c3092d
|
/aiotdlib/api/types/chat_permissions.py
|
50bb4223e13d4380851b065d1b4a787c41fb65f4
|
[
"LicenseRef-scancode-unknown-license-reference",
"MIT"
] |
permissive
|
thiagosm/aiotdlib
|
5cc790a5645f7e4cc61bbd0791433ed182d69062
|
4528fcfca7c5c69b54a878ce6ce60e934a2dcc73
|
refs/heads/main
| 2023-08-15T05:16:28.436803
| 2021-10-18T20:41:27
| 2021-10-18T20:41:27
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,355
|
py
|
# =============================================================================== #
# #
# This file has been generated automatically!! Do not change this manually! #
# #
# =============================================================================== #
from __future__ import annotations
from pydantic import Field
from ..base_object import BaseObject
class ChatPermissions(BaseObject):
"""
Describes actions that a user is allowed to take in a chat
:param can_send_messages: True, if the user can send text messages, contacts, locations, and venues
:type can_send_messages: :class:`bool`
:param can_send_media_messages: True, if the user can send audio files, documents, photos, videos, video notes, and voice notes. Implies can_send_messages permissions
:type can_send_media_messages: :class:`bool`
:param can_send_polls: True, if the user can send polls. Implies can_send_messages permissions
:type can_send_polls: :class:`bool`
:param can_send_other_messages: True, if the user can send animations, games, stickers, and dice and use inline bots. Implies can_send_messages permissions
:type can_send_other_messages: :class:`bool`
:param can_add_web_page_previews: True, if the user may add a web page preview to their messages. Implies can_send_messages permissions
:type can_add_web_page_previews: :class:`bool`
:param can_change_info: True, if the user can change the chat title, photo, and other settings
:type can_change_info: :class:`bool`
:param can_invite_users: True, if the user can invite new users to the chat
:type can_invite_users: :class:`bool`
:param can_pin_messages: True, if the user can pin messages
:type can_pin_messages: :class:`bool`
"""
ID: str = Field("chatPermissions", alias="@type")
can_send_messages: bool
can_send_media_messages: bool
can_send_polls: bool
can_send_other_messages: bool
can_add_web_page_previews: bool
can_change_info: bool
can_invite_users: bool
can_pin_messages: bool
@staticmethod
def read(q: dict) -> ChatPermissions:
return ChatPermissions.construct(**q)
|
[
"pylakey@protonmail.com"
] |
pylakey@protonmail.com
|
cab80e9366575b86ff4a1faca70c78fca028284b
|
3f2c08c8081c6b515cad3ba1e625954ea7ae37a9
|
/myScripts/Hot100/70.爬楼梯.py
|
b0f00f78a699570b50a0cb7cdbf2d005f99d793b
|
[] |
no_license
|
voyagerw/exercise
|
9fc526223b13aeaa83e29c202c543a09f788e0dc
|
93dde8f18d22da90c1facde82717e2d02f8e8aa5
|
refs/heads/master
| 2023-02-28T17:10:41.515611
| 2021-02-05T08:56:18
| 2021-02-05T08:56:18
| 307,633,252
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 491
|
py
|
class Solution:
def climbStairs(self, n: int) -> int:
# 方法:动态规划
# if n < 3: return n
# dp = [0] * (n + 1)
# dp[0], dp[1] = 1, 1
# for i in range(2, n + 1):
# dp[i] = dp[i - 1] + dp[i - 2]
# return dp[-1]
# 空间优化
if n < 3: return n
a, b = 1, 2
for i in range(3, n + 1):
c = a + b
a, b = b, c
return c
N = 3
print(Solution().climbStairs(N))
|
[
"1723971842@qq.com"
] |
1723971842@qq.com
|
23b4c0f2721a2f3c299f892b7d0e0f7bedd11fc7
|
779d80920d42732d29ebaf253533606208fcb1fc
|
/metaci/notification/migrations/0003_auto_20170111_2103.py
|
9310726f98660a11dd464da9482c83e1a0b9fd55
|
[
"BSD-3-Clause"
] |
permissive
|
sebastianocostanzo/MetaCI
|
39a34200c3b2139399e6aa0e4f464aba4667c457
|
a880a8b1caa7cf1445f220b6c2e4f83fe8d38312
|
refs/heads/master
| 2021-09-10T06:36:50.906130
| 2018-03-21T17:09:34
| 2018-03-21T17:09:34
| 125,919,095
| 0
| 0
|
BSD-3-Clause
| 2018-03-21T17:09:35
| 2018-03-19T20:54:27
|
Python
|
UTF-8
|
Python
| false
| false
| 738
|
py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.3 on 2017-01-11 21:03
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('notification', '0002_planbranchnotification'),
]
operations = [
migrations.RemoveField(
model_name='planbranchnotification',
name='branch',
),
migrations.RemoveField(
model_name='planbranchnotification',
name='plan',
),
migrations.RemoveField(
model_name='planbranchnotification',
name='user',
),
migrations.DeleteModel(
name='PlanBranchNotification',
),
]
|
[
"jlantz@salesforce.com"
] |
jlantz@salesforce.com
|
4839b810c501483362679a841ded6fcd4c6624ad
|
60d5b5b1f1c912d1655de3884efc09dfddd8d132
|
/sites/vras/common/translations.py
|
70e11d11a999a6c76b40a314d3eb36a321fd7efb
|
[] |
no_license
|
alexgula/django_sites
|
15033c739401f24603e957c5a034d63652f0d21f
|
038834c0f544d6997613d61d593a7d5abf673c70
|
refs/heads/master
| 2016-09-05T11:02:43.838095
| 2014-07-07T11:36:07
| 2014-07-07T11:36:07
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 491
|
py
|
# coding=utf-8
from modeltranslation.translator import translator, TranslationOptions
from content.models import News, StaticPage
from catalog.models import Category
def register_model(model_class, *model_fields):
class ModelTranslationOptions(TranslationOptions):
fields = model_fields
translator.register(model_class, ModelTranslationOptions)
register_model(News, 'title', 'desc')
register_model(StaticPage, 'title', 'desc')
register_model(Category, 'title', 'desc')
|
[
"alexgula@gmail.com"
] |
alexgula@gmail.com
|
f744461aca6b0ac3bf9429e6a521ed82648f0e9a
|
08c01544e8ca95761d55ab307c00cd9cd3290e30
|
/fofa_sprider.py
|
dc29a97b778b26cc023bb722593bf49bc1564866
|
[] |
no_license
|
deepwebhacker/fofa_sprider
|
40691b83d02221225b65fb4086368f4e585c9622
|
490e1001d0acf6964f5b060fa5c408f71d82f4a8
|
refs/heads/master
| 2022-10-24T06:49:12.972731
| 2020-06-19T04:25:13
| 2020-06-19T04:25:13
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,503
|
py
|
import requests
import re
import base64
import urllib.parse
import warnings
from config import headers
import time
def waf():
print(" _____ _____ _____ ")
print(" /\ \ /\ \ /\ \ ")
print(" /::\____\ /::\ \ /::\ \ ")
print(" /:::/ / /::::\ \ /::::\ \ ")
print(" /:::/ _/___ /::::::\ \ /::::::\ \ ")
print(" /:::/ /\ \ /:::/\:::\ \ /:::/\:::\ \ ")
print(" /:::/ /::\____\ /:::/__\:::\ \ /:::/__\:::\ \ ")
print(" /:::/ /:::/ / /::::\ \:::\ \ /::::\ \:::\ \ ")
print(" /:::/ /:::/ _/___ /::::::\ \:::\ \ /::::::\ \:::\ \ ")
print(" /:::/___/:::/ /\ \ /:::/\:::\ \:::\ \ /:::/\:::\ \:::\ \ ")
print("|:::| /:::/ /::\____\/:::/ \:::\ \:::\____\/:::/ \:::\ \:::\____\ ")
print("|:::|__/:::/ /:::/ /\::/ \:::\ /:::/ /\::/ \:::\ \::/ / ")
print(" \:::\/:::/ /:::/ / \/____/ \:::\/:::/ / \/____/ \:::\ \/____/ ")
print(" \::::::/ /:::/ / \::::::/ / \:::\ \ ")
print(" \::::/___/:::/ / \::::/ / \:::\____\ ")
print(" \:::\__/:::/ / /:::/ / \::/ / ")
print(" \::::::::/ / /:::/ / \/____/ ")
print(" \::::::/ / /:::/ / ")
print(" \::::/ / /:::/ / ")
print(" \::/____/ \::/ / ")
print(" -- \/____/ ")
print(" ")
print(" --WAF 2.1 ")
headers = headers
warnings.filterwarnings("ignore")
def fofa1_request(url, headers):
with open("fofa.txt", 'a+') as f:
res = requests.get(url=url, headers=headers).text.encode('utf-8').decode('unicode_escape')
time.sleep(4)
res1 = re.compile('<a target="_blank" href="(https://|http://)(.*?)"', re.S)
res2 = res1.findall(res)
for i in res2:
if "gov" not in i:
f.write((i[0] + i[1]) + "\n")
f.flush()
else:
pass
f.close()
def page_numbers(pagenumbers1, pagenumbers2, arg):
for i in range(int(pagenumbers1), int(pagenumbers2) + 1):
url = "https://fofa.so/result?q=" + str(s) + "&page=" + str(i) + "&qbase64=" + str(arg)
fofa1_request(url, headers)
print("第{0}页以成功爬取完".format(i))
if __name__ == "__main__":
waf()
pagenumbers1 = input("请输入抓取起始页:")
pagenumbers2 = input("请输入抓取结尾页:")
f = input("请输入关键词:").encode('utf-8')
print("等待吧,心急吃不了热豆腐....")
arg = str(base64.b64encode(f), "utf-8").replace('+', '%2B')
s = urllib.parse.quote(f)
page_numbers(pagenumbers1, pagenumbers2, arg)
print("抓取成功")
|
[
"noreply@github.com"
] |
deepwebhacker.noreply@github.com
|
568d6cf311f2430409734548f041a30ba662a189
|
5679731cee36c537615d285ed72810f4c6b17380
|
/350_IntersectionOfTwoArraysII.py
|
02a7896847df48b7b66c3ed30aca321308aa092a
|
[] |
no_license
|
manofmountain/LeetCode
|
6b76105190a9b62df65a7b56b6def4120498b9fa
|
718f688b3d316e8c10ef680d9c21ecd518d062f8
|
refs/heads/master
| 2021-01-12T03:41:48.318116
| 2017-07-18T12:35:58
| 2017-07-18T12:35:58
| 78,252,164
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 707
|
py
|
# 77.2%
class Solution(object):
def intersect(self, nums1, nums2):
"""
:type nums1: List[int]
:type nums2: List[int]
:rtype: List[int]
"""
import collections
c = collections.Counter(nums1)
res = list()
for num in nums2:
if num in c and c[num] > 0:
c[num] -= 1
res.append(num)
return res
# A solution from LeetCode and pretty clear
from collections import Counter
class Solution(object):
def intersect(self, nums1, nums2):
c1, c2 = Counter(nums1), Counter(nums2)
return sum([[num] * min(c1[num], c2[num]) for num in c1 & c2], [])
|
[
"noreply@github.com"
] |
manofmountain.noreply@github.com
|
bebefcb0eccc61a519ee015e2757f9f92bb2e39e
|
a0f719fa9f24917619e19a10574c21cc1c621d9a
|
/socialapp/migrations/0008_auto_20201122_1917.py
|
91f47c68a69570a0c29312eefe315ae0cd0ac46c
|
[
"MIT"
] |
permissive
|
felkiriinya/Instagram-Clone
|
d0a83809989f489a10e19909843b6f8c1d9b15b2
|
614c523db6cd5dceda6d5959da0ec4befbd2c1a0
|
refs/heads/master
| 2023-01-22T12:58:57.202168
| 2020-11-23T16:15:44
| 2020-11-23T16:15:44
| 314,453,966
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 553
|
py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.29 on 2020-11-22 16:17
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('socialapp', '0007_image_user'),
]
operations = [
migrations.AlterField(
model_name='image',
name='user',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='posts', to='socialapp.Profile'),
),
]
|
[
"felkiriinya@gmail.com"
] |
felkiriinya@gmail.com
|
d412d011285db5a9ed022d53ce535bdc13bdf3c5
|
6e177f4b81d511c0592171a685e8fcbf6b0677fa
|
/aggregators/best_response_linear.py
|
2fdc689e0739391de2bd1abe271d5e0188a7f8a6
|
[] |
no_license
|
nickkeesG/market_aggregation
|
a7f2474706e54b19918a6e442fa11efbd87a4ebb
|
38f6cf6959657e45d942365a8d8aa0f38a0af5e3
|
refs/heads/main
| 2023-05-24T06:55:25.207281
| 2021-06-16T11:20:50
| 2021-06-16T11:20:50
| 374,623,000
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 810
|
py
|
import math
def get_best_response_lin(p_hat, q_hat, belief, endowment, policy):
#The math below assumes that the agent initially prefers alpha securities. If that is not the case, the math is very similar, but it's just easier to exploit the symmetry of the securities and solve assuming it prefers alpha securities
initial_price = p_hat / (p_hat + q_hat)
if not belief >= initial_price:
s_b, s_a = get_best_response_lin(q_hat, p_hat, (1-belief), endowment, policy)
return s_a, s_b
Z = math.sqrt((belief/(1-belief)) * (p_hat / q_hat))
p = Z / (Z + 1)
s_min = (Z*q_hat - p_hat)/endowment
s_a = s_min + (1-s_min)*p*policy
s_b = (1-s_min)*(1-p)*policy
s_a = max(0,s_a)
s_b = max(0,s_b)
s_a = min(1,s_a)
s_b = min(1,s_b)
return s_a, s_b
|
[
"email@email.com"
] |
email@email.com
|
ec6bf45fc1685e30a4ce70ba73698e47ae85cedc
|
b8faf65ea23a2d8b119b9522a0aa182e9f51d8b1
|
/vmraid/patches/v10_0/remove_custom_field_for_disabled_domain.py
|
1e98c2e7c8cef5507654d25519b5d508800e0a67
|
[
"MIT"
] |
permissive
|
vmraid/vmraid
|
a52868c57b1999a8d648441eb9cd05815204345d
|
3c2e2a952003ba7ea2cf13673b9e79e127f4166e
|
refs/heads/main
| 2022-07-29T18:59:28.585133
| 2022-04-22T08:02:52
| 2022-04-22T08:02:52
| 372,473,120
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 403
|
py
|
import vmraid
def execute():
vmraid.reload_doc("core", "doctype", "domain")
vmraid.reload_doc("core", "doctype", "has_domain")
active_domains = vmraid.get_active_domains()
all_domains = vmraid.get_all("Domain")
for d in all_domains:
if d.name not in active_domains:
inactive_domain = vmraid.get_doc("Domain", d.name)
inactive_domain.setup_data()
inactive_domain.remove_custom_field()
|
[
"sowrisurya@outlook.com"
] |
sowrisurya@outlook.com
|
019105d320f29101729291f79cd82beca16ad2c9
|
be0f3dfbaa2fa3d8bbe59229aef3212d032e7dd1
|
/Gauss_v45r8/Gen/DecFiles/options/34124001.py
|
03941c6c708c5f9889d3b55a8533fea3694f5f19
|
[] |
no_license
|
Sally27/backup_cmtuser_full
|
34782102ed23c6335c48650a6eaa901137355d00
|
8924bebb935b96d438ce85b384cfc132d9af90f6
|
refs/heads/master
| 2020-05-21T09:27:04.370765
| 2018-12-12T14:41:07
| 2018-12-12T14:41:07
| 185,989,173
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 735
|
py
|
# file /home/hep/ss4314/cmtuser/Gauss_v45r8/Gen/DecFiles/options/34124001.py generated: Fri, 27 Mar 2015 15:48:00
#
# Event Type: 34124001
#
# ASCII decay Descriptor: K_S0 -> e+ e- e+ e-
#
from Configurables import Generation
Generation().EventType = 34124001
Generation().SampleGenerationTool = "SignalPlain"
from Configurables import SignalPlain
Generation().addTool( SignalPlain )
Generation().SignalPlain.ProductionTool = "PythiaProduction"
from Configurables import ToolSvc
from Configurables import EvtGenDecay
ToolSvc().addTool( EvtGenDecay )
ToolSvc().EvtGenDecay.UserDecayFile = "$DECFILESROOT/dkfiles/KS_4e=DecProdCut.dec"
Generation().SignalPlain.CutTool = "DaughtersInLHCb"
Generation().SignalPlain.SignalPIDList = [ 310 ]
|
[
"slavomirastefkova@b2pcx39016.desy.de"
] |
slavomirastefkova@b2pcx39016.desy.de
|
7defb694b9da10ae9c2a5faca93cee793d8c77b5
|
2cc0a6819b63a89593c49867dc4e0e9f12b53843
|
/test/test_connection.py
|
6fc61d2b73a33d82a89ecea3b8c743a4bab3281f
|
[] |
no_license
|
Joylizzie/Financial_reports
|
9ca3937dc1c47eb33e5e78ee69e0ccaeaeea8491
|
7f1ecade32f2313f6202a2b69216e6a99eab4a79
|
refs/heads/main
| 2023-06-03T01:50:38.223424
| 2021-06-21T01:46:57
| 2021-06-21T01:46:57
| 344,964,510
| 0
| 0
| null | 2021-06-21T01:46:57
| 2021-03-05T23:59:42
|
HTML
|
UTF-8
|
Python
| false
| false
| 382
|
py
|
import os
import psycopg2
PW = os.environ['POSTGRES_PW']
conn = psycopg2.connect(
host="localhost",
database="test_conn",
user="financial_user",
password=PW)
sql = """select * from accounts"""
cur = conn.cursor()
cur.execute('SELECT version()')
rows = cur.fetchall()
for i in rows:
print(i)
cur.execute(sql)
rows = cur.fetchall()
for i in rows:
print(i)
|
[
"paulhtremblay@gmail.com"
] |
paulhtremblay@gmail.com
|
ef030462b615c2d921a1effe49df91c546a68666
|
ca7aa979e7059467e158830b76673f5b77a0f5a3
|
/Python_codes/p03208/s083588568.py
|
c3423c84bec3c3107acc309a6da0564f9bbce475
|
[] |
no_license
|
Aasthaengg/IBMdataset
|
7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901
|
f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8
|
refs/heads/main
| 2023-04-22T10:22:44.763102
| 2021-05-13T17:27:22
| 2021-05-13T17:27:22
| 367,112,348
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 167
|
py
|
N,K = map(int,input().split())
H = []
sa = []
for i in range(N):
H.append(int(input()))
H.sort()
for j in range(N-K+1):
sa.append(H[K+j-1]-H[j])
print(min(sa))
|
[
"66529651+Aastha2104@users.noreply.github.com"
] |
66529651+Aastha2104@users.noreply.github.com
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.