blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
288
| content_id
stringlengths 40
40
| detected_licenses
listlengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 684
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
⌀ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
⌀ | gha_language
stringclasses 147
values | src_encoding
stringclasses 25
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 128
12.7k
| extension
stringclasses 142
values | content
stringlengths 128
8.19k
| authors
listlengths 1
1
| author_id
stringlengths 1
132
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
27bb58ea731197cadd74122bc2818fb29224ab4e
|
90d9610ab5a878966868858b1b261cb6be5f6f97
|
/test/parser_test/log_line_test.py
|
e817f891780930044bfe3b69da50f507a998eeec
|
[] |
no_license
|
torbjoernk/pfasst_py
|
fd4c68cd63592feca8c811b9d994c66a470b541c
|
9309734a41a17ff0e617a242d1c8ebfd75a89698
|
refs/heads/master
| 2021-01-10T01:55:54.256722
| 2015-11-04T13:07:22
| 2015-11-04T13:07:22
| 45,114,179
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,411
|
py
|
# coding=utf-8
"""
.. moduleauthor:: Torbjörn Klatt <t.klatt@fz-juelich.de>
"""
import datetime
import unittest
from pfasst_py.parser.log_line import LogLine
class LogLineTest(unittest.TestCase):
def setUp(self):
self.msg_normal = "04.11.2015 13:51:15,37 [PFASST , INFO , MPI 0] PFASST Prediction step"
self.msg_no_text = "04.11.2015 13:51:15,37 [PFASST , INFO , MPI 0] "
self.msg_no_mpi = "04.11.2015 13:51:15,37 [SDC , INFO ] PFASST Prediction step"
self.msg_no_mpi_no_text = "04.11.2015 13:51:15,37 [SDC , INFO ] "
def test_emits_a_warning_for_wrongly_formatted_log_lines(self):
with self.assertLogs('pfasst_py', level='WARNING') as cptr:
LogLine('not a log line')
self.assertRegex('\n'.join(cptr.output), "Log line could not be parsed")
def test_parse_mpi_line_with_message(self):
obj = LogLine(self.msg_normal)
self.assertEqual(obj.timestamp.value, datetime.datetime(2015, 11, 4, 13, 51, 15, 370000))
self.assertEqual(obj.logger.value, 'PFASST')
self.assertEqual(obj.level.value, 'INFO')
self.assertEqual(obj.rank.value, '0')
self.assertEqual(obj.message.value, 'PFASST Prediction step')
def test_parse_mpi_line_without_message(self):
obj = LogLine(self.msg_no_text)
self.assertEqual(obj.timestamp.value, datetime.datetime(2015, 11, 4, 13, 51, 15, 370000))
self.assertEqual(obj.logger.value, 'PFASST')
self.assertEqual(obj.level.value, 'INFO')
self.assertEqual(obj.rank.value, '0')
self.assertEqual(obj.message.value, '')
def test_parse_non_mpi_line_with_message(self):
obj = LogLine(self.msg_no_mpi)
self.assertEqual(obj.timestamp.value, datetime.datetime(2015, 11, 4, 13, 51, 15, 370000))
self.assertEqual(obj.logger.value, 'SDC')
self.assertEqual(obj.level.value, 'INFO')
self.assertIsNone(obj.rank)
self.assertEqual(obj.message.value, 'PFASST Prediction step')
def test_parse_non_mpi_line_without_message(self):
obj = LogLine(self.msg_no_mpi_no_text)
self.assertEqual(obj.timestamp.value, datetime.datetime(2015, 11, 4, 13, 51, 15, 370000))
self.assertEqual(obj.logger.value, 'SDC')
self.assertEqual(obj.level.value, 'INFO')
self.assertIsNone(obj.rank)
self.assertEqual(obj.message.value, '')
|
[
"t.klatt@fz-juelich.de"
] |
t.klatt@fz-juelich.de
|
5631ee24c00d9fdae2e324a445b848c7cf580bf8
|
6a898e59343d0b3ea4f9580f489ef76d888b2b7e
|
/ecommerce/migrations/0010_stock_first_quantity.py
|
61dafb7c5d1ec221bb519674e006100650c3d362
|
[] |
no_license
|
oujri/ecommerce
|
4b08b0316671e24206e810a38728d71c77fdc396
|
3fd8095dd2ed771a6951ed7fff08ca11ef0b94a1
|
refs/heads/master
| 2020-03-19T03:39:36.002373
| 2018-06-01T22:09:31
| 2018-06-01T22:09:31
| 135,749,651
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 392
|
py
|
# Generated by Django 2.0.4 on 2018-05-05 00:27
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('ecommerce', '0009_auto_20180505_0126'),
]
operations = [
migrations.AddField(
model_name='stock',
name='first_quantity',
field=models.IntegerField(default=0),
),
]
|
[
"anass.urh@outlook.fr"
] |
anass.urh@outlook.fr
|
ce29c2926f8ae832ab67ad03400bf494c196277d
|
2e682fd72e3feaa70e3f7bf2a3b83c50d783ec02
|
/PyTorch/contrib/cv/semantic_segmentation/PSPNet/mmcv_replace/version.py
|
b67226251d80062997136a3247b76992815b9730
|
[
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"BSD-3-Clause",
"LicenseRef-scancode-generic-cla",
"LicenseRef-scancode-unknown-license-reference",
"GPL-1.0-or-later"
] |
permissive
|
Ascend/ModelZoo-PyTorch
|
4c89414b9e2582cef9926d4670108a090c839d2d
|
92acc188d3a0f634de58463b6676e70df83ef808
|
refs/heads/master
| 2023-07-19T12:40:00.512853
| 2023-07-17T02:48:18
| 2023-07-17T02:48:18
| 483,502,469
| 23
| 6
|
Apache-2.0
| 2022-10-15T09:29:12
| 2022-04-20T04:11:18
|
Python
|
UTF-8
|
Python
| false
| false
| 833
|
py
|
# Copyright (c) Open-MMLab. All rights reserved.
__version__ = '1.2.7'
def parse_version_info(version_str):
"""Parse a version string into a tuple.
Args:
version_str (str): The version string.
Returns:
tuple[int | str]: The version info, e.g., "1.3.0" is parsed into
(1, 3, 0), and "2.0.0rc1" is parsed into (2, 0, 0, 'rc1').
"""
version_info = []
for x in version_str.split('.'):
if x.isdigit():
version_info.append(int(x))
elif x.find('rc') != -1:
patch_version = x.split('rc')
version_info.append(int(patch_version[0]))
version_info.append(f'rc{patch_version[1]}')
return tuple(version_info)
version_info = parse_version_info(__version__)
__all__ = ['__version__', 'version_info', 'parse_version_info']
|
[
"wangjiangben@huawei.com"
] |
wangjiangben@huawei.com
|
1da3ef21d2bb5e19ae2f2df7650686cf980d2119
|
bfa81e9ebd6c394fb7ff27afd063bca6e5cf48c1
|
/signal_example/apps.py
|
daa5ee116228405237999efc5ff5f8cee079ea3a
|
[
"MIT"
] |
permissive
|
bluebamus/django_miscellaneous_book
|
b0dea3b856323304faca1d41edb8f70f2b8c6455
|
22e0851b3a07aeef94bb723b334f036ed5c17f72
|
refs/heads/main
| 2023-07-09T17:15:56.206762
| 2021-08-11T10:36:47
| 2021-08-11T10:36:49
| 382,425,361
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 159
|
py
|
from django.apps import AppConfig
class SignalExampleConfig(AppConfig):
default_auto_field = 'django.db.models.BigAutoField'
name = 'signal_example'
|
[
"bluebamus@naver.com"
] |
bluebamus@naver.com
|
ee2c193886ab22514a4005b23ceb595186d0e4bc
|
c68ca71a3ac9f62063e866ad3fe31be9e265835f
|
/Pycharm Lab04/grammar.py
|
fddb105697c460343abfa84e8a7495296a7b1c14
|
[] |
no_license
|
Victor-Alexandru/Formal-Languages-and-Compiler-Design
|
971453887c2a83da434f7d622867fd6fd0615592
|
d36eaf96d200183165c3ebd3a2240e34fcf39e7d
|
refs/heads/master
| 2020-08-08T04:54:25.659844
| 2020-01-13T13:57:05
| 2020-01-13T13:57:05
| 213,719,498
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,867
|
py
|
import re
import tokenize
#
# A is defined as G = (N, E, P, S) where:
#
# N = set of non-terminals
# E = set of terminals
# P = set of productions
# S = starting symbol
class Grammar:
@staticmethod
def parseLine(line):
equalPos = line.index('=')
rhs = line[equalPos + 1:].strip('\n').strip(' ')[1:-1]
return [symbol.strip() for symbol in rhs.split(',')]
@staticmethod
def fromFile(fileName):
with open(fileName) as file:
N = Grammar.parseLine(file.readline())
E = Grammar.parseLine(file.readline())
S = file.readline().split('=')[1].strip()
P = Grammar.parseRules([line.strip('\n').strip(' ').strip(',') for line in file][1:-1])
return Grammar(N, E, P, S)
@staticmethod
def parseRules(rules):
result = []
for rule in rules:
lhs, rhs = rule.split('->')
lhs = lhs.strip()
rhs = [value.strip() for value in rhs.split('|')]
for value in rhs:
result.append((lhs, value.split()))
return result
def __init__(self, N, E, P, S):
self.N = N
self.E = E
self.P = P
self.S = S
def isNonTerminal(self, value):
return value in self.N
def isTerminal(self, value):
return value in self.E
def isRegular(self):
usedInRhs = dict()
notAllowedInRhs = list()
for rule in self.P:
lhs, rhs = rule
hasTerminal = False
hasNonTerminal = False
for char in rhs:
if self.isNonTerminal(char):
usedInRhs[char] = True
hasNonTerminal = True
elif self.isTerminal(char):
if hasNonTerminal:
return False
hasTerminal = True
if char == 'E':
notAllowedInRhs.append(lhs)
if hasNonTerminal and not hasTerminal:
return False
for char in notAllowedInRhs:
if char in usedInRhs:
return False
return True
def getProductionsFor(self, nonTerminal):
if not self.isNonTerminal(nonTerminal):
raise Exception('Can only show productions for non-terminals')
return [prod for prod in self.P if prod[0] == nonTerminal]
def showProductionsFor(self, nonTerminal):
productions = self.getProductionsFor(nonTerminal)
print(', '.join([' -> '.join(prod) for prod in productions]))
def __str__(self):
return 'N = { ' + ', '.join(self.N) + ' }\n' \
+ 'E = { ' + ', '.join(self.E) + ' }\n' \
+ 'P = { ' + ', '.join([' -> '.join([prod[0], ' '.join(prod[1])]) for prod in self.P]) + ' }\n' \
+ 'S = ' + str(self.S) + '\n'
|
[
"victor21cuciureanu@gmail.com"
] |
victor21cuciureanu@gmail.com
|
43504cf4ff2a416c1a81d72cfea563b923656089
|
792cceb9b573fdfff969404ded00448549e4aee7
|
/overextends/templatetags/overextends_tags.py
|
590b9e38862dbb754b27974876a34a6e959eeb85
|
[
"BSD-2-Clause"
] |
permissive
|
stefanw/django-overextends
|
bdc604bbe84664a844f76d10818630d554925834
|
be53aaab7d7f9260c58ab22bcba1cf782c6224f3
|
refs/heads/master
| 2020-12-30T19:11:22.963138
| 2012-09-01T04:39:32
| 2012-09-01T04:39:32
| 12,434,593
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,730
|
py
|
import os
from django import template
from django.template import Template, TemplateSyntaxError, TemplateDoesNotExist
from django.template.loader_tags import ExtendsNode
from django.template.loader import find_template_loader
register = template.Library()
class OverExtendsNode(ExtendsNode):
"""
Allows the template ``foo/bar.html`` to extend ``foo/bar.html``,
given that there is another version of it that can be loaded. This
allows templates to be created in a project that extend their app
template counterparts, or even app templates that extend other app
templates with the same relative name/path.
We use our own version of ``find_template``, that uses an explict
list of template directories to search for the template, based on
the directories that the known template loaders
(``app_directories`` and ``filesystem``) use. This list gets stored
in the template context, and each time a template is found, its
absolute path gets removed from the list, so that subsequent
searches for the same relative name/path can find parent templates
in other directories, which allows circular inheritance to occur.
Django's ``app_directories``, ``filesystem``, and ``cached``
loaders are supported. The ``eggs`` loader, and any loader that
implements ``load_template_source`` with a source string returned,
should also theoretically work.
"""
def find_template(self, name, context, peeking=False):
"""
Replacement for Django's ``find_template`` that uses the current
template context to keep track of which template directories it
has used when finding a template. This allows multiple templates
with the same relative name/path to be discovered, so that
circular template inheritance can occur.
"""
# These imports want settings, which aren't available when this
# module is imported to ``add_to_builtins``, so do them here.
from django.template.loaders.app_directories import app_template_dirs
from django.conf import settings
# Store a dictionary in the template context mapping template
# names to the lists of template directories available to
# search for that template. Each time a template is loaded, its
# origin directory is removed from its directories list.
context_name = "OVEREXTENDS_DIRS"
if context_name not in context:
context[context_name] = {}
if name not in context[context_name]:
all_dirs = list(settings.TEMPLATE_DIRS) + list(app_template_dirs)
# os.path.abspath is needed under uWSGI, and also ensures we
# have consistent path separators across different OSes.
context[context_name][name] = map(os.path.abspath, all_dirs)
# Build a list of template loaders to use. For loaders that wrap
# other loaders like the ``cached`` template loader, unwind its
# internal loaders and add those instead.
loaders = []
for loader_name in settings.TEMPLATE_LOADERS:
loader = find_template_loader(loader_name)
loaders.extend(getattr(loader, "loaders", [loader]))
# Go through the loaders and try to find the template. When
# found, removed its absolute path from the context dict so
# that it won't be used again when the same relative name/path
# is requested.
for loader in loaders:
dirs = context[context_name][name]
try:
source, path = loader.load_template_source(name, dirs)
except TemplateDoesNotExist:
pass
else:
# Only remove the absolute path for the initial call in
# get_parent, and not when we're peeking during the
# second call.
if not peeking:
remove_path = os.path.abspath(path[:-len(name) - 1])
context[context_name][name].remove(remove_path)
return Template(source)
raise TemplateDoesNotExist(name)
def get_parent(self, context):
"""
Load the parent template using our own ``find_template``, which
will cause its absolute path to not be used again. Then peek at
the first node, and if its parent arg is the same as the
current parent arg, we know circular inheritance is going to
occur, in which case we try and find the template again, with
the absolute directory removed from the search list.
"""
parent = self.parent_name.resolve(context)
# If parent is a template object, just return it.
if hasattr(parent, "render"):
return parent
template = self.find_template(parent, context)
if (isinstance(template.nodelist[0], ExtendsNode) and
template.nodelist[0].parent_name.resolve(context) == parent):
return self.find_template(parent, context, peeking=True)
return template
@register.tag
def overextends(parser, token):
"""
Extended version of Django's ``extends`` tag that allows circular
inheritance to occur, eg a template can both be overridden and
extended at once.
"""
bits = token.split_contents()
if len(bits) != 2:
raise TemplateSyntaxError("'%s' takes one argument" % bits[0])
parent_name = parser.compile_filter(bits[1])
nodelist = parser.parse()
if nodelist.get_nodes_by_type(ExtendsNode):
raise TemplateSyntaxError("'%s' cannot appear more than once "
"in the same template" % bits[0])
return OverExtendsNode(nodelist, parent_name, None)
|
[
"steve@jupo.org"
] |
steve@jupo.org
|
ef3750605dc1ceb4f6039d5fb4f9de3f419c88ba
|
58119a43f98d2d6387af04521ea6e577a12c3aed
|
/core/admin.py
|
66d903c333e89d7d2c21040e1fc567afddea8c5e
|
[] |
no_license
|
raysandeep/handly-backend
|
21178b9580c3592d98c4b80bac7cbdcf704d6dba
|
512767881ad0f04fb7870b8fa31241817aab1fe2
|
refs/heads/master
| 2022-12-24T22:34:59.807195
| 2020-09-29T22:17:43
| 2020-09-29T22:17:43
| 267,828,315
| 0
| 0
| null | 2020-05-29T10:16:06
| 2020-05-29T10:16:04
| null |
UTF-8
|
Python
| false
| false
| 302
|
py
|
from django.contrib import admin
from .models import (
Collections,
OutputFiles,
HandwritingInputLogger,
InputFile
)
# Register your models here.
admin.site.register(Collections)
admin.site.register(OutputFiles)
admin.site.register(HandwritingInputLogger)
admin.site.register(InputFile)
|
[
"rayanuthalas@gmail.com"
] |
rayanuthalas@gmail.com
|
1682bd0197cf6a3cb8c7dbd041d629157afe7f2d
|
317d199d36556ecf5da06c660cb5cb655a86ea09
|
/Challenges/lisas_workbook/test_lisas_worbook.py
|
cc41dc87422cb6364030a186007bf9a595fbeac5
|
[] |
no_license
|
baubrun/Challenges-PY
|
e109126a64a20128202e03c2ed359c179f523dcd
|
e2ca45cbca264f5790ce303807e25810a5d8d977
|
refs/heads/master
| 2022-12-17T03:24:43.308680
| 2020-09-14T12:37:24
| 2020-09-14T12:37:24
| 262,485,260
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 225
|
py
|
import pytest
from lisas_workbook import workbook
@pytest.mark.parametrize("n, k, arr, result",
[
(5, 3, [4,2,6,1,10], 4)
]
)
def test_workbook(n, k, arr, result):
assert workbook(n, k, arr) == result
|
[
"baubelf@gmail.com"
] |
baubelf@gmail.com
|
4c7cc1da3d3db48d74d8801ee92a4e8e292c1dc1
|
538ca338d88598c1c671c502d03b94781fbebdff
|
/thriftworker/utils/imports.py
|
09ebd16a067c8e7e23e570bf62ed703035ec4809
|
[] |
no_license
|
gdeetotdom/thriftworker
|
e12c94ac05eebe1b28f157415d3d6072ecf77876
|
c9b7554845a7f76de2e2076568e2fb779cacdfff
|
refs/heads/master
| 2021-01-10T19:22:53.608418
| 2013-11-11T08:18:59
| 2013-11-11T08:18:59
| 5,068,927
| 1
| 1
| null | 2013-11-11T08:19:00
| 2012-07-16T15:03:37
|
C
|
UTF-8
|
Python
| false
| false
| 3,550
|
py
|
from __future__ import absolute_import
import os
import sys
import imp
import importlib
from contextlib import contextmanager
import six
def symbol_by_name(name, aliases={}, imp=None, package=None,
sep='.', default=None, **kwargs):
"""Get symbol by qualified name.
The name should be the full dot-separated path to the class::
modulename.ClassName
Example::
celery.concurrency.processes.TaskPool
^- class name
or using ':' to separate module and symbol::
celery.concurrency.processes:TaskPool
If `aliases` is provided, a dict containing short name/long name
mappings, the name is looked up in the aliases first.
Examples:
>>> symbol_by_name("celery.concurrency.processes.TaskPool")
<class 'celery.concurrency.processes.TaskPool'>
>>> symbol_by_name("default", {
... "default": "celery.concurrency.processes.TaskPool"})
<class 'celery.concurrency.processes.TaskPool'>
# Does not try to look up non-string names.
>>> from celery.concurrency.processes import TaskPool
>>> symbol_by_name(TaskPool) is TaskPool
True
"""
if imp is None:
imp = importlib.import_module
if not isinstance(name, basestring):
return name # already a class
name = aliases.get(name) or name
sep = ':' if ':' in name else sep
module_name, _, cls_name = name.rpartition(sep)
if not module_name:
cls_name, module_name = None, package if package else cls_name
try:
try:
module = imp(module_name, package=package, **kwargs)
except ValueError, exc:
exc = ValueError("Couldn't import %r: %s" % (name, exc))
six.reraise(ValueError, exc, sys.exc_info()[2])
return getattr(module, cls_name) if cls_name else module
except (ImportError, AttributeError):
if default is None:
raise
return default
def instantiate(name, *args, **kwargs):
"""Instantiate class by name.
See :func:`symbol_by_name`.
"""
return symbol_by_name(name)(*args, **kwargs)
def qualname(obj):
if isinstance(obj, basestring):
return obj
if not hasattr(obj, '__name__') and hasattr(obj, '__class__'):
return qualname(obj.__class__)
return '.'.join([obj.__module__, obj.__name__])
def get_real_module(name):
"""Get the real Python module, regardless of any monkeypatching"""
fp, pathname, description = imp.find_module(name)
imp.acquire_lock()
try:
_realmodule = imp.load_module('_real_{0}'.format(name), fp, pathname,
description)
return _realmodule
finally:
imp.release_lock()
if fp:
fp.close()
@contextmanager
def cwd_in_path():
cwd = os.getcwd()
if cwd in sys.path:
yield
else:
sys.path.insert(0, cwd)
try:
yield cwd
finally:
try:
sys.path.remove(cwd)
except ValueError: # pragma: no cover
pass
def import_from_cwd(module, imp=None, package=None):
"""Import module, but make sure it finds modules
located in the current directory.
Modules located in the current directory has
precedence over modules located in `sys.path`.
"""
if imp is None:
imp = importlib.import_module
with cwd_in_path():
return imp(module, package=package)
|
[
"lipindd@gdeetotdom.ru"
] |
lipindd@gdeetotdom.ru
|
52e850e88c68b715b9fff51b8ed2477e68d341f2
|
79140b67cac1f5c8e3eb3ab3e7ad65a3a98866e8
|
/test/dnacode.py
|
a712a7e8dceed824582a9d407ad1ef22a93bd53f
|
[] |
no_license
|
dlovemore/bible
|
63c1eceed4a919f7a6d2dfb76b6b084d05c49612
|
2594a2414a66c0abedd1278fef805415a8793f28
|
refs/heads/master
| 2021-01-03T07:17:45.527017
| 2020-05-16T17:54:18
| 2020-05-16T17:54:18
| 239,975,858
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,549
|
py
|
# >>> from dna import *
# >>> (Genesis[1]-Genesis[4]).midv()
# Genesis 2:22 And the rib, which the LORD God had taken from man, made he a woman, and brought her unto the man.
# Genesis 2:23 And Adam said, This is now bone of my bones, and flesh of my flesh: she shall be called Woman, because she was taken out of Man.
# >>>
# >>> b.book(5)
# Deuteronomy 1:1-34:12 (959 verses)
# >>> b.book(5)[5].vn()
# 5055
# >>> tell(ssum,osum,'אלהימ')
# א ל ה י מ
# 1+30+5+10+40=86
# 1+12+5+10+13=41
# >>> tell(ssum,osum,'King')
# K i n g
# 20+9+50+7=86
# 11+9+14+7=41
# >>> tell(osum,ssum,'ברא')
# ב ר א
# 2+ 20+1= 23
# 2+200+1=203
# >>> tell('cre ate d')
# cre ate d
# 26+ 26+4=56
# >>> tell('God')
# G o d
# 7+15+4=26
# >>> tell(ssum,'LORD JEHOVAH')
# LORD JEHOVAH
# 184 + 492 =676
# >>> osum('God')**2
# 676
# >>> AY=AV+W+X+Y
# >>> tell(ssum,'את')
# א ת
# 1+400=401
# >>> tell(ssum,'King')
# K i n g
# 20+9+50+7=86
# >>> AY
# 3088286401
# >>> bin(AY)
# '0b10111000000100111000001011000001'
# >>> 23<<27|39<<15|11<<6|1
# 3088286401
# >>> tell('Ch ri st')
# Ch ri st
# 11+27+39=77
# >>> Isaiah[41:4]
# Isaiah 41:4 Who hath wrought and done it, calling the generations from the beginning? I the LORD, the first, and with the last; I am he.
# >>> b/'was'/'and is'/'to come'
# 2 Samuel 7:19 And this was yet a small thing in thy sight, O Lord GOD; but thou hast spoken also of thy servant's house for a great while to come. And is this the manner of man, O Lord GOD?
# Revelation 4:8 And the four beasts had each of them six wings about him; and they were full of eyes within: and they rest not day and night, saying, Holy, holy, holy, LORD God Almighty, which was, and is, and is to come.
# >>> bin(975)
# '0b1111001111'
# >>> b/'ladder'
# Genesis 28:12 And he dreamed, and behold a ladder set up on the earth, and the top of it reached to heaven: and behold the angels of God ascending and descending on it.
# >>> _.tell()
# And he dreamed, and behold a ladder set up on the earth, and the top of it reached to heaven: and behold the angels of God ascending and descending on it.
# 19+13+ 50 + 19+ 46 +1+ 44 + 44+37+29+ 33+ 52 + 19+ 33+ 51+21+29+ 44 +35+ 55 + 19+ 46 + 33+ 58 +21+ 26+ 76 + 19+ 84 +29+ 29=1114
# >>> base(22,AV+W+X+Y)
# [1, 5, 5, 5, 8, 9, 0, 13]
# >>> int('1555890d',22)
# 3088286401
# >>> base(12,AV+W+X+Y)
# [7, 2, 2, 3, 1, 6, 9, 4, 1]
# >>> base(23,AV+W+X+Y)
# [20, 19, 18, 19, 18, 11, 18]
# >>> int('KJIJIBI',23)
# 3088286401
# >>>
# >>> Ecclesiastes[7:27]
# Ecclesiastes 7:27 Behold, this have I found, saith the preacher, counting one by one, to find out the account:
# >>> Genesis/'divide'/'light'
# Genesis 1:4,14,18 (3 verses)
# >>> p(_)
# Genesis 1
# 4 And God saw the light, that it was good: and God divided the light from the darkness.
# 14 And God said, Let there be lights in the firmament of the heaven to divide the day from the night; and let them be for signs, and for seasons, and for days, and years:
# 18 And to rule over the day and over the night, and to divide the light from the darkness: and God saw that it was good.
# >>>
# >>> AY-2**32
# -1206680895
# >>> AX=AV+W+X
# >>> AX
# 3031058986
# >>> 55055**2
# 3031053025
# >>> AX-55055**2
# 5961
# >>> pf(_)
# Counter({3: 1, 1987: 1})
# >>> math.sqrt(.05414)
# 0.23268003782017915
# >>> nF(414)
# (14, 377, -37, 414, 196, 610, 15)
# >>>
### >>> b/40000
### Joshua 4:13;Judges 5:8;2 Samuel 10:18;1 Kings 4:26;1 Chronicles 12:36;19:18 (6 verses)
### >>> p(_)
### Joshua 4:13 About forty thousand prepared for war passed over before the LORD unto battle, to the plains of Jericho.
### Judges 5:8 They chose new gods; then was war in the gates: was there a shield or spear seen among forty thousand in Israel?
### 2 Samuel 10:18 And the Syrians fled before Israel; and David slew the men of seven hundred chariots of the Syrians, and forty thousand horsemen, and smote Shobach the captain of their host, who died there.
### 1 Kings 4:26 And Solomon had forty thousand stalls of horses for his chariots, and twelve thousand horsemen.
### 1 Chronicles 12:36 And of Asher, such as went forth to battle, expert in war, forty thousand.
### 1 Chronicles 19:18 But the Syrians fled before Israel; and David slew of the Syrians seven thousand men which fought in chariots, and forty thousand footmen, and killed Shophach the captain of the host.
# >>> math.sqrt(40)
# 6.324555320336759
# >>> math.sqrt(22)
# 4.69041575982343
# >>> math.sqrt(14)
# 3.7416573867739413
# >>> math.sqrt(2)
# 1.4142135623730951
# >>>
|
[
"davidlovemore@gmail.com"
] |
davidlovemore@gmail.com
|
d0dc44857bee01a251fbea954848bda531caf3e6
|
857da13a653162cc8c83a7d53a254e2caa97836d
|
/tensorpac/io.py
|
2e92c2212e409ac58f510be095da81bbd48b6f8a
|
[
"BSD-3-Clause"
] |
permissive
|
EtienneCmb/tensorpac
|
18a5e844826b7f63796276ec435d9dc43c440e40
|
ac9058fd375d423757943810f613d63785fab85f
|
refs/heads/master
| 2023-05-22T16:54:09.656836
| 2023-02-08T09:42:45
| 2023-02-08T09:42:45
| 93,316,276
| 70
| 18
|
BSD-3-Clause
| 2023-03-08T22:12:03
| 2017-06-04T13:47:18
|
Python
|
UTF-8
|
Python
| false
| false
| 5,546
|
py
|
"""Define tensorpac logger.
See :
https://stackoverflow.com/questions/384076/how-can-i-color-python-logging-output
"""
import logging
import sys
import re
BLACK, RED, GREEN, YELLOW, BLUE, MAGENTA, CYAN, WHITE = range(8)
RESET_SEQ = "\033[0m"
COLOR_SEQ = "\033[1;%dm"
BOLD_SEQ = "\033[1m"
COLORS = {
'DEBUG': GREEN,
'PROFILER': MAGENTA,
'INFO': WHITE,
'WARNING': YELLOW,
'ERROR': RED,
'CRITICAL': RED,
}
FORMAT = {'compact': "$BOLD%(levelname)s | %(message)s",
'spacy': "$BOLD%(levelname)-19s$RESET | %(message)s",
'tensorpac': "$BOLD%(name)s-%(levelname)-19s$RESET | %(message)s",
'print': "%(message)s",
}
def formatter_message(message, use_color=True):
"""Format the message."""
return message.replace("$RESET", RESET_SEQ).replace("$BOLD", BOLD_SEQ)
class _Formatter(logging.Formatter):
"""Formatter."""
def __init__(self, format_type='compact'):
logging.Formatter.__init__(self, FORMAT[format_type])
self._format_type = format_type
def format(self, record):
name = record.levelname
msg = record.getMessage()
# If * in msg, set it in RED :
if '*' in msg:
regexp = '\*.*?\*'
re_search = re.search(regexp, msg).group()
to_color = COLOR_SEQ % (30 + RED) + re_search + COLOR_SEQ % (
30 + WHITE) + RESET_SEQ
msg_color = re.sub(regexp, to_color, msg)
msg_color += RESET_SEQ
record.msg = msg_color
# Set level color :
levelname_color = COLOR_SEQ % (30 + COLORS[name]) + name + RESET_SEQ
record.levelname = levelname_color
if record.levelno == 20:
logging.Formatter.__init__(self, FORMAT['print'])
else:
logging.Formatter.__init__(self, FORMAT[self._format_type])
return formatter_message(logging.Formatter.format(self, record))
class _StreamHandler(logging.StreamHandler):
"""Stream handler allowing matching and recording."""
def __init__(self):
logging.StreamHandler.__init__(self, sys.stderr)
self.setFormatter(_lf)
self._str_pattern = None
self.emit = self._tensorpac_emit
def _tensorpac_emit(self, record, *args):
msg = record.getMessage()
test = self._match_pattern(record, msg)
if test:
record.msg = test
return logging.StreamHandler.emit(self, record)
else:
return
def _match_pattern(self, record, message):
if isinstance(self._str_pattern, str):
if re.search(self._str_pattern, message):
sub = '*{}*'.format(self._str_pattern)
return re.sub(self._str_pattern, sub, message)
else:
return ''
else:
return message
logger = logging.getLogger('tensorpac')
_lf = _Formatter()
_lh = _StreamHandler() # needs _lf to exist
logger.addHandler(_lh)
PROFILER_LEVEL_NUM = 1
logging.addLevelName(PROFILER_LEVEL_NUM, "PROFILER")
def profiler_fcn(self, message, *args, **kws):
# Yes, logger takes its '*args' as 'args'.
if self.isEnabledFor(PROFILER_LEVEL_NUM):
self._log(PROFILER_LEVEL_NUM, message, args, **kws)
logging.Logger.profiler = profiler_fcn
LOGGING_TYPES = dict(DEBUG=logging.DEBUG, INFO=logging.INFO,
WARNING=logging.WARNING, ERROR=logging.ERROR,
CRITICAL=logging.CRITICAL, PROFILER=PROFILER_LEVEL_NUM)
def set_log_level(verbose=None, match=None):
"""Convenience function for setting the logging level.
This function comes from the PySurfer package. See :
https://github.com/nipy/PySurfer/blob/master/surfer/utils.py
Parameters
----------
verbose : bool, str, int, or None
The verbosity of messages to print. If a str, it can be either
PROFILER, DEBUG, INFO, WARNING, ERROR, or CRITICAL.
match : string | None
Filter logs using a string pattern.
"""
# if verbose is None:
# verbose = "INFO"
logger = logging.getLogger('tensorpac')
if isinstance(verbose, bool):
verbose = 'INFO' if verbose else 'WARNING'
if isinstance(verbose, str):
if (verbose.upper() in LOGGING_TYPES):
verbose = verbose.upper()
verbose = LOGGING_TYPES[verbose]
logger.setLevel(verbose)
else:
raise ValueError("verbose must be in "
"%s" % ', '.join(LOGGING_TYPES))
if isinstance(match, str):
_lh._str_pattern = match
def progress_bar(value, endvalue, bar_length=20, pre_st=None):
"""Progress bar."""
percent = float(value) / endvalue
arrow = '-' * int(round(percent * bar_length) - 1) + '>'
spaces = ' ' * (bar_length - len(arrow))
pre_st = '' if not isinstance(pre_st, str) else pre_st
sys.stdout.write("\r{0} [{1}] {2}%".format(pre_st, arrow + spaces,
int(round(percent * 100))))
sys.stdout.flush()
def is_pandas_installed():
"""Test if pandas is installed."""
try:
import pandas # noqa
except:
raise IOError("pandas not installed. See https://pandas.pydata.org/"
"pandas-docs/stable/install.html")
def is_statsmodels_installed():
"""Test if statsmodels is installed."""
try:
import statsmodels # noqa
except:
raise IOError("statsmodels not installed. See http://www.statsmodels."
"org/stable/install.html")
|
[
"e.combrisson@gmail.com"
] |
e.combrisson@gmail.com
|
a5aaebd396700872fe251036dd8234a37d473ff0
|
c2b777fdd5b92aa4cbd25594b1ea877d6b280fc7
|
/Max_number_of_zeroes.py
|
19b4743d441dd8a5da8e493cf03a6223269ea584
|
[] |
no_license
|
pasbahar/python-practice
|
2baa09c474fa5744a11dabcc75507f03cd75c6a5
|
23bcd774becaa720588feb7ba3cf6ea04aafcf86
|
refs/heads/master
| 2020-12-04T05:50:40.382790
| 2020-02-27T17:25:23
| 2020-02-27T17:25:23
| 231,641,289
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,119
|
py
|
'''Given an array of N values. Print the number which has maximum number of zeroes. If there are no zeroes then print -1.
Note: If there are multiple numbers with same (max) number of zeroes then print the Maximum number among them.
Input:
The first line of input contains an integer T denoting the number of test cases. T testcases follow. Each testcase contains two lines of input. The first line consists of an integer N. The next line consists of N spaced integers.
Output:
For each testcase, print the number with maximum number of zeroes.
Constraints:
1<=T<=100
1<=N<=107
1<=A[i]<=10100
Example:
Input:
1
5
10 20 3000 9999 200
Output:
3000
Explanation:
Testcase1: 3000 hsa maximum number of zeroes so we print it.'''
for i in range(int(input())):
n=int(input())
l=list(map(str,input().split()))
max_c=0
res='-1'
for x in l:
count=0
for j in x:
if j=='0':
count+=1
if max_c<count:
max_c=count
res=x
elif max_c==count and max_c!=0:
if int(x)>int(res):
res=x
print(res)
|
[
"pasbahar@gmail.com"
] |
pasbahar@gmail.com
|
72f8e492f29ea42a868c04f17d46f40e698fae2b
|
b0c8e0cafa4a8916faab3cce65756ae91426c43f
|
/study/Python/Week3/BOJ_4811_강의현.py
|
19de46f493609fe6e4c68e9e3a1f45dc0ea0fe68
|
[] |
no_license
|
Rurril/IT-DA-3rd
|
b3e3ec3c2a5efbc75b76b84e9002c27a0ba4a1c4
|
9985e237cb1b90e9609656d534e0ed164723e281
|
refs/heads/master
| 2022-07-22T15:26:39.085369
| 2021-11-23T13:30:06
| 2021-11-23T13:30:06
| 288,980,334
| 3
| 29
| null | 2020-11-05T10:25:30
| 2020-08-20T10:49:17
|
Java
|
UTF-8
|
Python
| false
| false
| 451
|
py
|
#알약
import sys
def pill(W,H):
if dp[W][H]>0:
return dp[W][H]
if W==0:
return 1
if W>0 and H==0:
dp[W][H]+=pill(W-1,H+1)
elif W>0 and H>0:
dp[W][H]+=pill(W-1,H+1)
dp[W][H]+=pill(W,H-1)
return dp[W][H]
while True:
n=int(sys.stdin.readline())
dp=[[0 for _ in range(31)] for _ in range(31)]
if n==0:
break
else:
print(pill(n,0))
|
[
"riverkeh@naver.com"
] |
riverkeh@naver.com
|
f997ff5c8dc2cf41a469ffa1bd14cc67aa74d335
|
7c5fb33929116bb77b438de3ead93b3978b5af71
|
/alf/examples/mbrl_pendulum.py
|
068e609c739320a78caa5b8f2e8628c80e621db8
|
[
"Apache-2.0"
] |
permissive
|
HorizonRobotics/alf
|
d6dac891322a81ccb7e2a9749139627b1eda28cb
|
b00ff2fa5e660de31020338ba340263183fbeaa4
|
refs/heads/pytorch
| 2023-08-21T18:51:41.370566
| 2023-08-16T00:07:22
| 2023-08-16T00:07:22
| 178,459,453
| 288
| 57
|
Apache-2.0
| 2023-09-14T20:40:20
| 2019-03-29T18:44:07
|
Python
|
UTF-8
|
Python
| false
| false
| 1,653
|
py
|
# Copyright (c) 2020 Horizon Robotics and ALF Contributors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import torch
import alf
# implement the respective reward functions for desired environments here
@alf.configurable
def reward_function_for_pendulum(obs, action):
"""Function for computing reward for gym Pendulum environment. It takes
as input:
(1) observation (Tensor of shape [batch_size, observation_dim])
(2) action (Tensor of shape [batch_size, num_actions])
and returns a reward Tensor of shape [batch_size].
"""
def _observation_cost(obs):
c_theta, s_theta, d_theta = obs[..., :1], obs[..., 1:2], obs[..., 2:3]
theta = torch.atan2(s_theta, c_theta)
cost = theta**2 + 0.1 * d_theta**2
cost = torch.sum(cost, dim=1)
cost = torch.where(
torch.isnan(cost), 1e6 * torch.ones_like(cost), cost)
return cost
def _action_cost(action):
return 0.001 * torch.sum(action**2, dim=-1)
cost = _observation_cost(obs) + _action_cost(action)
# negative cost as reward
reward = -cost
return reward
|
[
"noreply@github.com"
] |
HorizonRobotics.noreply@github.com
|
673f2d850b90b20a67a00755f8a9e59b28f7324f
|
91d1a6968b90d9d461e9a2ece12b465486e3ccc2
|
/sagemaker_write_2/human-task-ui_create.py
|
eebb4a0b6530ac76c1e2b8f1be0de382851e94ff
|
[] |
no_license
|
lxtxl/aws_cli
|
c31fc994c9a4296d6bac851e680d5adbf7e93481
|
aaf35df1b7509abf5601d3f09ff1fece482facda
|
refs/heads/master
| 2023-02-06T09:00:33.088379
| 2020-12-27T13:38:45
| 2020-12-27T13:38:45
| 318,686,394
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,166
|
py
|
#!/usr/bin/python
# -*- codding: utf-8 -*-
import os
import sys
sys.path.append(os.path.dirname(os.path.abspath(os.path.dirname(__file__))))
from common.execute_command import write_two_parameter
# url : https://awscli.amazonaws.com/v2/documentation/api/latest/reference/sagemaker/create-human-task-ui.html
if __name__ == '__main__':
"""
delete-human-task-ui : https://awscli.amazonaws.com/v2/documentation/api/latest/reference/sagemaker/delete-human-task-ui.html
describe-human-task-ui : https://awscli.amazonaws.com/v2/documentation/api/latest/reference/sagemaker/describe-human-task-ui.html
list-human-task-uis : https://awscli.amazonaws.com/v2/documentation/api/latest/reference/sagemaker/list-human-task-uis.html
"""
parameter_display_string = """
# human-task-ui-name : The name of the user interface you are creating.
# ui-template :
"""
add_option_dict = {}
add_option_dict["parameter_display_string"] = parameter_display_string
# ex: add_option_dict["no_value_parameter_list"] = "--single-parameter"
write_two_parameter("sagemaker", "create-human-task-ui", "human-task-ui-name", "ui-template", add_option_dict)
|
[
"hcseo77@gmail.com"
] |
hcseo77@gmail.com
|
c78f086df8711a95739b5f59b62912beb82f0281
|
8574853abe4cfe95b5e03e0b41cf23d1ed865509
|
/pyqtgraph/ui_mainWindow.py
|
b5cadc315fd621cf5e8b4cdbaa0e70b6d0df7e37
|
[] |
no_license
|
neutrons/GUI_Tools
|
7ae8b90aad2cc1dc129d75618fc820c1c362dcda
|
34932a86545b9d52b2fa63f01c7950aebde54d78
|
refs/heads/master
| 2021-01-17T04:48:06.202029
| 2016-11-09T18:31:57
| 2016-11-09T18:31:57
| 28,240,513
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,394
|
py
|
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'ui_mainWindow.ui'
#
# Created by: PyQt4 UI code generator 4.11.4
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
def _fromUtf8(s):
return s
try:
_encoding = QtGui.QApplication.UnicodeUTF8
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig, _encoding)
except AttributeError:
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig)
class Ui_MainWindow(object):
def setupUi(self, MainWindow):
MainWindow.setObjectName(_fromUtf8("MainWindow"))
MainWindow.resize(1535, 845)
MainWindow.setMinimumSize(QtCore.QSize(300, 0))
self.centralwidget = QtGui.QWidget(MainWindow)
self.centralwidget.setMinimumSize(QtCore.QSize(0, 0))
self.centralwidget.setObjectName(_fromUtf8("centralwidget"))
self.verticalLayout = QtGui.QVBoxLayout(self.centralwidget)
self.verticalLayout.setObjectName(_fromUtf8("verticalLayout"))
self.widget_1 = QtGui.QWidget(self.centralwidget)
self.widget_1.setObjectName(_fromUtf8("widget_1"))
self.verticalLayout.addWidget(self.widget_1)
self.widget_2 = QtGui.QWidget(self.centralwidget)
self.widget_2.setObjectName(_fromUtf8("widget_2"))
self.verticalLayout.addWidget(self.widget_2)
MainWindow.setCentralWidget(self.centralwidget)
self.menubar = QtGui.QMenuBar(MainWindow)
self.menubar.setGeometry(QtCore.QRect(0, 0, 1535, 22))
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Minimum)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.menubar.sizePolicy().hasHeightForWidth())
self.menubar.setSizePolicy(sizePolicy)
self.menubar.setObjectName(_fromUtf8("menubar"))
MainWindow.setMenuBar(self.menubar)
self.action1_Data = QtGui.QAction(MainWindow)
self.action1_Data.setObjectName(_fromUtf8("action1_Data"))
self.action2_Normalization = QtGui.QAction(MainWindow)
self.action2_Normalization.setEnabled(True)
self.action2_Normalization.setObjectName(_fromUtf8("action2_Normalization"))
self.action3_Binning = QtGui.QAction(MainWindow)
self.action3_Binning.setObjectName(_fromUtf8("action3_Binning"))
self.action4_Fitting = QtGui.QAction(MainWindow)
self.action4_Fitting.setObjectName(_fromUtf8("action4_Fitting"))
self.action5_Results = QtGui.QAction(MainWindow)
self.action5_Results.setObjectName(_fromUtf8("action5_Results"))
self.actionAbout = QtGui.QAction(MainWindow)
self.actionAbout.setObjectName(_fromUtf8("actionAbout"))
self.action1_Raw_Data = QtGui.QAction(MainWindow)
self.action1_Raw_Data.setObjectName(_fromUtf8("action1_Raw_Data"))
self.action2_Normalization_2 = QtGui.QAction(MainWindow)
self.action2_Normalization_2.setObjectName(_fromUtf8("action2_Normalization_2"))
self.action3_Normalized_Data = QtGui.QAction(MainWindow)
self.action3_Normalized_Data.setObjectName(_fromUtf8("action3_Normalized_Data"))
self.retranslateUi(MainWindow)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
def retranslateUi(self, MainWindow):
MainWindow.setWindowTitle(_translate("MainWindow", "MainWindow", None))
self.action1_Data.setText(_translate("MainWindow", "1. Data", None))
self.action2_Normalization.setText(_translate("MainWindow", "2. Normalization", None))
self.action3_Binning.setText(_translate("MainWindow", "4. Binning", None))
self.action4_Fitting.setText(_translate("MainWindow", "5. Fitting", None))
self.action5_Results.setText(_translate("MainWindow", "6. Strain Mapping", None))
self.actionAbout.setText(_translate("MainWindow", "About ...", None))
self.action1_Raw_Data.setText(_translate("MainWindow", "1. Raw Data", None))
self.action2_Normalization_2.setText(_translate("MainWindow", "2. Normalization", None))
self.action3_Normalized_Data.setText(_translate("MainWindow", "3. Normalized Data", None))
|
[
"bilheuxjm@ornl.gov"
] |
bilheuxjm@ornl.gov
|
c4c4ad976f5a937b2721e07f86ceec98b86e7c4c
|
d82d8bce58a64e579e8a5e5d9e3fbc2b5274ea0a
|
/code/compile_list.py
|
ad8e5e99242fcfbfab31f7d23f459ca752478fb6
|
[] |
no_license
|
annayqho/IcBL-master-list
|
2f58ad8fa092296d0c999dcca8b7700cb8f41ef7
|
c9d3884411fbdcb6f607ac76af72e5625a4266ba
|
refs/heads/master
| 2021-07-19T19:22:06.205001
| 2020-05-14T23:50:26
| 2020-05-14T23:50:26
| 163,516,921
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 7,091
|
py
|
""" Compile a list of Ic-BL SNe """
import numpy as np
import requests
from astropy.table import Table
from astropy.time import Time
from astropy.coordinates import SkyCoord,Distance
from astropy.cosmology import Planck15
from astropy.io import ascii
DATA_DIR = "/Users/annaho/Dropbox/Projects/Research/IcBL/data"
def todeg(ra, dec):
""" convert XX:XX:XX to decimal degrees """
radeg = []
decdeg = []
for ii,raval in enumerate(ra):
hh = raval.split(":")[0]
mm = raval.split(":")[1]
ss = raval.split(":")[2]
radegval = hh+"h"+mm+"m"+ss+"s"
dd = dec[ii].split(":")[0]
mm = dec[ii].split(":")[1]
ss = dec[ii].split(":")[2]
decdegval = dd+"d"+mm+"m"+ss+"s"
c = SkyCoord(radegval, decdegval, frame='icrs')
radeg.append(c.ra.deg)
decdeg.append(c.dec.deg)
return np.array(radeg), np.array(decdeg)
def opensn():
"""
Automatically grab all of the Ic-BL SNe from the open SN catalog """
print("Connecting to the open SN catalog...")
server = "https://api.sne.space/catalog"
r = requests.get(server, params={'claimedtype': 'Ic BL', 'format': 'json'})
dat = r.json()
# Retrieve the data you want
nsn = len(dat.keys())
print("Found %s claimed Ic-BL SNe on the open SN catalog" %nsn)
return dat
def tns():
""" Run this to automatically grab all of the Ic-BL SNe from TNS """
print("Connecting to TNS server...")
server = "https://wis-tns.weizmann.ac.il/search"
r = requests.get(server, params={'objtype': 7, 'format': 'csv'})
alldat = r.text.split('\n')
# Header
header = np.array(alldat[0].split('","'))
# Data
dat = alldat[1:]
# According to the formatting, you want to group things that live together
# in double quotation marks. So, the real split between items is ",", not ,
for ii,row in enumerate(dat):
dat[ii] = np.array(dat[ii].split('","'))
dat = np.array(dat)
# Retrieve the data you want
nsn = dat.shape[0]
print("Found %s Ic-BL SNe on TNS" %nsn)
name = dat[:,np.where(header=='Name')[0][0]]
ra = dat[:,np.where(header=='RA')[0][0]]
dec = dat[:,np.where(header=='DEC')[0][0]]
radeg, decdeg = todeg(ra,dec)
z = dat[:,np.where(header=='Redshift')[0][0]]
date = dat[:,np.where(header=='Discovery Date (UT)')[0][0]]
ref = ['TNS'] * nsn
return name, date, radeg, decdeg, z, ref
def ptf():
""" the PTF/iPTF sample of 34 Ic-BL SNe
I copied the table directly from the .tex file downloaded from the arXiv,
then ran the following two commands
%s/\\//g
%s/ //g
%s/\*//g
%s/xx//g
I also removed the commented-out lines
In this paper, they give estimated explosion epochs (with a typical
uncertainty of 2 days) for all of the SNe observed before
and after r maximum brightness.
A lot of them don't have an estimated explosion epoch, though.
So what I should do is use the estimate for the ones that have it,
and for the ones that don't have it, just report discovery date
as I found it on the marshal.
"""
# Discovery dates on the Marshal, for the ones that aren't in Table 2
# 27 out of 34 leaves 7
disc = {}
disc['PTF09sk'] = 2455002.74571
disc['PTF10cs'] = 2455203.74537
disc['PTF12grr'] = 2456117.84878
disc['iPTF14bfu'] = Time('2014-06-06T03:11:51.86').jd
disc['iPTF15dld'] = 2457318.82184
disc['iPTF16coi'] = 2457625.72566
disc['iPTF17axg'] = 2457784.97286
dat = Table.read(
"%s/taddia2018.dat" %DATA_DIR,
delimiter='&', format='ascii.fast_no_header')
# file with explosion epochs
dat_expl = Table.read(
"%s/taddia2018_t2.dat" %DATA_DIR,
delimiter='&', format='ascii.fast_no_header')
name_expl = dat_expl['col1']
texpl = dat_expl['col8']
name = dat['col1']
texpl = []
for n in name:
try:
ind = np.where(name_expl==n)[0][0]
texpl.append(texpl[ind])
except:
texpl.append(disc[n])
ra = dat['col2']
dec = dat['col3']
radeg, decdeg = todeg(ra, dec)
z = dat['col5']
ref = ['T18']*len(name)
return list(name), texpl, list(radeg), list(decdeg), list(z), ref
def ztf():
""" The list of Ic-BL discovered in ZTF """
dat = Table.read(
"%s/ztf.dat" %DATA_DIR,
delimiter='&', format='ascii.fast_no_header')
name = dat['col1']
date = dat['col3']
ra = dat['col5']
dec = dat['col6']
radeg, decdeg = todeg(ra, dec)
z = dat['col7']
ref = ['ZTF']*len(name)
return list(name), list(date), list(radeg), list(decdeg), list(z), ref
def add(name, disc, ra, dec, redshift, ref, n, di, r, d, z, re):
c = SkyCoord(ra, dec, unit='deg')
cadd = SkyCoord(r, d, unit='deg')
nadd = 0
for ii,val in enumerate(cadd):
dist = c.separation(val).arcsec
nopos = False
noname = False
# Is the position in there already?
if sum(dist <= 2) == 0:
nopos = True
# Is the name in there already?
if n[ii] not in name:
noname = True
if np.logical_and(nopos, noname):
name.append(n[ii])
disc.append(di[ii])
ra.append(r[ii])
dec.append(d[ii])
redshift.append(z[ii])
ref.append(re[ii])
nadd += 1
else:
print("%s is a duplicate, not adding" %n[ii])
print("added %s events" %str(nadd))
return name, disc, ra, dec, redshift, ref
if __name__=="__main__":
dat = opensn()
names = np.array(list(dat.keys()))
nsn = len(names)
ra = []
dec = []
for key,val in dat.items():
if len(val['ra']) > 0:
ra.append(val['ra'][0]['value'])
dec.append(val['dec'][0]['value'])
ra,dec = todeg(ra,dec)
opensnpos = SkyCoord(ra, dec, unit='deg')
# Question 1: are there any Ic-BL on TNS that are not on openSN?
name, date, radeg, decdeg, z, ref = tns()
name = np.array([val.replace(" ", "") for val in name])
missing = np.setdiff1d(name,names)
if len(missing) > 0:
print("There are TNS Ic-BL SNe missing from OpenSN")
print(missing)
else:
print("All TNS Ic-BL SNe are on OpenSN")
# Question 2: are there any Ic-BL from other papers that are not on openSN?
# Yes, a whole bunch from PTF and ZTF.
name, date, radeg, decdeg, z, ref = ztf()
name = np.array(name)
print(np.setdiff1d(name,names))
# compare positions, since some of these only have ZTF names...
ptfpos = SkyCoord(radeg, decdeg, unit='deg')
for ii,val in enumerate(ptfpos):
if min(val.separation(opensnpos).arcsec) < 1:
print("%s already in openSN" %name[ii])
else:
print("%s not in openSN" %name[ii])
# # Name, Expl./Disc. Date, RA, Dec, Redshift, Reference
# ascii.write(
# [names], 'all_icbl.html', names=['Name'], delimiter=',',
# overwrite=True, format='html')
|
[
"annayqho@gmail.com"
] |
annayqho@gmail.com
|
c36c23782d5de4c8d32ff18fa5c495be5c8bbb9e
|
f576f0ea3725d54bd2551883901b25b863fe6688
|
/sdk/eventhub/azure-mgmt-eventhub/generated_samples/private_link_resources_get.py
|
09d9cb10ccc2b1df74331b0170522555e172219b
|
[
"LicenseRef-scancode-generic-cla",
"MIT",
"LGPL-2.1-or-later"
] |
permissive
|
Azure/azure-sdk-for-python
|
02e3838e53a33d8ba27e9bcc22bd84e790e4ca7c
|
c2ca191e736bb06bfbbbc9493e8325763ba990bb
|
refs/heads/main
| 2023-09-06T09:30:13.135012
| 2023-09-06T01:08:06
| 2023-09-06T01:08:06
| 4,127,088
| 4,046
| 2,755
|
MIT
| 2023-09-14T21:48:49
| 2012-04-24T16:46:12
|
Python
|
UTF-8
|
Python
| false
| false
| 1,568
|
py
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
from azure.mgmt.eventhub import EventHubManagementClient
"""
# PREREQUISITES
pip install azure-identity
pip install azure-mgmt-eventhub
# USAGE
python private_link_resources_get.py
Before run the sample, please set the values of the client ID, tenant ID and client secret
of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID,
AZURE_CLIENT_SECRET. For more info about how to get the value, please see:
https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal
"""
def main():
client = EventHubManagementClient(
credential=DefaultAzureCredential(),
subscription_id="subID",
)
response = client.private_link_resources.get(
resource_group_name="ArunMonocle",
namespace_name="sdk-Namespace-2924",
)
print(response)
# x-ms-original-file: specification/eventhub/resource-manager/Microsoft.EventHub/stable/2021-11-01/examples/NameSpaces/PrivateLinkResourcesGet.json
if __name__ == "__main__":
main()
|
[
"noreply@github.com"
] |
Azure.noreply@github.com
|
c3f9cd65dcb33f02e8541e83d77676f71f7713de
|
764e24bf9e8a4d68b3f06eb0e0b9e6c9140e72ba
|
/Deportes/Evento/form.py
|
c244a680e74e5c3e074d3387faf9f55f1eb7b198
|
[] |
no_license
|
efnaranjo6/Deportes
|
27200d30f1f86364ed2d37f76342f63b657a5788
|
71f22790a8d059516aa35ac325cc9b284979712e
|
refs/heads/master
| 2021-03-28T00:09:42.513669
| 2020-04-27T16:45:41
| 2020-04-27T16:45:41
| 247,819,791
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 511
|
py
|
from django import forms
from Evento.models import Evento
class Eventoform(forms.ModelForm):
class Meta:
model = Evento
fields = ['nombre']
labels = {'nombre ': 'ingrese el nombre'
}
widget={'nombre' : forms.TextInput(),
}
def __init__(self,*args, **kwargs):
super().__init__(*args, **kwargs)
for field in iter(self.fields):
self.fields[field].widget.attrs.update({
'class':'form-control'})
|
[
"efnaranjo6@misena.edu.co"
] |
efnaranjo6@misena.edu.co
|
17568aee9cbf26702497d31f6ef8bf6bdd3b0a1c
|
cf197880ad947b1706ae2ca19fa7010cc2dd12b8
|
/Algorithms/Chapter2_Fundamentals2/04_Character_Art.py
|
b26daab9076f8155875eeae2d5c2f620deedb91e
|
[] |
no_license
|
KevinMichaelCamp/Python-HardWay
|
9b8adb5be31729da8f52c956b4d0638a79715013
|
25f21f4fb8934edb13649fea3d5d15033332a7eb
|
refs/heads/master
| 2020-03-26T12:59:15.938322
| 2020-01-02T01:27:37
| 2020-01-02T01:27:37
| 144,917,453
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 575
|
py
|
# From star art - do the same but allow for character to be passed in as argument
def drawLeftStars(num, char):
text = ""
text += char * num
text += " " * (75 - num)
return text
def drawRightStars(num, char):
text = ""
text += " " * (75 - num)
text += char * num
return text
def drawCenteredStars(num, char):
text = ""
text += " " * ((75 - num)//2)
text += char * num
text += " " * ((75 - num)//2)
return text
# Test Cases
print(drawLeftStars(35, "%"))
print(drawRightStars(35, "@"))
print(drawCenteredStars(35, "!"))
|
[
"kevinmichaelcamp@gmail.com"
] |
kevinmichaelcamp@gmail.com
|
59f7e0dc619c13fdb4d809743f30f6d88e72df7c
|
78d35bb7876a3460d4398e1cb3554b06e36c720a
|
/sdk/network/azure-mgmt-network/azure/mgmt/network/v2020_08_01/aio/operations/_express_route_ports_locations_operations.py
|
4f8fd8e9b5f9c43e377a4b1f5bd034b2cdb2c574
|
[
"MIT",
"LicenseRef-scancode-generic-cla",
"LGPL-2.1-or-later"
] |
permissive
|
catchsrinivas/azure-sdk-for-python
|
e35f59b60318a31b3c940a7a3a07b61b28118aa5
|
596227a7738a5342274486e30489239d539b11d1
|
refs/heads/main
| 2023-08-27T09:08:07.986249
| 2021-11-11T11:13:35
| 2021-11-11T11:13:35
| 427,045,896
| 0
| 0
|
MIT
| 2021-11-11T15:14:31
| 2021-11-11T15:14:31
| null |
UTF-8
|
Python
| false
| false
| 7,901
|
py
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.mgmt.core.exceptions import ARMErrorFormat
from ... import models as _models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class ExpressRoutePortsLocationsOperations:
"""ExpressRoutePortsLocationsOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2020_08_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def list(
self,
**kwargs: Any
) -> AsyncIterable["_models.ExpressRoutePortsLocationListResult"]:
"""Retrieves all ExpressRoutePort peering locations. Does not return available bandwidths for each
location. Available bandwidths can only be obtained when retrieving a specific peering
location.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ExpressRoutePortsLocationListResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2020_08_01.models.ExpressRoutePortsLocationListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ExpressRoutePortsLocationListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-08-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('ExpressRoutePortsLocationListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Network/ExpressRoutePortsLocations'} # type: ignore
async def get(
self,
location_name: str,
**kwargs: Any
) -> "_models.ExpressRoutePortsLocation":
"""Retrieves a single ExpressRoutePort peering location, including the list of available
bandwidths available at said peering location.
:param location_name: Name of the requested ExpressRoutePort peering location.
:type location_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ExpressRoutePortsLocation, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2020_08_01.models.ExpressRoutePortsLocation
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ExpressRoutePortsLocation"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-08-01"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'locationName': self._serialize.url("location_name", location_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('ExpressRoutePortsLocation', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Network/ExpressRoutePortsLocations/{locationName}'} # type: ignore
|
[
"noreply@github.com"
] |
catchsrinivas.noreply@github.com
|
dca9a6c7ed4c4e8ab5c5da27e4c28c00529a590e
|
0111fa900ae348d9f06a4976567c7f3ab8b82be3
|
/python/arbolBin.py
|
fbb13627c7728861f95e3ab72b6201555ab997d2
|
[] |
no_license
|
apdaza/universidad-ejercicios
|
4e790f84637f8753a166a5318d21217161cbe0bc
|
64a02fd7b63e2cd899a91297f652e2351ccbce2f
|
refs/heads/master
| 2021-09-11T04:59:17.252072
| 2021-09-04T13:56:04
| 2021-09-04T13:56:04
| 86,726,523
| 6
| 4
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,849
|
py
|
class Nodo():
def __init__(self,val,izq=None,der=None):
self.valor = val
self.izquierda = izq
self.derecha = der
def inorden(arbol):
if arbol == None:
return []
else:
return inorden(arbol.izquierda)+[arbol.valor]+inorden(arbol.derecha)
def preorden(arbol):
if arbol == None:
return []
else:
return [arbol.valor]+preorden(arbol.izquierda)+preorden(arbol.derecha)
def postorden(arbol):
if arbol == None:
return []
else:
return postorden(arbol.izquierda)+postorden(arbol.derecha)+[arbol.valor]
def evaluar(arbol):
if arbol.valor == '+':
return evaluar(arbol.izquierda) + evaluar(arbol.derecha)
elif arbol.valor == '-':
return evaluar(arbol.izquierda) - evaluar(arbol.derecha)
elif arbol.valor == '*':
return evaluar(arbol.izquierda) * evaluar(arbol.derecha)
elif arbol.valor == '/':
return evaluar(arbol.izquierda) / evaluar(arbol.derecha)
else:
return int(arbol.valor)
def suma(arbol):
if arbol == None:
return 0
else:
return suma(arbol.izquierda)+suma(arbol.derecha)+arbol.valor
def insertar(arbol, valor):
if arbol == None:
return Nodo(valor)
else:
if valor >= arbol.valor:
return Nodo(arbol.valor, arbol.izquierda, insertar(arbol.derecha, valor))
else:
return Nodo(arbol.valor, insertar(arbol.izquierda, valor), arbol.derecha)
def insertarLista(arbol, lista):
if lista==[]:
return arbol
else:
if arbol == None:
return insertarLista(Nodo(lista[0]), lista[1:])
else:
return insertarLista(insertar(arbol, lista[0]), lista[1:])
def imprimeArbolSangrado(arbol, nivel=0):
if arbol == None:
return
else:
imprimeArbolSangrado(arbol.izquierda, nivel+1)
print ' '*nivel + str(arbol.valor)
imprimeArbolSangrado(arbol.derecha, nivel+1)
def buscarEnArbol(valor, arbol):
if arbol == None:
return False
elif arbol.valor == valor:
return True
elif valor < arbol.valor:
return buscarEnArbol(valor, arbol.izquierda)
else:
return buscarEnArbol(valor, arbol.derecha)
def contarElementos(arbol):
if arbol == None:
return 0
else:
return 1 + contarElementos(arbol.derecha) + contarElementos(arbol.izquierda)
a = Nodo(15, Nodo(10, Nodo(4)), Nodo(25))
b = Nodo('+', Nodo('-', Nodo('14'),Nodo('4')), Nodo('25'))
print inorden(a)
print preorden(a)
print postorden(a)
print suma(a)
print inorden(insertar(a,67))
print inorden(insertarLista(a,[23,2,17,20]))
imprimeArbolSangrado(a,0)
print inorden(b)
print preorden(b)
print postorden(b)
print evaluar(b)
print buscarEnArbol(10, a)
print buscarEnArbol(110, a)
print contarElementos(a)
|
[
"apdaza@gmail.com"
] |
apdaza@gmail.com
|
e329ad301e71e357fe4efbf573cac5c78ea6436d
|
83c92bdbab514a9630d74d24f91cbd77ec7e33f6
|
/{{cookiecutter.repo_name}}/{{cookiecutter.repo_name}}/settings/production.py
|
f8de424d0459cfbb0f1e441344801a9075bcb14b
|
[] |
no_license
|
dnmellen/cookiecutter-simple-django-sqlite
|
2874f890858aac2bac923d22b03c702c93a5ee0f
|
51b5128b6335acc464b572fec34f5c75427d97d0
|
refs/heads/master
| 2021-01-23T23:51:56.699451
| 2013-09-26T10:34:46
| 2013-09-26T10:34:46
| 13,118,893
| 4
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 155
|
py
|
from .base import *
DEBUG = True
TEMPLATE_DEBUG = DEBUG
ADMINS = (
('{{cookiecutter.author_name}}', '{{cookiecutter.email}}'),
)
MANAGERS = ADMINS
|
[
"marcofucci@gmail.com"
] |
marcofucci@gmail.com
|
900763796b0f0666b1b4561b359127ee4227b144
|
799f7938856a320423625c6a6a3881eacdd0e039
|
/lldb/test/API/lang/cpp/class-template-type-parameter-pack/TestClassTemplateTypeParameterPack.py
|
88beac18e891abbb7f08e342f38989c7c35f5851
|
[
"NCSA",
"Apache-2.0",
"LLVM-exception"
] |
permissive
|
shabalind/llvm-project
|
3b90d1d8f140efe1b4f32390f68218c02c95d474
|
d06e94031bcdfa43512bf7b0cdfd4b4bad3ca4e1
|
refs/heads/main
| 2022-10-18T04:13:17.818838
| 2021-02-04T13:06:43
| 2021-02-04T14:23:33
| 237,532,515
| 0
| 0
|
Apache-2.0
| 2020-01-31T23:17:24
| 2020-01-31T23:17:23
| null |
UTF-8
|
Python
| false
| false
| 4,146
|
py
|
import lldb
from lldbsuite.test.decorators import *
from lldbsuite.test.lldbtest import *
from lldbsuite.test import lldbutil
class TestCaseClassTemplateTypeParameterPack(TestBase):
mydir = TestBase.compute_mydir(__file__)
@expectedFailureAll(oslist=["windows"]) # Fails to read memory from target.
@no_debug_info_test
def test(self):
self.build()
self.dbg.CreateTarget(self.getBuildArtifact("a.out"))
self.expect_expr("emptyTypePack", result_type="TypePack<>",
result_children=[ValueCheck(name="a", type="int")])
self.expect_expr("oneElemTypePack", result_type="TypePack<int>",
result_children=[ValueCheck(name="a", type="int")])
self.expect_expr("twoElemTypePack", result_type="TypePack<int, float>",
result_children=[ValueCheck(name="a", type="int")])
self.expect_expr("emptyAnonTypePack", result_type="AnonTypePack<>",
result_children=[ValueCheck(name="b", type="int")])
self.expect_expr("oneElemAnonTypePack", result_type="AnonTypePack<int>",
result_children=[ValueCheck(name="b", type="int")])
self.expect_expr("twoElemAnonTypePack", result_type="AnonTypePack<int, float>",
result_children=[ValueCheck(name="b", type="int")])
self.expect_expr("emptyAnonTypePackAfterTypeParam", result_type="AnonTypePackAfterTypeParam<int>",
result_children=[ValueCheck(name="c", type="int")])
self.expect_expr("oneElemAnonTypePackAfterTypeParam", result_type="AnonTypePackAfterTypeParam<int, float>",
result_children=[ValueCheck(name="c", type="int")])
self.expect_expr("emptyAnonTypePackAfterAnonTypeParam", result_type="AnonTypePackAfterAnonTypeParam<int>",
result_children=[ValueCheck(name="d", type="float")])
self.expect_expr("oneElemAnonTypePackAfterAnonTypeParam", result_type="AnonTypePackAfterAnonTypeParam<int, float>",
result_children=[ValueCheck(name="d", type="float")])
self.expect_expr("emptyTypePackAfterAnonTypeParam", result_type="TypePackAfterAnonTypeParam<int>",
result_children=[ValueCheck(name="e", type="int")])
self.expect_expr("oneElemTypePackAfterAnonTypeParam", result_type="TypePackAfterAnonTypeParam<int, float>",
result_children=[ValueCheck(name="e", type="int")])
self.expect_expr("emptyTypePackAfterTypeParam", result_type="TypePackAfterTypeParam<int>",
result_children=[ValueCheck(name="f", type="int")])
self.expect_expr("oneElemTypePackAfterTypeParam", result_type="TypePackAfterTypeParam<int, float>",
result_children=[ValueCheck(name="f", type="int")])
self.expect_expr("emptyAnonTypePackAfterNonTypeParam", result_type="AnonTypePackAfterNonTypeParam<1>",
result_children=[ValueCheck(name="g", type="int")])
self.expect_expr("oneElemAnonTypePackAfterNonTypeParam", result_type="AnonTypePackAfterNonTypeParam<1, int>",
result_children=[ValueCheck(name="g", type="int")])
self.expect_expr("emptyAnonTypePackAfterAnonNonTypeParam", result_type="AnonTypePackAfterAnonNonTypeParam<1>",
result_children=[ValueCheck(name="h", type="float")])
self.expect_expr("oneElemAnonTypePackAfterAnonNonTypeParam", result_type="AnonTypePackAfterAnonNonTypeParam<1, int>",
result_children=[ValueCheck(name="h", type="float")])
self.expect_expr("emptyTypePackAfterAnonNonTypeParam", result_type="TypePackAfterAnonNonTypeParam<1>",
result_children=[ValueCheck(name="i", type="int")])
self.expect_expr("oneElemTypePackAfterAnonNonTypeParam", result_type="TypePackAfterAnonNonTypeParam<1, int>",
result_children=[ValueCheck(name="i", type="int")])
self.expect_expr("emptyTypePackAfterNonTypeParam", result_type="TypePackAfterNonTypeParam<1>",
result_children=[ValueCheck(name="j", type="int")])
self.expect_expr("oneElemTypePackAfterNonTypeParam", result_type="TypePackAfterNonTypeParam<1, int>",
result_children=[ValueCheck(name="j", type="int")])
|
[
"teemperor@gmail.com"
] |
teemperor@gmail.com
|
dc244dc98111c4d6ce264ef5fb440adca03a7d4b
|
15f321878face2af9317363c5f6de1e5ddd9b749
|
/solutions_python/Problem_199/3413.py
|
2e81c2d19b8f1f7dccafdf9a831f4d28676309e6
|
[] |
no_license
|
dr-dos-ok/Code_Jam_Webscraper
|
c06fd59870842664cd79c41eb460a09553e1c80a
|
26a35bf114a3aa30fc4c677ef069d95f41665cc0
|
refs/heads/master
| 2020-04-06T08:17:40.938460
| 2018-10-14T10:12:47
| 2018-10-14T10:12:47
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,232
|
py
|
import sys
def switcher(string , num):
final_string = ''
if len(string) < num:
return False
for element in string:
if element == '-' and num > 0:
final_string = final_string + '+'
elif element == '+' and num > 0:
final_string = final_string + '-'
else:
final_string = final_string + element
num = num -1
print(final_string)
return final_string
def plus_detonation(string):
return_string = string
print (string)
for element in string:
if element == '+':
string = string [1:]
print (string)
return_string = string
elif element == '-':
return return_string
break
return return_string
print (plus_detonation('+---'))
def solver(string, num):
temp_string = string
print (string)
temp_string = plus_detonation(temp_string)
counter = 0
print (temp_string)
if temp_string == '':
return counter
while temp_string != '':
temp_string = switcher(temp_string, num)
counter += 1
if temp_string == False:
return 'IMPOSSIBLE'
break
else:
temp_string = plus_detonation(temp_string)
return counter
input_file = sys.argv[1] + '.in'
output_file = sys.argv[1] + '.out'
def inputer(input_file):
output_list = []
with open (input_file) as fin:
finx = fin.read().split('\n')
biglist = [line.strip().split(' ') for line in finx]
biglist = biglist[1:-1]
return biglist
biglist = inputer(input_file)
return_list = []
for element in biglist:
test_string = element[0]
test_num = int(element[1])
return_list.append(solver(test_string, test_num))
def outputer(output_file, return_list):
with open (output_file, 'w') as out:
x = 1
for element in return_list:
if element == 'IMPOSSIBLE':
out.write('Case #%d: %s \n' %(x, element))
else:
out.write('Case #%d: %d \n' %(x, element))
x += 1
outputer(output_file, return_list)
|
[
"miliar1732@gmail.com"
] |
miliar1732@gmail.com
|
bbb9ab49c765200446265a8b66843165bf662912
|
f07a42f652f46106dee4749277d41c302e2b7406
|
/Data Set/bug-fixing-5/c8697a4864f95672c58598eff207548b0bcc63e5-<__init__>-bug.py
|
f1f08670f92375243709b5a78775622ba949683e
|
[] |
no_license
|
wsgan001/PyFPattern
|
e0fe06341cc5d51b3ad0fe29b84098d140ed54d1
|
cc347e32745f99c0cd95e79a18ddacc4574d7faa
|
refs/heads/main
| 2023-08-25T23:48:26.112133
| 2021-10-23T14:11:22
| 2021-10-23T14:11:22
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 776
|
py
|
def __init__(self, argument_spec):
self.spec = argument_spec
self.module = None
self.init_module()
self.interface = self.module.params['interface']
self.mode = self.module.params['mode']
self.state = self.module.params['state']
self.access_vlan = self.module.params['access_vlan']
self.native_vlan = self.module.params['native_vlan']
self.trunk_vlans = self.module.params['trunk_vlans']
self.host = self.module.params['host']
self.username = self.module.params['username']
self.port = self.module.params['port']
self.changed = False
self.updates_cmd = list()
self.results = dict()
self.proposed = dict()
self.existing = dict()
self.end_state = dict()
self.intf_info = dict()
self.intf_type = None
|
[
"dg1732004@smail.nju.edu.cn"
] |
dg1732004@smail.nju.edu.cn
|
3eae3203390b548511b0f67aaa046b3793f0e674
|
caace044baf7a6f2b0bda65ae361eed06bddfc3c
|
/dailyQuestion/2020/2020-05/05-30/python/solution_mono_stack.py
|
c291e66a6dccdffb17a62db905e6b0ac18ddf515
|
[
"Apache-2.0"
] |
permissive
|
russellgao/algorithm
|
fd6126e89c40d7d351c53bbd5fde690c9be899ef
|
ad5e724d20a8492b8eba03fc0f24e4ff5964b3ea
|
refs/heads/master
| 2023-03-28T03:00:02.370660
| 2021-03-28T10:56:38
| 2021-03-28T10:56:38
| 259,038,372
| 3
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 808
|
py
|
# 单调栈
def largestRectangleArea(heights: [int]) -> int:
n = len(heights)
left, right = [0] * n, [0] * n
mono_stack = []
for i in range(n):
while mono_stack and heights[mono_stack[-1]] >= heights[i]:
mono_stack.pop()
left[i] = mono_stack[-1] if mono_stack else -1
mono_stack.append(i)
mono_stack = []
for i in range(n - 1, -1, -1):
while mono_stack and heights[mono_stack[-1]] >= heights[i]:
mono_stack.pop()
right[i] = mono_stack[-1] if mono_stack else n
mono_stack.append(i)
result = max((right[i] - left[i] - 1) * heights[i] for i in range(n)) if n > 0 else 0
return result
if __name__ == "__main__":
heights = [2, 1, 5, 6, 2, 3]
result = largestRectangleArea(heights)
print(result)
|
[
"gaoweizong@hd123.com"
] |
gaoweizong@hd123.com
|
d65b720c7fe482af48ba1cb8dacd00f00571ed21
|
dfffc423a768475e680f954e3eea6c944df965d5
|
/main.py
|
72aba8f9dea35dccd8ba3daf532a91ab0b87e329
|
[] |
no_license
|
ssfdust/wechatheadless
|
13948ea86cfd7cb71d6cdea74fc4931b4d111843
|
544a8a74c5adcabb35b0828b52de49df43f222e1
|
refs/heads/master
| 2023-05-10T05:29:29.290177
| 2021-06-01T02:57:34
| 2021-06-01T02:57:34
| 372,156,803
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,436
|
py
|
from selenium import webdriver
from pathlib import Path
from selenium.webdriver.firefox.options import Options
from time import sleep
from selenium.common.exceptions import NoSuchElementException
import os
url_prefix = os.environ.get("INJECTOR_URL_PREFIX", "https://127.0.0.1")
injectjs = f"""
var script = document.createElement('script')
script.src = '{url_prefix}/injector.js'
document.getElementsByTagName('head')[0].appendChild(script)
"""
options = Options()
options.headless = True
# profile_path = Path(__file__).parent / "ffprofile"
geckodriver_path = str(Path(__file__).parent / "bin/geckodriver")
driver = webdriver.Firefox(options=options, executable_path=geckodriver_path)
driver.get("https://wx.qq.com")
sleep(8)
element = driver.find_element_by_xpath("/html/body/div[1]/div[2]/div[1]/img")
element.screenshot("./qrcode.png")
print("生成qrcode.png")
while True:
try:
driver.find_element_by_xpath("/html/body/div[1]/div/div[1]/div[1]/div[1]/img")
os.remove("./qrcode.png")
print("删除qrcode.png")
break
except NoSuchElementException:
print("not login")
sleep(2)
def load(webdriver):
webdriver.execute_script(injectjs)
sleep(2)
webdriver.execute_script("injector.run()")
def reload_(webdriver):
webdriver.refresh()
sleep(6)
load(webdriver)
load(driver)
while True:
sleep(7200)
print("刷新页面")
reload_(driver)
|
[
"ssfdust@gmail.com"
] |
ssfdust@gmail.com
|
1c81599ad7026475eacd7d54f0fd2ea3bc926b78
|
6d8a42478faa8eedb7efcc60aeeeb3d34beb0fab
|
/lottery/models.py
|
adaf021fcbdd7f4206fbfd81fd8b4c07efa1f61a
|
[] |
no_license
|
zjleifeng/lottery
|
7509c45d39e71f3e47c4ffed18723335888a3c96
|
bb6e7ba9acff0659a82d431bee17a2c6f7c65731
|
refs/heads/master
| 2020-04-11T11:00:44.328193
| 2018-07-05T15:47:14
| 2018-07-05T15:47:14
| 77,206,353
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,407
|
py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time : 2016/12/8 12:35
# @Author : eric
# @Site :
# @File : models.py
# @Software: PyCharm
from django.db import models
# Create your models here.
class Person(models.Model):
name=models.CharField(max_length=50,verbose_name=u'姓名')
tel=models.BigIntegerField(verbose_name=u'手机号码')
num=models.CharField(max_length=50,default=100,verbose_name=u'奖券号码')
isWin=models.IntegerField(default=0,verbose_name=u'是否中奖')
mWin=models.IntegerField(default=0)
cWin=models.IntegerField(default=1)
class Meta:
verbose_name_plural=u'抽奖人员信息'
def __unicode__(self):
return self.name
__str__=__unicode__
class Result(models.Model):
uid=models.IntegerField()
name=models.CharField(max_length=50,verbose_name=u'中奖人姓名')
tel=models.CharField(max_length=50,verbose_name=u'中奖人电话')
num=models.CharField(max_length=50,default=100,verbose_name=u'奖券号码')
createtime=models.DateTimeField(auto_now_add=True,verbose_name=u'中奖时间')
awardname=models.CharField(max_length=50,verbose_name=u'奖项名称')
isdel=models.IntegerField(default=0,verbose_name=u'是否被删除1是0否')
class Meta:
verbose_name_plural=u'中奖人员信息'
def __unicode__(self):
return self.name
__str__=__unicode__
|
[
"zj@duetin.com"
] |
zj@duetin.com
|
8b9d544b0d8141bf0019d1b34f8dda0b58913596
|
83686519904694228204a481aa949ded7b4f65f8
|
/Python/python_stack/Django/JDsTruckTracker/apps/truck_tracker/urls.py
|
3580bb4c4738300c450ae5a810b1e56a2aebd98a
|
[] |
no_license
|
andres121985/DojoAssignments
|
5621a9253ab5113a5038f62474582a59fec11a27
|
62f525e8574628c72e01df5725a2495bdeb5d3a9
|
refs/heads/master
| 2021-01-23T12:44:48.501795
| 2017-06-02T18:40:33
| 2017-06-02T18:40:33
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 521
|
py
|
from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^$', views.index),
url(r'^trucks$', views.trucks),
url(r'^addtruck$', views.add_truck),
url(r'^add$', views.add),
url(r'^delete$', views.delete),
url(r'^logout$', views.logout),
url(r'^login$', views.login),
url(r'^register$', views.register),
url(r'^search$', views.search),
url(r'^category/(?P<id>\d+)$', views.category),
url(r'^category/(?P<id>\d+)/(?P<truck_id>\d+)$', views.specific_truck),
]
|
[
"rd@rubenduran.net"
] |
rd@rubenduran.net
|
bea3b00a62b7195cd1722b1ab7beea5807b6cd01
|
ea94fc6a5c16f3266a7a48b005b3f10fef92c845
|
/src/maths/nodes/ListNode.py
|
90819c6904b829984f52466b69ed61fda4235ed3
|
[
"MIT"
] |
permissive
|
Grahack/Turing
|
ea340ee3f3db33a27bdf4a053f34f4c9bec525c3
|
fad060121d45b91f82af8952dd9f64e5d7bd9ed2
|
refs/heads/master
| 2020-03-17T00:49:45.176435
| 2018-05-11T20:07:11
| 2018-05-11T20:07:11
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 774
|
py
|
# -*- coding: utf-8 -*-
import util.html
from util.math import proper_str
from .AstNode import *
class ListNode(AstNode):
"""Identifier node
value -- value (list of AstNode)"""
value = None
def __init__(self, value: List[AstNode]):
super().__init__(True)
self.value = value
def __str__(self):
return "[List %s]" % self.value
def __repr__(self):
return "ListNode(%r)" % self.value
def code(self, bb=False) -> str:
return (util.html.sanitize("[%s]") if bb else "[%s]") % proper_str([node.code(bb) for node in self.value])[1:-1]
def python(self) -> str:
return "list([%s])" % ", ".join(x.python() for x in self.value)
def children(self) -> List["AstNode"]:
return self.value
|
[
"zippedfire@free.fr"
] |
zippedfire@free.fr
|
2269e3f344eb0175a3534a568e2586a9132d6e67
|
2db5bf5832ddb99e93bb949ace1fad1fde847319
|
/beginLearn/AndrewNg/exercise/c2/L_layer_network.py
|
64efd67d5e58e163209f4e2f658bd28b8d10c20c
|
[] |
no_license
|
RoderickAdriance/PythonDemo
|
2d92b9aa66fcd77b6f797e865df77fbc8c2bcd14
|
98b124fecd3a972d7bc46661c6a7de8787b8e761
|
refs/heads/master
| 2020-04-06T17:36:46.000133
| 2018-11-15T07:07:03
| 2018-11-15T07:07:03
| 157,666,809
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,408
|
py
|
import time
import numpy as np
import h5py
import matplotlib.pyplot as plt
import scipy
from PIL import Image
from scipy import ndimage
from exercise.c6.dnn_app_utils_v2 import *
def L_layer_model(X, Y, layers_dims, learning_rate=0.0075, num_iterations=3000, print_cost=False):
"""
Implements a L-layer neural network: [LINEAR->RELU]*(L-1)->LINEAR->SIGMOID.
Arguments:
X -- data, numpy array of shape (number of examples, num_px * num_px * 3)
Y -- true "label" vector (containing 0 if cat, 1 if non-cat), of shape (1, number of examples)
layers_dims -- list containing the input size and each layer size, of length (number of layers + 1).
learning_rate -- learning rate of the gradient descent update rule
num_iterations -- number of iterations of the optimization loop
print_cost -- if True, it prints the cost every 100 steps
Returns:
parameters -- parameters learnt by the model. They can then be used to predict.
"""
costs = []
parameters = initialize_parameters_deep(layers_dims)
for i in range(0, num_iterations):
# AL最后的预测值,caches每层计算的Z和参数w x b
# AL [0.5,0.8,0.3.......]
AL, caches = L_model_forward(X, parameters)
# 计算损失
cost = compute_cost(AL, Y)
# 反向传播
grads = L_model_backward(AL, Y, caches)
|
[
"1371553306@qq.com"
] |
1371553306@qq.com
|
fe8925b066e6f71c81f76db9c4cfec33be3c51fa
|
f22778fb83b4723dd3dbc26834a8475c4b8c442c
|
/nana_utils.py
|
d10f46774c1b1861c6af2babc1c15319ef167d3d
|
[] |
no_license
|
innovationgarage/deprecated-PortPollution-AISreduction-dask
|
070b3011b505de74b2126b83fa93a7bb0eace84c
|
d66cfa96529f54a6f12faa39560beab10d7ef5a8
|
refs/heads/master
| 2020-04-01T15:10:53.598911
| 2018-10-25T07:38:47
| 2018-10-25T07:38:47
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 456
|
py
|
import dask.distributed, distributed.client, dask.bag, daskutils.io.msgpack, daskutils.base, os.path, uuid, msgpack, daskutils.sort
#client = dask.distributed.Client('ymslanda.innovationgarage.tech:8786')
data = [uuid.uuid4().hex for a in range(0, 100000)]
s = daskutils.sort.MergeSort("/tmp/")
res = s.sort(dask.bag.from_sequence(data, npartitions=4))
res = res.compute()
assert len(res) == len(data)
assert res == sorted(res)
assert res == sorted(data)
|
[
"egil.moller@freecode.no"
] |
egil.moller@freecode.no
|
339a75ee64661331f4a5fd9250006afbdd1e8c4c
|
1c390cd4fd3605046914767485b49a929198b470
|
/leetcode/partition-equal-subset-sum.py
|
6cceb12902e683d1ba9985b592a5f627bf4f9821
|
[] |
no_license
|
wwwwodddd/Zukunft
|
f87fe736b53506f69ab18db674311dd60de04a43
|
03ffffee9a76e99f6e00bba6dbae91abc6994a34
|
refs/heads/master
| 2023-01-24T06:14:35.691292
| 2023-01-21T15:42:32
| 2023-01-21T15:42:32
| 163,685,977
| 7
| 8
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 309
|
py
|
class Solution:
def canPartition(self, a: List[int]) -> bool:
s = sum(a)
if s % 2:
return False
f = [0 for i in range(s + 1)]
f[0] = 1
for i in a:
for j in range(i, s + 1)[::-1]:
f[j] |= f[j - i]
return f[s // 2] == 1
|
[
"wwwwodddd@gmail.com"
] |
wwwwodddd@gmail.com
|
ef7a98380f2dc3cb9f4e993171957dd70c6761f7
|
d41d18d3ea6edd2ec478b500386375a8693f1392
|
/plotly/validators/scatter3d/error_x/_value.py
|
db1f336f929396b215dbe5a268a8760d26d12b9a
|
[
"MIT"
] |
permissive
|
miladrux/plotly.py
|
38921dd6618650d03be9891d6078e771ffccc99a
|
dbb79e43e2cc6c5762251537d24bad1dab930fff
|
refs/heads/master
| 2020-03-27T01:46:57.497871
| 2018-08-20T22:37:38
| 2018-08-20T22:37:38
| 145,742,203
| 1
| 0
|
MIT
| 2018-08-22T17:37:07
| 2018-08-22T17:37:07
| null |
UTF-8
|
Python
| false
| false
| 434
|
py
|
import _plotly_utils.basevalidators
class ValueValidator(_plotly_utils.basevalidators.NumberValidator):
def __init__(
self, plotly_name='value', parent_name='scatter3d.error_x', **kwargs
):
super(ValueValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type='calc',
min=0,
role='info',
**kwargs
)
|
[
"adam.kulidjian@gmail.com"
] |
adam.kulidjian@gmail.com
|
e000f3f97c731836cb809f51e139ba47ba132579
|
ddf002d1084d5c63842a6f42471f890a449966ee
|
/basics/Python/Recursion/recursion_factorial.py
|
09684087ef55e781859db5daf3d5512e54a8269a
|
[] |
no_license
|
RaghavJindal2000/Python
|
0ab3f198cbc5559bdf46ac259c7136356f7f09aa
|
8e5c646585cff28ba3ad9bd6c384bcb5537d671a
|
refs/heads/master
| 2023-01-01T23:56:02.073029
| 2020-10-18T19:30:01
| 2020-10-18T19:30:01
| 263,262,452
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 167
|
py
|
def fact(n):
if(n<=0):
return 1
else:
return n*fact(n-1)
n=int(input("Enter the Number : "))
print("Factorial of ",n," is = ",fact(n))
input()
|
[
"40332753+RaghavJindal2000@users.noreply.github.com"
] |
40332753+RaghavJindal2000@users.noreply.github.com
|
f928f4e34606749623dbabda74342d7c8d15021e
|
56fd2d92b8327cfb7d8f95b89c52e1700343b726
|
/examples/notebooks/rebalance_etfs/strategy.py
|
d5636616438b0a536a14bc605986acb7f768e20f
|
[
"MIT"
] |
permissive
|
stjordanis/Odin
|
fecb640ccf4f2e6eb139389d25cbe37da334cdb6
|
e2e9d638c68947d24f1260d35a3527dd84c2523f
|
refs/heads/master
| 2020-04-15T09:13:17.850126
| 2017-02-09T00:25:55
| 2017-02-09T00:25:55
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,193
|
py
|
import pandas as pd
from odin.strategy import AbstractStrategy
from odin.strategy.templates import BuyAndHoldStrategy
from odin.utilities.mixins.strategy_mixins import (
LongStrategyMixin,
TotalSellProportionMixin,
AlwaysBuyIndicatorMixin,
NeverSellIndicatorMixin,
DefaultPriorityMixin,
DefaultFeaturesMixin,
)
class BuyAndHoldSpyderStrategy(BuyAndHoldStrategy):
def buy_indicator(self, feats):
return feats.name in ("SPY", )
class RebalanceETFStrategy(
LongStrategyMixin,
TotalSellProportionMixin,
AlwaysBuyIndicatorMixin,
NeverSellIndicatorMixin,
DefaultPriorityMixin,
DefaultFeaturesMixin,
):
def compute_buy_proportion(self, feats):
"""Implementation of abstract base class method."""
if feats.name == "SPY":
return 0.6
elif feats.name == "AGG":
return 0.4
def exit_indicator(self, feats):
"""Implementation of abstract base class method."""
symbol = feats.name
pos = self.portfolio.portfolio_handler.filled_positions[symbol]
date = self.portfolio.data_handler.current_date
return pos.compute_holding_period(date).days > 63
|
[
"jamesbrofos@gmail.com"
] |
jamesbrofos@gmail.com
|
8ad8780e0d55bc33de20a87a14f2635eba86d737
|
dfcd2ab21b4b87b15a908cae58650eb8f4e34539
|
/accounts/views.py
|
542b21c365142be1d526953ce0c17e0b83451680
|
[] |
no_license
|
janak29292/esnayproductions
|
29a73eb064165e80a4729cf4b26095cb6dfb09f1
|
762dfa02630668ed97f86760c3b20824cf89d6b2
|
refs/heads/master
| 2023-02-27T23:10:44.697353
| 2021-02-06T09:44:04
| 2021-02-06T09:44:04
| 325,578,763
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,932
|
py
|
from django.http import Http404
from django.shortcuts import render
# Create your views here.
from accounts.models import Team, Portfolio, PortfolioCategory, Blog
def index(request, *args, **kwargs):
team_players = Team.objects.all()
portfolios = Portfolio.objects.filter(active=True)[:11]
categories = PortfolioCategory.objects.all()
blogs = Blog.objects.all().order_by('-id')[:10]
context = {
"team": team_players,
"portfolios": portfolios,
"categories": categories,
"blogs": blogs
}
return render(request, 'index.html', context=context)
def about(request, *args, **kwargs):
return render(request, 'about.html')
def team(request, *args, **kwargs):
team_players = Team.objects.all()
context = {"team": team_players}
return render(request, 'team.html', context=context)
def portfolio(request, *args, **kwargs):
portfolios = Portfolio.objects.filter(active=True)[:50]
categories = PortfolioCategory.objects.all()
context = {
"portfolios": portfolios,
"categories": categories
}
return render(request, 'portfolio-four-columns.html', context=context)
def portfolio_detail(request, *args, **kwargs):
pk = kwargs.get('pk')
try:
instance = Portfolio.objects.get(id=pk)
except Portfolio.DoesNotExist:
raise Http404
context = {
"portfolio": instance
}
return render(request, 'portfolio-single-item.html', context=context)
def blog(request, *args, **kwargs):
blogs = Blog.objects.all().order_by('-id')
context = {
"blogs": blogs
}
return render(request, 'blog-fullwidth.html', context=context)
def blog_detail(request, *args, **kwargs):
return render(request, 'blog-single-post.html')
def contact(request, *args, **kwargs):
return render(request, 'contact.html')
|
[
"doon.29292@gmail.com"
] |
doon.29292@gmail.com
|
bef0ca1abde4a21dfb30146a13f94cc01fc77e1e
|
1c40a5b1e7ffb7ffed2cfe831c1686aa4af82284
|
/omm/analysis/aes/aes_excel_bio.py
|
f4ccead5ecbb61b5ce170fb185f60b30a91a0d48
|
[] |
no_license
|
rhodges/oregon-marinemap
|
3c4bb3c13f15ec5f2efd405b0006c8a85d3b42b0
|
4a3797f507a48fd158be5f751fa0ca8c24a47cb7
|
refs/heads/master
| 2016-09-05T17:49:44.702600
| 2011-11-04T15:56:18
| 2011-11-04T15:56:18
| 32,354,262
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 646
|
py
|
import xlwt
from analysis.excel.utils import major_heading_style
from analysis.excel.biology import bio_terrestrial_headers, bio_terrestrial_data, bio_intertidal_headers, bio_intertidal_data, bio_subtidal_headers, bio_subtidal_data
def populate_bio_sheet(ws, context):
bio_header(ws, context)
bio_terrestrial_headers(ws)
bio_terrestrial_data(ws, context)
bio_intertidal_headers(ws)
bio_intertidal_data(ws, context)
bio_subtidal_headers(ws, context)
bio_subtidal_data(ws, context)
def bio_header(ws, context):
ws.write(0, 0, "Energy Site Biology Report for %s" % context['aes'].name, major_heading_style)
|
[
"sfletche@gmail.com"
] |
sfletche@gmail.com
|
1001396875ee79948e767bf2febdfa60c88bc214
|
f8b36ef6debb317e7213346b11df932eb3b6af35
|
/winner's code/2nd_code.py
|
10725b67993d69c10ebc395d4d75d088bba13d60
|
[] |
no_license
|
YoungriKIM/lotte
|
ec767eb5eaa8f13f38115f1c30d25ed5bac7ef2b
|
e3ceda9918210ee418269d1586ec200ce9d9ca33
|
refs/heads/main
| 2023-03-30T15:44:00.387934
| 2021-03-29T13:16:44
| 2021-03-29T13:16:44
| 348,300,060
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 303
|
py
|
# 롯데정보통신 Vision AI 경진대회 - Public LB 2nd place Solution
# https://dev-hunmin.tistory.com/entry/%EB%A1%AF%EB%8D%B0%EC%A0%95%EB%B3%B4%ED%86%B5%EC%8B%A0-Vision-AI-%EA%B2%BD%EC%A7%84%EB%8C%80%ED%9A%8C-Public-LB-2nd-place-Solution
# 깃허브
# https://github.com/hunmin-hub/LotteVisionAI
|
[
"lemontleo0311@gmail.com"
] |
lemontleo0311@gmail.com
|
e5c47acc73444f8227a478fe41b8fe625e1f7218
|
8a03b8459902d1bf0806f8d3387fb962bb57cf58
|
/Testscripts/click_on_crcreports.py
|
79a662d8d429268a4b00cb60ea7b5510f66d6cd5
|
[] |
no_license
|
chetandg123/cQube
|
f95a0e86b1e98cb418de209ad26ae2ba463cfcbc
|
a862a1cdf46faaaff5cad49d78c4e5f0454a6407
|
refs/heads/master
| 2022-07-18T12:43:06.839896
| 2020-05-22T13:23:52
| 2020-05-22T13:23:52
| 258,089,042
| 0
| 0
| null | 2020-05-08T16:28:26
| 2020-04-23T03:55:52
|
HTML
|
UTF-8
|
Python
| false
| false
| 1,033
|
py
|
import time
import unittest
from selenium import webdriver
from selenium.webdriver import ActionChains
from selenium.webdriver.support.select import Select
# script to click on crc reports
from Data.Paramters import Data
class CRC(unittest.TestCase):
def setUp(self):
self.driver = webdriver.Chrome(Data.Path)
self.driver.maximize_window()
self.driver.implicitly_wait(10)
self.driver.get(Data.URL)
self.driver.find_element_by_xpath(Data.email).send_keys(Data.username)
self.driver.find_element_by_xpath(Data.pwd).send_keys(Data.password)
self.driver.find_element_by_xpath(Data.loginbtn).click()
time.sleep(10)
def test_crcreports(self):
self.driver.find_element_by_xpath(Data.Dashboard).click()
time.sleep(5)
self.driver.find_element_by_xpath(Data.crc).click()
print(self.driver.title)
def tearDown(self):
time.sleep(5)
self.driver.close()
if __name__ == "__main__":
unittest.main()
|
[
"chetan.goudar@tibilsolutions.com"
] |
chetan.goudar@tibilsolutions.com
|
463fb4daf35270c14d78f343872184d855614ad0
|
6f562994c50361ceebe1e806653ff329ce2ea06c
|
/Code/mar_14/page2.py
|
e0e722b06740c8badb19c5b166e00c69b04025a5
|
[] |
no_license
|
ManishaHingne/PythonML
|
aaef74ca0cab84e7dca0e1e829768e737b4cc7af
|
17623a0699d3f7719c73fbe8e9f0bef7b69e8aec
|
refs/heads/master
| 2020-05-21T15:35:09.785203
| 2019-05-11T06:21:09
| 2019-05-11T06:21:09
| 186,094,739
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 142
|
py
|
import matplotlib.pyplot as plt
x = [1, 2, 3, 4, 5]
y = [20, 29, 21, 22, 21]
plt.plot(x, y)
plt.savefig('./images/chart1.png')
# plt.show()
|
[
"you@example.com"
] |
you@example.com
|
c5962280fb2ab8ba6cb8738531e6d1ec77fdffce
|
06b2eed882d8d7034fb7c57b648d5aa37d7f617b
|
/pycharmproject/django-rest/auth_ob/ob1/ob1/wsgi.py
|
c71ef1ec2bd6bbd96df7e830582e77a0b3fb725d
|
[] |
no_license
|
1974410167/python_projects
|
558e2e7a4ea66b083ebd6d2f808b725e1bd153d6
|
81a97cbf41de12bdc3dbc46edf2931075ac4f758
|
refs/heads/main
| 2023-06-22T09:41:22.038620
| 2023-06-09T15:09:44
| 2023-06-09T15:09:44
| 355,036,534
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 383
|
py
|
"""
WSGI config for ob1 project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.0/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'ob1.settings')
application = get_wsgi_application()
|
[
"1974410167@qq.com"
] |
1974410167@qq.com
|
429e24ce3134c6a853d16b512de97706843ab40a
|
9d8acc20d2ee1d1957849dfb71c22e0dae2d8c5c
|
/baomoicrawl/venv/Lib/site-packages/scrapy/utils/http.py
|
6e20649f5181a54d6d1787f641e3ac42ad182e66
|
[] |
no_license
|
thuy4tbn99/TranTruongThuy_17021178_Nhom4_Crawler
|
b0fdedee2942a12d9f64dfed93f43802dc5ab340
|
87c8c07433466bbc43a24ea089f75baeb467c356
|
refs/heads/master
| 2022-11-27T21:36:33.917491
| 2020-08-10T23:24:42
| 2020-08-10T23:24:42
| 286,583,216
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 956
|
py
|
"""
Transitional module for moving to the w3lib library.
For new code, always import from w3lib.http instead of this module
"""
import warnings
from scrapy.exceptions import ScrapyDeprecationWarning
from scrapy.utils.decorators import deprecated
from w3lib.http import * # noqa: F401
warnings.warn("Module `scrapy.utils.http` is deprecated, "
"Please import from `w3lib.http` instead.",
ScrapyDeprecationWarning, stacklevel=2)
@deprecated
def decode_chunked_transfer(chunked_body):
"""Parsed body received with chunked transfer encoding, and return the
decoded body.
For more info see:
https://en.wikipedia.org/wiki/Chunked_transfer_encoding
"""
body, h, t = '', '', chunked_body
while t:
h, t = t.split('\r\n', 1)
if h == '0':
break
size = int(h, 16)
body += t[:size]
t = t[size + 2:]
return body
|
[
"thuy4tbn99@gmail.com"
] |
thuy4tbn99@gmail.com
|
d24bb4ef7f0abf71c58f7e83c21133ec8c481125
|
98c6ea9c884152e8340605a706efefbea6170be5
|
/examples/data/Assignment_4/lxxjin012/ndom.py
|
3ddda13fca45dc51450355becc52d34ba11c8df0
|
[] |
no_license
|
MrHamdulay/csc3-capstone
|
479d659e1dcd28040e83ebd9e3374d0ccc0c6817
|
6f0fa0fa1555ceb1b0fb33f25e9694e68b6a53d2
|
refs/heads/master
| 2021-03-12T21:55:57.781339
| 2014-09-22T02:22:22
| 2014-09-22T02:22:22
| 22,372,174
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 850
|
py
|
def ndom_to_decimal (a):
x=str(a)
if len(str(a))==1:
digit1=int(str(a)[0])*1
number=digit
elif len(str(a))==2:
digit2=int(str(a)[0])*6
digit3=int(str(a)[1])*1
number=digit2+digit3
elif len(str(a))==3:
digit4=int(str(a)[0])*36
digit5=int(str(a)[1])*6
digit6=int(str(a)[2])*1
number=digit4+digit5+digit6
return number
def decimal_to_ndom (a):
digit1=a//36
b=a-(digit1*36)
digit2=b//6
c=b-(digit2*6)
number=(digit1*100)+(digit2*10)+(c)
return number
def ndom_add (a, b):
add=ndom_to_decimal(a)+ndom_to_decimal(b)
ndom=decimal_to_ndom (add)
return ndom
def ndom_multiply(a,b):
multiply=(ndom_to_decimal(a))*(ndom_to_decimal(b))
ndom=decimal_to_ndom (multiply)
return ndom
|
[
"jarr2000@gmail.com"
] |
jarr2000@gmail.com
|
1bbd9872d8797ac523e3882012ec1954661bbb15
|
82b946da326148a3c1c1f687f96c0da165bb2c15
|
/sdk/python/pulumi_azure_native/synapse/v20210601preview/list_integration_runtime_auth_key.py
|
6ba5abeaa2f156a92731038bf8807a01933558a9
|
[
"BSD-3-Clause",
"Apache-2.0"
] |
permissive
|
morrell/pulumi-azure-native
|
3916e978382366607f3df0a669f24cb16293ff5e
|
cd3ba4b9cb08c5e1df7674c1c71695b80e443f08
|
refs/heads/master
| 2023-06-20T19:37:05.414924
| 2021-07-19T20:57:53
| 2021-07-19T20:57:53
| 387,815,163
| 0
| 0
|
Apache-2.0
| 2021-07-20T14:18:29
| 2021-07-20T14:18:28
| null |
UTF-8
|
Python
| false
| false
| 3,096
|
py
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
__all__ = [
'ListIntegrationRuntimeAuthKeyResult',
'AwaitableListIntegrationRuntimeAuthKeyResult',
'list_integration_runtime_auth_key',
]
@pulumi.output_type
class ListIntegrationRuntimeAuthKeyResult:
"""
The integration runtime authentication keys.
"""
def __init__(__self__, auth_key1=None, auth_key2=None):
if auth_key1 and not isinstance(auth_key1, str):
raise TypeError("Expected argument 'auth_key1' to be a str")
pulumi.set(__self__, "auth_key1", auth_key1)
if auth_key2 and not isinstance(auth_key2, str):
raise TypeError("Expected argument 'auth_key2' to be a str")
pulumi.set(__self__, "auth_key2", auth_key2)
@property
@pulumi.getter(name="authKey1")
def auth_key1(self) -> Optional[str]:
"""
The primary integration runtime authentication key.
"""
return pulumi.get(self, "auth_key1")
@property
@pulumi.getter(name="authKey2")
def auth_key2(self) -> Optional[str]:
"""
The secondary integration runtime authentication key.
"""
return pulumi.get(self, "auth_key2")
class AwaitableListIntegrationRuntimeAuthKeyResult(ListIntegrationRuntimeAuthKeyResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return ListIntegrationRuntimeAuthKeyResult(
auth_key1=self.auth_key1,
auth_key2=self.auth_key2)
def list_integration_runtime_auth_key(integration_runtime_name: Optional[str] = None,
resource_group_name: Optional[str] = None,
workspace_name: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableListIntegrationRuntimeAuthKeyResult:
"""
The integration runtime authentication keys.
:param str integration_runtime_name: Integration runtime name
:param str resource_group_name: The name of the resource group. The name is case insensitive.
:param str workspace_name: The name of the workspace.
"""
__args__ = dict()
__args__['integrationRuntimeName'] = integration_runtime_name
__args__['resourceGroupName'] = resource_group_name
__args__['workspaceName'] = workspace_name
if opts is None:
opts = pulumi.InvokeOptions()
if opts.version is None:
opts.version = _utilities.get_version()
__ret__ = pulumi.runtime.invoke('azure-native:synapse/v20210601preview:listIntegrationRuntimeAuthKey', __args__, opts=opts, typ=ListIntegrationRuntimeAuthKeyResult).value
return AwaitableListIntegrationRuntimeAuthKeyResult(
auth_key1=__ret__.auth_key1,
auth_key2=__ret__.auth_key2)
|
[
"noreply@github.com"
] |
morrell.noreply@github.com
|
dea54118e83a7cbf3499e2efc569d2e35c8355b5
|
680bd46e8eae20e78a425f766432711a47235374
|
/models/site_monitor_checkpoint.py
|
a48c4844c14f12ba8c06fcc5feafa3d14d250fd4
|
[
"Apache-2.0"
] |
permissive
|
ILMostro/lm-sdk-python
|
9f45217d64c0fc49caf2f4b279a124c2efe3d24d
|
40da5812ab4d50dd1c6c3c68f7ea13c4d8f4fb49
|
refs/heads/master
| 2022-02-01T16:51:12.810483
| 2019-07-16T17:54:11
| 2019-07-16T17:54:11
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 7,061
|
py
|
# coding: utf-8
"""
LogicMonitor REST API
LogicMonitor is a SaaS-based performance monitoring platform that provides full visibility into complex, hybrid infrastructures, offering granular performance monitoring and actionable data and insights. logicmonitor_sdk enables you to manage your LogicMonitor account programmatically. # noqa: E501
OpenAPI spec version: 1.0.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
class SiteMonitorCheckpoint(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'description': 'str',
'display_prio': 'int',
'geo_info': 'str',
'id': 'int',
'is_enabled_in_root': 'bool',
'name': 'str'
}
attribute_map = {
'description': 'description',
'display_prio': 'displayPrio',
'geo_info': 'geoInfo',
'id': 'id',
'is_enabled_in_root': 'isEnabledInRoot',
'name': 'name'
}
def __init__(self, description=None, display_prio=None, geo_info=None, id=None, is_enabled_in_root=None, name=None): # noqa: E501
"""SiteMonitorCheckpoint - a model defined in Swagger""" # noqa: E501
self._description = None
self._display_prio = None
self._geo_info = None
self._id = None
self._is_enabled_in_root = None
self._name = None
self.discriminator = None
self.description = description
if display_prio is not None:
self.display_prio = display_prio
if geo_info is not None:
self.geo_info = geo_info
if id is not None:
self.id = id
if is_enabled_in_root is not None:
self.is_enabled_in_root = is_enabled_in_root
if name is not None:
self.name = name
@property
def description(self):
"""Gets the description of this SiteMonitorCheckpoint. # noqa: E501
:return: The description of this SiteMonitorCheckpoint. # noqa: E501
:rtype: str
"""
return self._description
@description.setter
def description(self, description):
"""Sets the description of this SiteMonitorCheckpoint.
:param description: The description of this SiteMonitorCheckpoint. # noqa: E501
:type: str
"""
if description is None:
raise ValueError("Invalid value for `description`, must not be `None`") # noqa: E501
self._description = description
@property
def display_prio(self):
"""Gets the display_prio of this SiteMonitorCheckpoint. # noqa: E501
:return: The display_prio of this SiteMonitorCheckpoint. # noqa: E501
:rtype: int
"""
return self._display_prio
@display_prio.setter
def display_prio(self, display_prio):
"""Sets the display_prio of this SiteMonitorCheckpoint.
:param display_prio: The display_prio of this SiteMonitorCheckpoint. # noqa: E501
:type: int
"""
self._display_prio = display_prio
@property
def geo_info(self):
"""Gets the geo_info of this SiteMonitorCheckpoint. # noqa: E501
:return: The geo_info of this SiteMonitorCheckpoint. # noqa: E501
:rtype: str
"""
return self._geo_info
@geo_info.setter
def geo_info(self, geo_info):
"""Sets the geo_info of this SiteMonitorCheckpoint.
:param geo_info: The geo_info of this SiteMonitorCheckpoint. # noqa: E501
:type: str
"""
self._geo_info = geo_info
@property
def id(self):
"""Gets the id of this SiteMonitorCheckpoint. # noqa: E501
:return: The id of this SiteMonitorCheckpoint. # noqa: E501
:rtype: int
"""
return self._id
@id.setter
def id(self, id):
"""Sets the id of this SiteMonitorCheckpoint.
:param id: The id of this SiteMonitorCheckpoint. # noqa: E501
:type: int
"""
self._id = id
@property
def is_enabled_in_root(self):
"""Gets the is_enabled_in_root of this SiteMonitorCheckpoint. # noqa: E501
:return: The is_enabled_in_root of this SiteMonitorCheckpoint. # noqa: E501
:rtype: bool
"""
return self._is_enabled_in_root
@is_enabled_in_root.setter
def is_enabled_in_root(self, is_enabled_in_root):
"""Sets the is_enabled_in_root of this SiteMonitorCheckpoint.
:param is_enabled_in_root: The is_enabled_in_root of this SiteMonitorCheckpoint. # noqa: E501
:type: bool
"""
self._is_enabled_in_root = is_enabled_in_root
@property
def name(self):
"""Gets the name of this SiteMonitorCheckpoint. # noqa: E501
:return: The name of this SiteMonitorCheckpoint. # noqa: E501
:rtype: str
"""
return self._name
@name.setter
def name(self, name):
"""Sets the name of this SiteMonitorCheckpoint.
:param name: The name of this SiteMonitorCheckpoint. # noqa: E501
:type: str
"""
self._name = name
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(SiteMonitorCheckpoint, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, SiteMonitorCheckpoint):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
|
[
"bamboo@build01.us-west-1.logicmonitor.net"
] |
bamboo@build01.us-west-1.logicmonitor.net
|
c751ad79e8fae529b101b6f67f960895117723a7
|
890c8b8e90e516a5a3880eca9b2d217662fe7d84
|
/armulator/armv6/opcodes/arm_instruction_set/arm_data_processing_and_miscellaneous_instructions/arm_extra_load_store_instructions/ldrsb_literal_a1.py
|
5b2ac9460c52668437168816a8f42ea9fd81d9d8
|
[
"MIT"
] |
permissive
|
doronz88/armulator
|
b864135996f876c7857b79a314d4aa06cc19c549
|
0294feac2785c8947e5943ac0c34f941ee4b5fff
|
refs/heads/master
| 2022-11-05T08:14:42.405335
| 2020-06-18T23:53:17
| 2020-06-18T23:53:17
| 273,363,061
| 2
| 0
| null | 2020-06-18T23:51:03
| 2020-06-18T23:51:02
| null |
UTF-8
|
Python
| false
| false
| 825
|
py
|
from armulator.armv6.opcodes.abstract_opcodes.ldrsb_literal import LdrsbLiteral
from armulator.armv6.opcodes.opcode import Opcode
class LdrsbLiteralA1(LdrsbLiteral, Opcode):
def __init__(self, instruction, add, imm32, t):
Opcode.__init__(self, instruction)
LdrsbLiteral.__init__(self, add, imm32, t)
def is_pc_changing_opcode(self):
return False
@staticmethod
def from_bitarray(instr, processor):
w = instr[10]
p = instr[7]
imm4_l = instr[-4:]
imm4_h = instr[20:24]
rt = instr[16:20]
add = instr[8]
imm32 = "0b000000000000000000000000" + imm4_h + imm4_l
if p == w or rt.uint == 15:
print "unpredictable"
else:
return LdrsbLiteralA1(instr, **{"add": add, "imm32": imm32, "t": rt.uint})
|
[
"matan1008@gmail.com"
] |
matan1008@gmail.com
|
eab2c83f1f6b77cae668f340fa4e8d50c06049c8
|
263fb97b6ab614f1e25533ba30b37ef94c0a2e81
|
/parser/team08/Tytus_SQLPARSER_G8/Instrucciones/FunctionBinaryString/GetByte.py
|
e8dcc14bd94144d729a4212ee31d0bef1bb7f35c
|
[
"MIT"
] |
permissive
|
ElbaAlvarez/tytus
|
f77e57df2338270b331172e270e2e44a94b68d2e
|
795e660f3005d4fac5cb68af92b178ba1318e96e
|
refs/heads/main
| 2023-02-12T08:55:32.347236
| 2020-12-24T14:00:06
| 2020-12-24T14:00:06
| 320,481,467
| 0
| 0
|
MIT
| 2020-12-11T05:57:41
| 2020-12-11T05:57:41
| null |
UTF-8
|
Python
| false
| false
| 459
|
py
|
from Instrucciones.TablaSimbolos.Instruccion import Instruccion
class GetByte(Instruccion):
def __init__(self, valor, tipo, linea, columna):
Instruccion.__init__(self,tipo,linea,columna)
self.valor = valor
def ejecutar(self, tabla, arbol):
super().ejecutar(tabla,arbol)
bytes(self.valor, 'utf-8')
return bytes(self.valor,'utf-8')
instruccion = GetByte("hola mundo",None, 1,2)
instruccion.ejecutar(None,None)
|
[
"michikatrins@gmail.com"
] |
michikatrins@gmail.com
|
58b4e869b8fbf743ba16b0be22137d18b172cdf6
|
7d43ba52d958537905cfdde46cc194a97c45dc56
|
/ProgrammingParadigms/OOP/SOLID/solid_workbook.py
|
361c598c444742f3305a9a384d65f599a2f7e871
|
[] |
no_license
|
Koshmatova/workbook
|
3e4d1f698a01f2be65c1abc83ee251ebc8a6bbcd
|
902695e8e660689a1730c23790dbdc51737085c9
|
refs/heads/master
| 2023-05-01T02:30:46.868027
| 2021-05-10T03:49:08
| 2021-05-10T03:49:08
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 6,380
|
py
|
СТРУКТУРНАЯ ПАРАДИГМА
Дейкстра -> ЛЮБОЙ алг можно выразить через 3 способа выбора команд:
линейное exe
ветвление по усл
exe цикла при exe условия
#рекомендовал использовать only их
Никлаус Вирт -> Алгоритмы + Структуры данных = Программы
возможность записать подпрограмму в v ПРИНАДЛЕЖ даже asm
ИНКАПСУЛЯЦИЯ
# защита инварианта
# Любая программная сущность, обладающая нетривиальным состоянием, должна быть превращена в замкнутую систему, которую можно только перевести из одного корректного состояния в другое.(чтобы его нельзя было сломать)
* все что касается obj - внутри одной архитектурной границы(упаковка данных и поведения в единый компонент)
#все что касается класса пакуется в один модуль
#между классами четкие границы
* четкое разделение интерфейса и реализации
* КАЖДЫЙ obj должен иметь свой pi - таким чтобы не было необходимости лезть в реализацию или использовать его неподходящим образом
#ВСЕ КЛАССЫ имеют интерфейс
* в этом помогает сокрытие данных
код не может пересечь границу о которой не знает, и получить данные к которым нет доступа
СОКРЫТИЕ РЕАЛИЗАЦИИ В РЕЛЯЦИОННЫХ БД
#] СУЩ бд, используемая несколькими программами, к реализации которых нет доступа
создаем
набор хранимых процедур, компонуем в схему Interface
для каждой программы создаем по пользователю и разрешаем доступ только к этой схеме
#теперь сущность с нетривиальным поведением закрыта интерфейсом
АЛЬТЕРНАТИВА СОКРЫТИЯ В PYTHON
* _
* Документировать only интерфейс, ВСЕ что НЕ_ИМЕЕТ доков - реализация
* Отделять интерфейс через code-conventions
__all__
* Сделать code-convention строгими
# автоматические проверки -> нарушение приравнивается к ошибке и ломает сборку
#базовыи класс определяет fx которая должна быть общеи для ∀ производных
объекты предоставляют интерфейсы. if объект предоставляет интерфейс -> интерфейс специфицирует поведение объекта.
классы реализуют интерфейсы. if класс реализует интерфейс -> его экземпляры предоставляют данный интерфейс
Экземпляры предоставляют интерфейсы которые их классы реализуют, & могут напрямую предоставлять дополнительные интерфейсы не реализованные в классе.
классы обычно не предоставляют интерфейсы которые они реализуют
#можно обобщить это до фабрик - можно создать callable производящий obj предоставляющие интерфейсы ~ фабрика реализует интерфейсы.
ПОЛИМОРФИЗМ
# Страуструп -> один интерфейс - мн-во реализаций
# пользователь интерфейса не будет знать о реализации ничего
поменялась ли она
ПОЛИМОРФИЗМ ЗА ПРЕДЕЛАМИ ООП
# Erlang СОДЕРЖ фичу behaviour
# код делится на модули, имя модуля можно исп как v ->
# вызов fx из модуля:
foobar:function()
или
Module = foobar
Module:function()
# behavior нужен для уверенности что модуль ИМЕЕТ определенные fx
# в модуле использующем другие модули с помощью behavior_info задаются требования к модулям-v, в свою очередь модули с помощью декларации behaviour обязуются реализовать это поведение
#es:
# модуль gen_server позволяет создать сервер в отдельном процессе, выполняющий запросы других процессов, gen_server СОДЕРЖ ВСЮ логику запросов других процессов
# но обработка запросов делается реализацией поведения gen_server, и пока другие модули реализуют его правильно(пусть там пустые заглушки) - gen_server плевать как обрабатываются эти запросы и более того, обрабатывающий модуль можно сменить на лету
НАСЛЕДОВАНИЕ
#позволяет объединить переиспользование кода с полиморфизмом
SINGLE RESPONSIBILITY
# где и как должны пролегать границы между классами(интерфейс, реализация
|
[
"mkone112@gmail.com"
] |
mkone112@gmail.com
|
ee0a8b4383627724adac95c5b9fe794045bbd923
|
2c9db62ddaffd77c097b3da4990021270912ea40
|
/백준/1966.py
|
757df2ca0f3b4c3cc1fd65161b5b06dc6f61a950
|
[] |
no_license
|
nahyun119/algorithm
|
9ae120fbe047819a74e06fc6879f55405bc9ea71
|
40e291305a4108266073d489e712787df1dbae4b
|
refs/heads/master
| 2023-08-27T23:18:44.133748
| 2021-10-03T11:32:12
| 2021-10-03T11:32:12
| 326,661,962
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 971
|
py
|
import sys
import heapq # -> 기본적으로 min heap이므로 max heap을 구현하려면 우선순위에 -1을 곱한다.
from collections import deque
input = sys.stdin.readline
result = []
def solve():
global result
n, m = map(int, input().split())
documents = list(map(int, input().split()))
q = deque()
answer = []
for i in range(n):
q.append((documents[i], i))
count = 1
while True:
max_value = max(q)
priority, index = q.popleft()
#print(max_value, priority, index)
if priority < max_value[0]:
q.append((priority, index))
else:
if index == m:
break
count += 1 # 프린트한 경우만 카운트
result.append(count)
#print(count)
def main():
global result
T = int(input())
for _ in range(T):
solve()
for r in result:
print(r)
if __name__ == "__main__":
main()
|
[
"nahyun858@gmail.com"
] |
nahyun858@gmail.com
|
615be1b30f756944ef2e0597b47f152d45a6918d
|
11ca0c393c854fa7212e783a34269f9dae84e8c7
|
/Python/785. 判断二分图.py
|
0ac82063c14ffcf55dc1f21b8d60715c5a8a6092
|
[] |
no_license
|
VictoriqueCQ/LeetCode
|
dc84d81163eed26fa9dbc2114bba0b5c2ea881f4
|
a77b3ead157f97f5d9599badb4d4c5da69de44ba
|
refs/heads/master
| 2021-06-05T06:40:24.659909
| 2021-03-31T08:31:51
| 2021-03-31T08:31:51
| 97,978,957
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,622
|
py
|
from typing import List
import collections
class Solution:
def isBipartite(self, graph: List[List[int]]) -> bool:
n = len(graph)
UNCOLORED, RED, GREEN = 0, 1, 2
color = [UNCOLORED] * n
for i in range(n):
if color[i] == UNCOLORED:
q = collections.deque([i])
color[i] = RED
while q:
node = q.popleft()
cNei = (GREEN if color[node] == RED else RED)
for neighbor in graph[node]:
if color[neighbor] == UNCOLORED:
q.append(neighbor)
color[neighbor] = cNei
elif color[neighbor] != cNei:
return False
return True
class Solution1:
def isBipartite(self, graph: List[List[int]]) -> bool:
# dfs time O(E+V), space O(V)
n = len(graph)
visited = [0] * n
stack = []
for i in range(n):
if visited[i] == 0:
stack.append(i)
visited[i] = 1
while stack:
cur = stack.pop()
for neighbor in graph[cur]:
if visited[neighbor] == 0:
stack.append(neighbor)
visited[neighbor] = -visited[cur]
else:
if visited[neighbor] != -visited[cur]:
return False
return True
s = Solution1()
print(s.isBipartite([[1,2,3],[0,2],[0,1,3],[0,2]]))
|
[
"1997Victorique0317"
] |
1997Victorique0317
|
54455505d3762eae077685337d9117b9749a5e0a
|
a7a115b000cd40be9378174777da4f1b56b99de0
|
/web_crawl_book/demo4.py
|
18d7d8d1d1b889b512a3291e57c3fc15f15cb7d1
|
[] |
no_license
|
fireinrain/python_spider
|
316f7cc230989223e6177c5ba2443eba9b54a52a
|
364273278efa6629ec7d79f86c2ce54555ff7691
|
refs/heads/master
| 2022-06-26T20:38:56.462771
| 2017-06-27T00:53:42
| 2017-06-27T00:53:42
| 60,531,584
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,024
|
py
|
#! /usr/bin/python3
# _encoding:utf-8_
# Written by liuzhaoyang
# wcontact:liu575563079@gmail.com
from urllib.request import urlopen
from bs4 import BeautifulSoup
import re
import random
import datetime
import lxml
# 获取页面中的所有内链的列表
def get_inter_links(bsobj,include_url):
inter_links = []
# 找出所有以/为开头的链接
for link in bsobj.findAll("a",href=re.compile("^(/|.*"+include_url+")")):
if link.attrs['href'] is not None:
inner_link = link.attrs['href']
if inner_link not in include_url:
include_url.append(inner_link)
return inter_links
# 获取页面的所有外链的列表
def get_external_links(bsobj,external_url):
external_links = []
for link in bsobj.findAll("a",href=re.compile("^(http|www)((?!"+external_url+").)*$")):
if link.attrs['href'] is not None:
inner_link = link.attrs['href']
if inner_link not in external_links:
external_links.append(inner_link)
return external_links
# 分割地址
def split_address(address):
address_parts = address.replace("http://","").split("/")
return address_parts
# 获取随机外链
def get_random_external_link(start_page):
html = urlopen(start_page)
bsobj = BeautifulSoup(html.read(),"lxml")
# print(html.read())
external_links = get_external_links(bsobj,split_address(start_page)[0])
if len(external_links) == 0:
inter_links = get_inter_links(start_page)
return get_external_links(random.choice(inter_links))
else:
return random.choice(external_links)
def follow_external_only(start_site):
external_link = get_random_external_link("http://oreilly.com")
print("随机外链:"+external_link)
follow_external_only(external_link)
if __name__ == "__main__":
# strs = "http://www.baidu.com/music"
# sss = split_address(strs)
# print(sss)
# get_random_external_link(strs)
follow_external_only("http://oreilly.com")
|
[
"575563079@qq.com"
] |
575563079@qq.com
|
7f7037fa21290dcb8418f1c9c2eeb83c713b4296
|
40d404e424489bb48c3673dee6664de64dbab39c
|
/myWebsite/.venv/lib/python3.8/site-packages/zope/annotation/attribute.py
|
5c009ce2b71fbbf979a61e014fc06a8c534ed00c
|
[] |
no_license
|
alyasamba/me
|
07c9f5f27aa16f768e0432780ac8f6f5ab6afbd1
|
978053c867181bad8eb316a0920ba290a7b1ceae
|
refs/heads/main
| 2023-01-28T09:57:46.616285
| 2020-12-02T02:31:25
| 2020-12-02T02:31:25
| 315,935,399
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,466
|
py
|
##############################################################################
#
# Copyright (c) 2001, 2002 Zope Foundation and Contributors.
# All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
#
##############################################################################
"""Attribute Annotations implementation"""
import logging
try:
from collections.abc import MutableMapping as DictMixin
except ImportError:
# Python 2
from collections import MutableMapping as DictMixin
try:
from BTrees.OOBTree import OOBTree as _STORAGE
except ImportError: # pragma: no cover
logging.getLogger(__name__).warning(
'BTrees not available: falling back to dict for attribute storage')
_STORAGE = dict
from zope import component, interface
from zope.annotation import interfaces
_EMPTY_STORAGE = _STORAGE()
@interface.implementer(interfaces.IAnnotations)
@component.adapter(interfaces.IAttributeAnnotatable)
class AttributeAnnotations(DictMixin):
"""Store annotations on an object
Store annotations in the `__annotations__` attribute on a
`IAttributeAnnotatable` object.
"""
# Yes, there's a lot of repetition of the `getattr` call,
# but that turns out to be the most efficient for the ways
# instances are typically used without sacrificing any semantics.
# See https://github.com/zopefoundation/zope.annotation/issues/8
# for a discussion of alternatives (which included functools.partial,
# a closure, capturing the annotations in __init__, and versions
# with getattr and exceptions).
def __init__(self, obj, context=None):
self.obj = obj
@property
def __parent__(self):
return self.obj
def __bool__(self):
return bool(getattr(self.obj, '__annotations__', 0))
__nonzero__ = __bool__
def get(self, key, default=None):
"""See zope.annotation.interfaces.IAnnotations"""
annotations = getattr(self.obj, '__annotations__', _EMPTY_STORAGE)
return annotations.get(key, default)
def __getitem__(self, key):
annotations = getattr(self.obj, '__annotations__', _EMPTY_STORAGE)
return annotations[key]
def keys(self):
annotations = getattr(self.obj, '__annotations__', _EMPTY_STORAGE)
return annotations.keys()
def __iter__(self):
annotations = getattr(self.obj, '__annotations__', _EMPTY_STORAGE)
return iter(annotations)
def __len__(self):
annotations = getattr(self.obj, '__annotations__', _EMPTY_STORAGE)
return len(annotations)
def __setitem__(self, key, value):
"""See zope.annotation.interfaces.IAnnotations"""
try:
annotations = self.obj.__annotations__
except AttributeError:
annotations = self.obj.__annotations__ = _STORAGE()
annotations[key] = value
def __delitem__(self, key):
"""See zope.app.interfaces.annotation.IAnnotations"""
try:
annotation = self.obj.__annotations__
except AttributeError:
raise KeyError(key)
del annotation[key]
|
[
"sopesherifabdoulah@gmail.com"
] |
sopesherifabdoulah@gmail.com
|
98a489879ec8c353e34f3c02e3732f79b32f4943
|
12a012ace19a14fc2c4ce2daec7a9df94cd5d925
|
/[1차] 비밀지도.py
|
00fedf556e4d6eaf4e149b313258d139fa9b2ee1
|
[] |
no_license
|
newfull5/Programmers
|
a0a25fd72c0a8a7932122cb72e65b28ecd29ff71
|
b880a8043427f6aa7dc72caa3e46b1f6584a8962
|
refs/heads/master
| 2022-12-28T13:46:52.215347
| 2022-12-12T13:50:53
| 2022-12-12T13:50:53
| 211,209,943
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,852
|
py
|
'''
def solution(n, arr1, arr2):
answer1 = []
answer2 = []
answer = []
for arr in arr1:
if len(bin(arr)[2:]) != n:
answer1.append((n - len(bin(arr)[2:]))*'0' + bin(arr)[2:])
else:
answer1.append(bin(arr)[2:])
for arr in arr2:
if len(bin(arr)[2:]) != n:
answer2.append((n - len(bin(arr)[2:]))*'0' + bin(arr)[2:])
else:
answer2.append(bin(arr)[2:])
for i in range(0, n):
temp = ''
for j in range(0,n):
if answer1[i][j] == '1' or answer2[i][j] == '1':
temp += '#'
else:
temp += ' '
answer.append(temp)
return answer
'''
"""
# 2020.02.26
# 20일전 풀이에서 조금도 달라진게 없다. 풀이가 하나밖에 없는 문제인건가? 아니면 성장하지 못한 것인가?
def solution(n, arr1, arr2):
ar1 = []
ar2 = []
answer = []
for num in arr1:
if len(bin(num)[2:]) != n:
ar1.append('0'*(n - len(bin(num)[2:])) + bin(num)[2:])
else:
ar1.append(bin(num)[2:])
for num in arr2:
if len(bin(num)[2:]) != n:
ar2.append('0'*(n - len(bin(num)[2:])) + bin(num)[2:])
else:
ar2.append(bin(num)[2:])
for i in range(0, n):
string = ''
for j in range(0, n):
if ar1[i][j] == '1' or ar2[i][j] == '1':
string += '#'
else:
string += ' '
answer.append(string)
return answer
"""
#2022.11.12
def _geunsub(string, n):
string = string[2:]
string = '00000' + string
string = string[-n:]
return string.replace('1', '#').replace('0', ' ')
def solution(n, arr1, arr2):
return [_geunsub(bin(a | b), n) for a,b, in zip(arr1, arr2)]
|
[
"noreply@github.com"
] |
newfull5.noreply@github.com
|
445f3536583b311826de4e39ab680a8d10b37ae2
|
800b5166148d4e3cd03825d7d20e2900fbc6c789
|
/report_form/migrations/0040_poorpeopledataform_offpoor_year.py
|
a92d794dfc6987387dfa5eac23d67a0145623cc5
|
[] |
no_license
|
JiSuPiaoYi/dawufupin
|
4ffc979a93502eb576776673c98aaeb16021827e
|
57756a501436fabe9b27ebca2e80e60932da30dc
|
refs/heads/master
| 2020-04-07T11:37:35.728108
| 2018-11-20T09:09:50
| 2018-11-20T09:09:50
| 158,334,010
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 509
|
py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2018-09-24 11:38
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('report_form', '0039_auto_20180924_0918'),
]
operations = [
migrations.AddField(
model_name='poorpeopledataform',
name='offpoor_year',
field=models.CharField(blank=True, db_column='offpoor_year', max_length=20),
),
]
|
[
"360510132@qq.com"
] |
360510132@qq.com
|
747fccee4d243a477338db4e3c8eb9374a0b38f5
|
53181572c4b22df4b569a9901bcd5347a3459499
|
/tuit_200202/py200306/review_input.py
|
15b13efd525e58aa765a21602912b88ef25afaaa
|
[] |
no_license
|
edu-athensoft/ceit4101python_student
|
80ef067b77421fce76d04f778d5c6de8b12f676c
|
33cfa438c062d45e8d246b853e93d3c14b92ff2d
|
refs/heads/master
| 2020-07-30T01:04:21.084384
| 2020-07-27T02:21:57
| 2020-07-27T02:21:57
| 210,027,310
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 185
|
py
|
"""
input
"""
# input()
# input("Prompt:")
my_input = input("Prompt:")
print(type(my_input))
num1 = float(input("Enter a floating number:"))
num2 = int(input("Enter an integer"))
|
[
"lada314@gmail.com"
] |
lada314@gmail.com
|
5f9451699991e4faaa4b152a2074561f28165aa0
|
9dfb3372a1e4516d970a6e9d0a9fd8360580eae7
|
/game/grafics/idea.py
|
c5d531d74ba20540c8a4411ceb550c78e140ef81
|
[] |
no_license
|
clambering-goat/cameron_pyton
|
d1cd0e7b04da14e7ba4f89dcb4d973f297a4626c
|
df0b0365b86e75cfcfc2c1fc21608f1536a3b79f
|
refs/heads/master
| 2021-07-14T20:37:37.021401
| 2019-02-28T07:52:11
| 2019-02-28T07:52:11
| 137,251,669
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,026
|
py
|
import pygame
from math import sin,cos,radians,atan,degrees,atanh
pygame.init()
y_size,x_size=500,500
screen = pygame.display.set_mode((y_size,x_size))
done = False
point_to_point_at=pygame.mouse.get_pos()
def distamnce(x,y,x2,y2):
x_main=x-x2
y_main=y-y2
c=x_main**2+y_main**2
c=c*0.5
print(c)
distance_apart=50
count=0
distance=20
pointion1=250,250
pointion2=pointion1[0]+(distance_apart*(3**0.5)),pointion1[1]+(distance_apart/2)
pointion3=pointion1[0]+(distance_apart*(3**0.5)),pointion1[1]-(distance_apart/2)
orgin=pointion1[0],pointion1[1]+((distance_apart*(3**0.5))/2)
while not done:
for event in pygame.event.get():
if event.type == pygame.QUIT:
done = True
screen.fill((255, 255, 255))
#angle=angle+1
point_to_point_at=pygame.mouse.get_pos()
pygame.draw.line(screen, (0, 0, 255), orgin,(point_to_point_at[0],point_to_point_at[1]),5)
pygame.display.flip()
pygame.time.wait(20)
|
[
"camerondrain@gmail.com"
] |
camerondrain@gmail.com
|
d0dfa176c55af006ba3041061a0a878a1418a113
|
3eb877ab6d9aba74c63acfc7d9dfe83fe77195ba
|
/google-cloud-sdk/lib/surface/compute/target_https_proxies/update.py
|
f648093f60593faf8dd500347de7363742e32be1
|
[
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0"
] |
permissive
|
Gilolume/HeuApplication
|
cd65267e6171277fc50f31a582b6ff6634758209
|
e48c68ba9bc4f952b7bd5a0ba47f4c810ed56812
|
refs/heads/master
| 2022-11-25T06:18:47.892562
| 2017-11-24T09:21:16
| 2017-11-24T09:21:16
| 104,208,662
| 0
| 1
| null | 2020-07-25T12:32:09
| 2017-09-20T11:47:10
|
Python
|
UTF-8
|
Python
| false
| false
| 7,762
|
py
|
# Copyright 2014 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Command for updating target HTTPS proxies."""
from googlecloudsdk.api_lib.compute import base_classes
from googlecloudsdk.api_lib.compute import target_proxies_utils
from googlecloudsdk.calliope import base
from googlecloudsdk.calliope import exceptions
from googlecloudsdk.command_lib.compute.ssl_certificates import (
flags as ssl_certificates_flags)
from googlecloudsdk.command_lib.compute.target_https_proxies import flags
from googlecloudsdk.command_lib.compute.url_maps import flags as url_map_flags
from googlecloudsdk.core import log
@base.ReleaseTracks(base.ReleaseTrack.GA, base.ReleaseTrack.BETA)
class UpdateGA(base.SilentCommand):
"""Update a target HTTPS proxy.
*{command}* is used to change the SSL certificate and/or URL map of
existing target HTTPS proxies. A target HTTPS proxy is referenced
by one or more forwarding rules which
define which packets the proxy is responsible for routing. The
target HTTPS proxy in turn points to a URL map that defines the rules
for routing the requests. The URL map's job is to map URLs to
backend services which handle the actual requests. The target
HTTPS proxy also points to at most 10 SSL certificates used for
server-side authentication.
"""
SSL_CERTIFICATE_ARG = None
SSL_CERTIFICATES_ARG = None
TARGET_HTTPS_PROXY_ARG = None
URL_MAP_ARG = None
@classmethod
def Args(cls, parser):
certs = parser.add_mutually_exclusive_group()
cls.SSL_CERTIFICATE_ARG = (
ssl_certificates_flags.SslCertificateArgumentForOtherResource(
'target HTTPS proxy', required=False))
cls.SSL_CERTIFICATE_ARG.AddArgument(parser, mutex_group=certs)
cls.SSL_CERTIFICATES_ARG = (
ssl_certificates_flags.SslCertificatesArgumentForOtherResource(
'target HTTPS proxy', required=False))
cls.SSL_CERTIFICATES_ARG.AddArgument(
parser, mutex_group=certs, cust_metavar='SSL_CERTIFICATE')
cls.TARGET_HTTPS_PROXY_ARG = flags.TargetHttpsProxyArgument()
cls.TARGET_HTTPS_PROXY_ARG.AddArgument(parser, operation_type='update')
cls.URL_MAP_ARG = url_map_flags.UrlMapArgumentForTargetProxy(
required=False, proxy_type='HTTPS')
cls.URL_MAP_ARG.AddArgument(parser)
@property
def service(self):
return self.compute.targetHttpsProxies
@property
def method(self):
pass
@property
def resource_type(self):
return 'targetHttpProxies'
def _CreateRequestsWithCertRefs(self, args, ssl_cert_refs,
quic_override=None):
holder = base_classes.ComputeApiHolder(self.ReleaseTrack())
client = holder.client
requests = []
target_https_proxy_ref = self.TARGET_HTTPS_PROXY_ARG.ResolveAsResource(
args, holder.resources)
if ssl_cert_refs:
requests.append(
(client.apitools_client.targetHttpsProxies, 'SetSslCertificates',
client.messages.ComputeTargetHttpsProxiesSetSslCertificatesRequest(
project=target_https_proxy_ref.project,
targetHttpsProxy=target_https_proxy_ref.Name(),
targetHttpsProxiesSetSslCertificatesRequest=(
client.messages.TargetHttpsProxiesSetSslCertificatesRequest(
sslCertificates=[
ref.SelfLink() for ref in ssl_cert_refs
])))))
if args.url_map:
url_map_ref = self.URL_MAP_ARG.ResolveAsResource(args, holder.resources)
requests.append(
(client.apitools_client.targetHttpsProxies, 'SetUrlMap',
client.messages.ComputeTargetHttpsProxiesSetUrlMapRequest(
project=target_https_proxy_ref.project,
targetHttpsProxy=target_https_proxy_ref.Name(),
urlMapReference=client.messages.UrlMapReference(
urlMap=url_map_ref.SelfLink()))))
if quic_override:
requests.append(
(client.apitools_client.targetHttpsProxies, 'SetQuicOverride',
client.messages.ComputeTargetHttpsProxiesSetQuicOverrideRequest(
project=target_https_proxy_ref.project,
targetHttpsProxy=target_https_proxy_ref.Name(),
targetHttpsProxiesSetQuicOverrideRequest=(
client.messages.TargetHttpsProxiesSetQuicOverrideRequest(
quicOverride=quic_override)))))
return client.MakeRequests(requests)
def _GetSslCertificatesList(self, args):
holder = base_classes.ComputeApiHolder(self.ReleaseTrack())
if args.ssl_certificate:
log.warn(
'The --ssl-certificate flag is deprecated and will be removed soon. '
'Use equivalent --ssl-certificates %s flag.', args.ssl_certificate)
return [
self.SSL_CERTIFICATE_ARG.ResolveAsResource(args, holder.resources)
]
if args.ssl_certificates:
return self.SSL_CERTIFICATES_ARG.ResolveAsResource(args, holder.resources)
return []
def _CheckMissingArgument(self, args):
if not (args.IsSpecified('ssl_certificates') or
args.IsSpecified('ssl_certificate') or args.IsSpecified('url_map')):
raise exceptions.ToolException(
'You must specify at least one of [--ssl-certificates] or '
'[--url-map].')
def Run(self, args):
self._CheckMissingArgument(args)
ssl_certificate_refs = self._GetSslCertificatesList(args)
return self._CreateRequestsWithCertRefs(args, ssl_certificate_refs)
@base.ReleaseTracks(base.ReleaseTrack.ALPHA)
class UpdateAlpha(UpdateGA):
"""Update a target HTTPS proxy.
*{command}* is used to change the SSL certificate and/or URL map of
existing target HTTPS proxies. A target HTTPS proxy is referenced
by one or more forwarding rules which
define which packets the proxy is responsible for routing. The
target HTTPS proxy in turn points to a URL map that defines the rules
for routing the requests. The URL map's job is to map URLs to
backend services which handle the actual requests. The target
HTTPS proxy also points to at most 10 SSL certificates used for
server-side authentication.
"""
@classmethod
def Args(cls, parser):
super(UpdateAlpha, cls).Args(parser)
target_proxies_utils.AddQuicOverrideUpdateArgs(parser)
def _CheckMissingArgument(self, args):
if not (args.IsSpecified('ssl_certificates') or
args.IsSpecified('ssl_certificate') or
args.IsSpecified('url_map') or args.IsSpecified('quic_override')):
raise exceptions.ToolException(
'You must specify at least one of [--ssl-certificates], '
'[--url-map] or [--quic-override].')
def Run(self, args):
self._CheckMissingArgument(args)
holder = base_classes.ComputeApiHolder(self.ReleaseTrack())
messages = holder.client.messages
quic_override = (messages.TargetHttpsProxiesSetQuicOverrideRequest.
QuicOverrideValueValuesEnum(args.quic_override)
) if args.IsSpecified('quic_override') else None
ssl_certificate_refs = self._GetSslCertificatesList(args)
return self._CreateRequestsWithCertRefs(args, ssl_certificate_refs,
quic_override)
|
[
"jeremy.debelleix@gmail.com"
] |
jeremy.debelleix@gmail.com
|
df3a234e51d43397b86be585b6914f5ef896cd45
|
832852c679816673f708860929a36a20ca8d3e32
|
/Configurations/ggH_SF/Full2016_nAODv4/maxDNN/comb_2j_dymvaOptim.py
|
ddfa9667d553dc87552a9171f6eafc15ee4d708f
|
[] |
no_license
|
UniMiBAnalyses/PlotsConfigurations
|
c4ec7376e2757b838930dfb2615e1dc99a64e542
|
578fe518cfc608169d3418bcb63a8342d3a24390
|
refs/heads/master
| 2023-08-31T17:57:45.396325
| 2022-09-01T10:13:14
| 2022-09-01T10:13:14
| 172,092,793
| 0
| 13
| null | 2023-04-27T10:26:52
| 2019-02-22T15:52:44
|
Python
|
UTF-8
|
Python
| false
| false
| 2,545
|
py
|
# Combinations to use
#comb = {}
optim={}
##optim['dymva0p82'] = ' && dymva_dnn_2j > 0.82 '
##optim['dymva0p83'] = ' && dymva_dnn_2j > 0.83 '
##optim['dymva0p84'] = ' && dymva_dnn_2j > 0.84 '
#optim['dymva0p845'] = ' && dymva_dnn_2j > 0.845 '
#optim['dymva0p85'] = ' && dymva_dnn_2j > 0.85 '
optim['dymva0p855'] = ' && dymva_dnn_2j > 0.855 '
##optim['dymva0p86'] = ' && dymva_dnn_2j > 0.86 '
optim['dymva0p865'] = ' && dymva_dnn_2j > 0.865 '
##optim['dymva0p87'] = ' && dymva_dnn_2j > 0.87 '
optim['dymva0p875'] = ' && dymva_dnn_2j > 0.875 '
##optim['dymva0p88'] = ' && dymva_dnn_2j > 0.88 '
optim['dymva0p885'] = ' && dymva_dnn_2j > 0.885 '
##optim['dymva0p89'] = ' && dymva_dnn_2j > 0.89 '
##optim['dymva0p90'] = ' && dymva_dnn_2j > 0.90 '
##optim['dymva0p91'] = ' && dymva_dnn_2j > 0.91 '
#optim['dymva0p92'] = ' && dymva_dnn_2j > 0.92 '
##optim['dymva0p925'] = ' && dymva_dnn_2j > 0.925 '
#optim['dymva0p93'] = ' && dymva_dnn_2j > 0.93 '
#optim['dymva0p94'] = ' && dymva_dnn_2j > 0.94 '
#optim['dymva0p945'] = ' && dymva_dnn_2j > 0.945 '
##optim['dymva0p95'] = ' && dymva_dnn_2j > 0.95 '
#optim['dymva0p955'] = ' && dymva_dnn_2j > 0.955 '
#optim['dymva0p96'] = ' && dymva_dnn_2j > 0.96 '
#optim['dymva0p965'] = ' && dymva_dnn_2j > 0.965 '
#optim['dymva0p97'] = ' && dymva_dnn_2j > 0.97 '
##optim['dymva0p975'] = ' && dymva_dnn_2j > 0.975 '
optim['dymva0p98'] = ' && dymva_dnn_2j > 0.98 '
optim['dymva0p985'] = ' && dymva_dnn_2j > 0.985 '
optim['dymva0p99'] = ' && dymva_dnn_2j > 0.99 '
##optim['dymva0p995'] = ' && dymva_dnn_2j > 0.995 '
for iCut in optim:
combs['hww2l2v_13TeV_2jee_'+iCut] = {
'hww2l2v_13TeV_2jee_'+iCut : 'events' ,
'hww2l2v_13TeV_WW_2jee_'+iCut : 'events' ,
'hww2l2v_13TeV_top_2jee_'+iCut : 'events' ,
}
combs['hww2l2v_13TeV_2jmm_'+iCut] = {
'hww2l2v_13TeV_2jmm_'+iCut : 'events' ,
'hww2l2v_13TeV_WW_2jmm_'+iCut : 'events' ,
'hww2l2v_13TeV_top_2jmm_'+iCut : 'events' ,
}
combs['hww2l2v_13TeV_2jsf_'+iCut] = {
'hww2l2v_13TeV_2jee_'+iCut : 'events' ,
'hww2l2v_13TeV_WW_2jee_'+iCut : 'events' ,
'hww2l2v_13TeV_top_2jee_'+iCut : 'events' ,
'hww2l2v_13TeV_2jmm_'+iCut : 'events' ,
'hww2l2v_13TeV_WW_2jmm_'+iCut : 'events' ,
'hww2l2v_13TeV_top_2jmm_'+iCut : 'events' ,
}
|
[
"davide.di.croce@cern.ch"
] |
davide.di.croce@cern.ch
|
060fce54f0cf1197aa859bd4e882a86f97d1a248
|
34599596e145555fde0d4264a1d222f951f49051
|
/pcat2py/class/20fab7f2-5cc5-11e4-af55-00155d01fe08.py
|
65e06e5eb7d8192e2b4c7cd3f1cdffd123911b50
|
[
"MIT"
] |
permissive
|
phnomcobra/PCAT2PY
|
dc2fcbee142ce442e53da08476bfe4e68619346d
|
937c3b365cdc5ac69b78f59070be0a21bdb53db0
|
refs/heads/master
| 2021-01-11T02:23:30.669168
| 2018-02-13T17:04:03
| 2018-02-13T17:04:03
| 70,970,520
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,393
|
py
|
#!/usr/bin/python
################################################################################
# 20fab7f2-5cc5-11e4-af55-00155d01fe08
#
# Justin Dierking
# justindierking@hardbitsolutions.com
# phnomcobra@gmail.com
#
# 10/24/2014 Original Construction
################################################################################
class Finding:
def __init__(self):
self.output = []
self.is_compliant = False
self.uuid = "20fab7f2-5cc5-11e4-af55-00155d01fe08"
def check(self, cli):
# Initialize Compliance
self.is_compliant = False
# Get Registry DWORD
dword = cli.get_reg_dword(r'HKLM:\Software\Policies\Microsoft\Windows\GameUX', 'DownloadGameInfo')
# Output Lines
self.output = [r'HKLM:\Software\Policies\Microsoft\Windows\GameUX', ('DownloadGameInfo=' + str(dword))]
if dword == 0:
self.is_compliant = True
return self.is_compliant
def fix(self, cli):
cli.powershell(r"New-Item -path 'HKLM:\Software\Policies\Microsoft'")
cli.powershell(r"New-Item -path 'HKLM:\Software\Policies\Microsoft\Windows'")
cli.powershell(r"New-Item -path 'HKLM:\Software\Policies\Microsoft\Windows\GameUX'")
cli.powershell(r"Set-ItemProperty -path 'HKLM:\Software\Policies\Microsoft\Windows\GameUX' -name 'DownloadGameInfo' -value 0 -Type DWord")
|
[
"phnomcobra@gmail.com"
] |
phnomcobra@gmail.com
|
90780ca6d62b27da11a6b61560c01714020dd63d
|
d1ed040025811cce2533a7e55737eadc00bd15a9
|
/shop/widgets.py
|
84e21750c6450f92f338c5ecc50bc5985e6503d3
|
[] |
no_license
|
Code-Institute-Submissions/guillermo
|
0dfa6aecab3e325c5bd30356557b710da6851deb
|
c6def675ac78e88fd20c86f59498d9251cf302a7
|
refs/heads/master
| 2023-01-15T11:38:35.306745
| 2020-11-10T21:09:41
| 2020-11-10T21:09:41
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 442
|
py
|
from django.forms.widgets import ClearableFileInput
from django.utils.translation import gettext_lazy as _
class CustomClearableFileInput(ClearableFileInput):
"""
Shows thumbnail of current image and checkbox to remove it.
"""
clear_checkbox_label = _("Remove")
initial_text = _("Current Image")
input_text = _("")
template_name = (
"shop/custom_widget_templates/custom_clearable_file_input.html"
)
|
[
"brachetta@me.com"
] |
brachetta@me.com
|
a99e6f4b0b71c257b8c163c1c1cf93f0172adcd4
|
07570ec33eb49effd9ed6af73214bac1b607038f
|
/client/swagger_client/models/domain_list.py
|
4c04cf18e0a3344618f96b8efcd04ad9b132fceb
|
[
"MIT"
] |
permissive
|
kakwa/certascale
|
d9998a66ba6a239ba5b5e537f12dabdd5876996c
|
0df8da0f518506500117152fd0e28ee3286949af
|
refs/heads/master
| 2020-03-29T09:24:32.794060
| 2019-01-30T14:06:10
| 2019-01-30T14:06:10
| 149,756,729
| 0
| 2
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,538
|
py
|
# coding: utf-8
"""
certascale API
Certascale API documentation # noqa: E501
OpenAPI spec version: 1.0.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
from swagger_client.models.domain import Domain # noqa: F401,E501
class DomainList(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'list': 'list[Domain]',
'next_id': 'int'
}
attribute_map = {
'list': 'list',
'next_id': 'next_id'
}
def __init__(self, list=None, next_id=None): # noqa: E501
"""DomainList - a model defined in Swagger""" # noqa: E501
self._list = None
self._next_id = None
self.discriminator = None
if list is not None:
self.list = list
if next_id is not None:
self.next_id = next_id
@property
def list(self):
"""Gets the list of this DomainList. # noqa: E501
:return: The list of this DomainList. # noqa: E501
:rtype: list[Domain]
"""
return self._list
@list.setter
def list(self, list):
"""Sets the list of this DomainList.
:param list: The list of this DomainList. # noqa: E501
:type: list[Domain]
"""
self._list = list
@property
def next_id(self):
"""Gets the next_id of this DomainList. # noqa: E501
:return: The next_id of this DomainList. # noqa: E501
:rtype: int
"""
return self._next_id
@next_id.setter
def next_id(self, next_id):
"""Sets the next_id of this DomainList.
:param next_id: The next_id of this DomainList. # noqa: E501
:type: int
"""
self._next_id = next_id
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, DomainList):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
|
[
"carpentier.pf@gmail.com"
] |
carpentier.pf@gmail.com
|
c85f6d190b977c7efe18e3b3eafff96dd5697bcc
|
32ce121ca829a50fd4786b2c1470c25ccb980487
|
/examples/subscribe_1.py
|
9d17f9d0bf71c69ef3763322b770cc44aad7bbeb
|
[
"MIT"
] |
permissive
|
py-zoid/harmony
|
5aa42b0665a8624627a3ed2d7271847f2a3df7b6
|
8a94b253c36302ee6d52fd2a0748e6b91879bbef
|
refs/heads/main
| 2023-06-23T08:15:28.610600
| 2021-05-30T01:40:04
| 2021-05-30T01:40:04
| 387,935,695
| 3
| 0
|
MIT
| 2021-07-20T23:16:23
| 2021-07-20T23:16:22
| null |
UTF-8
|
Python
| false
| false
| 736
|
py
|
#!/usr/bin/python3
from python_graphql_client import GraphqlClient
from json import dumps
import asyncio
def prettyPrint(data):
print(dumps(data, sort_keys=True, indent=2))
try:
client = GraphqlClient(endpoint="ws://localhost:7000/v1/graphql")
query = """
subscription {
newPendingTx {
from
to
nonce
gasPrice
queuedFor
pendingFor
pool
}
}
"""
print('Listening for any new tx, entering pending pool')
asyncio.run(client.subscribe(query=query, handle=prettyPrint))
except Exception as e:
print(e)
except KeyboardInterrupt:
print('\nStopping')
|
[
"anjanroy@yandex.com"
] |
anjanroy@yandex.com
|
0b393a21f7951461e0b7dc197f6ee0790223b2a5
|
2bdedcda705f6dcf45a1e9a090377f892bcb58bb
|
/src/main/output/head_father/thing_end/question_water_right/fact.py
|
d42798930b1c8ad06369f29d10e59c00e1537245
|
[] |
no_license
|
matkosoric/GenericNameTesting
|
860a22af1098dda9ea9e24a1fc681bb728aa2d69
|
03f4a38229c28bc6d83258e5a84fce4b189d5f00
|
refs/heads/master
| 2021-01-08T22:35:20.022350
| 2020-02-21T11:28:21
| 2020-02-21T11:28:21
| 242,123,053
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,391
|
py
|
using CategoriesPOC.TranslatorService;
using System;
using System.Net;
using System.Net.Http;
using System.Threading.Tasks;
namespace CategoriesPOC.Helpers
{
public static class TranslatorHelper
{
private const string SubscriptionKey = "ec5892dd4dbc7efdd4227cd0291300f5"; //Enter here the Key from your Microsoft Translator Text subscription on http://portal.azure.com
public static Task<string> Translate(string word, string lang="")
{
var translatorService = new TranslatorService.LanguageServiceClient();
var authTokenSource = new AzureAuthToken(SubscriptionKey);
var token = string.Empty;
lang = string.IsNullOrEmpty(lang) ? DetectLanguage(word).Result : lang;
if (lang == "en") return Task.FromResult<string>(word);
try
{
token = authTokenSource.GetAccessToken();
return translatorService.TranslateAsync(token, word, lang, "en", "text/plain", "general", string.Empty);
}
catch (HttpRequestException)
{
switch (authTokenSource.RequestStatusCode)
{
case HttpStatusCode.Unauthorized:
Console.WriteLine("Request to token service is not authorized (401). Check that the Azure subscription key is valid.");
break;
case HttpStatusCode.Forbidden:
Console.WriteLine("Request to token service is not authorized (403). For accounts in the free-tier, check that the account quota is not exceeded.");
break;
}
throw;
}
//Console.WriteLine("Translated to French: {0}", translatorService.Translate(token, "Hello World", "en", "fr", "text/plain", "general", string.Empty));
}
public static Task<GetTranslationsResponse> GetTranslations(string word, string lang = "")
{
var translatorService = new TranslatorService.LanguageServiceClient();
var authTokenSource = new AzureAuthToken(SubscriptionKey);
var token = string.Empty;
lang = string.IsNullOrEmpty(lang) ? DetectLanguage(word).Result : lang;
try
{
token = authTokenSource.GetAccessToken();
var options = new TranslateOptions();
return translatorService.GetTranslationsAsync(token, word, lang, "en", 20, options);
}
catch (HttpRequestException)
{
switch (authTokenSource.RequestStatusCode)
{
case HttpStatusCode.Unauthorized:
Console.WriteLine("Request to token service is not authorized (401). Check that the Azure subscription key is valid.");
break;
case HttpStatusCode.Forbidden:
Console.WriteLine("Request to token service is not authorized (403). For accounts in the free-tier, check that the account quota is not exceeded.");
break;
}
throw;
}
}
public static Task<string> DetectLanguage(string str)
{
var translatorService = new TranslatorService.LanguageServiceClient();
var authTokenSource = new AzureAuthToken(SubscriptionKey);
var token = string.Empty;
try
{
token = authTokenSource.GetAccessToken();
return translatorService.DetectAsync(token, str);
}
catch (HttpRequestException)
{
switch (authTokenSource.RequestStatusCode)
{
case HttpStatusCode.Unauthorized:
Console.WriteLine("Request to token service is not authorized (401). Check that the Azure subscription key is valid.");
break;
case HttpStatusCode.Forbidden:
Console.WriteLine("Request to token service is not authorized (403). For accounts in the free-tier, check that the account quota is not exceeded.");
break;
}
throw;
}
//translatorService.Detect(token, str);
}
}
}
|
[
"soric.matko@gmail.com"
] |
soric.matko@gmail.com
|
c82533b14aad2bc70cb7f0d32c0a011ac1ba5058
|
98810fbf90a42028915a88bfac9fb8cb8681008e
|
/azure-devops/azext_devops/devops_sdk/v6_0/token_administration/__init__.py
|
d0c5658b8f53ee15939375e036f993b970fc95b2
|
[
"MIT",
"BSD-3-Clause",
"LicenseRef-scancode-free-unknown",
"LicenseRef-scancode-unicode",
"LGPL-3.0-only",
"LicenseRef-scancode-warranty-disclaimer",
"PSF-2.0",
"PostgreSQL",
"LicenseRef-scancode-python-cwi",
"LGPL-2.1-or-later",
"LicenseRef-scancode-proprietary-license",
"LGPL-2.1-only",
"CC-BY-4.0",
"Python-2.0",
"MPL-1.1",
"OpenSSL",
"LicenseRef-scancode-other-copyleft",
"LicenseRef-scancode-unknown-license-reference",
"MPL-1.0",
"ISC",
"GPL-2.0-only",
"Apache-2.0",
"LicenseRef-scancode-public-domain",
"BSD-2-Clause",
"GPL-1.0-or-later",
"MPL-2.0"
] |
permissive
|
Azure/azure-devops-cli-extension
|
ba87357a8243e1318f100791fc32acbb59448d05
|
bd34a6fd0658a15dadf6c09c7f6217ca5ffa662b
|
refs/heads/master
| 2023-08-29T10:56:54.228674
| 2023-07-17T04:37:06
| 2023-07-17T04:37:06
| 107,708,057
| 419
| 208
|
MIT
| 2023-08-02T02:10:10
| 2017-10-20T17:39:11
|
Python
|
UTF-8
|
Python
| false
| false
| 815
|
py
|
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
# Generated file, DO NOT EDIT
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------------------------
from .models import *
from .token_administration_client import TokenAdministrationClient
__all__ = [
'SessionToken',
'TokenAdministrationRevocation',
'TokenAdminPagedSessionTokens',
'TokenAdminRevocation',
'TokenAdministrationClient'
]
|
[
"noreply@github.com"
] |
Azure.noreply@github.com
|
d9891b24891a2da8e8a76e6058e6a19b83a963c5
|
ef5f369a8fb3978dbb57cdab2c0f83880fa43c36
|
/amatino/tests/primary/entity.py
|
c47f450533c5f4325bdca50830a582096037333a
|
[
"MIT"
] |
permissive
|
pypi-buildability-project/amatino-python
|
c8a93c849d9e97ea907d411511a0c732ee51b29e
|
9178e0883b735f882729c19a7a68df68b49e057b
|
refs/heads/master
| 2022-07-19T12:24:06.587840
| 2020-05-21T05:28:08
| 2020-05-21T05:28:08
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,034
|
py
|
"""
Amatino API Python Bindings
Entity Test Module
Author: hugh@amatino.io
"""
from amatino.entity import Entity
from amatino.tests.ancillary.session import SessionTest
from amatino import Session
class EntityTest(SessionTest):
"""
Test the Entity primary object
"""
def __init__(self, name='Create, retrieve, update an Entity') -> None:
self.entity = None
super().__init__(name)
self.create_session()
if not isinstance(self.session, Session):
raise RuntimeError(
'Session creation failed, consider running Session tests'
)
return
def create_entity(self) -> Entity:
entity = Entity.create(
self.session,
'Test Entity',
None,
None
)
self.entity = entity
return entity
def execute(self) -> None:
assert self.session is not None
try:
entity = self.create_entity()
except Exception as error:
self.record_failure(error)
return
assert isinstance(self.entity, Entity)
try:
entity = Entity.retrieve(
self.session,
entity.id_
)
except Exception as error:
self.record_failure(error)
return
if entity.id_ != self.entity.id_:
self.record_failure('Entity ids do not match')
return
new_name = 'Updated Entity Name'
try:
updated_entity = entity.update(new_name)
except Exception as error:
self.record_failure(error)
return
if updated_entity.name != new_name:
self.record_failure('Entity name not updated: ' + str(entity.name))
return
listed_entities = Entity.retrieve_list(
session=self.session
)
assert isinstance(listed_entities, list)
assert len(listed_entities) > 0
self.record_success()
return
|
[
"hugh.jeremy@gmail.com"
] |
hugh.jeremy@gmail.com
|
29eb2562ec4c47d302e9848afa25bb9fe02ea5ef
|
380848070205bf5cb119071eb2b32e98caca0253
|
/two by two/convo.py
|
e7e591289ce9079002f4202d804241a043f07220
|
[] |
no_license
|
qynglang/Algorithm-intelligence
|
a3b3720ec8f2475457875d38cdde858c1805e910
|
8e410b87cea6abd18a8bcd45ed89cb5f436748b3
|
refs/heads/master
| 2020-07-03T03:30:53.923930
| 2019-09-17T09:25:21
| 2019-09-17T09:25:21
| 201,769,566
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,434
|
py
|
# Create some wrappers for simplicity
import tensorflow as tf
def conv2d(x, W, strides=1):
# Conv2D wrapper, with bias and relu activation
x = tf.nn.conv2d(x, W, strides=[1, strides, strides, 1], padding='SAME')
#x = tf.nn.bias_add(x, b)
return tf.nn.relu(x)
def maxpool2d(x, k=2):
# MaxPool2D wrapper
return tf.nn.max_pool(x, ksize=[1, k, k, 1], strides=[1, k, k, 1],
padding='SAME')
# Create model
def conv_net(x, weights, biases, dropout):
# MNIST data input is a 1-D vector of 784 features (28*28 pixels)
# Reshape to match picture format [Height x Width x Channel]
# Tensor input become 4-D: [Batch Size, Height, Width, Channel]
x = tf.reshape(x, shape=[-1, 40, 50, 1])
# Convolution Layer
conv1 = conv2d(x, weights['wc1'])
# Max Pooling (down-sampling)
conv1 = maxpool2d(conv1, k=3)
# Convolution Layer
conv2 = conv2d(conv1, weights['wc2'])
# Max Pooling (down-sampling)
conv2 = maxpool2d(conv2, k=3)
# Fully connected layer
# Reshape conv2 output to fit fully connected layer input
fc1 = tf.reshape(conv2, [-1, weights['wd1'].get_shape().as_list()[0]])
fc1 = tf.add(tf.matmul(fc1, weights['wd1']), biases['bd1'])
fc1 = tf.nn.relu(fc1)
# Apply Dropout
fc1 = tf.nn.dropout(fc1, dropout)
# Output, class prediction
out = tf.add(tf.matmul(fc1, weights['out']), biases['out'])
return out
|
[
"noreply@github.com"
] |
qynglang.noreply@github.com
|
1e4bee103070178cb11759b33a9988d636e01631
|
bf26ed0b9ef5a6d846df05a748dcc7d4799f1164
|
/chapter-2/bhp.py
|
030e9f74b45e2cd6fb2c75dd983a94d776a09543
|
[] |
no_license
|
cornh0lio/blackhat-python
|
41cd694c845c982ff3384a3620017e64a799afe8
|
b1373b759435cc50a53ce7b05bca906523c924b9
|
refs/heads/master
| 2021-06-15T20:04:46.897711
| 2017-01-16T15:46:38
| 2017-01-16T15:46:38
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,874
|
py
|
import sys
import socket
import getopt
import threading
import subprocess
# define some global variables
listen = False
command = False
upload = False
execute = ""
target = ""
upload_destination = ""
port = 0
# Print the help for the tool
def usage():
print "BHP Net Tool"
print
print "Usage: bhp.py -t <target> -p <port>"
print "-l --listen - listen on [host]:[port] for incoming connections"
print "-e --execute=<file_to_run> - execute the given file upon receiving a connection"
print "-c --command - initialize a command shell"
print "-u --upload=<destination> - upon receiving a connection upolad a file and write it to [destination]"
print
print
print "Examples:"
print "bhp.py -t 192.168.0.1 -p 5555 -l -c"
print "bhp.py -t 192.168.0.1 -p 5555 -l -u=c:\\target.exe"
print "bhp.py -t 192.168.0.1 -p 5555 -l -e=\"cat /etc/passwd\""
print "echo 'ABCDEFGHI' | ./bhp.py -t 192.168.11.12 -p 135"
sys.exit(0)
def main():
global listen
global port
global execute
global command
global upload_destination
global target
if not len(sys.argv[1:]):
usage()
# read the commandline options
try:
opts, args = getopt.getopt(sys.argv[1:],"hle:t:p:cu:", ["help","listen","execute","target","port","command","upload"])
except getopt.GetoptError as err:
print str(err)
usage()
for o,a in opts:
if o in ("-h","--help"):
usage()
elif o in ("-l","--listen"):
listen = True
elif o in ("-e","--execute"):
execute = a
elif o in ("-c","--command"):
command = True
elif o in ("-t","--target"):
target = a
elif o in ("-p","--port"):
port = int(a)
else:
assert False, "Unhandled Option"
# are we going to listen or just send data from stdin?
if not listen and len(target) and port > 0:
# read in the buffer from the commandline
# this will block, so send CTRL-D if not sending input
# to stdin
buffer = sys.stdin.read()
#send data off
client_sender(buffer)
# we are going to listen and potentially
# upload things, execute commands, and drop a shell back
# depending on our command line options above
if listen:
server_loop()
def client_sender(buffer):
client = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
try:
# connect to our target host
client.connect((target,port))
if len(buffer):
client.send(buffer)
while True:
# now wait for data back
recv_len = 1
response = ""
while recv_len:
data = client.recv(4096)
recv_len = len(data)
response += data
if recv_len < 4096:
break
print response
# wait for more input
buffer = raw_input("")
buffer += "\n"
# send it off
client.send(buffer)
except:
print "[*] Exception! Exiting!."
client.close()
def server_loop():
global target
# if no target is defined, we listen on all interfaces
if not len(target):
target = "0.0.0.0"
server = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
server.bind((target,port))
server.listen(5)
# this is the while where we manage incoming connections
while True:
client_socket, addr = server.accept()
# spin off a thread to handle ouyr new client
client_thread = threading.Thread(target=client_handler,args=(client_socket,))
client_thread.start()
def run_command(command):
# trim the newline
command = command.rstrip()
# run the command and get the output back
try:
output = subprocess.check_output(command, stderr=subprocess.STDOUT, shell=True)
except:
output = "Failed to execute command.\r\n"
# send the output back to the client
return output
def client_handler(client_socket):
global upload
global execute
global command
# check for upload
if len(upload_destination):
# read in all of the bytes and write to our destination
file_buffer = ""
# keep reading data until none is available
while True:
data = client_socket.recv(1024)
if not data:
break
else:
file_buffer += data
# now we take these bytes and try to write them out
try:
# We open a file descriptor in write + binary mode
file_descriptor = open(upload_destination, "wb")
file_descriptor.write(file_buffer)
file_descriptor.close()
# send ack to the client to confirm that we wrote the file
client_socket.send("Successfully saved the file to %s\r\n" % upload_destination)
except:
client_socket.send("Failed to save the file to %s\r\n" % upload_destination)
if len(execute):
output = run_command(execute)
client_socket.send(output)
# now we go into another loop if a command shell is requested
if command:
while True:
# show a simple prompt
client_socket.send("<BHP:#> ")
# now we receive until we see a linefeed
cmd_buffer = ""
while "\n" not in cmd_buffer:
cmd_buffer += client_socket.recv(1024)
# get back the command output
response = run_command(cmd_buffer)
# send back the response
client_socket.send(response)
main()
|
[
"you@example.com"
] |
you@example.com
|
3a446d64643255b8eed4cfce2ad8f4db60a1e0f3
|
48d0cfbe1ba313740a94ef75f25e685bbc8aa7f6
|
/santa/content/tests/base.py
|
cc92b8a7faf8d2364a79342c3604ce91a1dbb1af
|
[] |
no_license
|
taito-zz/santa.content
|
72995e455b3ceec7842fc5923a607ba5963268cd
|
dd497f48918212c61bd429e1e7130a9b1c4620f5
|
refs/heads/master
| 2021-05-27T14:58:47.513815
| 2012-10-30T19:10:14
| 2012-10-30T19:10:14
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,326
|
py
|
from plone.app.testing import FunctionalTesting
from plone.app.testing import IntegrationTesting
from plone.app.testing import PLONE_FIXTURE
from plone.app.testing import PloneSandboxLayer
from plone.testing import z2
import unittest2 as unittest
class SantaContentLayer(PloneSandboxLayer):
defaultBases = (PLONE_FIXTURE,)
def setUpZope(self, app, configurationContext):
"""Set up Zope."""
# Load ZCML
import santa.content
self.loadZCML(package=santa.content)
z2.installProduct(app, 'santa.content')
def setUpPloneSite(self, portal):
"""Set up Plone."""
# Install into Plone site using portal_setup
self.applyProfile(portal, 'santa.content:default')
def tearDownZope(self, app):
"""Tear down Zope."""
z2.uninstallProduct(app, 'santa.content')
FIXTURE = SantaContentLayer()
INTEGRATION_TESTING = IntegrationTesting(
bases=(FIXTURE,), name="SantaContentLayer:Integration")
FUNCTIONAL_TESTING = FunctionalTesting(
bases=(FIXTURE,), name="SantaContentLayer:Functional")
class IntegrationTestCase(unittest.TestCase):
"""Base class for integration tests."""
layer = INTEGRATION_TESTING
class FunctionalTestCase(unittest.TestCase):
"""Base class for functional tests."""
layer = FUNCTIONAL_TESTING
|
[
"taito.horiuchi@gmail.com"
] |
taito.horiuchi@gmail.com
|
e9583dfd136ae69e44da411101e8d5ef314a7351
|
e446c2c600fbe6e279acf05eac3079643b4c3cf3
|
/14_3_21_algorithms_data_structures/recursion.py
|
cc1a8f708ffee28156c8cb439c8770e67c427f73
|
[] |
no_license
|
solomoniosif/SDA_Python_Exercises
|
2208298240c7788a2ddd93adb68870d5d5265683
|
691cd5328bbec8fa53f6a6f26bc8071d3e70ef58
|
refs/heads/master
| 2023-03-28T15:02:49.689022
| 2021-04-03T09:53:26
| 2021-04-03T09:53:26
| 328,112,039
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 979
|
py
|
from timer import time_execution
# import sys
#
#
# sys.setrecursionlimit(10 ** 6)
@time_execution
def recursive_factorial(n):
def factorial(n):
if n == 0:
return 1
return n * factorial(n - 1)
return factorial(n)
@time_execution
def iterative_factorial(n):
if n < 0:
return 0
elif n == 0 or n == 1:
return 1
else:
fact = 1
while n > 1:
fact *= n
n -= 1
return fact
# print(f"5! = {recursive_factorial(777)}")
# print(f"5! = {iterative_factorial(777)}")
@time_execution
def recursive_fibonacci(n):
def inner(n):
if n in [0, 1]:
return n
return inner(n - 1) + inner(n - 2)
return inner(n)
@time_execution
def iterative_fibonacci(n):
i = 0
b = 1
a = 0
while i < n:
c = b + a
b = a
a = c
i += 1
return c
print(recursive_fibonacci(32))
print(iterative_fibonacci(32))
|
[
"solomoniosif@gmail.com"
] |
solomoniosif@gmail.com
|
e811ec107a083b1f682d0ad79cbf097409f2116a
|
b22588340d7925b614a735bbbde1b351ad657ffc
|
/athena/Control/PerformanceMonitoring/PerfMonTests/python/IoTestsLib.py
|
2a4ec4de377d084a030efa749b09c1a7a575b8d4
|
[] |
no_license
|
rushioda/PIXELVALID_athena
|
90befe12042c1249cbb3655dde1428bb9b9a42ce
|
22df23187ef85e9c3120122c8375ea0e7d8ea440
|
refs/heads/master
| 2020-12-14T22:01:15.365949
| 2020-01-19T03:59:35
| 2020-01-19T03:59:35
| 234,836,993
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 6,626
|
py
|
# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
## @file PerfMonTests.IoTestsLib
## @date April 2009
__author__ = "Sebastien Binet <binet@cern.ch>"
__version__ = "$Revision: 1.1 $"
__doc__ = """
a set of simple minded functions to test ROOT I/O (from python)
"""
from array import array as carray
import random
# set some dummy seed, for reproducibility
random.seed(20080910) # first LHC startup :)
from os import sysconf
_pagesz = sysconf('SC_PAGE_SIZE') / 1024 # in kb
_py_dtype_to_root = {
'i' : 'I',
'f' : 'F',
}
"""translates the usual python 'dtype' codes to the ROOT/CINT ones
"""
from PyUtils.Decorators import forking
def pymon():
"""returns (cpu[ms], vmem[kb], rss[kb])
"""
from resource import getrusage, RUSAGE_SELF
from string import split as ssplit
cpu = getrusage(RUSAGE_SELF)
mem = open('/proc/self/statm','r')
cpu = (cpu.ru_utime+cpu.ru_stime) * 1e3 # in milliseconds
mem = ssplit(mem.readlines()[0])
vmem = int(mem[0])*_pagesz
rss = int(mem[1])*_pagesz
return cpu,vmem,rss
def comp_delta(d, verbose=False):
assert 'start' in d
assert 'stop' in d
assert len(d['start']) == 3
assert len(d['stop']) == 3
if verbose:
print repr(d)
delta = { 'cpu' : d['stop'][0] - d['start'][0],
'vmem': d['stop'][1] - d['start'][1],
'rss' : d['stop'][2] - d['start'][2],
'nbytes': -1
}
if 'nbytes' in d:
delta['nbytes'] = d['nbytes']
print "==> cpu: %(cpu)8.3f ms vmem: %(vmem)i kB rss: %(rss)i kB nbytes: %(nbytes)i kB" % delta
return delta
def import_ROOT():
import sys
# for ROOT...
if not '-b' in sys.argv:
sys.argv.insert(1, '-b')
import ROOT
return ROOT
ROOT = import_ROOT()
@forking
def io_test1_write(fname, nevts=1000, sz=1000, dtype='i'):
"""testing writing 1000 evts with arrays of 1000- integers
"""
f = ROOT.TFile.Open(fname, 'RECREATE')
t = ROOT.TTree('t', 't')
nevts= nevts
imax = sz
data = carray(dtype, imax*[ 0 ] )
#t.Branch( 'mynum', n, 'mynum/I' )
t.Branch( 'i', data, 'data[%d]/%s'%(imax, _py_dtype_to_root[dtype]) )
from random import randint
fill = t.Fill
for i in xrange(nevts):
for j in xrange(sz):
data[j] = randint(0, sz)
fill()
f.Write()
f.Close()
return
@forking
def io_test1_read(fname, verbose=False):
f = ROOT.TFile.Open(fname, 'READ')
t = f.Get('t')
assert t, "could not find tree 't'"
nevts = t.GetEntries()
if verbose:
print "::: reading [%s] (%i events) [sz=%s kB]" % (fname, nevts,
f.GetSize()/1024)
tot_bytes = 0
get_entry = t.GetEntry
start = pymon()
for ievt in xrange(nevts):
# copy next entry into memory and verify
nb = get_entry(ievt)
if nb <= 0:
continue
tot_bytes += nb
# use the values directly from the tree
data = getattr(t, 'data')
sz = len(data)
assert sz > 0
#print "::: ievt [%3i] : #data = %s" % (ievt, sz)
stop = pymon()
del t
f.Close()
return {'start' : start,
'stop' : stop,
'nbytes': tot_bytes/1024}
@forking
def io_test2_write(fname, nevts=1000, sz=1000, dtype='i'):
"""testing writing 1000 evts with arrays of (variable length) 1000- ints
"""
f = ROOT.TFile.Open(fname, 'RECREATE')
t = ROOT.TTree('t', 't')
nevts= nevts
imax = sz
n = carray( 'i', [ 0 ] )
data = carray( dtype, imax*[ 0 ] )
t.Branch( 'sz', n, 'sz/I' )
t.Branch( 'data', data, 'data[sz]/%s'%_py_dtype_to_root[dtype])
from random import randint
fill = t.Fill
for i in xrange(nevts):
jmax = randint(1, sz)
n[0] = jmax
for j in xrange(jmax):
data[j] = randint(0, sz)
fill()
f.Write()
f.Close()
return
@forking
def io_test2_read(fname, verbose=False):
f = ROOT.TFile.Open(fname, 'READ')
t = f.Get('t')
assert t, "could not find tree 't'"
nevts = t.GetEntries()
if verbose:
print "::: reading [%s] (%i events) [sz=%s kB]" % (fname, nevts,
f.GetSize()/1024)
tot_bytes = 0
get_entry = t.GetEntry
start = pymon()
for ievt in xrange(nevts):
# copy next entry into memory and verify
nb = get_entry(ievt)
if nb <= 0:
continue
tot_bytes += nb
# use the values directly from the tree
data = getattr(t, 'data')
sz = len(data)
assert sz > 0
#print "::: ievt [%3i] : #data = %s" % (ievt, sz)
stop = pymon()
del t
f.Close()
return {'start' : start,
'stop' : stop,
'nbytes': tot_bytes/1024}
### tests ---------------------------------------------------------------------
if __name__ == "__main__":
# FIXME: use 'nose' instead... for automatical test discovery
print "::: running all tests..."
nreads = 10 # nbr of times to repeat each 'read' test
mon_data = {}
# -----
# io_test1
# -----
# io_test1 - ints
fname = '/tmp/out_test1_ints.root'
w = io_test1_write(fname=fname,
nevts=100000, sz=1000,
dtype='i')
mon_data['io_test1-ints'] = []
for _ in xrange(nreads):
mon_data['io_test1-ints'].append(comp_delta(io_test1_read(fname=fname)))
# io_test1 - floats
fname = '/tmp/out_test1_flts.root'
w = io_test1_write(fname=fname,
nevts=100000, sz=1000,
dtype='f')
mon_data['io_test1-flts'] = []
for _ in xrange(nreads):
mon_data['io_test1-flts'].append(comp_delta(io_test1_read(fname=fname)))
# -----
# io_test2
# -----
# io_test2 - ints
fname = '/tmp/out_test2_ints.root'
w = io_test2_write(fname=fname,
nevts=100000, sz=1000,
dtype='i')
mon_data['io_test2-ints'] = []
for _ in xrange(nreads):
mon_data['io_test2-ints'].append(comp_delta(io_test2_read(fname=fname)))
# io_test2 - floats
fname = '/tmp/out_test2_floats.root'
w = io_test2_write(fname=fname,
nevts=100000, sz=1000,
dtype='f')
mon_data['io_test2-flts'] = []
for _ in xrange(nreads):
mon_data['io_test2-flts'].append(comp_delta(io_test2_read(fname=fname)))
print mon_data
|
[
"rushioda@lxplus754.cern.ch"
] |
rushioda@lxplus754.cern.ch
|
96dc1b0790b37b38c91a4371bce1044a9a8221dc
|
cb95d669749407510b9dd87518bea60d10cd478d
|
/migration/change_uq.py
|
38a176fff51b7662fedf44cea3ac89921c8ccc94
|
[] |
no_license
|
patarapolw/zhlib
|
465af0898912afe57ea99595bde6faf562124851
|
66b61c2a607eb0bff2cfe7f51c45789d865db044
|
refs/heads/master
| 2020-04-02T03:45:57.039084
| 2018-11-01T02:57:37
| 2018-11-01T02:57:37
| 153,982,936
| 4
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 295
|
py
|
from playhouse.migrate import SqliteMigrator, migrate
from zhlib import zh
if __name__ == '__main__':
migrator = SqliteMigrator(zh.database)
migrate(
migrator.drop_index('sentence', 'sentence_chinese'),
migrator.add_index('sentence', ('sentence', 'pinyin'), True)
)
|
[
"patarapolw@gmail.com"
] |
patarapolw@gmail.com
|
02daa1468251ba4567e1b5a2cf22a54aae0bebef
|
4e29395020ce78f435e75e0b3f1e09b227f6f4d8
|
/ataraxia/inference/ocr/recognition/crann/src/crannRec/recurrent.py
|
63481a0bd9fe0a199e952dd6ae3f352fa5fef01b
|
[] |
no_license
|
luoyangustc/argus
|
8b332d94af331a2594f5b1715ef74a4dd98041ad
|
2ad0df5d7355c3b81484f6625b82530b38b248f3
|
refs/heads/master
| 2020-05-25T21:57:37.815370
| 2019-05-22T09:42:40
| 2019-05-22T09:42:40
| 188,005,059
| 5
| 3
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,461
|
py
|
#coding:UTF-8
import torch.nn as nn
import time
class CompositeLSTM(nn.Module):
def __init__(self, nIn, nHidden, nOut, multi_gpu=False):
super(CompositeLSTM, self).__init__()
self.rnn = nn.LSTM(nIn, nHidden, bidirectional=True)
self.embedding = nn.Linear(nHidden * 2, nOut)
self.multi_gpu = multi_gpu
initrange = 0.08
print("Initializing Bidirectional LSTM...")
for weight in self.rnn.parameters():
weight.data.uniform_(-initrange, initrange)
def forward(self, input):
if self.multi_gpu:
self.rnn.flatten_parameters()
start = time.time()
recurrent, _ = self.rnn(input)
print('Recurrent Net cost: {:.3f}'.format(time.time() - start))
T, b, h = recurrent.size()
t_rec = recurrent.view(T*b, h)
output = self.embedding(t_rec)
output = output.view(T, b, -1)
return output
class MLayerLSTM(nn.Module):
def __init__(self, nIn, nHidden, nLayer, nClass, dropout, multi_gpu=False):
super(MLayerLSTM, self).__init__()
self.rnn = nn.LSTM(nIn, nHidden, nLayer, dropout=dropout, bidirectional=True)
self.embedding = nn.Linear(nHidden * 2, nClass)
self.multi_gpu = multi_gpu
initrange = 0.08
print("Initializing Bidirectional LSTM...")
for weight in self.rnn.parameters():
weight.data.uniform_(-initrange, initrange)
def forward(self, input):
if self.multi_gpu:
self.rnn.flatten_parameters()
recurrent, _ = self.rnn(input)
T, b, h = recurrent.size()
t_rec = recurrent.view(T*b, h)
output = self.embedding(t_rec)
output = output.view(T, b, -1)
return output
def compositelstm(rnn_conf, n_class):
in_dim = rnn_conf['n_In']
n_hidden = rnn_conf['n_Hidden']
multi_gpu = rnn_conf['multi_gpu']
model = nn.Sequential(
CompositeLSTM(in_dim, n_hidden, n_hidden, multi_gpu),
CompositeLSTM(n_hidden, n_hidden, n_class, multi_gpu)
)
return model
def lstm_2layer(rnn_conf, n_class):
in_dim = rnn_conf['n_In']
n_hidden = rnn_conf['n_Hidden']
n_layer = rnn_conf['n_Layer']
dropout = rnn_conf['dropout']
multi_gpu = rnn_conf['multi_gpu']
model = MLayerLSTM(in_dim, n_hidden, n_layer, n_class, dropout, multi_gpu)
return model
#TODO Implement Seq2Seq model
#class Seq2Seq(nn.Module):
|
[
"luoyang@qiniu.com"
] |
luoyang@qiniu.com
|
0b3ce2bb646fbb0331575ede06a06288df241849
|
5864e86954a221d52d4fa83a607c71bacf201c5a
|
/eveclientqatools/explosions.py
|
b7df83a42b9b24a0ca2895a5e0776b27306a98bb
|
[] |
no_license
|
connoryang/1v1dec
|
e9a2303a01e5a26bf14159112b112be81a6560fd
|
404f2cebf13b311e754d45206008918881496370
|
refs/heads/master
| 2021-05-04T02:34:59.627529
| 2016-10-19T08:56:26
| 2016-10-19T08:56:26
| 71,334,417
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 6,958
|
py
|
#Embedded file name: e:\jenkins\workspace\client_SERENITY\branches\release\SERENITY\packages\eveclientqatools\explosions.py
import uicontrols
import carbonui.const as uiconst
import evetypes
import util
from carbonui.primitives.gridcontainer import GridContainer
from eve.client.script.ui.control.eveCombo import Combo
from eve.client.script.ui.control.buttons import ButtonIcon
from carbonui.primitives.container import Container
from evegraphics.explosions.spaceObjectExplosionManager import SpaceObjectExplosionManager
from evegraphics.fsd.explosionBuckets import GetExplosionBucketIDByTypeID, GetExplosionRaces
SEARCH_DISTANCE = 1000000
class ExplosionDebugger(object):
def __init__(self):
self.name = 'Explosions'
self.windowID = 'Explosions_ ' + self.name
self._sceneManager = sm.GetService('sceneManager')
self._michelle = sm.GetService('michelle')
self.scroll = None
self.selectedBallsToExplosionBucketID = {}
self.ballIDToExplosion = {}
self.explosionBucketsUsedWhenWindowOpened = False
def GetBall(self, ballID = None):
if ballID is None:
ballID = self.shipId
return sm.GetService('michelle').GetBall(ballID)
def _OnClose(self):
SpaceObjectExplosionManager.USE_EXPLOSION_BUCKETS = self.explosionBucketsUsedWhenWindowOpened
def ShowUI(self):
self.explosionBucketsUsedWhenWindowOpened = SpaceObjectExplosionManager.USE_EXPLOSION_BUCKETS
wnd = uicontrols.Window.Open(windowID=self.windowID)
wnd.SetTopparentHeight(0)
wnd.SetMinSize([500, 250])
wnd.SetCaption(self.name)
wnd._OnClose = self._OnClose
main = wnd.GetMainArea()
bottomCont = Container(name='bottomCont', parent=main, align=uiconst.TOBOTTOM, height=30, width=50, padBottom=10)
explosionSelectionContainer = Container(name='explosionSelectionCont', parent=main, align=uiconst.TOBOTTOM, height=30, padTop=10, padBottom=10)
explosionContainer = Container(name='explosionContainer', parent=main, align=uiconst.TOALL, padBottom=10)
self.scroll = uicontrols.Scroll(parent=explosionContainer)
self.scroll.sr.id = 'explosionDebugScroll'
self.scroll.OnSelectionChange = self.OnSelectionChange
self.explosionCombo = Combo(name='myCombo', parent=explosionSelectionContainer, label='Set explosion to selected items', options=[('Random', None)], callback=self.OnExplosionSelected, align=uiconst.TOTOP, padRight=12, padLeft=12)
buttonGrid = GridContainer(name='buttonGrid', parent=bottomCont, align=uiconst.CENTER, width=150, height=20, lines=1, columns=3)
ButtonIcon(name='Play', parent=buttonGrid, align=uiconst.TORIGHT, width=20, height=20, iconSize=24, padRight=15, texturePath='res:/UI/Texture/Icons/play.png', func=self.Explode, hint='Play Explosions (the exploding ships will not survive)')
ButtonIcon(name='Refresh', parent=buttonGrid, align=uiconst.CENTER, width=20, height=20, iconSize=24, texturePath='res:/UI/Texture/Icons/replay.png', func=self.UpdateTable, hint='Update table')
ButtonIcon(name='ClearWrecks', parent=buttonGrid, align=uiconst.TOLEFT, width=20, height=20, iconSize=32, padLeft=15, texturePath='res:/UI/Texture/Icons/44_32_37.png', func=self.ClearWrecks, hint='Clear wrecks')
self.UpdateTable()
def UpdateTable(self):
layout = '%s<t>%s<t>%s<t>%s<t>%s<t>%s'
headers = ['distance (m)',
'itemID',
'Type Name',
'Group Name',
'Explosion Bucket ID',
'Selected Explosion']
content = []
ballpark = sm.GetService('michelle').GetBallpark()
balls = ballpark.GetBallsInRange(session.shipid, SEARCH_DISTANCE)
selectedEntries = []
for ballID in balls:
ball = sm.GetService('michelle').GetBall(ballID)
if not hasattr(ball, 'typeData') or getattr(ball, 'exploded', False):
continue
typeID = ball.typeData['typeID']
explosionBucketID = GetExplosionBucketIDByTypeID(typeID)
if explosionBucketID is None:
continue
typeName = evetypes.GetName(typeID)
groupName = evetypes.GetGroupName(typeID)
explosionRes = 'Random'
dist = util.FmtAmt(ballpark.DistanceBetween(session.shipid, ballID))
info = (dist,
ballID,
typeName,
groupName,
explosionBucketID,
explosionRes)
label = layout % info
entry = uicontrols.ScrollEntryNode(decoClass=uicontrols.SE_GenericCore, label=label)
if ballID in self.selectedBallsToExplosionBucketID:
selectedEntries.append(entry)
content.append(entry)
self.scroll.Load(contentList=content, headers=headers, fixedEntryHeight=18)
self.scroll.SelectNodes(selectedEntries)
def OnSelectionChange(self, selection):
self.selectedBallsToExplosionBucketID = {}
for item in selection:
itemInfo = item.label.split('<t>')
itemID = int(itemInfo[1])
explosionBucketID = int(itemInfo[4])
self.selectedBallsToExplosionBucketID[itemID] = explosionBucketID
explosionBuckets = set(self.selectedBallsToExplosionBucketID.values())
options = [('Random', None)]
for explosionBucketID in explosionBuckets:
for race, explosions in GetExplosionRaces(int(explosionBucketID)).iteritems():
for explosion in explosions:
options.append((explosion.filePath, explosion))
self.explosionCombo.LoadOptions(options)
def OnExplosionSelected(self, combobox, key, value):
selectedBalls = self.selectedBallsToExplosionBucketID.keys()
for ballID in selectedBalls:
if value is None:
del self.ballIDToExplosion[ballID]
else:
self.ballIDToExplosion[ballID] = value
for row in self.scroll.sr.nodes:
if not row.get('selected', 0):
continue
label = row.label
splitLabel = label.split('<t>')
splitLabel[5] = value.filePath
row.label = '<t>'.join(splitLabel)
self.scroll.ReloadNodes()
def Explode(self):
SpaceObjectExplosionManager.USE_EXPLOSION_BUCKETS = True
for ballID, explosion in self.ballIDToExplosion.iteritems():
SpaceObjectExplosionManager.SetPreferredExplosion(ballID, explosion)
for ballID in self.selectedBallsToExplosionBucketID:
sm.GetService('slash').SlashCmd('/kill %s' % ballID)
if ballID in self.ballIDToExplosion:
del self.ballIDToExplosion[ballID]
self.selectedBallsToExplosionBucketID = {}
def ClearWrecks(self):
sm.GetService('slash').SlashCmd('/unspawn range=%s only=groupWreck' % SEARCH_DISTANCE)
|
[
"le02005@163.com"
] |
le02005@163.com
|
9bb2e4a7ed40ed97b5149b0f6f1e2ac1f704ad6b
|
63d3a6255f2677f9d92205d62163b9d22a74c5c7
|
/modules/dynadb/migrations/0063_auto_20161221_1826.py
|
c58f76604e77a21a599a46e02764f5ddf4cef3f0
|
[
"Apache-2.0"
] |
permissive
|
GPCRmd/GPCRmd
|
9204f39b1bfbc800b13512b316e05e54ddd8af23
|
47d7a4e71025b70e15a0f752760873249932c54e
|
refs/heads/main
| 2023-09-04T11:13:44.285629
| 2023-08-29T13:43:01
| 2023-08-29T13:43:01
| 260,036,875
| 3
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 850
|
py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.5 on 2016-12-21 17:26
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('dynadb', '0062_merge'),
]
operations = [
migrations.AlterField(
model_name='dyndbmodel',
name='type',
field=models.SmallIntegerField(choices=[(0, 'Apoform'), (1, 'Complex')], default=0),
),
migrations.AlterField(
model_name='dyndbsubmissionmolecule',
name='type',
field=models.SmallIntegerField(blank=True, choices=[(0, 'Orthosteric ligand'), (1, 'Allosteric ligand'), (2, 'Crystallographic waters'), (3, 'Crystallographic lipids'), (4, 'Crystallographic ions'), (5, 'Other')], default=0, null=True),
),
]
|
[
"adrian.garcia.recio@gmail.com"
] |
adrian.garcia.recio@gmail.com
|
c5ae45a375095336c401e1f966e0b4e474d46e8a
|
0b793bce2da8c3d09b7956c0672ddbffd46feaed
|
/atcoder/corp/codefes2016_qc_c.py
|
06989a728b75a41345f62c26b25d84e5a15ae4aa
|
[
"MIT"
] |
permissive
|
knuu/competitive-programming
|
c6c4e08fb231937d988bdc5a60a8ad6b31b97616
|
16bc68fdaedd6f96ae24310d697585ca8836ab6e
|
refs/heads/master
| 2021-01-17T09:39:02.647688
| 2020-11-07T03:17:22
| 2020-11-07T03:17:22
| 27,886,732
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 737
|
py
|
N = int(input())
A = [int(x) for x in input().split()]
B = [int(x) for x in input().split()]
INF = 10**9
record_A = []
record_B = []
record_A.append((A[0], A[0]))
for i in range(1, N):
if A[i] == A[i-1]:
record_A.append((1, record_A[-1][1]))
else:
record_A.append((A[i], A[i]))
record_B.append((B[-1], B[-1]))
for i in reversed(range(N-1)):
if B[i] == B[i+1]:
record_B.append((1, record_B[-1][1]))
else:
record_B.append((B[i], B[i]))
ans = 1
mod = 10**9 + 7
for (lb_a, ub_a), (lb_b, ub_b) in zip(record_A, reversed(record_B)):
lb, ub = max(lb_a, lb_b), min(ub_a, ub_b)
if ub - lb < 0:
print(0)
break
ans *= ub - lb + 1
ans %= mod
else:
print(ans)
|
[
"premier3next@gmail.com"
] |
premier3next@gmail.com
|
d9e648577a84d88311e187435c4adda1b002de3f
|
35fa8925e63f2b0f62ef6bfc1ff4e03cf42bd923
|
/tests/models/test_category.py
|
03d5ee1020c13013d8c46e00d4cfc63d278f2993
|
[
"Apache-2.0"
] |
permissive
|
TheLabbingProject/django_analyses
|
9e6f8b9bd2a84e8efe6dda6a15de6a3ecdf48ec1
|
5642579660fd09dde4a23bf02ec98a7ec264bceb
|
refs/heads/master
| 2023-02-26T07:53:53.142552
| 2023-02-17T08:12:17
| 2023-02-17T08:12:17
| 225,623,958
| 1
| 2
|
Apache-2.0
| 2023-02-17T08:12:18
| 2019-12-03T13:15:29
|
Python
|
UTF-8
|
Python
| false
| false
| 3,594
|
py
|
from django.test import TestCase
from django_analyses.models.category import Category
from tests.factories.category import CategoryFactory
class CategoryTestCase(TestCase):
"""
Tests for the :class:`~django_analyses.models.category.Category` model.
"""
def setUp(self):
"""
Adds the created instances to the tests' contexts.
For more information see unittest's :meth:`~unittest.TestCase.setUp` method.
"""
self.category = CategoryFactory()
##########
# Meta #
##########
def test_verbose_name_plural(self):
"""
Validate the `verbose name plural`_ of the
:class:`~django_analyses.models.category.Category` model.
.. _verbose name plural: https://docs.djangoproject.com/en/2.2/ref/models/options/#verbose-name-plural
"""
self.assertEqual(Category._meta.verbose_name_plural, "Categories")
def test_ordering(self):
"""
Validate the `ordering`_ of the
:class:`~django_analyses.models.category.Category` model.
.. _ordering: https://docs.djangoproject.com/en/2.2/ref/models/options/#ordering
"""
self.assertTupleEqual(Category._meta.ordering, ("title",))
##########
# Fields #
##########
# title
def test_title_max_length(self):
"""
Validate the max_length of the *title* field.
"""
field = self.category._meta.get_field("title")
self.assertEqual(field.max_length, 255)
def test_title_is_unique(self):
"""
Validates that the *title* field is unique.
"""
field = self.category._meta.get_field("title")
self.assertTrue(field.unique)
def test_title_blank_and_null(self):
"""
Validates that the *title* field may not be blank or null.
"""
field = self.category._meta.get_field("title")
self.assertFalse(field.blank)
self.assertFalse(field.null)
# description
def test_description_is_not_unique(self):
"""
Validates that the *description* field is not set to unique.
"""
field = self.category._meta.get_field("description")
self.assertFalse(field.unique)
def test_description_blank_and_null(self):
"""
Validates that the *description* field may be blank or null.
"""
field = self.category._meta.get_field("description")
self.assertTrue(field.blank)
self.assertTrue(field.null)
# parent
def test_parent_is_nullable(self):
"""
Validates that the *parent* field is nullable.
"""
field = self.category._meta.get_field("parent")
self.assertTrue(field.null)
def test_creation_with_parent_category(self):
"""
Tests creating a category with an existing category as the parent.
"""
new_category = CategoryFactory(parent=self.category)
self.assertEqual(new_category.parent, self.category)
def test_settings_a_parent_category(self):
"""
Tests setting a parent category.
"""
parent = CategoryFactory()
self.category.parent = parent
self.category.save()
self.assertEqual(self.category.parent, parent)
###########
# Methods #
###########
def test_string(self):
"""
Validate the string output of the
:class:`~django_analyses.models.category.category` model.
"""
self.assertEqual(str(self.category), self.category.title)
|
[
"z.baratz@gmail.com"
] |
z.baratz@gmail.com
|
6845f29a5c09f0a2ad3e965b6e8a97e5f2963dbc
|
c2fd9c421b225862633f74f99a7a0dad635c5c67
|
/tree/RangeSumofBST.py
|
0b06571e618c824e7fd428daebeaebde12112bc8
|
[] |
no_license
|
yuhangxiaocs/LeetCodePy
|
3751881dbd78b581a1d75beea737aed28765988b
|
31012a004ba14ddfb468a91925d86bc2dfb60dd4
|
refs/heads/master
| 2020-12-20T19:36:55.421295
| 2020-11-24T17:01:15
| 2020-11-24T17:01:15
| 236,190,313
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,401
|
py
|
# Definition for a binary tree node.
class TreeNode(object):
def __init__(self, x):
self.val = x
self.left = None
self.right = None
def construct(self, t):
return 0
class Solution(object):
# 利用二叉搜索树的性质 适当剪枝
def rangeSumBST(self, root, L, R):
"""
:type root: TreeNode
:type L: int
:type R: int
:rtype: int
"""
if root == None:
return 0
if root.val < L:
return self.rangeSumBST(root.right, L, R)
elif root.val > R:
return self.rangeSumBST(root.left, L, R)
else:
return root.val + self.rangeSumBST(root.right, L, R) + self.rangeSumBST(root.left, L, R)
# 用stack来模拟递归 节约递归调用代价
# python中用list的append和pop操作轻松实现stack
def rangeSumBST2(self, root, L, R):
stack = []
stack.append(root)
rangeSum = 0
while (len(stack) > 0):
node = stack.pop()
if node == None:
continue
if node.val < L:
stack.append(node.right)
elif node.val > R:
stack.append(node.left)
else:
rangeSum += node.val
stack.append(node.left)
stack.append(node.right)
return rangeSum
|
[
"1248618975@qq.com"
] |
1248618975@qq.com
|
372952efec21a12b8261f6363b873755ecc62eed
|
3ba0de5f13f6eae9434cd09964a9d69a6dbda636
|
/mako/lib/MemoryConfiguration.py
|
25db0ef519aed8814320b45d63efa19b9cfe7b46
|
[] |
no_license
|
fantastic001/Mako
|
513f43f4170896a807c4e297573e19125dc2066c
|
eb51f163b127f9c273ff9179d6ed55092fed369f
|
refs/heads/master
| 2022-01-18T20:10:33.141618
| 2022-01-02T12:30:03
| 2022-01-02T12:30:03
| 85,867,290
| 8
| 3
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 250
|
py
|
from . import Configuration
class MemoryConfiguration(Configuration):
def __init__(self, data={}):
self.data = data
def open(self) -> dict:
return self.data
def save(self, params: dict):
self.data = params
|
[
"stefan@lugons.org"
] |
stefan@lugons.org
|
51e65b7efa8aca4a4d89a8d1aaa1076f921df158
|
7455dcf23ca3c8d74abcb4ef223bf0506ccb1eb9
|
/PMD/map-pipeline/src/main/python/run.py
|
e6986ed77cda45243f56ac59bf06dfbec808a5cb
|
[] |
no_license
|
ResearchSoftwareInstitute/Duke-TIC
|
2e2ca9cadd52d672b5614aa6d661afb0ab0bf25d
|
f481103adc68b883cf70c101901f296b031954aa
|
refs/heads/master
| 2020-04-05T02:13:10.849193
| 2019-01-15T16:32:05
| 2019-01-15T16:32:05
| 156,468,435
| 0
| 1
| null | 2018-11-21T17:10:10
| 2018-11-07T00:41:49
|
Scala
|
UTF-8
|
Python
| false
| false
| 149
|
py
|
from utils import submit
import sys
host = sys.argv[1]
cache_dir = sys.argv[2]
args = sys.argv[3:]
submit(host, cache_dir, "tic.Transform", *args)
|
[
"xuh@cs.unc.edu"
] |
xuh@cs.unc.edu
|
09d87b4f24a30478585165a9e590a4f858680692
|
2293c76c3d18e2fcd44ded90bd40113d26285663
|
/pyeccodes/defs/grib2/local/1098/centres_table.py
|
a1437b6b11a2846a97eca02ab304aaac8681e911
|
[
"Apache-2.0"
] |
permissive
|
ecmwf/pyeccodes
|
b1f121dbddf68d176a03805ed5144ba0b37ac211
|
dce2c72d3adcc0cb801731366be53327ce13a00b
|
refs/heads/master
| 2022-04-23T10:37:40.524078
| 2020-04-18T06:30:29
| 2020-04-18T06:30:29
| 255,554,540
| 9
| 3
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 867
|
py
|
def load(h):
return ({'abbr': 'eggr', 'code': 0, 'title': 'UK Met Office - UK'},
{'abbr': 'aemet', 'code': 1, 'title': 'AEMET- Spain HIRLAM'},
{'abbr': 'arpasim', 'code': 2, 'title': 'ARPA-SIM - Italy COSMO'},
{'abbr': 'metno', 'code': 3, 'title': 'Met.NO'},
{'abbr': 'zamg', 'code': 4, 'title': 'ZAMG / Austria'},
{'abbr': 'dwd', 'code': 5, 'title': 'DWD - Germany SRNWP'},
{'abbr': 'dnmi', 'code': 6, 'title': 'DNMI/Univ Oslo - Norway HIRLAM ALADIN'},
{'abbr': 'meteofrance', 'code': 7, 'title': 'Meteo-France / France'},
{'abbr': 'dmi', 'code': 8, 'title': 'DMI'},
{'abbr': 'hungary', 'code': 9, 'title': 'Hungary'},
{'abbr': 'czech', 'code': 10, 'title': 'Czech Republic'},
{'abbr': 'croatia', 'code': 11, 'title': 'Croatia'})
|
[
"baudouin.raoult@ecmwf.int"
] |
baudouin.raoult@ecmwf.int
|
817aa994789d584285af1b87544401eee6f12db6
|
f6f5db03e5f0fc43bf466730650fc2923d438189
|
/feedjack_wp_export/migrations/0005_auto__chg_field_taxonomyterm_term_name__chg_field_export_url.py
|
e20a4935fbbe851552188c40ef257e8fc19951be
|
[
"WTFPL"
] |
permissive
|
mk-fg/feedjack-wordpress-export
|
bd7e97adf5793067e909d7eaf14804eafaee5beb
|
72f034872d65cb0d10ff097a13627f7b86b13843
|
refs/heads/master
| 2023-08-23T03:55:01.381404
| 2012-08-29T11:04:32
| 2012-08-29T11:04:32
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 6,070
|
py
|
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Changing field 'TaxonomyTerm.term_name'
db.alter_column('feedjack_wp_export_taxonomyterm', 'term_name', self.gf('django.db.models.fields.CharField')(max_length=255))
# Changing field 'Export.url'
db.alter_column('feedjack_wp_export_export', 'url', self.gf('django.db.models.fields.CharField')(max_length=255))
def backwards(self, orm):
# Changing field 'TaxonomyTerm.term_name'
db.alter_column('feedjack_wp_export_taxonomyterm', 'term_name', self.gf('django.db.models.fields.CharField')(max_length=254))
# Changing field 'Export.url'
db.alter_column('feedjack_wp_export_export', 'url', self.gf('django.db.models.fields.CharField')(max_length=255))
models = {
'feedjack.feed': {
'Meta': {'ordering': "('name', 'feed_url')", 'object_name': 'Feed'},
'etag': ('django.db.models.fields.CharField', [], {'max_length': '127', 'blank': 'True'}),
'feed_url': ('django.db.models.fields.URLField', [], {'unique': 'True', 'max_length': '200'}),
'filters': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'feeds'", 'blank': 'True', 'to': "orm['feedjack.Filter']"}),
'filters_logic': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'immutable': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'last_checked': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'last_modified': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'link': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'shortname': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'skip_errors': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'tagline': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '200', 'blank': 'True'})
},
'feedjack.filter': {
'Meta': {'object_name': 'Filter'},
'base': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'filters'", 'to': "orm['feedjack.FilterBase']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'parameter': ('django.db.models.fields.CharField', [], {'max_length': '512', 'null': 'True', 'blank': 'True'})
},
'feedjack.filterbase': {
'Meta': {'object_name': 'FilterBase'},
'crossref': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'crossref_rebuild': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'crossref_span': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True', 'blank': 'True'}),
'crossref_timeline': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'handler_name': ('django.db.models.fields.CharField', [], {'max_length': '256', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '64'})
},
'feedjack_wp_export.export': {
'Meta': {'ordering': "('url', 'blog_id', 'username')", 'unique_together': "(('url', 'blog_id'),)", 'object_name': 'Export'},
'blog_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '63'}),
'url': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'username': ('django.db.models.fields.CharField', [], {'max_length': '63'})
},
'feedjack_wp_export.exportsubscriber': {
'Meta': {'ordering': "('export', '-is_active', 'feed')", 'object_name': 'ExportSubscriber'},
'export': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'subscriber_set'", 'to': "orm['feedjack_wp_export.Export']"}),
'feed': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'exports'", 'to': "orm['feedjack.Feed']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'processors': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'taxonomies': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['feedjack_wp_export.TaxonomyTerm']", 'null': 'True', 'blank': 'True'})
},
'feedjack_wp_export.taxonomyterm': {
'Meta': {'ordering': "('taxonomy', 'term_name', 'term_id')", 'unique_together': "(('taxonomy', 'term_name'), ('taxonomy', 'term_id'))", 'object_name': 'TaxonomyTerm'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'taxonomy': ('django.db.models.fields.CharField', [], {'max_length': '63'}),
'term_id': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'term_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'})
}
}
complete_apps = ['feedjack_wp_export']
|
[
"mk.fraggod@gmail.com"
] |
mk.fraggod@gmail.com
|
2ee8846e5a2086e11df153514d9ed5676a0b0ba3
|
d5ad13232e3f1ced55f6956bc4cbda87925c8085
|
/RNAseqMSMS/2-sv/2-split-mapped-sv/2-type.py
|
194578ce6452976b1ac7d6adbf8c5f41fddece1f
|
[] |
no_license
|
arvin580/SIBS
|
c0ba9a8a41f59cb333517c286f7d80300b9501a2
|
0cc2378bf62359ec068336ea4de16d081d0f58a4
|
refs/heads/master
| 2021-01-23T21:57:35.658443
| 2015-04-09T23:11:34
| 2015-04-09T23:11:34
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,415
|
py
|
import sys
import os
files = os.listdir('.')
ouFile1 = open('split-mapped-translocation','w')
ouFile2 = open('split-mapped-inversion','w')
ouFile3 = open('split-mapped-duplication','w')
ouFile4 = open('split-mapped-deletion','w')
for f in files:
if f[-12:] =='not-splicing':
inFile = open(f)
while True:
line1 = inFile.readline()
line2 = inFile.readline()
if line1:
fields = line1.split()
ch1 = fields[3]
ch2 = fields[15]
pos1 = float(fields[10])
pos2 = float(fields[11])
pos3 = float(fields[22])
pos4 = float(fields[23])
qpos1 = float(fields[8])
qpos2 = float(fields[9])
qpos3 = float(fields[20])
qpos4 = float(fields[21])
mid1 = (pos1+pos2)/2
mid2 = (pos3+pos4)/2
qmid1 = (qpos1+qpos2)/2
qmid2 = (qpos3+qpos4)/2
if ch1 != ch2:
ouFile1.write(line1)
ouFile1.write(line2)
elif (pos1 - pos2)*(pos3-pos4) < 0:
ouFile2.write(line1)
ouFile2.write(line2)
else:
if (pos1 - pos2) < 0 and (pos3 - pos4) <0 :
if (mid1 - mid2)*(qmid1 - qmid2) < 0:
ouFile3.write(line1)
ouFile3.write(line2)
else:
ouFile4.write(line1)
ouFile4.write(line2)
elif (pos1 -pos2) >0 and (pos3 - pos4) > 0:
if (mid1 - mid2)*(qmid1 - qmid2) > 0:
ouFile3.write(line1)
ouFile3.write(line2)
else:
ouFile4.write(line1)
ouFile4.write(line2)
else:
ouFile4.write(line1)
ouFile4.write(line2)
#elif (mid1 - mid2)*(qmid1 - qmid2) < 0:
# ouFile3.write(line1)
# ouFile3.write(line2)
# print(str(mid1)+'\t'+str(mid2)+'\t'+str(qmid1)+'\t'+str(qmid2))
else:
break
inFile.close()
|
[
"sunahnice@gmail.com"
] |
sunahnice@gmail.com
|
1fcd177241175f152741cc56ddfb300b6eea02db
|
179d8aae260d20443e6e87613cff55d42587bc16
|
/examples/oneflow2onnx/models/test_resnet50.py
|
a1c5ff6baefa1dda15f6499ddd4777b30db9293f
|
[] |
no_license
|
666DZY666/oneflow_convert_tools
|
3b1f9d6ebaf154d7218236c332c6f9613b89a860
|
bb38c52954facbfe977e09c7e4706b7563a7b50c
|
refs/heads/main
| 2023-06-04T10:16:08.786531
| 2021-06-24T08:38:24
| 2021-06-24T08:38:24
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,920
|
py
|
"""
Copyright 2020 The OneFlow Authors. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import oneflow as flow
import oneflow.typing as tp
import onnx
import onnxruntime as ort
import numpy as np
from oneflow_onnx.oneflow2onnx.util import convert_to_onnx_and_check
BLOCK_COUNTS = [3, 4, 6, 3]
BLOCK_FILTERS = [256, 512, 1024, 2048]
BLOCK_FILTERS_INNER = [64, 128, 256, 512]
g_trainable = False
def _conv2d(
name,
input,
filters,
kernel_size,
strides=1,
padding="SAME",
data_format="NCHW",
dilations=1,
trainable=True,
# weight_initializer=flow.variance_scaling_initializer(data_format="NCHW"),
weight_initializer=flow.variance_scaling_initializer(
2, "fan_in", "random_normal", data_format="NCHW"
),
weight_regularizer=flow.regularizers.l2(1.0 / 32768),
):
weight = flow.get_variable(
name + "-weight",
shape=(filters, input.shape[1], kernel_size, kernel_size),
dtype=input.dtype,
initializer=weight_initializer,
regularizer=weight_regularizer,
model_name="weight",
trainable=trainable,
)
return flow.nn.conv2d(
input, weight, strides, padding, data_format, dilations, name=name
)
def _batch_norm(inputs, name=None, trainable=True):
return flow.layers.batch_normalization(
inputs=inputs,
axis=1,
momentum=0.9, # 97,
epsilon=1.001e-5,
center=True,
scale=True,
trainable=trainable,
training=trainable,
name=name,
)
def conv2d_affine(input, name, filters, kernel_size, strides, activation=None):
# input data_format must be NCHW, cannot check now
padding = "SAME" if strides > 1 or kernel_size > 1 else "VALID"
output = _conv2d(
name, input, filters, kernel_size, strides, padding, trainable=g_trainable
)
output = _batch_norm(output, name + "_bn", trainable=g_trainable)
if activation == "Relu":
output = flow.math.relu(output)
return output
def bottleneck_transformation(input, block_name, filters, filters_inner, strides):
a = conv2d_affine(
input, block_name + "_branch2a", filters_inner, 1, 1, activation="Relu",
)
b = conv2d_affine(
a, block_name + "_branch2b", filters_inner, 3, strides, activation="Relu",
)
c = conv2d_affine(b, block_name + "_branch2c", filters, 1, 1)
return c
def residual_block(input, block_name, filters, filters_inner, strides_init):
if strides_init != 1 or block_name == "res2_0":
shortcut = conv2d_affine(
input, block_name + "_branch1", filters, 1, strides_init
)
else:
shortcut = input
bottleneck = bottleneck_transformation(
input, block_name, filters, filters_inner, strides_init
)
return flow.math.relu(bottleneck + shortcut)
def residual_stage(input, stage_name, counts, filters, filters_inner, stride_init=2):
output = input
for i in range(counts):
block_name = "%s_%d" % (stage_name, i)
output = residual_block(
output, block_name, filters, filters_inner, stride_init if i == 0 else 1,
)
return output
def resnet_conv_x_body(input, on_stage_end=lambda x: x):
output = input
for i, (counts, filters, filters_inner) in enumerate(
zip(BLOCK_COUNTS, BLOCK_FILTERS, BLOCK_FILTERS_INNER)
):
stage_name = "res%d" % (i + 2)
output = residual_stage(
output, stage_name, counts, filters, filters_inner, 1 if i == 0 else 2,
)
on_stage_end(output)
return output
def resnet_stem(input):
conv1 = _conv2d("conv1", input, 1, 1, 2)
tmp = _batch_norm(conv1, "conv1_bn", trainable=g_trainable)
conv1_bn = flow.math.relu(tmp)
pool1 = flow.nn.max_pool2d(
conv1_bn, ksize=3, strides=2, padding="VALID", data_format="NCHW", name="pool1",
)
return pool1
def resnet50(images, trainable=True, need_transpose=False):
# note: images.shape = (N C H W) in cc's new dataloader, transpose is not needed anymore
if need_transpose:
images = flow.transpose(images, name="transpose", perm=[0, 3, 1, 2])
with flow.scope.namespace("Resnet"):
stem = resnet_stem(images)
body = resnet_conv_x_body(stem, lambda x: x)
pool5 = flow.nn.avg_pool2d(
body, ksize=7, strides=1, padding="VALID", data_format="NCHW", name="pool5",
)
fc1001 = flow.layers.dense(
flow.reshape(pool5, (pool5.shape[0], -1)),
units=1000,
use_bias=True,
kernel_initializer=flow.variance_scaling_initializer(
2, "fan_in", "random_normal"
),
# kernel_initializer=flow.xavier_uniform_initializer(),
bias_initializer=flow.random_uniform_initializer(),
kernel_regularizer=flow.regularizers.l2(1.0 / 32768),
trainable=trainable,
name="fc1001",
)
return fc1001
def test_resnet50():
@flow.global_function()
def InferenceNet(images: tp.Numpy.Placeholder((1, 3, 224, 224))):
logits = resnet50(images)
predictions = flow.nn.softmax(logits)
return predictions
convert_to_onnx_and_check(InferenceNet, flow_weight_dir=None, onnx_model_path="/tmp")
|
[
"1182563586@qq.com"
] |
1182563586@qq.com
|
f7af2abc696098cdcf7342806fe9a1fca0e927f0
|
9a7a7e43902b6bc5a9e96933da8814acf3f318a3
|
/Python3接口测试/Demo/requests_basic_demo.py
|
eae7986e0055a59d4e3ea0bcc34b73ba0340f15e
|
[] |
no_license
|
liuchangfu/python_script
|
9684d512f4bb09f37585e3fc56329be2ea8d6eb5
|
73f0e71364fc2271626e0deff54b4079ad92390c
|
refs/heads/master
| 2020-03-15T16:05:47.624545
| 2018-06-08T10:44:17
| 2018-06-08T10:44:17
| 132,226,941
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 915
|
py
|
#-*- coding:utf-8 -*-
__author__ = "苦叶子"
# 导入模块
import requests
if __name__ == "__main__":
print("开源优测 - requests基本示例")
# 发送HTTP GET请求,获取github API列表
r = requests.get("https://api.github.com")
# 请求返回码
status_code = r.status_code
# 完整的返回头
headers = r.headers
# 请求返回头 content-type的值
content_type = r.headers["content-type"]
# 返回内容编码类型
code = r.encoding
# 返回内容文本
text = r.text
# 若返回结果为json格式,我们可以获取其json格式内容
json_data = r.json()
# 打印上述所有获取到的值
print("状态码: ", status_code)
print("返回头: ", headers)
print("content-type: ", content_type)
print("编码:", code)
print("文本内容: ", text)
print("json串内容: ", json_data)
|
[
"shift_1220@163.com"
] |
shift_1220@163.com
|
b574c638e632c2c9acb969482d20a6e3aff555da
|
f3b233e5053e28fa95c549017bd75a30456eb50c
|
/p38a_input/L3FN/3FN-2S_MD_NVT_rerun/set_1ns_equi_1.py
|
b69dbe2d0723c7e0f6d2cdc6d7d1ae094c03f431
|
[] |
no_license
|
AnguseZhang/Input_TI
|
ddf2ed40ff1c0aa24eea3275b83d4d405b50b820
|
50ada0833890be9e261c967d00948f998313cb60
|
refs/heads/master
| 2021-05-25T15:02:38.858785
| 2020-02-18T16:57:04
| 2020-02-18T16:57:04
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 928
|
py
|
import os
dir = '/mnt/scratch/songlin3/run/p38a/L3FN/MD_NVT_rerun/ti_one-step/3FN_2S/'
filesdir = dir + 'files/'
temp_equiin = filesdir + 'temp_equi_1.in'
temp_pbs = filesdir + 'temp_1ns_equi_1.pbs'
lambd = [ 0.00922, 0.04794, 0.11505, 0.20634, 0.31608, 0.43738, 0.56262, 0.68392, 0.79366, 0.88495, 0.95206, 0.99078]
for j in lambd:
os.system("rm -r %6.5f" %(j))
os.system("mkdir %6.5f" %(j))
os.chdir("%6.5f" %(j))
os.system("rm *")
workdir = dir + "%6.5f" %(j) + '/'
#equiin
eqin = workdir + "%6.5f_equi_1.in" %(j)
os.system("cp %s %s" %(temp_equiin, eqin))
os.system("sed -i 's/XXX/%6.5f/g' %s" %(j, eqin))
#PBS
pbs = workdir + "%6.5f_1ns_equi_1.pbs" %(j)
os.system("cp %s %s" %(temp_pbs, pbs))
os.system("sed -i 's/XXX/%6.5f/g' %s" %(j, pbs))
#top
os.system("cp ../3FN-2S_merged.prmtop .")
os.system("cp ../0.5_equi_0.rst .")
#submit pbs
os.system("qsub %s" %(pbs))
os.chdir(dir)
|
[
"songlin3@msu.edu"
] |
songlin3@msu.edu
|
7fe2b984bb64556c73259340aa07d9b479af10c0
|
781e2692049e87a4256320c76e82a19be257a05d
|
/assignments/python/wc/src/475.py
|
7044ba9d04fd4df419719828541451ec5195f793
|
[] |
no_license
|
itsolutionscorp/AutoStyle-Clustering
|
54bde86fe6dbad35b568b38cfcb14c5ffaab51b0
|
be0e2f635a7558f56c61bc0b36c6146b01d1e6e6
|
refs/heads/master
| 2020-12-11T07:27:19.291038
| 2016-03-16T03:18:00
| 2016-03-16T03:18:42
| 59,454,921
| 4
| 0
| null | 2016-05-23T05:40:56
| 2016-05-23T05:40:56
| null |
UTF-8
|
Python
| false
| false
| 133
|
py
|
def word_count(phrase):
words = {}
for word in phrase.split():
words[word] = words.get(word, 0) + 1
return words
|
[
"rrc@berkeley.edu"
] |
rrc@berkeley.edu
|
01cf0d870aefe802fe4b97ed4766e1610c28530b
|
75dcb56e318688499bdab789262839e7f58bd4f6
|
/_algorithms_challenges/leetcode/LeetcodePythonProject/leetcode_0651_0700/LeetCode668_KthSmallestNumberInMultiplicationTable.py
|
3d0c274f6424ca868adade8603128f21123179a1
|
[] |
no_license
|
syurskyi/Algorithms_and_Data_Structure
|
9a1f358577e51e89c862d0f93f373b7f20ddd261
|
929dde1723fb2f54870c8a9badc80fc23e8400d3
|
refs/heads/master
| 2023-02-22T17:55:55.453535
| 2022-12-23T03:15:00
| 2022-12-23T03:15:00
| 226,243,987
| 4
| 1
| null | 2023-02-07T21:01:45
| 2019-12-06T04:14:10
|
Jupyter Notebook
|
UTF-8
|
Python
| false
| false
| 1,148
|
py
|
'''
Created on Oct 11, 2017
@author: MT
'''
class Solution(object):
def findKthNumber(self, m, n, k):
"""
:type m: int
:type n: int
:type k: int
:rtype: int
"""
low, high = 1, m*n+1
while low < high:
mid = (low+high)//2
c = self.count(mid, m, n)
if c >= k:
high = mid
else:
low = mid+1
return high
def count(self, val, m, n):
count = 0
for i in range(1, m+1):
tmp = min(val//i, n)
count += tmp
return count
def test(self):
testCases = [
[
3,
3,
5,
],
[
2,
3,
6,
],
]
for m, n, k in testCases:
print('m: %s' % m)
print('n: %s' % n)
print('k: %s' % k)
result = self.findKthNumber(m, n, k)
print('result: %s' % result)
print('-='*30+'-')
if __name__ == '__main__':
Solution().test()
|
[
"sergejyurskyj@yahoo.com"
] |
sergejyurskyj@yahoo.com
|
e2b246ef45c75445029b5451c1379c4957530865
|
e70276d10c1161e8594a9d03ca8d89f9491f5a90
|
/example1.py
|
1959895a822248e6aa892ea4fd2d1cfdcc7685bb
|
[] |
no_license
|
py-yyc/twisted-postgres
|
655f177c26d3503524eeb82e9d5ce0dc2cb4da18
|
d45ad294d969ea60698021c4e63463596437a01c
|
refs/heads/master
| 2021-01-23T07:34:30.234497
| 2017-03-28T20:36:29
| 2017-03-28T20:36:29
| 86,429,271
| 0
| 0
| null | 2017-03-28T14:27:07
| 2017-03-28T07:35:21
|
JavaScript
|
UTF-8
|
Python
| false
| false
| 1,193
|
py
|
## <h1>txpostgres</h1>
from twisted.internet import defer, task
from txpostgres import txpostgres
_create_table = '''
DROP TABLE IF EXISTS todo;
CREATE TABLE todo
(
id SERIAL,
todo VARCHAR(254) NOT NULL,
created_at TIMESTAMP NOT NULL,
PRIMARY KEY (id)
);
'''
@task.react
@defer.inlineCallbacks
def main(reactor):
connections = []
for x in range(25):
conn = txpostgres.Connection()
db = yield conn.connect('dbname=postgres')
connections.append(db)
yield connections[0].runOperation(_create_table)
# a 'real' generator, round-robin all connections
def connection_generator():
while True:
for c in connections:
yield c
connect = connection_generator()
inserts = []
for item in range(1000):
db = next(connect)
d = db.runOperation(
'INSERT INTO todo (todo, created_at) '
'VALUES (%s, NOW());', [item],
)
dl.append(d)
start = reactor.seconds()
yield defer.DeferredList(dl)
diff = reactor.seconds() - start
print("Took {}s".format(diff))
## show-output
|
[
"meejah@meejah.ca"
] |
meejah@meejah.ca
|
79495020056d56275a8f266ff7a23318b987552b
|
2bb90b620f86d0d49f19f01593e1a4cc3c2e7ba8
|
/pardus/tags/2007/applications/network/grsync/actions.py
|
69e6713e63b0e0168a7da300e514053089af4597
|
[] |
no_license
|
aligulle1/kuller
|
bda0d59ce8400aa3c7ba9c7e19589f27313492f7
|
7f98de19be27d7a517fe19a37c814748f7e18ba6
|
refs/heads/master
| 2021-01-20T02:22:09.451356
| 2013-07-23T17:57:58
| 2013-07-23T17:57:58
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 952
|
py
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright 2006 TUBITAK/UEKAE
# Licensed under the GNU General Public License, version 2.
# See the file http://www.gnu.org/copyleft/gpl.txt.
from pisi.actionsapi import autotools
from pisi.actionsapi import pisitools
from pisi.actionsapi import get
def setup():
autotools.autoconf() # for turkish patch
autotools.configure()
def build():
autotools.make()
def install():
autotools.rawInstall("DESTDIR=%(DESTINATION)s \
INSTALLDIR=%(DESTINATION)s/usr/bin \
MANDIR=%(DESTINATION)s/usr/share/man/man1 \
INCLUDEDIR=%(DESTINATION)s/usr/include \
LOCALEDIR=%(DESTINATION)s/usr/share/locale \
PKGCONFIGDIR=%(DESTINATION)s/usr/lib/pkgconfig" % {'DESTINATION': get.installDIR()})
pisitools.dodoc("AUTHORS", "COPYING", "README", "Changelog", "INSTALL", "NEWS")
|
[
"yusuf.aydemir@istanbul.com"
] |
yusuf.aydemir@istanbul.com
|
8768675fb9f7b9b350c1e7cacfd9cfc4b8ef5d8a
|
de5be7e4d9e20bbfda3ce8697afc3433a3ccf55d
|
/python_tutorial/excercise_13_oops_concept/multilevel_inhwrentance.py
|
9af603e09b2240ba940161d99b7718d5cf32ef4c
|
[] |
no_license
|
poojataksande9211/python_data
|
42a88e0a0395f383d4375000a3d01b894bd38e62
|
64c952d622abfa77f2fdfd737c210014fce153c5
|
refs/heads/main
| 2023-04-16T10:24:27.213764
| 2021-04-27T16:34:32
| 2021-04-27T16:34:32
| 360,673,774
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 977
|
py
|
#multilevel inherence
class Phone:#base class/parent class
def __init__(self,model_name,brand,price):
self.model_name=model_name
self.brand=brand
self._price=max(price,0)
def full_name(self):
return f"{self.model_name} {self.brand}"
def caliing_no(self,phone_no):
return f"calling from {phone_no}"
class Smartphone(Phone):#derived class/child class
def __init__(self,model_name,brand,price,ram,internal_memory,rear_camera):
Phone.__init__(self,model_name,brand,price)
self.ram=ram
self.internal_memory=internal_memory
self.rear_camera=rear_camera
class flagshipPhone(Smartphone):
def __init__(self,model_name,brand,price,ram,internal_memory,rear_camera,front_camera):
Smartphone.__init__(self,model_name,brand,price,ram,internal_memory,rear_camera)
self.front_camera=front_camera
p1=flagshipPhone("1+","67ytr",78000,"16gb","4gb","7mp","9mp")
print(p1.full_name())
|
[
"amitganvir6@gmail.com"
] |
amitganvir6@gmail.com
|
a34d0ddd7df3516c21f514127949bf7cbd07ebc1
|
f0d713996eb095bcdc701f3fab0a8110b8541cbb
|
/oyS6TX4NXzpbfjL4a_12.py
|
db54f2431410716bc482bd2f3768b6388bb56f68
|
[] |
no_license
|
daniel-reich/turbo-robot
|
feda6c0523bb83ab8954b6d06302bfec5b16ebdf
|
a7a25c63097674c0a81675eed7e6b763785f1c41
|
refs/heads/main
| 2023-03-26T01:55:14.210264
| 2021-03-23T16:08:01
| 2021-03-23T16:08:01
| 350,773,815
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,659
|
py
|
"""
This challenge is based on the game Scrabble. Each word you play is scored
based on the point value of each tile/letter (see first table), as well as
additional points conferred by any special squares your tiles land on (see
second table).
Create a function that takes a list representing a row of squares in a
Scrabble board, and a string representing the word to be played. The list will
consist of `-` representing normal squares, and "DL", "TL", "DW" representing
special squares. Return the index of the list where the first letter of the
word should be placed to maximise the score of the word to be played. Return
the lowest index, if several exist.
Letter| Points
---|---
A| 1
B| 3
C| 3
D| 2
E| 1
F| 4
G| 2
H| 4
I| 1
J| 8
K| 5
L| 2
M| 3
N| 1
O| 1
P| 3
Q| 10
R| 1
S| 1
T| 1
U| 1
V| 4
W| 4
X| 8
Y| 4
Z| 10
Special Square| Meaning
---|---
DL| Double letter score - doubles the point value of a letter placed on the
square
TL| Triple letter score - triples the point value of a letter placed on the
square
DW| Double word score - doubles the score of an entire word if one of its
letters is placed on the square
### Examples
best_start(["-","DW","-","-","-","TL","-","-","-","TL","-","-","-","DW","-"], "quiz") ➞ 0
# Doubling the entire word maximises the score. Starting at
# indices 1,10, and 11 have the same effect, but the function
# should return the lowest index.
best_start(["-","DW","-","-","-","TL","-","-","-","TL","-","-","-","DW","-"], "quit") ➞ 5
# Tripling the first letter alone gives a higher score than
# doubling the entire word, as the other 3 letters have
# low point-values.
best_start(["-","DW","-","-","-","TL","-","-","-","TL","-","-","-","DW","-"], "quart") ➞ 9
# Tripling the first (high-scoring) letter, and doubling the word.
best_start(["-","DW","-","-","-","TL","-","-","-","TL","-","-","-","DW","-"], "quartz") ➞ 0
# Tripling the last (high-scoring) letter, and doubling the word.
# Index 9 has the same effect (tripling the first letter, doubling
# the word), but 0 is the lower index.
### Notes
N/A
"""
def best_start(lst, word):
points = [1,3,3,2,1,4,2,4,1,8,5,2,3,1,1,3,10,1,1,1,1,4,4,8,4,10]
lst2 = []
add = ['-','DL','TL']
for i in range(16-len(word)):
p = 0
multiple = 1
for j in range(len(word)):
if lst[i+j] == 'DW':
p += points[ord(word[j].lower())-97]
multiple *= 2
else:
p += (add.index(lst[i+j])+1)*points[ord(word[j].lower())-97]
lst2.append(multiple*p)
return lst2.index(max(lst2))
|
[
"daniel.reich@danielreichs-MacBook-Pro.local"
] |
daniel.reich@danielreichs-MacBook-Pro.local
|
a36ea33f2fdd065e7a8deca00f0cebbf46407cdc
|
f03bd5bd7873c5cc33b4ef5199f219539f3a340e
|
/CAAPR/CAAPR_AstroMagic/PTS/pts/modeling/misc/geometryplotter.py
|
ba2446591d26f513d77abd0a0603b2eed23ff4a1
|
[
"GPL-1.0-or-later",
"AGPL-3.0-only",
"AGPL-3.0-or-later",
"LicenseRef-scancode-other-copyleft",
"LicenseRef-scancode-philippe-de-muyter",
"MIT"
] |
permissive
|
Stargrazer82301/CAAPR
|
5f8a7033b16792f23abd5d07021b53b9228a5db4
|
62b2339beb2eb956565e1605d44d92f934361ad7
|
refs/heads/master
| 2022-08-29T02:53:33.658022
| 2022-08-05T19:06:46
| 2022-08-05T19:06:46
| 49,977,601
| 8
| 1
|
MIT
| 2022-08-05T19:06:47
| 2016-01-19T19:32:42
|
Python
|
UTF-8
|
Python
| false
| false
| 5,856
|
py
|
#!/usr/bin/env python
# -*- coding: utf8 -*-
# *****************************************************************
# ** PTS -- Python Toolkit for working with SKIRT **
# ** © Astronomical Observatory, Ghent University **
# *****************************************************************
## \package pts.modeling.misc.geometryplotter Contains the GeometryPlotter class.
# -----------------------------------------------------------------
# Ensure Python 3 compatibility
from __future__ import absolute_import, division, print_function
# Import standard modules
from textwrap import wrap
import matplotlib.pyplot as plt
from matplotlib.patches import Ellipse as plt_Ellipse
from collections import OrderedDict
# Import the relevant PTS classes and modules
from ...core.tools.logging import log
from ..basics.models import SersicModel, ExponentialDiskModel, DeprojectionModel
# -----------------------------------------------------------------
pretty_colors = ["r", "dodgerblue", "purple", "darkorange", "lawngreen", "yellow", "darkblue", "teal", "darkgreen", "lightcoral", "crimson", "saddlebrown"]
# -----------------------------------------------------------------
class GeometryPlotter(object):
"""
This class...
"""
def __init__(self):
"""
The constructor ...
:return:
"""
# Call the constructor of the base class
super(GeometryPlotter, self).__init__()
# -- Attributes --
# The geometries
self.geometries = OrderedDict()
# The patches
self.patches = OrderedDict()
# The figure
self._figure = None
self._min_x = None
self._max_x = None
self._min_y = None
self._max_y = None
# Properties
self.title = None
self.format = None
self.transparent = False
# -----------------------------------------------------------------
def add_geometry(self, geometry, label):
"""
This function ...
:param geometry:
:param label:
:return:
"""
self.geometries[label] = geometry
# -----------------------------------------------------------------
def run(self, path):
"""
This function ...
:param path:
:return:
"""
# Create matplotlib patches from the geometries
self.create_patches()
# Plot
self.plot(path)
# -----------------------------------------------------------------
def create_patches(self):
"""
This function ...
:return:
"""
colors = iter(pretty_colors)
# Loop over the geometries
for label in self.geometries:
geometry = self.geometries[label]
x_center = 0.0
y_center = 0.0
major = None # 2 * major axis radius
minor = None # 2 * minor axis radius
angle = None # in degrees
if isinstance(geometry, SersicModel):
major = 2.0 * geometry.effective_radius.to("pc").value
minor = geometry.flattening * major
angle = geometry.tilt.to("deg").value
elif isinstance(geometry, ExponentialDiskModel):
major = 2.0 * geometry.radial_scale.to("pc").value
minor = 2.0 * geometry.axial_scale.to("pc").value
angle = geometry.tilt.to("deg").value
elif isinstance(geometry, DeprojectionModel):
minor = 2.0 * geometry.scale_height.to("pc").value
major = 0.3 * (geometry.pixelscale * geometry.x_size).to("pc").value
angle = 0.0
if self._min_x is None or 0.5*major > abs(self._min_x): self._min_x = - 0.5*major
if self._max_x is None or 0.5*major > self._max_x: self._max_x = 0.5*major
if self._min_y is None or 0.5*minor > abs(self._min_y): self._min_y = - 0.5*minor
if self._max_y is None or 0.5*minor > self._max_y: self._max_y = 0.5*minor
# Create the patch
color = next(colors)
ell = plt_Ellipse((x_center, y_center), major, minor, angle, edgecolor='none', facecolor=color, lw=3, alpha=0.7)
# Add the patch
self.patches[label] = ell
# -----------------------------------------------------------------
def plot(self, path):
"""
This function ...
:param path:
:return:
"""
# Inform the user
log.info("Plotting ...")
# Create the figure
self._figure = plt.figure()
ax = self._figure.add_subplot(111, aspect='equal')
for label in self.patches:
ax.add_patch(self.patches[label])
# TODO: add text for label
#plt.grid('on')
ax.set_xlim(self._min_x, self._max_x)
ax.set_ylim(self._min_y, self._max_y)
# Set the title
if self.title is not None: self._figure.suptitle("\n".join(wrap(self.title, 60)))
# Finish
self.finish_plot(path)
# -----------------------------------------------------------------
def finish_plot(self, path):
"""
This function ...
:param path:
:return:
"""
# Debugging
if type(path).__name__ == "BytesIO":
log.debug("Saving the SED plot to a buffer ...")
elif path is None: log.debug("Showing the SED plot ...")
else: log.debug("Saving the SED plot to " + str(path) + " ...")
# Save the figure
if path is not None: plt.savefig(path, bbox_inches='tight', pad_inches=0.25, transparent=self.transparent, format=self.format)
else: plt.show()
plt.close()
# -----------------------------------------------------------------
|
[
"cjrc88@gmail.com"
] |
cjrc88@gmail.com
|
7ee7efe6546c3a50ec69004cb842ff4254183a01
|
5201e237c0d58cdfdbc2fdf8103f9141161eb9f8
|
/itkBSplineTransformInitializerPython.pyi
|
6e847fd18da0e9fedf867511dbcf47d39fe173e9
|
[] |
no_license
|
hjmjohnson/itk-stubs
|
704f5b92a755e55b81d02fcad62a366143e125f3
|
771951d007ae425b758e088eae6f9e4ca0e4afb1
|
refs/heads/main
| 2023-01-22T05:50:33.649088
| 2020-12-04T01:31:09
| 2020-12-04T01:35:06
| 318,368,028
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,449
|
pyi
|
import itk.itkRGBPixelPython
from typing import Any
class _SwigNonDynamicMeta(type):
__setattr__: Any = ...
def itkBSplineTransformInitializerBSTD23IF2_New(): ...
class itkBSplineTransformInitializerBSTD23IF2(itk.ITKCommonBasePython.itkObject):
thisown: Any = ...
def __init__(self, *args: Any, **kwargs: Any) -> None: ...
__New_orig__: Any = ...
Clone: Any = ...
GetTransform: Any = ...
SetTransform: Any = ...
GetImage: Any = ...
SetImage: Any = ...
GetTransformDomainMeshSize: Any = ...
SetTransformDomainMeshSize: Any = ...
InitializeTransform: Any = ...
__swig_destroy__: Any = ...
cast: Any = ...
def New(*args: Any, **kargs: Any): ...
New: Any = ...
itkBSplineTransformInitializerBSTD23IF2___New_orig__: Any
itkBSplineTransformInitializerBSTD23IF2_cast: Any
def itkBSplineTransformInitializerBSTD23ISS2_New(): ...
class itkBSplineTransformInitializerBSTD23ISS2(itk.ITKCommonBasePython.itkObject):
thisown: Any = ...
def __init__(self, *args: Any, **kwargs: Any) -> None: ...
__New_orig__: Any = ...
Clone: Any = ...
GetTransform: Any = ...
SetTransform: Any = ...
GetImage: Any = ...
SetImage: Any = ...
GetTransformDomainMeshSize: Any = ...
SetTransformDomainMeshSize: Any = ...
InitializeTransform: Any = ...
__swig_destroy__: Any = ...
cast: Any = ...
def New(*args: Any, **kargs: Any): ...
New: Any = ...
itkBSplineTransformInitializerBSTD23ISS2___New_orig__: Any
itkBSplineTransformInitializerBSTD23ISS2_cast: Any
def itkBSplineTransformInitializerBSTD23IUC2_New(): ...
class itkBSplineTransformInitializerBSTD23IUC2(itk.ITKCommonBasePython.itkObject):
thisown: Any = ...
def __init__(self, *args: Any, **kwargs: Any) -> None: ...
__New_orig__: Any = ...
Clone: Any = ...
GetTransform: Any = ...
SetTransform: Any = ...
GetImage: Any = ...
SetImage: Any = ...
GetTransformDomainMeshSize: Any = ...
SetTransformDomainMeshSize: Any = ...
InitializeTransform: Any = ...
__swig_destroy__: Any = ...
cast: Any = ...
def New(*args: Any, **kargs: Any): ...
New: Any = ...
itkBSplineTransformInitializerBSTD23IUC2___New_orig__: Any
itkBSplineTransformInitializerBSTD23IUC2_cast: Any
def itkBSplineTransformInitializerBSTD33IF3_New(): ...
class itkBSplineTransformInitializerBSTD33IF3(itk.ITKCommonBasePython.itkObject):
thisown: Any = ...
def __init__(self, *args: Any, **kwargs: Any) -> None: ...
__New_orig__: Any = ...
Clone: Any = ...
GetTransform: Any = ...
SetTransform: Any = ...
GetImage: Any = ...
SetImage: Any = ...
GetTransformDomainMeshSize: Any = ...
SetTransformDomainMeshSize: Any = ...
InitializeTransform: Any = ...
__swig_destroy__: Any = ...
cast: Any = ...
def New(*args: Any, **kargs: Any): ...
New: Any = ...
itkBSplineTransformInitializerBSTD33IF3___New_orig__: Any
itkBSplineTransformInitializerBSTD33IF3_cast: Any
def itkBSplineTransformInitializerBSTD33ISS3_New(): ...
class itkBSplineTransformInitializerBSTD33ISS3(itk.ITKCommonBasePython.itkObject):
thisown: Any = ...
def __init__(self, *args: Any, **kwargs: Any) -> None: ...
__New_orig__: Any = ...
Clone: Any = ...
GetTransform: Any = ...
SetTransform: Any = ...
GetImage: Any = ...
SetImage: Any = ...
GetTransformDomainMeshSize: Any = ...
SetTransformDomainMeshSize: Any = ...
InitializeTransform: Any = ...
__swig_destroy__: Any = ...
cast: Any = ...
def New(*args: Any, **kargs: Any): ...
New: Any = ...
itkBSplineTransformInitializerBSTD33ISS3___New_orig__: Any
itkBSplineTransformInitializerBSTD33ISS3_cast: Any
def itkBSplineTransformInitializerBSTD33IUC3_New(): ...
class itkBSplineTransformInitializerBSTD33IUC3(itk.ITKCommonBasePython.itkObject):
thisown: Any = ...
def __init__(self, *args: Any, **kwargs: Any) -> None: ...
__New_orig__: Any = ...
Clone: Any = ...
GetTransform: Any = ...
SetTransform: Any = ...
GetImage: Any = ...
SetImage: Any = ...
GetTransformDomainMeshSize: Any = ...
SetTransformDomainMeshSize: Any = ...
InitializeTransform: Any = ...
__swig_destroy__: Any = ...
cast: Any = ...
def New(*args: Any, **kargs: Any): ...
New: Any = ...
itkBSplineTransformInitializerBSTD33IUC3___New_orig__: Any
itkBSplineTransformInitializerBSTD33IUC3_cast: Any
|
[
"hans-johnson@uiowa.edu"
] |
hans-johnson@uiowa.edu
|
9b892bd0533c2466c006109d413052b477861b4f
|
a1cbf221a6befed3891d75c69e2a546effd2499d
|
/authentication/Newapi/views.py
|
72124092bf1628bb4daabd0e9eaef153619b13da
|
[] |
no_license
|
Coder339/V-django-newCRM
|
9a93efbb252ba814241076ece17088af8dd15935
|
2182266204f54d301b7c087a99627d441e00fe54
|
refs/heads/master
| 2022-12-24T15:12:47.081949
| 2020-08-24T12:15:13
| 2020-08-24T12:15:13
| 247,274,031
| 0
| 2
| null | 2022-12-08T04:19:35
| 2020-03-14T12:39:13
|
Python
|
UTF-8
|
Python
| false
| false
| 4,182
|
py
|
import jwt
from django.conf import settings
from django.contrib import messages
from django.forms.models import model_to_dict
from django.http import Http404
from django.http import HttpResponseRedirect
from django.shortcuts import render, get_object_or_404
from django.views import View
from rest_framework import authentication
from rest_framework import permissions,authentication
from rest_framework import generics,mixins
from rest_framework import (
generics,
status,
)
from .serializer import *
from rest_framework.exceptions import NotFound
from rest_framework.permissions import (IsAuthenticated,
IsAuthenticatedOrReadOnly)
from rest_framework.response import Response
from rest_framework.views import APIView
# from authentication.models import (User, UserProfile, UserDevices)
from authentication.models import (User)
# from authentication.permissions import (
# IsClientAdmin,
# IsProfileOwner,
# IsOwnerOrAdmin)
from authentication.renderer import UserJSONRenderer, ClientJSONRenderer
from django.core.exceptions import ObjectDoesNotExist
from .serializer import (RegistrationSerializer, LoginSerializer)
from django.core.cache import cache
# from utils import BaseUtils
from utils.permissions import IsViewerOrReadOnly, IsReviewer, IsAdmin
# from utils.emailer import Emailer
from utils.util import generateOTP
# from utils.models import BaseAbstractModel
class RegistrationAPIView(generics.GenericAPIView):
"""Register new users."""
serializer_class = RegistrationSerializer
renderer_classes = (UserJSONRenderer,)
def post(self, request):
serializer = self.serializer_class(data=request.data)
serializer.is_valid(raise_exception=True)
serializer.save()
user_data = serializer.data
message = [
request,
user_data["email"]
]
response = {
"data": {
"user": dict(user_data),
"message": "Account created successfully",
"status": "success"
}
}
return Response(response, status=status.HTTP_201_CREATED)
class LoginAPIView(generics.GenericAPIView):
"""login a user via email"""
serializer_class = LoginSerializer
renderer_classes = (UserJSONRenderer,)
def post(self, request):
print('now here', request.data)
serializer = self.serializer_class(data=request.data)
serializer.is_valid(raise_exception=True)
user_data = serializer.data
response = {
"data": {
"user": dict(user_data),
"message": "You have successfully logged in",
"status": "success"
}
}
return Response(response, status=status.HTTP_200_OK)
class UserListCreateView(mixins.ListModelMixin,
mixins.CreateModelMixin,
generics.GenericAPIView):
queryset = User.objects.all()
serializer_class = UserSerializer
permission_classes = []
# authentication_classes = [SessionAuthentication]
lookup_field = 'pk'
def get(self,request,*args,**kwargs):
return self.list(request,*args,**kwargs)
def post(self,request,*args,**kwargs):
return self.create(request,*args,**kwargs)
# def perform_create(self,serializer):
# user = self.request.user
# serializer.save(user=user)
class UserDetailView(mixins.RetrieveModelMixin,
mixins.UpdateModelMixin,
mixins.DestroyModelMixin,
generics.GenericAPIView):
queryset = User.objects.all()
serializer_class = UserSerializer
permission_classes = []
# authentication_classes = [SessionAuthentication]
lookup_field = 'pk'
def get(self, request, *args, **kwargs):
return self.retrieve(request, *args, **kwargs)
def put(self, request, *args, **kwargs):
return self.update(request, *args, **kwargs)
def delete(self, request, *args, **kwargs):
return self.destroy(request, *args, **kwargs)
|
[
"amanpreet.leanvia@gmail.com"
] |
amanpreet.leanvia@gmail.com
|
961e3e5d418160b3651c41f909de28656b25a0da
|
c41497aef2158cbe051eea3c80889847e03a34ce
|
/scrap/migrations/0005_auto_20200523_1841.py
|
4ba0ce822d98aaf70bb48b358fe27b048504c0b9
|
[] |
no_license
|
NicolleLouis/scrap_freelance
|
27e4570b2d2804783879927f4c29d7ff4804acd9
|
f9d0e750651e4ff4def2d39427c4918ac057aa9d
|
refs/heads/master
| 2022-08-27T14:22:38.047438
| 2020-05-28T12:44:26
| 2020-05-28T12:44:26
| 251,595,293
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,220
|
py
|
# Generated by Django 3.0.4 on 2020-05-23 18:41
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('scrap', '0004_auto_20200523_1246'),
]
operations = [
migrations.AddField(
model_name='bike',
name='amortisseur',
field=models.TextField(blank=True, null=True),
),
migrations.AddField(
model_name='bike',
name='boitier_de_pedalier',
field=models.TextField(blank=True, null=True),
),
migrations.AddField(
model_name='bike',
name='cadre',
field=models.TextField(blank=True, null=True),
),
migrations.AddField(
model_name='bike',
name='cassette',
field=models.TextField(blank=True, null=True),
),
migrations.AddField(
model_name='bike',
name='chaine',
field=models.TextField(blank=True, null=True),
),
migrations.AddField(
model_name='bike',
name='cintre',
field=models.TextField(blank=True, null=True),
),
migrations.AddField(
model_name='bike',
name='coloris',
field=models.TextField(blank=True, null=True),
),
migrations.AddField(
model_name='bike',
name='derailleur_arriere',
field=models.TextField(blank=True, null=True),
),
migrations.AddField(
model_name='bike',
name='derailleur_avant',
field=models.TextField(blank=True, null=True),
),
migrations.AddField(
model_name='bike',
name='extras',
field=models.TextField(blank=True, null=True),
),
migrations.AddField(
model_name='bike',
name='fourche',
field=models.TextField(blank=True, null=True),
),
migrations.AddField(
model_name='bike',
name='freins',
field=models.TextField(blank=True, null=True),
),
migrations.AddField(
model_name='bike',
name='jantes',
field=models.TextField(blank=True, null=True),
),
migrations.AddField(
model_name='bike',
name='leviers_de_frein',
field=models.TextField(blank=True, null=True),
),
migrations.AddField(
model_name='bike',
name='manettes',
field=models.TextField(blank=True, null=True),
),
migrations.AddField(
model_name='bike',
name='moyeux',
field=models.TextField(blank=True, null=True),
),
migrations.AddField(
model_name='bike',
name='pedales',
field=models.TextField(blank=True, null=True),
),
migrations.AddField(
model_name='bike',
name='pedalier',
field=models.TextField(blank=True, null=True),
),
migrations.AddField(
model_name='bike',
name='pneus',
field=models.TextField(blank=True, null=True),
),
migrations.AddField(
model_name='bike',
name='poids',
field=models.TextField(blank=True, null=True),
),
migrations.AddField(
model_name='bike',
name='potence',
field=models.TextField(blank=True, null=True),
),
migrations.AddField(
model_name='bike',
name='rayons',
field=models.TextField(blank=True, null=True),
),
migrations.AddField(
model_name='bike',
name='selle',
field=models.TextField(blank=True, null=True),
),
migrations.AddField(
model_name='bike',
name='tailles',
field=models.TextField(blank=True, null=True),
),
migrations.AddField(
model_name='bike',
name='tige_de_selle',
field=models.TextField(blank=True, null=True),
),
]
|
[
"louisxnicolle@gmail.com"
] |
louisxnicolle@gmail.com
|
a71e544bf006f57c8759ee695e014662dc59ea3f
|
3d19e1a316de4d6d96471c64332fff7acfaf1308
|
/Users/A/awalias/european_league_tables_by_year.py
|
4e206a1c8aa5872e5562db75e75110a4b1e8aa0f
|
[] |
no_license
|
BerilBBJ/scraperwiki-scraper-vault
|
4e98837ac3b1cc3a3edb01b8954ed00f341c8fcc
|
65ea6a943cc348a9caf3782b900b36446f7e137d
|
refs/heads/master
| 2021-12-02T23:55:58.481210
| 2013-09-30T17:02:59
| 2013-09-30T17:02:59
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,892
|
py
|
# Scraper for Premier League, Bundesliga, and Seria A league tables between 2009-2012
# Extra countries can be added (france) or Divisions (england2, germany2 etc.) in countries list
# Ant Wilson 2013
import scraperwiki
import lxml.html
countries = ['england', 'germany', 'italy']
class EOS_Table(object):
"""class representing the league table at the end of the season"""
fields = ["Position" ,
"Team" ,
"Matches played" ,
"Matches won" ,
"Draws" ,
"Matches lost" ,
"Goals For" ,
"Goals Against" ,
"Goal Difference",
"Points" ,
"League" ,
"Year" ]
def is_ascii(self,s):
return all(ord(c) < 128 for c in s)
# when initialised, entity will parse for selectors and save resulting dict
def __init__(self, element, year, league):
row = element.cssselect("tr")
for el in row:
td = el.cssselect("td")
store = {}
if (self.is_ascii(td[0].text_content())):
for i in range(0,10):
store[self.fields[i]] = td[i].text_content().strip()
store[self.fields[10]] = league
store[self.fields[11]] = year
store['Key'] = store['Team'] + '-' + str(store['Year'])
scraperwiki.sqlite.save(unique_keys=["Key"], data=store)
# main. Grabs league table for each combination of country-year. Leagues/Countries set at top of file.
for country in countries:
for year in range(2009,2013):
html = scraperwiki.scrape("http://www.soccerstats.com/latest.asp?league=%s_%s" % (country, year))
root = lxml.html.fromstring(html)
for element in root.cssselect("table.stat"):
EOS_Table(element, year, country)
# Scraper for Premier League, Bundesliga, and Seria A league tables between 2009-2012
# Extra countries can be added (france) or Divisions (england2, germany2 etc.) in countries list
# Ant Wilson 2013
import scraperwiki
import lxml.html
countries = ['england', 'germany', 'italy']
class EOS_Table(object):
"""class representing the league table at the end of the season"""
fields = ["Position" ,
"Team" ,
"Matches played" ,
"Matches won" ,
"Draws" ,
"Matches lost" ,
"Goals For" ,
"Goals Against" ,
"Goal Difference",
"Points" ,
"League" ,
"Year" ]
def is_ascii(self,s):
return all(ord(c) < 128 for c in s)
# when initialised, entity will parse for selectors and save resulting dict
def __init__(self, element, year, league):
row = element.cssselect("tr")
for el in row:
td = el.cssselect("td")
store = {}
if (self.is_ascii(td[0].text_content())):
for i in range(0,10):
store[self.fields[i]] = td[i].text_content().strip()
store[self.fields[10]] = league
store[self.fields[11]] = year
store['Key'] = store['Team'] + '-' + str(store['Year'])
scraperwiki.sqlite.save(unique_keys=["Key"], data=store)
# main. Grabs league table for each combination of country-year. Leagues/Countries set at top of file.
for country in countries:
for year in range(2009,2013):
html = scraperwiki.scrape("http://www.soccerstats.com/latest.asp?league=%s_%s" % (country, year))
root = lxml.html.fromstring(html)
for element in root.cssselect("table.stat"):
EOS_Table(element, year, country)
|
[
"pallih@kaninka.net"
] |
pallih@kaninka.net
|
7f7c5b5bf16c39cf0d28d88b300d7c8220fd855c
|
8f5f0c3ef83fdd482387973149738f6178477a42
|
/medium/algos/combination_sum_iii.py
|
9fb25611bcb7124aaf1677d2282f38179e15f76f
|
[] |
no_license
|
nicokuzak/leetcode
|
79a5771ad83786cc7dbfd790f8fffcf1ce58794e
|
39b0235dc429a97a7cba0689d44641a6af6d7a32
|
refs/heads/main
| 2023-04-06T21:02:09.553185
| 2021-04-14T22:21:20
| 2021-04-14T22:21:20
| 336,847,511
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,529
|
py
|
"""Find all valid combinations of k numbers that sum up to n such that the following conditions are true:
Only numbers 1 through 9 are used.
Each number is used at most once.
Return a list of all possible valid combinations. The list must not contain the same combination twice, and the combinations may be returned in any order.
Example 1:
Input: k = 3, n = 7
Output: [[1,2,4]]
Explanation:
1 + 2 + 4 = 7
There are no other valid combinations.
Example 2:
Input: k = 3, n = 9
Output: [[1,2,6],[1,3,5],[2,3,4]]
Explanation:
1 + 2 + 6 = 9
1 + 3 + 5 = 9
2 + 3 + 4 = 9
There are no other valid combinations.
Example 3:
Input: k = 4, n = 1
Output: []
Explanation: There are no valid combinations. [1,2,1] is not valid because 1 is used twice.
Example 4:
Input: k = 3, n = 2
Output: []
Explanation: There are no valid combinations.
Example 5:
Input: k = 9, n = 45
Output: [[1,2,3,4,5,6,7,8,9]]
Explanation:
1 + 2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 = 45
There are no other valid combinations.
Constraints:
2 <= k <= 9
1 <= n <= 60"""
class Solution:
def combinationSum3(self, k: int, n: int) -> List[List[int]]:
res = []
def dfs(cur, k, n, nxt):
if len(cur) == k:
if sum(cur) == n:
res.append(cur)
return
for j in range(len(nxt)):
dfs(cur[:]+[nxt[j]], k, n, nxt[j+1:])
for i in range(1, 10):
dfs([i], k, n, [num for num in range(i+1,10)])
return res
|
[
"nicokuzak95@gmail.com"
] |
nicokuzak95@gmail.com
|
67b9ba8f95fe5eb0985c03d506574f1bc41c9344
|
3c1639bccf3fc0abc9c82c00ab92ac3f25cf105e
|
/book/section-8-函数/02-函数的实参和形参(位置参数).py
|
f1632e35aef8b467e9d0352a5e544321e51d7496
|
[
"Apache-2.0"
] |
permissive
|
LiuJunb/PythonStudy
|
783318a64496c2db41442ad66e0cc9253b392734
|
3386b9e3ccb398bfcfcd1a3402182811f9bb37ca
|
refs/heads/master
| 2022-12-11T05:22:53.725166
| 2018-11-15T01:34:37
| 2018-11-15T01:34:37
| 143,956,065
| 1
| 0
|
Apache-2.0
| 2022-11-22T01:58:23
| 2018-08-08T03:26:26
|
JavaScript
|
UTF-8
|
Python
| false
| false
| 861
|
py
|
# 1.定义一个函数( 有两个形参 )
def get_animal(animal_name, animal_type):
"""获取动画的姓名和类型"""
print('name: '+animal_name + ' --> type: ' + animal_type)
get_animal('🐱', 'animal') # 传递两个实参
get_animal('animal', '🐱') # 传递两个实参
# 2.关键字实参( 避免参数顺序传递异常 )
# get_animal() #get_animal() missing 2 required positional arguments: 'animal_name' and 'animal_type'
get_animal(animal_type='animal', animal_name='🐶')
get_animal(animal_name='🐷', animal_type='animal')
# 3.参数的默认值
def get_animal_info(animal_name='🐒', animal_type='animal'):
"""获取动画的姓名和类型"""
print('name: '+animal_name + ' --> type: ' + animal_type)
print('---------------')
get_animal_info()
get_animal_info('🐭')
get_animal_info(animal_type='Animal')
|
[
"liujun@520it.com"
] |
liujun@520it.com
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.