blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
288
| content_id
stringlengths 40
40
| detected_licenses
listlengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 684
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
⌀ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
⌀ | gha_language
stringclasses 147
values | src_encoding
stringclasses 25
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 128
12.7k
| extension
stringclasses 142
values | content
stringlengths 128
8.19k
| authors
listlengths 1
1
| author_id
stringlengths 1
132
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
0266bfd3dabb66fbc32785187d7dd62dcd182a82
|
5399dd4580ea3f528753bc8b52a981743d62f8bb
|
/ML/m39_pickle.py
|
957e4037bd4d833cbf848e3087f487fa8f253ce3
|
[] |
no_license
|
iwillbeaprogramer/Study
|
3ac7c118ffe3981d78b4ad263cb62432eae13970
|
3bfe571da5bbfc545b994e5878e217f9306bde14
|
refs/heads/main
| 2023-05-07T16:31:05.564973
| 2021-05-27T14:50:00
| 2021-05-27T14:50:00
| 324,044,441
| 8
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,161
|
py
|
# eval_set
from xgboost import XGBClassifier,XGBRegressor
from sklearn.datasets import load_boston, load_breast_cancer
from sklearn.model_selection import train_test_split
import numpy as np
from sklearn.metrics import r2_score,accuracy_score,mean_squared_error
datasets = load_boston()
x = datasets.data
y = datasets.target
x_train,x_test,y_train,y_test = train_test_split(x,y,test_size=0.15,shuffle=True,random_state = 66)
model = XGBRegressor(n_estimators = 1000,learning_rate = 0.01,n_jobs=8)
model.fit(x_train,y_train,verbose=1,eval_metric = ['rmse','logloss'],eval_set = [(x_train,y_train),(x_test,y_test)],early_stopping_rounds=10)
aaa = model.score(x_test,y_test)
print(aaa)
y_pred = model.predict(x_test)
r2 = r2_score(y_test,y_pred)
rmse = mean_squared_error(y_test,y_pred)**0.5
print("r2 : ",r2)
print("rmse : ",rmse)
result = model.evals_result()
print(result)
# 저장
import pickle
# pickle.dump(model, open("../data/xgb_save/m39.pickle.dat","wb"))
# print("저장완료")
print("불러오기")
model2 = pickle.load(open('../data/xgb_save/m39.pickle.dat','rb'))
print('불러왔다')
r22 = model.score(x_test,y_test)
print('r22 : ',r22)
|
[
"wisixicidi@gmail.com"
] |
wisixicidi@gmail.com
|
2816391722086df3dfeffc573cf0446551c2149b
|
4bed9030031fc99f6ea3d5267bd9e773f54320f8
|
/sparse/repos/MaayanLab/clustergrammer-widget/setup.py
|
0dd34fb3172e27c6f17267a1c21facac86ecae9f
|
[
"BSD-3-Clause"
] |
permissive
|
yuvipanda/mybinder.org-analytics
|
c5f4b939541d29727bc8d3c023b4d140de756f69
|
7b654e3e21dea790505c626d688aa15640ea5808
|
refs/heads/master
| 2021-06-13T05:49:12.447172
| 2018-12-22T21:48:12
| 2018-12-22T21:48:12
| 162,839,358
| 1
| 1
|
BSD-3-Clause
| 2021-06-10T21:05:50
| 2018-12-22T20:01:52
|
Jupyter Notebook
|
UTF-8
|
Python
| false
| false
| 5,515
|
py
|
from __future__ import print_function
from setuptools import setup, find_packages, Command
from setuptools.command.sdist import sdist
from setuptools.command.build_py import build_py
from setuptools.command.egg_info import egg_info
from subprocess import check_call
import os
import sys
import platform
here = os.path.dirname(os.path.abspath(__file__))
node_root = os.path.join(here, 'js')
is_repo = os.path.exists(os.path.join(here, '.git'))
npm_path = os.pathsep.join([
os.path.join(node_root, 'node_modules', '.bin'),
os.environ.get('PATH', os.defpath),
])
from distutils import log
log.set_verbosity(log.DEBUG)
log.info('setup.py entered')
log.info('$PATH=%s' % os.environ['PATH'])
LONG_DESCRIPTION = 'clustergrammer_widget'
def js_prerelease(command, strict=False):
"""decorator for building minified js/css prior to another command"""
class DecoratedCommand(command):
def run(self):
jsdeps = self.distribution.get_command_obj('jsdeps')
if not is_repo and all(os.path.exists(t) for t in jsdeps.targets):
# sdist, nothing to do
command.run(self)
return
try:
self.distribution.run_command('jsdeps')
except Exception as e:
missing = [t for t in jsdeps.targets if not os.path.exists(t)]
if strict or missing:
log.warn('rebuilding js and css failed')
if missing:
log.error('missing files: %s' % missing)
raise e
else:
log.warn('rebuilding js and css failed (not a problem)')
log.warn(str(e))
command.run(self)
update_package_data(self.distribution)
return DecoratedCommand
def update_package_data(distribution):
"""update package_data to catch changes during setup"""
build_py = distribution.get_command_obj('build_py')
# distribution.package_data = find_package_data()
# re-init build_py options which load package_data
build_py.finalize_options()
class NPM(Command):
description = 'install package.json dependencies using npm'
user_options = []
node_modules = os.path.join(node_root, 'node_modules')
targets = [
os.path.join(here, 'clustergrammer_widget', 'static', 'extension.js'),
os.path.join(here, 'clustergrammer_widget', 'static', 'index.js')
]
def initialize_options(self):
pass
def finalize_options(self):
pass
def has_npm(self):
try:
check_call(['npm', '--version'])
return True
except:
return False
def should_run_npm_install(self):
package_json = os.path.join(node_root, 'package.json')
node_modules_exists = os.path.exists(self.node_modules)
return self.has_npm()
def run(self):
has_npm = self.has_npm()
if not has_npm:
log.error("`npm` unavailable. If you're running this command using sudo, make sure `npm` is available to sudo")
env = os.environ.copy()
env['PATH'] = npm_path
if self.should_run_npm_install():
log.info("Installing build dependencies with npm. This may take a while...")
check_call(['npm', 'install'], cwd=node_root, stdout=sys.stdout, stderr=sys.stderr)
os.utime(self.node_modules, None)
for t in self.targets:
if not os.path.exists(t):
msg = 'Missing file: %s' % t
if not has_npm:
msg += '\nnpm is required to build a development version of widgetsnbextension'
raise ValueError(msg)
# update package data in case this created new files
update_package_data(self.distribution)
version_ns = {}
with open(os.path.join(here, 'clustergrammer_widget', '_version.py')) as f:
exec(f.read(), {}, version_ns)
setup_args = {
'name': 'clustergrammer_widget',
'version': version_ns['__version__'],
'description': 'clustergrammer_widget',
'long_description': LONG_DESCRIPTION,
'include_package_data': True,
'data_files': [
('share/jupyter/nbextensions/clustergrammer_widget', [
'clustergrammer_widget/static/extension.js',
'clustergrammer_widget/static/index.js',
'clustergrammer_widget/static/index.js.map',
]),
],
'install_requires': [
'ipywidgets>=5.1.5',
],
'packages': find_packages(),
'zip_safe': False,
'cmdclass': {
'build_py': js_prerelease(build_py),
'egg_info': js_prerelease(egg_info),
'sdist': js_prerelease(sdist, strict=True),
'jsdeps': NPM,
},
'author': 'Nicolas Fernandez',
'author_email': 'nickfloresfernandez@gmail.com',
'url': 'http://jupyter.org',
'keywords': [
'ipython',
'jupyter',
'widgets',
],
'classifiers': [
'Development Status :: 4 - Beta',
'Framework :: IPython',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'Topic :: Multimedia :: Graphics',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
}
setup(**setup_args)
|
[
"yuvipanda@gmail.com"
] |
yuvipanda@gmail.com
|
73c6fbe202adb37cfaa60701dbf155423a36249b
|
a775bb0ef2347a91aa1e6236f0e6eae6512a84ad
|
/src/robosub2019/armer.py
|
cbf0ec2d09da56ca925e8bb07392e777ba257059
|
[] |
no_license
|
Tartan-AUV/tartan-sub
|
66376e163681bd7dac47c62e41669c0c842badc0
|
0a05156a887cdd6467813a358973cc23e6e55227
|
refs/heads/master
| 2020-04-06T16:45:04.396871
| 2019-10-19T17:46:37
| 2019-10-19T17:46:37
| 157,632,649
| 0
| 4
| null | 2019-11-03T20:26:47
| 2018-11-15T01:10:48
|
Python
|
UTF-8
|
Python
| false
| false
| 855
|
py
|
#!/usr/bin/env python
import rospy
from std_msgs.msg import Bool
class Armer(object):
def __init__(self, run_config):
self.config = run_config
self.pub = rospy.Publisher(self.config.arming_topic, Bool, queue_size=1)
self.sub = rospy.Subscriber(self.config.arming_topic, Bool, self.callback)
self.armed = False
self.rate = rospy.Rate(1) # 1Hz
def arm(self):
while not rospy.is_shutdown() and not self.armed:
msg = Bool()
msg.data = True
self.pub.publish(msg)
self.rate.sleep()
return
def callback(self, msg):
self.armed = msg.data
def disarm(self):
while not rospy.is_shutdown():
msg = Bool()
msg.data = False
self.pub.publish(msg)
self.rate.sleep()
return
|
[
"you@example.com"
] |
you@example.com
|
0b330a3afbbf55128aa6e962b16c14c7c7eaf126
|
99b0631baa2fd9ab2455d848b47febf581916272
|
/study_code/learn_nonlocal.py
|
108dc21fa3015ab9051ced4645bbf529a466aeb5
|
[] |
no_license
|
seceast/PyProjects
|
a934e366cb619f2610d75b9a0fb47d818814a4de
|
7be7193b4126ce920a3d3ffa4ef5d8743b3fa7d1
|
refs/heads/master
| 2023-03-07T22:23:21.229489
| 2021-02-25T05:37:58
| 2021-02-25T05:37:58
| 265,480,151
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 349
|
py
|
"""
-*- coding: utf-8 -*-
@author: yangyd
@file: learn_nonlocal.py
@time: 2019/10/14 0014 14:33
"""
def outner():
b = 10
def inner():
# 声明外部函数局部变量,不声明可以使用b但是无法修改b
nonlocal b
print(f'old_var = {b}')
b = 20
print(f'new_bar = {b}')
inner()
outner()
|
[
"yangyadong25@163.com"
] |
yangyadong25@163.com
|
10dc28f9618ba9669fbf73bb52f0188b41ca1653
|
4bc2d855558ccb962991f997e9779919031687dd
|
/capstone/causalmodel/migrations/0026_suggestedintervention_is_priority.py
|
fa89dff57a9a21ae61b0079b3a2bd148a737e427
|
[] |
no_license
|
jmblontoc/Likha-Capstone
|
80081e44b7ad6457eb776432e623c6db8b7a17e2
|
e1c32911b58cd1419c8e1a554ac32210456d201d
|
refs/heads/master
| 2022-12-10T03:26:32.946638
| 2018-12-09T04:33:10
| 2018-12-09T04:33:10
| 134,726,142
| 0
| 1
| null | 2022-11-25T23:52:42
| 2018-05-24T14:21:36
|
Python
|
UTF-8
|
Python
| false
| false
| 413
|
py
|
# Generated by Django 2.1b1 on 2018-11-01 11:25
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('causalmodel', '0025_suggestedintervention'),
]
operations = [
migrations.AddField(
model_name='suggestedintervention',
name='is_priority',
field=models.BooleanField(default=True),
),
]
|
[
"37819032+jmblontoc@users.noreply.github.com"
] |
37819032+jmblontoc@users.noreply.github.com
|
3f52cb7f354134071ca7b9aa0ee2b810466b0d7b
|
d76cf9484f93c1822c71c87980e6f76a2c0f5786
|
/application/api/superclass/superclasses.py
|
e73ead1271653b8a6b572471b3917a103589597d
|
[] |
no_license
|
chris-hamberg/twitter
|
a02b0278eab609d696a84f39ea771a5936379561
|
b806de4e073a0f4791cde93111c69c4d44b6844a
|
refs/heads/master
| 2022-12-14T19:06:26.221176
| 2019-12-13T21:44:23
| 2019-12-13T21:44:23
| 172,611,261
| 0
| 0
| null | 2022-12-08T04:51:18
| 2019-02-26T01:02:13
|
Python
|
UTF-8
|
Python
| false
| false
| 4,721
|
py
|
try:
from twitter.application.api.superclass.abstract_base_class import AbstractBase
from twitter.application.subprocess.base64encode import base64encode
from twitter.application.subprocess.connection import requests
except ModuleNotFoundError as main:
from application.api.superclass.abstract_base_class import AbstractBase
from application.subprocess.base64encode import base64encode
from application.subprocess.connection import requests
finally:
from requests import get, post
import inspect
# ----------------------------------------------------------------------- #
'''
Special classes for handling edge cases, as the occur in the api definition
'''
# ---------------------------------------=------------------------------- #
class Base64(AbstractBase):
'''
update_profile_image, and update_profile_banner classes inherit from
this class.
'''
def __init__(self):
super().__init__()
self._method = post
self._endpoint = None
self._key = None
#NOTE do the base64 encoding procedure !!!
def __call__(self, **params):
self._data = base64encode(params)
try:
# NOTE direct_messages changed state
###### by adding the content-type
###### field to headers.
###### try to pop content-type.
self._headers.pop('content-type')
except KeyError as good:
pass
return AbstractBase.__call__(self, **params)
def __repr__(self): raise NotImplementedError
class Collision(AbstractBase):
'''
For some reason the API was written to have name collisions.
collections/entires had to ve renamed to collections/entries_method
list/members had to be renamed to list/members_method
this superclass corrects the urls and endpoints for these classes
'''
def __init__(self):
super().__init__()
surrogate = self.__class__.__name__
biological = surrogate.split('_')[0]
self._url = self.url.replace(surrogate, biological)
self._endpoint = self._endpoint.split('_')[0]
def __repr__(self): raise NotImplementedError
class Empty(AbstractBase):
'''
settings, and remove_profile_banner classes inherit from this class.
'''
def __init__(self):
super().__init__()
del self._params, self._data
def __call__(self): #NOTE neither of these subclasses take any arg
return AbstractBase.__call__(self)
def __repr__(self):
return ' PARAMETERS: None'
class Media(AbstractBase):
def __init__(self):
super().__init__()
self._method = post
self._url = self.url.replace('api', 'upload')
self._url = self.url.replace(self.__class__.__name__.lower(),
'upload')
self._endpoint = self.endpoint.replace(self.__class__.__name__.lower(),
'upload')
try:
self._headers.pop('content-type')
except KeyError as good: pass
def __call__(self, **params):
if self.__class__.__name__ != 'upload':
params.update({'command': self.__class__.__name__})
return AbstractBase.__call__(self, **params)
def __repr__(self): raise NotImplementedError
class Numeric(AbstractBase):
'''
retweets, retweet, and unretweet classes inherit from this class.
All other status module classes are children of AbstractBase.
'''
def __init__(self, suffix=None):
# because the retweets class is a special edge case; where
# str.rstrip('.json') removes the trailing 's' from 'retweets',
# we need the following conditional statement to handle that
# deformation.
if not suffix:
suffix = '/{id}.json'
super().__init__()
self._method = post
self._url = self._url.rstrip('.json') + suffix
def __call__(self, **params): #NOTE these endpoints are of a special form.
url = self.url
self._url = self.url.format_map(params)
response = AbstractBase.__call__(self, **params)
self._url = url
return response
def __repr__(self): raise NotImplementedError
class Ternary(AbstractBase):
def __init__(self):
self._method = None
super().__init__()
frame = inspect.currentframe()
fpath = inspect.getouterframes(frame, 2)[9][1]
module = fpath.split('/')[-1].split('.')[0]
self._module = module
url = self.url.split('/')
url.insert(-2, self.parent)
url = '/'.join(url)
self._url = url
self._endpoint = '/'+self.parent+self._endpoint
def __repr__(self): raise NotImplementedError
|
[
"chris.hamberg@programmer.net"
] |
chris.hamberg@programmer.net
|
387a27431bd3d1af9f529f413813a0a29f54f3d5
|
9cd180fc7594eb018c41f0bf0b54548741fd33ba
|
/sdk/python/pulumi_azure_nextgen/logic/v20150801preview/get_integration_account_certificate.py
|
8d5432fc9120d562473d196d12733d96b4a0883b
|
[
"Apache-2.0",
"BSD-3-Clause"
] |
permissive
|
MisinformedDNA/pulumi-azure-nextgen
|
c71971359450d03f13a53645171f621e200fe82d
|
f0022686b655c2b0744a9f47915aadaa183eed3b
|
refs/heads/master
| 2022-12-17T22:27:37.916546
| 2020-09-28T16:03:59
| 2020-09-28T16:03:59
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 6,163
|
py
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union
from ... import _utilities, _tables
from . import outputs
__all__ = [
'GetIntegrationAccountCertificateResult',
'AwaitableGetIntegrationAccountCertificateResult',
'get_integration_account_certificate',
]
@pulumi.output_type
class GetIntegrationAccountCertificateResult:
def __init__(__self__, changed_time=None, created_time=None, key=None, location=None, metadata=None, name=None, public_certificate=None, tags=None, type=None):
if changed_time and not isinstance(changed_time, str):
raise TypeError("Expected argument 'changed_time' to be a str")
pulumi.set(__self__, "changed_time", changed_time)
if created_time and not isinstance(created_time, str):
raise TypeError("Expected argument 'created_time' to be a str")
pulumi.set(__self__, "created_time", created_time)
if key and not isinstance(key, dict):
raise TypeError("Expected argument 'key' to be a dict")
pulumi.set(__self__, "key", key)
if location and not isinstance(location, str):
raise TypeError("Expected argument 'location' to be a str")
pulumi.set(__self__, "location", location)
if metadata and not isinstance(metadata, dict):
raise TypeError("Expected argument 'metadata' to be a dict")
pulumi.set(__self__, "metadata", metadata)
if name and not isinstance(name, str):
raise TypeError("Expected argument 'name' to be a str")
pulumi.set(__self__, "name", name)
if public_certificate and not isinstance(public_certificate, str):
raise TypeError("Expected argument 'public_certificate' to be a str")
pulumi.set(__self__, "public_certificate", public_certificate)
if tags and not isinstance(tags, dict):
raise TypeError("Expected argument 'tags' to be a dict")
pulumi.set(__self__, "tags", tags)
if type and not isinstance(type, str):
raise TypeError("Expected argument 'type' to be a str")
pulumi.set(__self__, "type", type)
@property
@pulumi.getter(name="changedTime")
def changed_time(self) -> str:
"""
The changed time.
"""
return pulumi.get(self, "changed_time")
@property
@pulumi.getter(name="createdTime")
def created_time(self) -> str:
"""
The created time.
"""
return pulumi.get(self, "created_time")
@property
@pulumi.getter
def key(self) -> Optional['outputs.KeyVaultKeyReferenceResponse']:
"""
The key details in the key vault.
"""
return pulumi.get(self, "key")
@property
@pulumi.getter
def location(self) -> Optional[str]:
"""
The resource location.
"""
return pulumi.get(self, "location")
@property
@pulumi.getter
def metadata(self) -> Optional[Mapping[str, Any]]:
"""
The metadata.
"""
return pulumi.get(self, "metadata")
@property
@pulumi.getter
def name(self) -> Optional[str]:
"""
The resource name.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="publicCertificate")
def public_certificate(self) -> Optional[str]:
"""
The public certificate.
"""
return pulumi.get(self, "public_certificate")
@property
@pulumi.getter
def tags(self) -> Optional[Mapping[str, str]]:
"""
The resource tags.
"""
return pulumi.get(self, "tags")
@property
@pulumi.getter
def type(self) -> Optional[str]:
"""
The resource type.
"""
return pulumi.get(self, "type")
class AwaitableGetIntegrationAccountCertificateResult(GetIntegrationAccountCertificateResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetIntegrationAccountCertificateResult(
changed_time=self.changed_time,
created_time=self.created_time,
key=self.key,
location=self.location,
metadata=self.metadata,
name=self.name,
public_certificate=self.public_certificate,
tags=self.tags,
type=self.type)
def get_integration_account_certificate(certificate_name: Optional[str] = None,
integration_account_name: Optional[str] = None,
resource_group_name: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetIntegrationAccountCertificateResult:
"""
Use this data source to access information about an existing resource.
:param str certificate_name: The integration account certificate name.
:param str integration_account_name: The integration account name.
:param str resource_group_name: The resource group name.
"""
__args__ = dict()
__args__['certificateName'] = certificate_name
__args__['integrationAccountName'] = integration_account_name
__args__['resourceGroupName'] = resource_group_name
if opts is None:
opts = pulumi.InvokeOptions()
if opts.version is None:
opts.version = _utilities.get_version()
__ret__ = pulumi.runtime.invoke('azure-nextgen:logic/v20150801preview:getIntegrationAccountCertificate', __args__, opts=opts, typ=GetIntegrationAccountCertificateResult).value
return AwaitableGetIntegrationAccountCertificateResult(
changed_time=__ret__.changed_time,
created_time=__ret__.created_time,
key=__ret__.key,
location=__ret__.location,
metadata=__ret__.metadata,
name=__ret__.name,
public_certificate=__ret__.public_certificate,
tags=__ret__.tags,
type=__ret__.type)
|
[
"public@paulstack.co.uk"
] |
public@paulstack.co.uk
|
bc3c434190c692ff3c0094d20db9185411b09b8e
|
3caa6e43e3da18858a719c51dc9c63eb11014035
|
/build/LawnMowerRobot/hrp/am_loopmap/catkin_generated/pkg.installspace.context.pc.py
|
85b851e7aaef81366a23f89a188a48a8c38bea1b
|
[] |
no_license
|
rh-chen/catkin_ws
|
2ab5050340e7e62f818681d1c86b222f2b867ce9
|
102203a9c5b870862d5c4fcf465f3bf88247a007
|
refs/heads/master
| 2020-06-03T10:28:31.749952
| 2016-06-02T21:43:10
| 2016-06-02T21:43:10
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 487
|
py
|
# generated from catkin/cmake/template/pkg.context.pc.in
CATKIN_PACKAGE_PREFIX = ""
PROJECT_PKG_CONFIG_INCLUDE_DIRS = "/home/agneev/catkin_ws/install/include".split(';') if "/home/agneev/catkin_ws/install/include" != "" else []
PROJECT_CATKIN_DEPENDS = "nav_msgs;roscpp;std_msgs;tf;am_driver".replace(';', ' ')
PKG_CONFIG_LIBRARIES_WITH_PREFIX = "".split(';') if "" != "" else []
PROJECT_NAME = "am_loopmap"
PROJECT_SPACE_DIR = "/home/agneev/catkin_ws/install"
PROJECT_VERSION = "0.0.0"
|
[
"agneev@kth.se"
] |
agneev@kth.se
|
51c49fc3154deee52d0c8f7793fbddac1737f209
|
70bfc97fb0ff98d817f72f27775a7b467ae66c39
|
/firmware/python_modules/sha2017/dashboard/installer.py
|
2bd071a428f17b60fb027c5bec3caf6cb46245ee
|
[
"Apache-2.0"
] |
permissive
|
zakx/ESP32-platform-firmware
|
ae6ab2d023c06598dc6a9010fb4bfcf3a2d6710f
|
3593865602c2aba1f5a066a333ff864d9eaae4a3
|
refs/heads/master
| 2020-07-03T03:19:04.964007
| 2019-08-10T17:05:16
| 2019-08-10T17:05:16
| 201,767,805
| 0
| 0
| null | 2019-08-11T13:24:42
| 2019-08-11T13:24:42
| null |
UTF-8
|
Python
| false
| false
| 5,351
|
py
|
import orientation, dashboard.resources.woezel_repo as woezel_repo, term, easydraw, system, time, gc, ugfx, wifi, uos, json, sys, woezel, display
repo = woezel_repo
orientation.default()
def showMessage(msg, error=False, icon_wifi=False, icon_ok=False):
term.header(True, "Installer")
print(msg)
if error:
easydraw.messageCentered("ERROR\n\n"+msg, True, "/media/alert.png")
elif icon_wifi:
easydraw.messageCentered("PLEASE WAIT\n\n"+msg, True, "/media/wifi.png")
elif icon_ok:
easydraw.messageCentered(msg, True, "/media/ok.png")
else:
easydraw.messageCentered("PLEASE WAIT\n\n"+msg, True, "/media/busy.png")
# Generic actions
def btn_unhandled(pressed):
display.flush(display.FLAG_LUT_FASTEST)
def btn_exit(pressed):
if pressed:
system.launcher()
def btn_update(pressed):
if pressed:
repo.update()
system.start("installer", True)
# Categories list
categories_list = ugfx.List(0,0,ugfx.width(),ugfx.height()-48)
def show_categories(pressed=True):
if not pressed:
return
ugfx.clear(ugfx.WHITE)
#Hide category list
category_list.visible(False)
category_list.enabled(False)
#Show categories list
categories_list.visible(True)
categories_list.enabled(True)
#Input handling
ugfx.input_attach(ugfx.BTN_START, btn_exit)
ugfx.input_attach(ugfx.BTN_SELECT, btn_update)
ugfx.input_attach(ugfx.BTN_A, show_category)
ugfx.input_attach(ugfx.BTN_B, btn_unhandled)
ugfx.input_attach(ugfx.JOY_UP, btn_unhandled)
ugfx.input_attach(ugfx.JOY_DOWN, btn_unhandled)
ugfx.input_attach(ugfx.JOY_LEFT, btn_unhandled)
ugfx.input_attach(ugfx.JOY_RIGHT, btn_unhandled)
#Hint
easydraw.disp_string_right_bottom(0, "START: Exit app")
easydraw.disp_string_right_bottom(1, "A: Open category")
easydraw.disp_string_right_bottom(2, "SELECT: Update repo")
#Flush screen
display.flush(display.FLAG_LUT_NORMAL)
# Category browsing
category_list = ugfx.List(0,0,ugfx.width(),ugfx.height()-48)
def show_category(pressed=True):
if not pressed:
return
ugfx.clear(ugfx.WHITE)
global category
categories_list.visible(False)
categories_list.enabled(False)
slug = repo.categories[categories_list.selected_index()]["slug"]
showMessage("Loading "+slug+"...")
display.drawFill()
#Clean up list
while category_list.count() > 0:
category_list.remove_item(0)
try:
try:
category = repo.getCategory(slug)
except BaseException as e:
print("CAT OPEN ERR", e)
showMessage("Failed to open category "+slug+"!", True)
display.drawFill()
time.sleep(1)
show_categories()
gc.collect()
for package in category:
category_list.add_item("%s rev. %s" % (package["name"], package["revision"]))
category_list.selected_index(0)
category_list.visible(True)
category_list.enabled(True)
#Input handling
ugfx.input_attach(ugfx.BTN_START, btn_exit)
ugfx.input_attach(ugfx.BTN_SELECT, btn_unhandled)
ugfx.input_attach(ugfx.BTN_A, install_app)
ugfx.input_attach(ugfx.BTN_B, show_categories)
ugfx.input_attach(ugfx.JOY_UP, btn_unhandled)
ugfx.input_attach(ugfx.JOY_DOWN, btn_unhandled)
ugfx.input_attach(ugfx.JOY_LEFT, btn_unhandled)
ugfx.input_attach(ugfx.JOY_RIGHT, btn_unhandled)
#Hint
easydraw.disp_string_right_bottom(0, "START: Exit")
easydraw.disp_string_right_bottom(1, "A: Install app")
easydraw.disp_string_right_bottom(2, "B: Back")
#Flush screen
display.flush(display.FLAG_LUT_NORMAL)
except BaseException as e:
sys.print_exception(e)
print("ERROR", e)
showMessage("Internal error", True)
display.drawFill()
time.sleep(1)
show_categories()
# Install application
def install_app(pressed=True):
global category
if pressed:
slug = category[category_list.selected_index()]["slug"]
category = []
gc.collect()
category_list.visible(False)
category_list.enabled(False)
category_list.clear()
#Input handling
ugfx.input_attach(ugfx.BTN_START, btn_unhandled)
ugfx.input_attach(ugfx.BTN_SELECT, btn_unhandled)
ugfx.input_attach(ugfx.BTN_A, btn_unhandled)
ugfx.input_attach(ugfx.BTN_B, btn_unhandled)
ugfx.input_attach(ugfx.JOY_UP, btn_unhandled)
ugfx.input_attach(ugfx.JOY_DOWN, btn_unhandled)
ugfx.input_attach(ugfx.JOY_LEFT, btn_unhandled)
ugfx.input_attach(ugfx.JOY_RIGHT, btn_unhandled)
if not wifi.status():
wifi.connect()
wifi.wait()
if not wifi.status():
showMessage("Unable to connect to WiFi.")
display.drawFill()
time.sleep(2)
show_category()
showMessage("Installing "+slug+"...")
display.drawFill()
try:
woezel.install(slug)
showMessage("OK\n\n"+slug+" has been installed!", False, False, True)
display.drawFill()
time.sleep(2)
show_category()
except woezel.LatestInstalledError:
showMessage("NOTICE\n\nLatest version is already installed.", False, False, True)
display.drawFill()
time.sleep(2)
show_category()
except BaseException as e:
print("WOEZEL ERROR", e)
showMessage("Failed to install "+slug+"!", True)
display.drawFill()
time.sleep(2)
show_category()
#Main application
showMessage("Loading categories...")
display.drawFill()
if not repo.load():
if not repo.update():
if repo.lastUpdate==0:
showMessage("Failed to load repository. Returning to launcher...")
display.drawFill()
system.launcher()
for category in repo.categories:
categories_list.add_item("%s (%d) >" % (category["name"], category["eggs"]))
show_categories()
|
[
"renze@rnplus.nl"
] |
renze@rnplus.nl
|
853dc52871c3cacc341de2c08d13e6f8f092c466
|
91406a8a39d4cf9c9db57d57c13b74ea6a50b31d
|
/backend/theadvrou_19084/settings.py
|
b1326c53deb98ee4d1d5405cc5757874998d7f98
|
[] |
no_license
|
crowdbotics-apps/theadvrou-19084
|
9fb5d3ebb645f946ed70469daf0c8770e8692723
|
ecb1356777441578fafd5816638c4056bdaa8c21
|
refs/heads/master
| 2022-11-25T06:13:56.853550
| 2020-07-22T10:03:51
| 2020-07-22T10:03:51
| 281,493,497
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,806
|
py
|
"""
Django settings for theadvrou_19084 project.
Generated by 'django-admin startproject' using Django 2.2.2.
For more information on this file, see
https://docs.djangoproject.com/en/2.2/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.2/ref/settings/
"""
import os
import environ
env = environ.Env()
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = env.bool("DEBUG", default=False)
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.2/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = env.str("SECRET_KEY")
ALLOWED_HOSTS = env.list("HOST", default=["*"])
SITE_ID = 1
SECURE_PROXY_SSL_HEADER = ("HTTP_X_FORWARDED_PROTO", "https")
SECURE_SSL_REDIRECT = env.bool("SECURE_REDIRECT", default=False)
# Application definition
INSTALLED_APPS = [
"django.contrib.admin",
"django.contrib.auth",
"django.contrib.contenttypes",
"django.contrib.sessions",
"django.contrib.messages",
"django.contrib.staticfiles",
"django.contrib.sites",
"delivery_order",
"driver",
"menu",
"delivery_user_profile",
]
LOCAL_APPS = [
"home",
"users.apps.UsersConfig",
]
THIRD_PARTY_APPS = [
"rest_framework",
"rest_framework.authtoken",
"rest_auth",
"rest_auth.registration",
"bootstrap4",
"allauth",
"allauth.account",
"allauth.socialaccount",
"allauth.socialaccount.providers.google",
"django_extensions",
"drf_yasg",
# start fcm_django push notifications
"fcm_django",
# end fcm_django push notifications
]
INSTALLED_APPS += LOCAL_APPS + THIRD_PARTY_APPS
MIDDLEWARE = [
"django.middleware.security.SecurityMiddleware",
"django.contrib.sessions.middleware.SessionMiddleware",
"django.middleware.common.CommonMiddleware",
"django.middleware.csrf.CsrfViewMiddleware",
"django.contrib.auth.middleware.AuthenticationMiddleware",
"django.contrib.messages.middleware.MessageMiddleware",
"django.middleware.clickjacking.XFrameOptionsMiddleware",
]
ROOT_URLCONF = "theadvrou_19084.urls"
TEMPLATES = [
{
"BACKEND": "django.template.backends.django.DjangoTemplates",
"DIRS": [],
"APP_DIRS": True,
"OPTIONS": {
"context_processors": [
"django.template.context_processors.debug",
"django.template.context_processors.request",
"django.contrib.auth.context_processors.auth",
"django.contrib.messages.context_processors.messages",
],
},
},
]
WSGI_APPLICATION = "theadvrou_19084.wsgi.application"
# Database
# https://docs.djangoproject.com/en/2.2/ref/settings/#databases
DATABASES = {
"default": {
"ENGINE": "django.db.backends.sqlite3",
"NAME": os.path.join(BASE_DIR, "db.sqlite3"),
}
}
if env.str("DATABASE_URL", default=None):
DATABASES = {"default": env.db()}
# Password validation
# https://docs.djangoproject.com/en/2.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
"NAME": "django.contrib.auth.password_validation.UserAttributeSimilarityValidator",
},
{"NAME": "django.contrib.auth.password_validation.MinimumLengthValidator",},
{"NAME": "django.contrib.auth.password_validation.CommonPasswordValidator",},
{"NAME": "django.contrib.auth.password_validation.NumericPasswordValidator",},
]
# Internationalization
# https://docs.djangoproject.com/en/2.2/topics/i18n/
LANGUAGE_CODE = "en-us"
TIME_ZONE = "UTC"
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.2/howto/static-files/
STATIC_URL = "/static/"
MIDDLEWARE += ["whitenoise.middleware.WhiteNoiseMiddleware"]
AUTHENTICATION_BACKENDS = (
"django.contrib.auth.backends.ModelBackend",
"allauth.account.auth_backends.AuthenticationBackend",
)
STATIC_ROOT = os.path.join(BASE_DIR, "staticfiles")
STATICFILES_DIRS = [os.path.join(BASE_DIR, "static")]
STATICFILES_STORAGE = "whitenoise.storage.CompressedManifestStaticFilesStorage"
# allauth / users
ACCOUNT_EMAIL_REQUIRED = True
ACCOUNT_AUTHENTICATION_METHOD = "email"
ACCOUNT_USERNAME_REQUIRED = False
ACCOUNT_EMAIL_VERIFICATION = "mandatory"
ACCOUNT_CONFIRM_EMAIL_ON_GET = True
ACCOUNT_LOGIN_ON_EMAIL_CONFIRMATION = True
ACCOUNT_UNIQUE_EMAIL = True
LOGIN_REDIRECT_URL = "users:redirect"
ACCOUNT_ADAPTER = "users.adapters.AccountAdapter"
SOCIALACCOUNT_ADAPTER = "users.adapters.SocialAccountAdapter"
ACCOUNT_ALLOW_REGISTRATION = env.bool("ACCOUNT_ALLOW_REGISTRATION", True)
SOCIALACCOUNT_ALLOW_REGISTRATION = env.bool("SOCIALACCOUNT_ALLOW_REGISTRATION", True)
REST_AUTH_SERIALIZERS = {
# Replace password reset serializer to fix 500 error
"PASSWORD_RESET_SERIALIZER": "home.api.v1.serializers.PasswordSerializer",
}
REST_AUTH_REGISTER_SERIALIZERS = {
# Use custom serializer that has no username and matches web signup
"REGISTER_SERIALIZER": "home.api.v1.serializers.SignupSerializer",
}
# Custom user model
AUTH_USER_MODEL = "users.User"
EMAIL_HOST = env.str("EMAIL_HOST", "smtp.sendgrid.net")
EMAIL_HOST_USER = env.str("SENDGRID_USERNAME", "")
EMAIL_HOST_PASSWORD = env.str("SENDGRID_PASSWORD", "")
EMAIL_PORT = 587
EMAIL_USE_TLS = True
# start fcm_django push notifications
FCM_DJANGO_SETTINGS = {"FCM_SERVER_KEY": env.str("FCM_SERVER_KEY", "")}
# end fcm_django push notifications
if DEBUG:
# output email to console instead of sending
EMAIL_BACKEND = "django.core.mail.backends.console.EmailBackend"
|
[
"team@crowdbotics.com"
] |
team@crowdbotics.com
|
38ef0f1184875790dfef82c9ee5d3fb8e86bdc73
|
53bf080b397fb1250fc9be30b5978f7b14276fd8
|
/app/http/controllers/WelcomeController.py
|
3fde2bcb917796a4e3efb76d49a6dcee323fcfbc
|
[] |
no_license
|
afdolriski/masonite-starter
|
0297aea952ba6ca9bf86de8436bc33157af12292
|
1e61170e5ac0b40fba2c3022aaab4a5a8e6fe467
|
refs/heads/master
| 2021-05-12T18:43:34.160655
| 2018-01-10T03:15:27
| 2018-01-10T03:15:27
| 117,072,009
| 1
| 0
| null | 2018-01-11T08:30:10
| 2018-01-11T08:30:09
| null |
UTF-8
|
Python
| false
| false
| 330
|
py
|
''' A Module Description '''
from masonite.view import view
from config import application
class WelcomeController(object):
''' Controller for welcoming the user '''
def __init__(self):
pass
def show(self, request):
''' Show Welcome Template '''
return view('welcome', {'app': application})
|
[
"idmann509@gmail.com"
] |
idmann509@gmail.com
|
a80448101f8374c1c17802dfdd3ad542417d37a9
|
17cbe826892d06dc5aee4e4c2a5747e10933f2d0
|
/allennlp/scripts/train_fixtures.py
|
35f92b290cdb557c4bb1ba9ff665094925d8b8a5
|
[
"Apache-2.0",
"MIT"
] |
permissive
|
rahular/joint-coref-srl
|
3fdd0e37a56e3be894f3da4ceeb030a599ff4388
|
cd85fb4e11af1a1ea400ed657d0a4511c1d6c6be
|
refs/heads/main
| 2023-02-16T21:53:11.721014
| 2021-01-18T15:31:47
| 2021-01-18T15:31:47
| 330,708,579
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,821
|
py
|
#!/usr/bin/env python
import glob
import logging
import os
import re
import shutil
import sys
import tempfile
sys.path.insert(0, os.path.dirname(os.path.abspath(os.path.join(__file__, os.pardir))))
from allennlp.commands.test_install import _get_module_root
from allennlp.commands.train import train_model_from_file, train_model
from allennlp.common import Params
from allennlp.common.util import pushd
from allennlp.training.metrics import EvalbBracketingScorer
logger = logging.getLogger(__name__)
def train_fixture(config_prefix: str) -> None:
config_file = config_prefix + "experiment.json"
serialization_dir = config_prefix + "serialization"
# Train model doesn't like it if we have incomplete serialization
# directories, so remove them if they exist.
if os.path.exists(serialization_dir):
shutil.rmtree(serialization_dir)
# train the model
train_model_from_file(config_file, serialization_dir)
# remove unnecessary files
shutil.rmtree(os.path.join(serialization_dir, "log"))
for filename in glob.glob(os.path.join(serialization_dir, "*")):
if (
filename.endswith(".log")
or filename.endswith(".json")
or re.search(r"epoch_[0-9]+\.th$", filename)
):
os.remove(filename)
def train_fixture_gpu(config_prefix: str) -> None:
config_file = config_prefix + "experiment.json"
serialization_dir = config_prefix + "serialization"
params = Params.from_file(config_file)
params["trainer"]["cuda_device"] = 0
# train this one to a tempdir
tempdir = tempfile.gettempdir()
train_model(params, tempdir)
# now copy back the weights and and archived model
shutil.copy(
os.path.join(tempdir, "best.th"), os.path.join(serialization_dir, "best_gpu.th")
)
shutil.copy(
os.path.join(tempdir, "model.tar.gz"),
os.path.join(serialization_dir, "model_gpu.tar.gz"),
)
if __name__ == "__main__":
module_root = _get_module_root().parent
with pushd(module_root, verbose=True):
if len(sys.argv) >= 2 and sys.argv[1].lower() == "gpu":
train_fixture_gpu("allennlp/tests/fixtures/srl/")
else:
models = [
"biaffine_dependency_parser",
"constituency_parser",
"coref",
"decomposable_attention",
"encoder_decoder/composed_seq2seq",
"encoder_decoder/simple_seq2seq",
"encoder_decoder/copynet_seq2seq",
"simple_tagger_with_span_f1",
"srl",
]
for model in models:
if model == "constituency_parser":
EvalbBracketingScorer.compile_evalb()
train_fixture(f"allennlp/tests/fixtures/{model}/")
|
[
"rahul@di.ku.dk"
] |
rahul@di.ku.dk
|
1eb118f5187ab4a3c4a08f6e93c8c1611df906d2
|
7b4cf9df77a8f5f716dece430c9465b1de137a30
|
/src/rubricsampling/short_answer_test.py
|
392581f97870dcfcb6c025aa236fdcdf982e71d0
|
[] |
no_license
|
willcrichton/generative-grading
|
b440074e64c36a1fd982b4331e2d4ea36cbd57e4
|
a36e0c91f778817f8b79d36a06e4b982b1f30245
|
refs/heads/master
| 2020-06-06T16:27:02.103847
| 2019-07-04T21:22:40
| 2019-07-04T21:22:40
| 192,791,584
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,536
|
py
|
#!/usr/bin/env python
import pickle
import os.path
import sys
import generatorUtils as utils
from pprint import pprint
from tqdm import tqdm
from engine import Engine
from engineTempered import EngineTempered
from engineGuidedInference import EngineGuided
from src.datasets.citizenship_labels import CitizenshipLabels
import torch
from torch.utils.data import DataLoader
import matplotlib.pyplot as plt
import getpass
USER = getpass.getuser()
GRAMMAR_DIR = 'src/rubricsampling/grammars/citizenship13'
EXP_DIR = f'/home/{USER}/generative-grading/experiments/citizenship13_100k/2019-04-12--01_26_27'
class RubricSampler(object):
def create_data_loader(self, dataset, n=None):
if n is None:
n = len(dataset)
out = []
raw_progs = []
for i in range(n):
prog = dataset.raw_inputs[i]
if prog == 'religious freedom':
out.append(dataset[i])
raw_progs.append(prog)
break
return out, raw_progs
# Function: Run
# -------------
# This function compiles and renders samples
# from the Rubric Sample
def run(self):
inf_e = EngineGuided(GRAMMAR_DIR, EXP_DIR)
dataset = CitizenshipLabels(13, split='valid', vocab=inf_e.agent.train_dataset.vocab)
# dataset = CitizenshipLabels(13, split='valid')
N = 50
data, raw_prgs = self.create_data_loader(dataset)
data_loader = DataLoader(data, batch_size=1, shuffle=False)
tqdm_batch = tqdm(data_loader, total=N)
# inf_e = EngineTempered(GRAMMAR_DIR)
time_data = []
uniq_progs = set()
failed = []
num_all = 0
num_correct = 0
for i, data_list in enumerate(tqdm_batch):
program_args = (data_list[0], data_list[2])
label = data_list[3]
program = raw_prgs[i]
import pdb; pdb.set_trace()
corr = self.infer_matches(inf_e, program, program_args, label)
if corr:
num_correct += 1
num_all += 1
# pprint(failed)
print(f'Accuracy = {num_correct/num_all}')
def infer_matches(self, inf_e, program, program_args, label, n_lim=4):
all_progs = []
for i in range(n_lim):
new_prog, new_choices = self.guided_sample(inf_e, program_args)
all_progs.append(new_prog)
# import pdb; pdb.set_trace()
# print(program)
# print(new_prog)
# print(label)
# pprint(new_choices)
# input()
# print()
return int(new_choices['correctStrategy']) == label.item()
#####################
# Private Helpers
#####################
def guided_sample(self, inf_e, program_args):
# something that will crash if accessed without setting
initAssignments = 1000000 * torch.ones(1, inf_e.model.num_nodes)
program, labels, decisions, rvOrder, rvAssignments_pred = inf_e.renderProgram(program_args, initAssignments)
# program, labels, decisions, rvOrder, rvAssignments_pred = inf_e.renderProgram()
# print(rvAssignments[0][rvOrders[0][:rvOrders_lengths[0]]])
# print(rvAssignments_pred[0][rvOrders[0][:rvOrders_lengths[0]]])
# input()
return program, decisions
def sample(self, e):
program, labels, decisions, _, _ = e.renderProgram()
return program, decisions
if __name__ == '__main__':
RubricSampler().run()
|
[
"malikali@stanford.edu"
] |
malikali@stanford.edu
|
e1c77a2d0b8f06a83ef2a11dbfb21d51589e4ba3
|
77ab53380f74c33bb3aacee8effc0e186b63c3d6
|
/5180_constrained_subset_sum.py
|
323b9c395febde884a579570cf9771cc9a0c10dc
|
[] |
no_license
|
tabletenniser/leetcode
|
8e3aa1b4df1b79364eb5ca3a97db57e0371250b6
|
d3ebbfe2e4ab87d5b44bc534984dfa453e34efbd
|
refs/heads/master
| 2023-02-23T18:14:31.577455
| 2023-02-06T07:09:54
| 2023-02-06T07:09:54
| 94,496,986
| 2
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,829
|
py
|
'''
Given an integer array nums and an integer k, return the maximum sum of a non-empty subset of that array such that for every two consecutive integers in the subset, nums[i] and nums[j], where i < j, the condition j - i <= k is satisfied.
A subset of an array is obtained by deleting some number of elements (can be zero) from the array, leaving the remaining elements in their original order.
Example 1:
Input: nums = [10,2,-10,5,20], k = 2
Output: 37
Explanation: The subset is [10, 2, 5, 20].
Example 2:
Input: nums = [-1,-2,-3], k = 1
Output: -1
Explanation: The subset must be non-empty, so we choose the largest number.
Example 3:
Input: nums = [10,-2,-10,-5,20], k = 2
Output: 23
Explanation: The subset is [10, -2, -5, 20].
Constraints:
1 <= k <= nums.length <= 10^5
-10^4 <= nums[i] <= 10^4
'''
import sys
sys.setrecursionlimit(100000)
class Solution:
def max_sum(self, nums, l, h, k):
cur_max = -99999999999999999999999999999
for k_i in range(k):
cur_sum = 0
for n in range(l+k_i, h+1, k):
cur_sum += nums[n]
cur_max = max(cur_sum, cur_max)
return cur_max
def constrainedSubsetSum(self, nums, k) -> int:
if max(nums) < 0:
return max(nums)
res = 0
n_start = None
for i in range(len(nums)):
num = nums[i]
if num >= 0:
if n_start:
if n_start != 0:
n_range_sum = self.max_sum(nums, n_start, i - 1, k)
res = max(0, res + n_range_sum)
n_start = None
res += num
else:
if not n_start:
n_start = i
return res
def rec(self, cur_ind, cur_k):
if (cur_ind, cur_k) in self.ht:
return self.ht[(cur_ind, cur_k)]
if cur_ind == 0:
return max(0, self.nums[cur_ind]) if cur_k < self.k else self.nums[cur_ind]
choose = self.rec(cur_ind - 1, 1) + self.nums[cur_ind]
res = choose
if cur_k < self.k and self.nums[cur_ind] < 0:
not_choose = self.rec(cur_ind - 1, cur_k + 1)
res = max(res, not_choose)
# print(cur_ind, cur_k, res)
self.ht[(cur_ind, cur_k)] = res
return res
def constrainedSubsetSum2(self, nums, k) -> int:
if max(nums) < 0:
return max(nums)
self.nums = nums
self.k = k
self.ht = dict()
return self.rec(len(nums)-1, 1)
s = Solution()
# nums = [10,-2,-10,-5,20]
# k = 2
# nums = [-1,-2,-3]
# k = 1
# nums =[-5266,4019,7336,-3681,-5767]
# k = 2
nums = [-8269,3217,-4023,-4138,-683,6455,-3621,9242,4015,-3790]
k = 1
# nums = [(-1)**i * i for i in range(10000)]
# k = 5000
res = s.constrainedSubsetSum(nums, k)
print(res)
|
[
"tabletenniser@gmail.com"
] |
tabletenniser@gmail.com
|
51f2b3c70763ec2c0eae169ef31a20b8bdcc1dc2
|
081b33ead95b323e77bdce3717af0a5790e34a1e
|
/backend/apps/league/migrations/0001_initial.py
|
791e1d76b4e75f218d5bd9686461819c8acd89b0
|
[] |
no_license
|
alexmon1989/afliga
|
81ea3b32b18040bb8baa4e8af14a73003fb9a89f
|
661da30c0a5aa6b9975eb7dea9c9a031529d2dbb
|
refs/heads/master
| 2023-02-23T11:12:45.608118
| 2023-02-11T12:12:41
| 2023-02-11T12:12:41
| 105,630,198
| 0
| 0
| null | 2023-02-15T20:50:12
| 2017-10-03T08:36:15
|
Python
|
UTF-8
|
Python
| false
| false
| 7,519
|
py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.5 on 2017-10-26 09:05
from __future__ import unicode_literals
import django.core.validators
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Event',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('event_time', models.IntegerField(validators=[django.core.validators.MinValueValidator(1), django.core.validators.MaxValueValidator(100)], verbose_name='Минута')),
],
),
migrations.CreateModel(
name='EventType',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=255, verbose_name='Название')),
],
),
migrations.CreateModel(
name='Group',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=255, verbose_name='Название')),
('table', models.TextField(blank=True, null=True, verbose_name='Таблица результатов')),
('created_at', models.DateTimeField(auto_now_add=True, verbose_name='Создано')),
('updated_at', models.DateTimeField(auto_now=True, verbose_name='Обновлено')),
],
),
migrations.CreateModel(
name='Match',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('match_date', models.DateTimeField(blank=True, null=True, verbose_name='Время начала матча')),
('protocol', models.TextField(blank=True, verbose_name='Протокол')),
('created_at', models.DateTimeField(auto_now_add=True, verbose_name='Создано')),
('updated_at', models.DateTimeField(auto_now=True, verbose_name='Обновлено')),
],
),
migrations.CreateModel(
name='Player',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=255, verbose_name='ФИО')),
('birth_date', models.DateField(blank=True, null=True, verbose_name='Дата рождения')),
('biography', models.TextField(blank=True, verbose_name='Биография')),
],
),
migrations.CreateModel(
name='Position',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=255, verbose_name='Название')),
],
),
migrations.CreateModel(
name='Team',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=255, verbose_name='Название')),
('description', models.TextField(blank=True, null=True, verbose_name='Описание')),
('logo', models.ImageField(blank=True, null=True, upload_to='teams', verbose_name='Логотип')),
('created_at', models.DateTimeField(auto_now_add=True, verbose_name='Создано')),
('updated_at', models.DateTimeField(auto_now=True, verbose_name='Обновлено')),
],
),
migrations.CreateModel(
name='Tournament',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=255, verbose_name='Название')),
('description', models.TextField(blank=True, null=True, verbose_name='Описание')),
('created_at', models.DateTimeField(auto_now_add=True, verbose_name='Создано')),
('updated_at', models.DateTimeField(auto_now=True, verbose_name='Обновлено')),
],
),
migrations.CreateModel(
name='TournamentPlayer',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('player', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='league.Player', verbose_name='Игрок')),
('team', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='league.Team', verbose_name='Команда')),
('tournament', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='league.Tournament', verbose_name='Турнир')),
],
),
migrations.AddField(
model_name='tournament',
name='players',
field=models.ManyToManyField(blank=True, through='league.TournamentPlayer', to='league.Player'),
),
migrations.AddField(
model_name='tournament',
name='teams',
field=models.ManyToManyField(blank=True, to='league.Team'),
),
migrations.AddField(
model_name='player',
name='position',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='league.Position', verbose_name='Амплуа'),
),
migrations.AddField(
model_name='match',
name='team_1',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='team_1', to='league.Team', verbose_name='Команда 1'),
),
migrations.AddField(
model_name='match',
name='team_2',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='team_2', to='league.Team', verbose_name='Команда 2'),
),
migrations.AddField(
model_name='group',
name='tournament',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='league.Tournament', verbose_name='Турнир'),
),
migrations.AddField(
model_name='event',
name='event_type',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='league.EventType', verbose_name='Тип события'),
),
migrations.AddField(
model_name='event',
name='match',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='league.Match', verbose_name='Матч'),
),
migrations.AddField(
model_name='event',
name='player',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='league.Player', verbose_name='Игрок'),
),
migrations.AddField(
model_name='event',
name='team',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='league.Team', verbose_name='Команда'),
),
]
|
[
"alex.mon1989@gmail.com"
] |
alex.mon1989@gmail.com
|
43c33309ccced89d49211dc1029a296d823cd9cf
|
d69a933c30e45d2bf97d4e01ca0a7956daebb089
|
/src/rdfextras/sparql2sql/bison/QName.py
|
90844bfe5fe52ca3574aa9b05adf6c9b94e334f9
|
[] |
no_license
|
agarrido/ro-manager
|
d83af466abbf9bb27952f238f4e036810a45e447
|
9229b435d67825ce71bb2a7bd9855119d4f02c58
|
refs/heads/master
| 2021-01-17T11:58:32.133634
| 2011-10-19T12:33:50
| 2011-10-19T12:33:50
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 592
|
py
|
from rdflib.term import Identifier
class QName(Identifier):
__slots__ = ("localname", "prefix")
def __new__(cls,value):
try:
inst = unicode.__new__(cls, value)
except UnicodeDecodeError:
inst = unicode.__new__(cls, value,'utf-8')
inst.prefix,inst.localname = value.split(':')
return inst
class QNamePrefix(Identifier):
def __init__(self,prefix):
super(QNamePrefix,self).__init__(prefix)
# Convenience
# from rdfextras.sparql2sql.bison.QName import QName
# from rdfextras.sparql2sql.bison.QName import QNamePrefix
|
[
"gk-github@ninebynine.org"
] |
gk-github@ninebynine.org
|
1ca683547437674bf5d5f76e7114ff272e379d08
|
3bb301688bfd5b31f762dbe3ffead0a345a09401
|
/main.py
|
7e1582dfd4c9db3de14ebdd93f488c96c0b2f990
|
[] |
no_license
|
wudangqibujie/3.-24
|
5a03ef051dfaf543d788382215610016a4514e44
|
d29a0ed76ef5bb8c2b253f775d2ad0dad0b93f4c
|
refs/heads/master
| 2021-04-15T14:39:11.034821
| 2018-03-24T12:38:13
| 2018-03-24T12:38:13
| 126,597,638
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 554
|
py
|
import master
import redis_or
import slave
from lxml import etree
# a=master.Master_Spider("shenzhen")
# html = a.get_html("https://www.xin.com/beijing/benchi/i3/")
# urls=a.get_detail_url(html)
q = redis_or.Redis_Data()
# for url in urls:
# q.set_into_data("test_car_urls",url)
for i in range(1,11):
url = q.pop_data("test_car_urls")
# print(url)
html = master.Master_Spider("shenzhen").get_html("https://"+url)
print(type(html))
a = slave.Slave_Spisder()
data = a.parse_detail_data(html)
print(data)
|
[
"noreply@github.com"
] |
wudangqibujie.noreply@github.com
|
098a19b480464b71fd30f91f96b12deccd2b1286
|
2e682fd72e3feaa70e3f7bf2a3b83c50d783ec02
|
/PyTorch/built-in/nlp/MT5_ID4146_for_PyTorch/transformers/src/transformers/models/hubert/__init__.py
|
ee06be214ac55b024c5eba4b6ae5f0f737581f55
|
[
"BSD-3-Clause",
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"LicenseRef-scancode-generic-cla",
"LicenseRef-scancode-unknown-license-reference",
"GPL-1.0-or-later"
] |
permissive
|
Ascend/ModelZoo-PyTorch
|
4c89414b9e2582cef9926d4670108a090c839d2d
|
92acc188d3a0f634de58463b6676e70df83ef808
|
refs/heads/master
| 2023-07-19T12:40:00.512853
| 2023-07-17T02:48:18
| 2023-07-17T02:48:18
| 483,502,469
| 23
| 6
|
Apache-2.0
| 2022-10-15T09:29:12
| 2022-04-20T04:11:18
|
Python
|
UTF-8
|
Python
| false
| false
| 2,332
|
py
|
# flake8: noqa
# There's no way to ignore "F401 '...' imported but unused" warnings in this
# module, but to preserve other warnings. So, don't check this module at all.
# Copyright 2020 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import TYPE_CHECKING
from ...file_utils import _LazyModule, is_tf_available, is_torch_available
_import_structure = {
".wav2vec2.feature_extraction_wav2vec2": ["Wav2Vec2FeatureExtractor"],
"configuration_hubert": ["HUBERT_PRETRAINED_CONFIG_ARCHIVE_MAP", "HubertConfig"],
}
if is_torch_available():
_import_structure["modeling_hubert"] = [
"HUBERT_PRETRAINED_MODEL_ARCHIVE_LIST",
"HubertForCTC",
"HubertForSequenceClassification",
"HubertModel",
"HubertPreTrainedModel",
]
if is_tf_available():
_import_structure["modeling_tf_hubert"] = [
"TF_HUBERT_PRETRAINED_MODEL_ARCHIVE_LIST",
"TFHubertForCTC",
"TFHubertModel",
"TFHubertPreTrainedModel",
]
if TYPE_CHECKING:
from ..wav2vec2.feature_extraction_wav2vec2 import Wav2Vec2FeatureExtractor
from .configuration_hubert import HUBERT_PRETRAINED_CONFIG_ARCHIVE_MAP, HubertConfig
if is_torch_available():
from .modeling_hubert import (
HUBERT_PRETRAINED_MODEL_ARCHIVE_LIST,
HubertForCTC,
HubertForSequenceClassification,
HubertModel,
HubertPreTrainedModel,
)
if is_tf_available():
from .modeling_tf_hubert import (
TF_HUBERT_PRETRAINED_MODEL_ARCHIVE_LIST,
TFHubertForCTC,
TFHubertModel,
TFHubertPreTrainedModel,
)
else:
import sys
sys.modules[__name__] = _LazyModule(__name__, globals()["__file__"], _import_structure, module_spec=__spec__)
|
[
"wangjiangben@huawei.com"
] |
wangjiangben@huawei.com
|
a9b1421809818a57810ce42c5b458c0686040d19
|
2760effda15d884af413ca2a35809d03fabea377
|
/lc-94.py
|
d3a1d65ec14db010713ce89de3a52e712e346568
|
[] |
no_license
|
UtsavRaychaudhuri/leetcode
|
31943b98ad89d96d72ee4b6b1d1c8d70429d1e1f
|
77a13580fd6231830558b1cf8c84f8b3b62b99d0
|
refs/heads/master
| 2020-11-27T18:02:23.712639
| 2020-09-29T19:39:49
| 2020-09-29T19:39:49
| 229,552,583
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 546
|
py
|
# Definition for a binary tree node.
# class TreeNode(object):
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution(object):
def __init__(self):
self.my_list=[]
def inorderTraversal(self, root):
"""
:type root: TreeNode
:rtype: List[int]
"""
if root:
self.inorderTraversal(root.left)
self.my_list.append(root.val)
self.inorderTraversal(root.right)
return self.my_list
|
[
"utsav@pdx.edu"
] |
utsav@pdx.edu
|
e5f75c282a91efceab67452309899a392be1f731
|
de24f83a5e3768a2638ebcf13cbe717e75740168
|
/moodledata/vpl_data/303/usersdata/304/68337/submittedfiles/testes.py
|
ef1b112bb914f1155a9ca8d3cbd194b2db577544
|
[] |
no_license
|
rafaelperazzo/programacao-web
|
95643423a35c44613b0f64bed05bd34780fe2436
|
170dd5440afb9ee68a973f3de13a99aa4c735d79
|
refs/heads/master
| 2021-01-12T14:06:25.773146
| 2017-12-22T16:05:45
| 2017-12-22T16:05:45
| 69,566,344
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,807
|
py
|
# -*- coding: utf-8 -*-
#COMECE AQUI ABAIXO
########AULA DO DIA 06/09/2017########
"""
idade = int(input('Digite sua idade: '))
print('A idade do indivíduo é',idade,'!')
if int(idade) < 18:
print('Você tem menos de 18 anos')
elif int(idade) == 18:
print('Você tem 18 anos')
else:
print('Você tem mais de 18 anos')
altura = float(input('Digite sua altura: '))
if float(altura) <= 1.50:
print('Você é um smurf!')
elif float(altura) >1.50 < 1.70:
print('Você é normal')
else:
print('Você é um avatar!')
razao = idade/altura
print('Razão idade/altura = %.2f' %(razao))
"""
#QUESTÃO 01:
"""
print(50*'-')
print('QUESTÃO 01')
print(50*'-')
n1 = float(input('Insira a nota do primeiro bimestre: '))
n2 = float(input('Insira a nota do segundo bimestre: '))
n3 = float(input('Insira a nota do terceiro bimestre: '))
n4 = float(input('Insira a nota do quarto bimestre: '))
media = float((n1+n2+n3+n4)/4)
print('A média do aluno foi de %.2f' %(media))
"""
#QUESTÃO 02:
"""
print(50*'-')
print('QUESTÃO 02')
print(50*'-')
a = float(input('Insira metros para a conversão: '))
b = (a*100)
print(a,'metros convertido em centímetros = %.0f' %(b))
"""
#QUESTÃO 03:
"""
print(50*'-')
print('QUESTÃO 03')
print(50*'-')
altura = float(input('Insira a sua altura: '))
peso = float(input('Insira o seu peso: '))
pi = (72.7*altura)-58
print('O seu peso atual é de %.2f quilos e o peso ideal para a sua altura é de %.2f quilos' %(peso,pi))
"""
#QUESTÃO 04:
"""
print(50*'-')
print('QUESTÃO 04')
print(50*'-')
raio = float(input('Insira o raio do círculo em centímetros: '))
area = (3.14159*raio**2)
print('A área do círculo é de %.2f centímetros' %(area))
"""
x = 1
x + 2
x = x + 1
print (x)
|
[
"rafael.mota@ufca.edu.br"
] |
rafael.mota@ufca.edu.br
|
021870ba7e83cd49d113645f14d918b18cc9157b
|
a9063fd669162d4ce0e1d6cd2e35974274851547
|
/test/test_inline_response20072_plan_base.py
|
a7ee91720c79393327a18f8d2b9bb2083c8bfc00
|
[] |
no_license
|
rootalley/py-zoom-api
|
9d29a8c750e110f7bd9b65ff7301af27e8518a3d
|
bfebf3aa7b714dcac78be7c0affb9050bbce8641
|
refs/heads/master
| 2022-11-07T14:09:59.134600
| 2020-06-20T18:13:50
| 2020-06-20T18:13:50
| 273,760,906
| 1
| 3
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,508
|
py
|
# coding: utf-8
"""
Zoom API
The Zoom API allows developers to safely and securely access information from Zoom. You can use this API to build private services or public applications on the [Zoom App Marketplace](http://marketplace.zoom.us). To learn how to get your credentials and create private/public applications, read our [Authorization Guide](https://marketplace.zoom.us/docs/guides/authorization/credentials). All endpoints are available via `https` and are located at `api.zoom.us/v2/`. For instance you can list all users on an account via `https://api.zoom.us/v2/users/`. # noqa: E501
OpenAPI spec version: 2.0.0
Contact: developersupport@zoom.us
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import unittest
import swagger_client
from models.inline_response20072_plan_base import InlineResponse20072PlanBase # noqa: E501
from swagger_client.rest import ApiException
class TestInlineResponse20072PlanBase(unittest.TestCase):
"""InlineResponse20072PlanBase unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testInlineResponse20072PlanBase(self):
"""Test InlineResponse20072PlanBase"""
# FIXME: construct object with mandatory attributes with example values
# model = swagger_client.models.inline_response20072_plan_base.InlineResponse20072PlanBase() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
|
[
"github@rootalley.com"
] |
github@rootalley.com
|
85c0f14b868a22e23e5e3fd51fe3cc1e84759e72
|
5d3a6f5d1731d32479c3cd65748c58eefa614b07
|
/tests/test_models.py
|
8bef9f492bb207d37d8955f5b49e2f5693b413af
|
[] |
permissive
|
timgates42/django-rest-localflavor
|
fde435f0f07775ccf48187f68e7f29ad9d2a793f
|
844f86dbed5be126706b8d65678ed7e1fc9cfed0
|
refs/heads/master
| 2023-03-17T08:04:59.020065
| 2020-12-30T12:25:36
| 2020-12-30T12:25:36
| 246,013,020
| 0
| 0
|
BSD-3-Clause
| 2020-03-09T11:04:03
| 2020-03-09T11:04:02
| null |
UTF-8
|
Python
| false
| false
| 377
|
py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_django-rest-localflavor
------------
Tests for `django-rest-localflavor` models module.
"""
from django.test import TestCase
from rest_localflavor import models
class TestRest_localflavor(TestCase):
def setUp(self):
pass
def test_something(self):
pass
def tearDown(self):
pass
|
[
"me@gilsondev.in"
] |
me@gilsondev.in
|
1644cc8eed28893ea0314b6a735bb23190e45faa
|
8a38bb4e40a78afc69eed06c3d88d45e5995a336
|
/jax/experimental/gda_serialization/serialization.py
|
438f108293d7b1dc0bc9a3f97dc7ce9aad711ed9
|
[
"Apache-2.0",
"LicenseRef-scancode-warranty-disclaimer"
] |
permissive
|
MichaelMarien/jax
|
979a808ab6ea1698d2f03b1e9aeb826f59955543
|
bf3c658114703e955f0b06642c53c6b64c5b2df3
|
refs/heads/master
| 2023-02-13T00:45:04.056207
| 2022-02-15T19:25:26
| 2022-02-15T19:25:26
| 216,413,853
| 0
| 0
|
Apache-2.0
| 2023-02-06T07:02:48
| 2019-10-20T19:13:38
|
Python
|
UTF-8
|
Python
| false
| false
| 4,185
|
py
|
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""GlobalDeviceArray serialization and deserialization."""
import asyncio
import re
from typing import Callable
import jax
from jax.experimental import global_device_array as gda
from jax.experimental.maps import Mesh
import jax.numpy as jnp
import numpy as np
import tensorstore as ts
async def create_async_gda_from_callback(
global_shape: gda.Shape,
global_mesh: Mesh,
mesh_axes: gda.MeshAxes,
data_callback: Callable[[gda.Index], asyncio.Future],
):
global_idx_rid = gda.get_shard_indices_replica_ids(
global_shape, global_mesh, mesh_axes)
local_devices = global_mesh.local_devices
future_arrays = [data_callback(global_idx_rid[d][0])
for d in local_devices]
# Pause here and come back to `from_async_callback()` when future_arrays are
# ready. device_put cannot happen with future_arrays.
local_arrays = await asyncio.gather(*future_arrays)
dbs = [jax.device_put(array, device)
for array, device in zip(local_arrays, local_devices)]
return gda.GlobalDeviceArray(global_shape, global_mesh, mesh_axes, dbs,
gda._GdaFastPathArgs(global_idx_rid, local_devices))
def _get_metadata(gda):
if gda.dtype == jnp.bfloat16:
# Tensorstore uses 'bfloat16', not '<V2'.
dtype = 'bfloat16'
else:
dtype = np.dtype(gda.dtype).str
return {
'compressor': {
'id': 'gzip'
},
'shape': gda.shape,
'chunks': np.array(np.maximum(1, gda.local_data(0).shape)),
'dtype': dtype,
}
def get_tensorstore_spec(ckpt_path: str):
spec = {'driver': 'zarr', 'kvstore': {}}
if ckpt_path.startswith('gs://'):
m = re.fullmatch('^gs://([^/]*)/(.*)$', ckpt_path, re.DOTALL)
if m is None:
raise ValueError('The ckpt_path should contain the bucket name and the '
f'file path inside the bucket. Got: {ckpt_path}')
gcs_bucket = m.group(1)
path_without_bucket = m.group(2)
spec['kvstore'] = {'driver': 'gcs', 'bucket': gcs_bucket,
'path': path_without_bucket}
else:
spec['kvstore'] = {'driver': 'file', 'path': ckpt_path}
return spec
async def async_serialize(gda_inp: gda.GlobalDeviceArray, tensorstore_spec):
if not tensorstore_spec.get('metadata'):
tensorstore_spec['metadata'] = _get_metadata(gda_inp)
t = await ts.open(
ts.Spec(tensorstore_spec),
create=True,
open=True,
context=ts.Context({'file_io_concurrency': {
'limit': 128
}}))
async def _write_array(shard):
if shard.replica_id == 0:
await t[shard.index].write(shard.data)
future_write_state = jax.tree_util.tree_map(_write_array,
tuple(gda_inp.local_shards))
return await asyncio.gather(*future_write_state)
def run_serialization(gdas, tensorstore_specs):
async def _run_serializer():
future_writer = jax.tree_map(async_serialize, gdas, tensorstore_specs)
return await asyncio.gather(*future_writer)
asyncio.run(_run_serializer())
async def async_deserialize(mesh, mesh_axes, tensorstore_spec):
t = ts.open(ts.Spec(tensorstore_spec), open=True).result()
async def cb(index):
return await t[index].read()
return await create_async_gda_from_callback(t.shape, mesh, mesh_axes, cb)
def run_deserialization(global_meshes, mesh_axes, tensorstore_specs):
async def _run_deserializer():
future_gdas = jax.tree_map(async_deserialize, global_meshes, mesh_axes,
tensorstore_specs)
return await asyncio.gather(*future_gdas)
return asyncio.run(_run_deserializer())
|
[
"no-reply@google.com"
] |
no-reply@google.com
|
1fccb8fb07eda6f838b5b01854800df046dde04d
|
01548099ec20976d31cca7a720102c11c56fc9be
|
/scripts/handle_mysql.py
|
e9b295986da2e42b703e109bceae62db0fa7fd71
|
[] |
no_license
|
OMEN001/Lemon_Api_Test
|
6a27a9a9ccf28623006b465a107d53b17ad30404
|
373c9f1a1f1f3160bbe8edcc4b5740f9779947ae
|
refs/heads/master
| 2023-02-25T22:31:39.824908
| 2021-01-24T14:22:52
| 2021-01-24T14:22:52
| 329,324,658
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,651
|
py
|
# -*- coding: utf-8 -*-
# @Time : BaLiang
# @Author : 86187
import pymysql
import random
from scripts.handle_yaml import do_yaml
class HandleMysql:
def __init__(self):
# 创建连接对象
self.conn = pymysql.connect(host=do_yaml.read('mysql', 'host'), # mysql服务器ip或者域名
user=do_yaml.read('mysql', 'user'), # 用户名
password=do_yaml.read('mysql', 'password'),
db=do_yaml.read('mysql', 'db'), # 要连接的数据库名
port=do_yaml.read('mysql', 'port'), # 数据库端口号, 默认为3306(int型)
charset='utf8', # 数据库编码为utf8, 不能设为utf-8
# 默认返回的结果为元祖或者嵌套元祖的列表
# 可以指定cursorclass为DictCursor, 那么返回的结果为字典或者嵌套字典的列表
cursorclass=pymysql.cursors.DictCursor)
# 创建游标对象
self.cursor = self.conn.cursor()
# 执行查询语句
def run(self,sql,args=None,is_more=False):
# 通过游标对象执行sal
self.cursor.execute(sql,args)
# 通过连接对象提交
self.conn.commit()
if is_more:
return self.cursor.fetchall()
else:
return self.cursor.fetchone()
@staticmethod
def create_mobile():
# def create_mobile(self): 因为这里的self没有用到所以创建了静态方法(类中实例方法也可调用静态方法)
"""生成11位数字"""
# return "183" + "".join(random.randint(10000000,99999999))
return "183" + "".join(random.sample("0123456789",8))
def is_existed_mobile(self, mobile):
"""判断收集好是否被注册"""
sql = do_yaml.read("mysql","sql")
if self.run(sql, args=[mobile]):
return True
else:
return False
def create_not_existed_mobile(self):
"""生成一个不存在的电话号码"""
one_mobile = self.create_mobile()
while True:
if not self.is_existed_mobile(one_mobile):
break
return one_mobile
def close(self):
# 关闭游标对象
self.cursor.close()
# 关闭连接对象
self.conn.close()
if __name__ == '__main__':
sql = "select max(id) from member;"
do_mysql = HandleMysql()
max_id = do_mysql.run(sql)
print(max_id["max(id)"] + 1)
do_mysql.close()
|
[
"1668317403@qq.com"
] |
1668317403@qq.com
|
5d8433435bfdcf5f87e885ef711187b442bf55db
|
ecd9cbfa2c30e1bc39cf442e3302c4cb3cf1ea03
|
/bin/calculate_phylocsf.py
|
767ee44799121f64903399942d84fbb0d1288a98
|
[] |
no_license
|
lixin856/proteogenomics-analysis-workflow
|
087c84971ab8386ef06d031244da37fd895e82f5
|
0b0a02dfb93cc5806ef9c951d1674d82f454ec29
|
refs/heads/master
| 2023-05-01T17:13:44.293272
| 2021-05-11T07:23:14
| 2021-05-11T07:23:14
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,579
|
py
|
#!/usr/bin/env python3
'''
the script is modified from Mikael Hussius @ SciLifeLab, https://github.com/hussius/gff-phylocsf-human
download the following bigwig files first
# wget https://data.broadinstitute.org/compbio1/PhyloCSFtracks/hg19/latest/PhyloCSF+0.bw
# wget https://data.broadinstitute.org/compbio1/PhyloCSFtracks/hg19/latest/PhyloCSF+1.bw
# wget https://data.broadinstitute.org/compbio1/PhyloCSFtracks/hg19/latest/PhyloCSF+2.bw
# wget https://data.broadinstitute.org/compbio1/PhyloCSFtracks/hg19/latest/PhyloCSF-0.bw
# wget https://data.broadinstitute.org/compbio1/PhyloCSFtracks/hg19/latest/PhyloCSF-1.bw
# wget https://data.broadinstitute.org/compbio1/PhyloCSFtracks/hg19/latest/PhyloCSF-2.bw
'''
import sys
import os
import pyBigWig as pw
import numpy as np
def predict_coding(vec):
coding = "OTHER"
for v in vec:
if not v: continue
if v > 0: coding = "CODING"
return(coding)
if len(sys.argv)<4:
sys.exit("USAGE: python " + sys.argv[0] + "<GFF file> <BigWig file path> <output file>")
infile = sys.argv[1]
bw_file_path = sys.argv[2]
outfile = sys.argv[3]
regs = []
chrom={}
starts={}
ends={}
peptide={}
for line in open(infile):
if not line.startswith("chr"):
continue
fields = line.strip().split()
(chr, start, end, pept) = (fields[0], fields[3], fields[4], fields[8])
if not pept.startswith("Parent="): continue
name = chr+":"+start+"-"+end
chrom[name]=chr
starts[name]=int(start)
ends[name]=int(end)
peptide[name]=pept.split("=")[1]
regs.append(name)
scores = {}
rpathbase = os.path.join(bw_file_path,"PhyloCSF")
for rf in ["+0","+1","+2","+3","-0","-1","-2","-3"]:
rpath = rpathbase + rf + ".bw"
if os.path.isfile(rpath):
sys.stderr.write("Searching PhyloCSF reading frame " + rf + "\n")
bw = pw.open(rpath)
frame_score = {}
count = 0
for r in regs:
count += 1
if(count % 50 ==0): sys.stderr.write('\tProcessed ' + str(count) + " peptides out of " + str(len(regs)) + "\n")
sys.stderr.flush()
try:
score = bw.stats(chrom[r], starts[r], ends[r])[0]
except RuntimeError:
pass
frame_score[r] = score
scores[rf] = frame_score
bw.close()
else:
sys.stderr.write("%s doesn't exist \n" % rpath)
output = open(outfile,"w")
output.write("\t".join(["Bare peptide","PhyloCSF+0.score","PhyloCSF+1.score","PhyloCSF+2.score","PhyloCSF-0.score","PhyloCSF-1.score","PhyloCSF-2.score","PhyloCSF_prediction"])+"\n")
pep_scores={}
for r in regs:
scoreList = [scores["+0"][r], scores["+1"][r], scores["+2"][r], scores["-0"][r], scores["-1"][r], scores["-2"][r]]
seq = peptide[r]
if seq not in pep_scores:
pep_scores[seq]=scoreList
else: # this is to consider splice junction peptides which have two regions separated in gff file, we take mean phylocsf score of two regions
for i in range(0,len(scoreList)):
value = scoreList[i]
if value is None and pep_scores[seq][i] is None:
continue
elif None in [value, pep_scores[seq][i]]:
pep_scores[seq][i] = value if value else pep_scores[seq][i]
else:
pep_scores[seq][i] = (pep_scores[seq][i] + value)/2
for seq in pep_scores:
scoreList = pep_scores[seq]
row = [seq]+['NA' if x is None else str(x) for x in scoreList] + [predict_coding(scoreList)]
output.write('\t'.join(row) + '\n')
|
[
"jorrit.boekel@scilifelab.se"
] |
jorrit.boekel@scilifelab.se
|
490c3ff06f2d1f0ec763b0b33cbe461f3ce4c015
|
c5c95aee3c04ab89f1aa3505f45768d15994be53
|
/myScript.spec
|
bac2683370cf6c59b42d84e6187d2d5590501df2
|
[] |
no_license
|
snagavamsi123/Banking-Bot
|
16f55a9cad523ca63fb1bf47a1a802236adcd164
|
9745e51a2c3c8a2cf614d3052a3577fce40a74c0
|
refs/heads/master
| 2023-03-27T18:54:35.585541
| 2021-04-03T11:31:49
| 2021-04-03T11:31:49
| 354,273,629
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 920
|
spec
|
# -*- mode: python ; coding: utf-8 -*-
block_cipher = None
a = Analysis(['myScript.py'],
pathex=['C:\\Projects @tp\\Banking Bot\\BankingBot (1)\\BankingBot'],
binaries=[],
datas=[],
hiddenimports=['pandas._libs.tslibs.timedeltas'],
hookspath=[],
runtime_hooks=[],
excludes=[],
win_no_prefer_redirects=False,
win_private_assemblies=False,
cipher=block_cipher,
noarchive=False)
pyz = PYZ(a.pure, a.zipped_data,
cipher=block_cipher)
exe = EXE(pyz,
a.scripts,
a.binaries,
a.zipfiles,
a.datas,
[],
name='myScript',
debug=False,
bootloader_ignore_signals=False,
strip=False,
upx=True,
upx_exclude=[],
runtime_tmpdir=None,
console=True )
|
[
"snagavamsi123@gmail.com"
] |
snagavamsi123@gmail.com
|
f9610abf23be1a086309f79e6e949e757fd77148
|
487ce91881032c1de16e35ed8bc187d6034205f7
|
/codes/CodeJamCrawler/16_0_2/Remi05/RevengeOfPancakes.py
|
fd169dd8bcbd41b26d7a608b5f9048070362f40d
|
[] |
no_license
|
DaHuO/Supergraph
|
9cd26d8c5a081803015d93cf5f2674009e92ef7e
|
c88059dc66297af577ad2b8afa4e0ac0ad622915
|
refs/heads/master
| 2021-06-14T16:07:52.405091
| 2016-08-21T13:39:13
| 2016-08-21T13:39:13
| 49,829,508
| 2
| 0
| null | 2021-03-19T21:55:46
| 2016-01-17T18:23:00
|
Python
|
UTF-8
|
Python
| false
| false
| 824
|
py
|
import sys
import itertools
def formatOutput(n, result):
return 'Case #' + str(n) + ': ' + result + '\n'
file = open(sys.argv[1])
output = open('output.txt', 'w')
nTests = int(file.readline())
HAPPY = '+'
SAD = '-'
testNb = 1
for line in itertools.islice(file, 0, nTests+1):
stack = line.replace('\n', '')
nFlips = 0
while SAD in stack:
sPos = stack.index(SAD)
if sPos != 0:
stack = (sPos)*SAD + stack[sPos+1:]
nFlips += 1
if HAPPY in stack:
hPos = stack.index(HAPPY)
stack = (hPos)*HAPPY + stack[hPos+1:]
nFlips += 1
else:
stack = stack.replace(SAD, HAPPY)
nFlips += 1
output.write(formatOutput(testNb, str(nFlips)))
testNb += 1
|
[
"[dhuo@tcd.ie]"
] |
[dhuo@tcd.ie]
|
494970626e046ba086e9f9147e5adae928f73dd6
|
6b2a8dd202fdce77c971c412717e305e1caaac51
|
/solutions_2464487_1/Python/Andrew510/prob1.py
|
c9fc74984a7c57248747efda818c0b13b2a96b5c
|
[] |
no_license
|
alexandraback/datacollection
|
0bc67a9ace00abbc843f4912562f3a064992e0e9
|
076a7bc7693f3abf07bfdbdac838cb4ef65ccfcf
|
refs/heads/master
| 2021-01-24T18:27:24.417992
| 2017-05-23T09:23:38
| 2017-05-23T09:23:38
| 84,313,442
| 2
| 4
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 455
|
py
|
#!/usr/bin/python
import math
import sys
infile = sys.stdin
T = int(infile.readline())
for n in range(T):
r, t = map(int, infile.readline().split())
b = 2*r - 1
xx = int(math.sqrt(b*b + 8*t))
sol = (-b + xx) / 4
#XXX
need = 2*sol*sol + sol*b
#print need, t
while need > t:
sol -= 1
need = 2*sol*sol + sol*b
if sol < 0:
sol = 0
print 'Case #%d: %d' % (n+1, sol)
|
[
"eewestman@gmail.com"
] |
eewestman@gmail.com
|
d103f044e4b88a589318d2a3495451a5c29d7e4e
|
b19f1a0ed3b26f0b5cbc2a0b7db2141edc955901
|
/auto_test/Case_rbm/ftp_check_user/message.py
|
b6bd2eb9ef1d8977d9c1f2fee0b164c6ac7656c4
|
[] |
no_license
|
wangqian0818/auto_test_platform
|
5a1f04cbf6007e6ff3dbb74e838981de53491526
|
64e32099ac2d79fb70d3727b085465aac0e49d3f
|
refs/heads/master
| 2023-04-26T23:39:04.232001
| 2021-05-22T12:23:02
| 2021-05-22T12:23:02
| 368,116,381
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,098
|
py
|
import time
from common import baseinfo
from ftp_check_user import index
datatime = time.strftime('%Y-%m-%d %H:%M:%S',time.localtime(time.time()))
proxy_ip = baseinfo.gwServerIp
ftp_ip = baseinfo.ftp_ip
username = index.username
case2_allow_user = index.case2_allow_user
addftp = {
'AddAgent':{
"MethodName":"AddAgent",
"MessageTime":datatime,
"Sender":"Centre0",
"Content":[{
"InProtocol":"ftp",
"Type":2,
"InPort":8887,
"domain":"all",
"SyncId":87,
"OutAddr":[{"OutPort":21,"OutIp":ftp_ip}],
"InIp":proxy_ip
}]
}}
delftp = {
'DelAgent':{
"MethodName":"DelAgent",
"MessageTime":datatime,
"Sender":"Centre0",
"Content":[{
"InProtocol":"ftp",
"Type":2,
"InPort":8887,
"domain":"all",
"SyncId":87,
"OutAddr":[{"OutPort":21,"OutIp":ftp_ip}],
"InIp":proxy_ip
}]}
}
ftpcheck1 = {'SetFtpCheck':{
"MethodName":"SetFtpCheck",
"MessageTime":datatime,
"Sender":"Centre0",
"Content":[{
"Type":"user","DataCheck":username}
]}
}
ftpcheck2 = {'SetFtpCheck':{
"MethodName":"SetFtpCheck",
"MessageTime":datatime,
"Sender":"Centre0",
"Content":[{
"Type":"user","DataCheck":f'{username};{case2_allow_user}'}
]}
}
|
[
"wangqianjob0818@163.com"
] |
wangqianjob0818@163.com
|
1a8534c747ce64a7d53d310af6a6610ca5a802de
|
b7dd07413c05a13207988535b755b7d28dbc5663
|
/Chapter_11/name_function.py
|
ff49f2a5f8b4c33b8d0e85a0b57ace81dc09e227
|
[] |
no_license
|
GrnTeaLatte/AlienInvasion
|
b671a87cd730c3d4b31a8e8d760d2d02d576cfb3
|
d60e8e65adb79e54a1e1c579825827355a7e85ea
|
refs/heads/main
| 2023-02-26T03:55:26.799446
| 2020-11-03T00:42:06
| 2020-11-03T00:42:06
| 336,111,408
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 222
|
py
|
def get_formatted_name(first, last, middle=''):
"""Generate a neatly formatted full name."""
if middle:
full_name = first + ' ' + middle + ' ' + last
else:
full_name = first + ' ' + last
return full_name.title()
|
[
"audreyfu14@gmail.com"
] |
audreyfu14@gmail.com
|
7cd000559400fe32194070d58464cca0aa9ce297
|
bdc0b8809d52933c10f8eb77442bd0b4453f28f9
|
/build/nav_msgs/rosidl_generator_py/nav_msgs/msg/_map_meta_data.py
|
df3dc46c9436798500483901a5ad8cffd380adde
|
[] |
no_license
|
ClaytonCalabrese/BuiltRos2Eloquent
|
967f688bbca746097016dbd34563716bd98379e3
|
76bca564bfd73ef73485e5c7c48274889032e408
|
refs/heads/master
| 2021-03-27T22:42:12.976367
| 2020-03-17T14:24:07
| 2020-03-17T14:24:07
| 247,810,969
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 7,517
|
py
|
# generated from rosidl_generator_py/resource/_idl.py.em
# with input from nav_msgs:msg/MapMetaData.idl
# generated code does not contain a copyright notice
# Import statements for member types
import rosidl_parser.definition # noqa: E402, I100
class Metaclass_MapMetaData(type):
"""Metaclass of message 'MapMetaData'."""
_CREATE_ROS_MESSAGE = None
_CONVERT_FROM_PY = None
_CONVERT_TO_PY = None
_DESTROY_ROS_MESSAGE = None
_TYPE_SUPPORT = None
__constants = {
}
@classmethod
def __import_type_support__(cls):
try:
from rosidl_generator_py import import_type_support
module = import_type_support('nav_msgs')
except ImportError:
import logging
import traceback
logger = logging.getLogger(
'nav_msgs.msg.MapMetaData')
logger.debug(
'Failed to import needed modules for type support:\n' +
traceback.format_exc())
else:
cls._CREATE_ROS_MESSAGE = module.create_ros_message_msg__msg__map_meta_data
cls._CONVERT_FROM_PY = module.convert_from_py_msg__msg__map_meta_data
cls._CONVERT_TO_PY = module.convert_to_py_msg__msg__map_meta_data
cls._TYPE_SUPPORT = module.type_support_msg__msg__map_meta_data
cls._DESTROY_ROS_MESSAGE = module.destroy_ros_message_msg__msg__map_meta_data
from builtin_interfaces.msg import Time
if Time.__class__._TYPE_SUPPORT is None:
Time.__class__.__import_type_support__()
from geometry_msgs.msg import Pose
if Pose.__class__._TYPE_SUPPORT is None:
Pose.__class__.__import_type_support__()
@classmethod
def __prepare__(cls, name, bases, **kwargs):
# list constant names here so that they appear in the help text of
# the message class under "Data and other attributes defined here:"
# as well as populate each message instance
return {
}
class MapMetaData(metaclass=Metaclass_MapMetaData):
"""Message class 'MapMetaData'."""
__slots__ = [
'_map_load_time',
'_resolution',
'_width',
'_height',
'_origin',
]
_fields_and_field_types = {
'map_load_time': 'builtin_interfaces/Time',
'resolution': 'float',
'width': 'uint32',
'height': 'uint32',
'origin': 'geometry_msgs/Pose',
}
SLOT_TYPES = (
rosidl_parser.definition.NamespacedType(['builtin_interfaces', 'msg'], 'Time'), # noqa: E501
rosidl_parser.definition.BasicType('float'), # noqa: E501
rosidl_parser.definition.BasicType('uint32'), # noqa: E501
rosidl_parser.definition.BasicType('uint32'), # noqa: E501
rosidl_parser.definition.NamespacedType(['geometry_msgs', 'msg'], 'Pose'), # noqa: E501
)
def __init__(self, **kwargs):
assert all('_' + key in self.__slots__ for key in kwargs.keys()), \
'Invalid arguments passed to constructor: %s' % \
', '.join(sorted(k for k in kwargs.keys() if '_' + k not in self.__slots__))
from builtin_interfaces.msg import Time
self.map_load_time = kwargs.get('map_load_time', Time())
self.resolution = kwargs.get('resolution', float())
self.width = kwargs.get('width', int())
self.height = kwargs.get('height', int())
from geometry_msgs.msg import Pose
self.origin = kwargs.get('origin', Pose())
def __repr__(self):
typename = self.__class__.__module__.split('.')
typename.pop()
typename.append(self.__class__.__name__)
args = []
for s, t in zip(self.__slots__, self.SLOT_TYPES):
field = getattr(self, s)
fieldstr = repr(field)
# We use Python array type for fields that can be directly stored
# in them, and "normal" sequences for everything else. If it is
# a type that we store in an array, strip off the 'array' portion.
if (
isinstance(t, rosidl_parser.definition.AbstractSequence) and
isinstance(t.value_type, rosidl_parser.definition.BasicType) and
t.value_type.typename in ['float', 'double', 'int8', 'uint8', 'int16', 'uint16', 'int32', 'uint32', 'int64', 'uint64']
):
if len(field) == 0:
fieldstr = '[]'
else:
assert fieldstr.startswith('array(')
prefix = "array('X', "
suffix = ')'
fieldstr = fieldstr[len(prefix):-len(suffix)]
args.append(s[1:] + '=' + fieldstr)
return '%s(%s)' % ('.'.join(typename), ', '.join(args))
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
if self.map_load_time != other.map_load_time:
return False
if self.resolution != other.resolution:
return False
if self.width != other.width:
return False
if self.height != other.height:
return False
if self.origin != other.origin:
return False
return True
@classmethod
def get_fields_and_field_types(cls):
from copy import copy
return copy(cls._fields_and_field_types)
@property
def map_load_time(self):
"""Message field 'map_load_time'."""
return self._map_load_time
@map_load_time.setter
def map_load_time(self, value):
if __debug__:
from builtin_interfaces.msg import Time
assert \
isinstance(value, Time), \
"The 'map_load_time' field must be a sub message of type 'Time'"
self._map_load_time = value
@property
def resolution(self):
"""Message field 'resolution'."""
return self._resolution
@resolution.setter
def resolution(self, value):
if __debug__:
assert \
isinstance(value, float), \
"The 'resolution' field must be of type 'float'"
self._resolution = value
@property
def width(self):
"""Message field 'width'."""
return self._width
@width.setter
def width(self, value):
if __debug__:
assert \
isinstance(value, int), \
"The 'width' field must be of type 'int'"
assert value >= 0 and value < 4294967296, \
"The 'width' field must be an unsigned integer in [0, 4294967295]"
self._width = value
@property
def height(self):
"""Message field 'height'."""
return self._height
@height.setter
def height(self, value):
if __debug__:
assert \
isinstance(value, int), \
"The 'height' field must be of type 'int'"
assert value >= 0 and value < 4294967296, \
"The 'height' field must be an unsigned integer in [0, 4294967295]"
self._height = value
@property
def origin(self):
"""Message field 'origin'."""
return self._origin
@origin.setter
def origin(self, value):
if __debug__:
from geometry_msgs.msg import Pose
assert \
isinstance(value, Pose), \
"The 'origin' field must be a sub message of type 'Pose'"
self._origin = value
|
[
"calabreseclayton@gmail.com"
] |
calabreseclayton@gmail.com
|
075ab43e3a0a41cba1a3ddc5516d4f2be053423a
|
bb2120075f05463160e7ad9a3986f18848733563
|
/tests/test_vec2d.py
|
aabcdfc7d592e0bd89fc791e9d20daec1d79e802
|
[
"MIT"
] |
permissive
|
pablodiegoss/pytaon
|
b443d6a86773c691383aa607b7b1497bcfa7bc3b
|
0c5def531249331871197377338471521cf8de2c
|
refs/heads/master
| 2022-12-31T10:51:13.762691
| 2020-10-07T18:09:24
| 2020-10-07T18:09:24
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,504
|
py
|
"""
Módulo de testes para a classe Vec2d.
Utiliza os vetores u = <3,4>, v = <1,1>, ii=<1,0> e jj=<0,1> definidos em conftest.
"""
import pytest
import random
from math import pi, sqrt
from pytaon import Vec2d
def similar(x, y, tol=1e-6):
return abs(x - y) <= tol
class TestVec2d:
def test_angle(self, u, v):
assert u.angle > v.angle
assert similar(u.angle, pi / 4)
assert similar(u.angle_degrees, 45)
def test_angle_setter(self, u, v):
u.angle = 0.0
assert similar(u.x, 5.0)
assert similar(u.y, 0.0)
u.angle_degrees = 45.0
assert similar(u.x, 2.5 * sqrt(2))
assert similar(u.y, 2.5 * sqrt(2))
u.angle = pi / 4
assert similar(u.x, 2.5 * sqrt(2))
assert similar(u.y, 2.5 * sqrt(2))
def test_length(self, u, v):
assert u.length > v.length
assert u.length == 5
assert u.length_sqrd == 25
def test_length_setter(self, u):
x, y = u
u.length *= 2
assert similar(u.x, 2 * x)
assert similar(u.y, 2 * y)
def test_algebraic_operations(self, u, v):
assert u + v == Vec2d(u.x + v.x, u.y + v.y)
assert u - v == Vec2d(u.x - v.x, u.y - v.y)
assert u * 2 == Vec2d(2 * u.x, 2 * u.y)
assert 2 * u == Vec2d(2 * u.x, 2 * u.y)
assert u / 2 == Vec2d(u.x / 2, u.y / 2)
def test_algebraic_operations_with_tuples(self, u, v):
U, V = map(tuple, (u, v))
assert u + v == u + V == U + v
assert u - v == u - V == U - v
def test_neg_and_pos(self, u):
assert (-u) == u * (-1)
assert (+u) == u * (+1)
assert +u is not u
def test_inplace(self, u):
u_orig = u
u += (1, 1)
assert u == Vec2d(4, 5)
assert u is u_orig
u -= (1, 1)
assert u == Vec2d(3, 4)
assert u is u_orig
u *= 2
assert u == Vec2d(6, 8)
assert u is u_orig
u /= 2
assert u == Vec2d(3, 4)
assert u is u_orig
def test_item_getter(self, u, v):
for u in [u, v]:
assert u[0] == u.x
assert u[1] == u.y
def test_item_setter(self, u):
u[0] = n = random.random()
u[1] = m = random.random()
assert u.x == n
assert u.y == m
def test_item_raises_index_error(self, u):
with pytest.raises(IndexError):
u[2]
with pytest.raises(IndexError):
u[2] = 0.0
def test_cross_product(self, u, v):
V = tuple(v)
assert similar(u.cross(v), -v.cross(u))
assert similar(u.cross(v), -1)
assert u.cross(v) == u.cross(V)
def test_dot_product(self, u, v):
V = tuple(v)
assert u.dot(v) == 7.0
assert u.dot(v) == u.dot(V)
def test_get_angle_between(self, ii, v):
II = tuple(ii)
assert v.get_angle_between(v) == 0.0
assert similar(v.get_angle_between((-1) * v), pi)
assert v.get_angle_degrees_between(v) == 0.0
assert similar(v.get_angle_degrees_between((-1) * v), 180)
assert v.get_angle_between(ii) == v.get_angle_between(II)
assert similar(v.get_angle_between(ii), pi / 4)
assert similar(v.get_angle_degrees_between(ii), 45)
def test_get_distance(self, u, v):
assert similar(u.get_distance(v), sqrt(u.get_dist_sqrd(v)))
assert similar(u.get_distance(v), sqrt(13))
assert similar(u.get_dist_sqrd(v), 13)
def test_get_distance_accepts_tuples(self, u, v):
U, V = map(tuple, (u, v))
assert similar(u.get_distance(v), u.get_distance(V))
assert similar(u.get_dist_sqrd(v), u.get_dist_sqrd(V))
def test_normalized(self, u):
assert similar(u.normalized().length, 1)
assert similar(u.normalized().angle, u.angle)
def test_normalized_return_length(self, u):
angle, length = u.angle, u.length
assert similar(u.normalize_return_length(), length)
assert similar(u.angle, angle)
def test_interpolate_to(self, u, v):
assert similar(u.interpolate_to(v, 0), u)
assert similar(u.interpolate_to(v, 1), v)
assert similar(u.interpolate_to(v, 0.5), (u + v) / 2)
def test_interpolate_to_accept_tuples(self, u, v):
V = tuple(v)
assert similar(u.interpolate_to(v, 0.5), u.interpolate_to(V, 0.5))
def test_perpendicular(self, u):
v = u.perpendicular()
assert similar(u.length, v.length)
assert similar(u.dot(v), 0)
assert similar(u.angle_between(v), pi / 2)
def test_perpendicular_normal(self, u, v):
v = u.perpendicular()
assert similar(v.length, 1)
assert similar(u.dot(v), 0)
assert similar(u.angle_between(v), pi / 2)
def test_projection(self, u, v):
proj = u.projection(v)
assert similar(proj.angle, v.angle)
assert proj.length <= u.length
assert similar(v.length * proj.length, u.dot(v))
assert similar(u.length * v.projection(u).length, u.dot(v))
def test_rotate(self, u):
angle, length = u.angle, u.length
rotation = pi * random.random()
assert u.rotate(rotation) is None
assert similar(u.angle, angle + rotation)
assert similar(u.length, length)
def test_rotated(self, u):
rotation = pi * random.random()
u_ = u.rotated(rotation)
assert similar(u_.angle, u.angle + rotation)
assert similar(u_.length, u.length)
|
[
"fabiomacedomendes@gmail.com"
] |
fabiomacedomendes@gmail.com
|
5e03884088e7112a4fe74cab90f05bd21fd61391
|
be0f3dfbaa2fa3d8bbe59229aef3212d032e7dd1
|
/DaVinciDev_v38r1p1/Phys/StrippingArchive/python/StrippingArchive/Stripping17/StrippingLambdac2PKPiForXSec.py
|
d220bbc13f2a976b45ccebde6e2a9d69ff4e73dd
|
[] |
no_license
|
Sally27/backup_cmtuser_full
|
34782102ed23c6335c48650a6eaa901137355d00
|
8924bebb935b96d438ce85b384cfc132d9af90f6
|
refs/heads/master
| 2020-05-21T09:27:04.370765
| 2018-12-12T14:41:07
| 2018-12-12T14:41:07
| 185,989,173
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 7,182
|
py
|
'''
Lambdac cross-section lines
Adapted to current stripping framework by P. Spradlin.
'''
__author__ = ['Francesca Dordei', 'Francesco Dettori', 'Patrick Spradlin']
__date__ = '2010/07/15'
__version__ = '$Revision: 1.3 $'
__all__ = ( 'StrippingLambdac2PKPiForXSecConf',
'makeLambdac2PKPi',
'default_config' )
from Gaudi.Configuration import *
from StrippingConf.StrippingLine import StrippingLine
from GaudiKernel.SystemOfUnits import MeV, mm, ns
from LHCbKernel.Configuration import *
#from Configurables import FilterDesktop, CombineParticles
from GaudiConfUtils.ConfigurableGenerators import FilterDesktop, CombineParticles
from PhysSelPython.Wrappers import Selection
from StrippingUtils.Utils import LineBuilder
from StandardParticles import StdNoPIDsPions, StdNoPIDsKaons, StdNoPIDsProtons
class StrippingLambdac2PKPiForXSecConf(LineBuilder): # {
__configuration_keys__ = ( 'Daug_All_PT_MIN'
, 'Daug_1of3_PT_MIN'
, 'Daug_P_MIN'
, 'Daug_TRCHI2DOF_MAX'
, 'Daug_BPVIPCHI2_MIN'
, 'Proton_PIDp_MIN'
, 'Pi_PIDK_MAX'
, 'K_PIDK_MIN'
, 'Comb_ADAMASS_WIN'
, 'Comb_ADOCAMAX_MAX'
, 'Lambdac_PT_MIN'
, 'Lambdac_VCHI2VDOF_MAX'
, 'Lambdac_BPVVDCHI2_MIN'
, 'Lambdac_BPVDIRA_MIN'
, 'Lambdac_BPVLTIME_MAX'
, 'Lambdac_BPVLTIME_MIN'
, 'HltFilter'
, 'PrescaleLambdac2PKPi'
, 'PostscaleLambdac2PKPi'
)
def __init__(self, name, config) : # {
LineBuilder.__init__(self, name, config)
lambdac_name = name + 'Lambdac2PKPi'
self.inPions = StdNoPIDsPions
self.inKaons = StdNoPIDsKaons
self.inProtons = StdNoPIDsProtons
self.selLambdac2PKPi = makeLambdac2PKPi( name = lambdac_name
, inputSel = [ self.inPions, self.inKaons, self.inProtons ]
, Daug_All_PT_MIN = config['Daug_All_PT_MIN']
, Daug_1of3_PT_MIN = config['Daug_1of3_PT_MIN']
, Daug_P_MIN = config['Daug_P_MIN']
, Daug_TRCHI2DOF_MAX = config['Daug_TRCHI2DOF_MAX']
, Daug_BPVIPCHI2_MIN = config['Daug_BPVIPCHI2_MIN']
, Proton_PIDp_MIN = config['Proton_PIDp_MIN']
, Pi_PIDK_MAX = config['Pi_PIDK_MAX']
, K_PIDK_MIN = config['K_PIDK_MIN']
, Comb_ADAMASS_WIN = config['Comb_ADAMASS_WIN']
, Comb_ADOCAMAX_MAX = config['Comb_ADOCAMAX_MAX']
, Lambdac_PT_MIN = config['Lambdac_PT_MIN']
, Lambdac_VCHI2VDOF_MAX = config['Lambdac_VCHI2VDOF_MAX']
, Lambdac_BPVVDCHI2_MIN = config['Lambdac_BPVVDCHI2_MIN']
, Lambdac_BPVDIRA_MIN = config['Lambdac_BPVDIRA_MIN']
, Lambdac_BPVLTIME_MAX = config['Lambdac_BPVLTIME_MAX']
, Lambdac_BPVLTIME_MIN = config['Lambdac_BPVLTIME_MIN']
)
self.line_Lambdac2PKPi = StrippingLine( lambdac_name + 'Line',
HLT = config['HltFilter'],
prescale = config['PrescaleLambdac2PKPi'],
postscale = config['PostscaleLambdac2PKPi'],
algos = [ self.selLambdac2PKPi ]
)
self.registerLine(self.line_Lambdac2PKPi)
# }
# }
def makeLambdac2PKPi( name
, inputSel
, Daug_All_PT_MIN
, Daug_1of3_PT_MIN
, Daug_P_MIN
, Daug_TRCHI2DOF_MAX
, Daug_BPVIPCHI2_MIN
, Proton_PIDp_MIN
, Pi_PIDK_MAX
, K_PIDK_MIN
, Comb_ADAMASS_WIN
, Comb_ADOCAMAX_MAX
, Lambdac_PT_MIN
, Lambdac_VCHI2VDOF_MAX
, Lambdac_BPVVDCHI2_MIN
, Lambdac_BPVDIRA_MIN
, Lambdac_BPVLTIME_MAX
, Lambdac_BPVLTIME_MIN
, decDescriptors = [ "[Lambda_c+ -> p+ K- pi+]cc" ]
) : # {
daugCuts = "(PT > %(Daug_All_PT_MIN)s)" \
"& (P > %(Daug_P_MIN)s)" \
"& (TRCHI2DOF < %(Daug_TRCHI2DOF_MAX)s)" \
"& (BPVIPCHI2() > %(Daug_BPVIPCHI2_MIN)s)" % locals()
pCuts = "((PIDp-PIDpi) > %(Proton_PIDp_MIN)s)" % locals()
piCuts = "((PIDK-PIDpi) < %(Pi_PIDK_MAX)s)" % locals()
kCuts = "((PIDK-PIDpi) > %(K_PIDK_MIN)s)" % locals()
combCuts = "(ADAMASS('Lambda_c+') < %(Comb_ADAMASS_WIN)s)" \
"& (AMAXCHILD(PT) > %(Daug_1of3_PT_MIN)s)" \
"& (ADOCAMAX('') < %(Comb_ADOCAMAX_MAX)s)" % locals()
lambdacCuts = "(PT > %(Lambdac_PT_MIN)s)" \
"& (VFASPF(VCHI2/VDOF) < %(Lambdac_VCHI2VDOF_MAX)s)" \
"& (BPVVDCHI2 > %(Lambdac_BPVVDCHI2_MIN)s)" \
"& (BPVDIRA > %(Lambdac_BPVDIRA_MIN)s)" \
"& (BPVLTIME('PropertimeFitter/properTime:PUBLIC') > %(Lambdac_BPVLTIME_MIN)s)" \
"& (BPVLTIME('PropertimeFitter/properTime:PUBLIC') < %(Lambdac_BPVLTIME_MAX)s)" % locals()
_Lambdac = CombineParticles(
DecayDescriptors = decDescriptors
, DaughtersCuts = { "pi+" : daugCuts + '&' + piCuts,
"K+" : daugCuts + '&' + kCuts,
"p+" : daugCuts + '&' + pCuts }
, CombinationCut = combCuts
, MotherCut = lambdacCuts
)
return Selection( name,
Algorithm = _Lambdac,
RequiredSelections = inputSel
)
# }
default_config = { 'Daug_All_PT_MIN' : 400.0 * MeV
, 'Daug_1of3_PT_MIN' : 1200.0 * MeV
, 'Daug_P_MIN' : 3200.0 * MeV
, 'Daug_TRCHI2DOF_MAX' : 10.0
, 'Daug_BPVIPCHI2_MIN' : 0.5
, 'Proton_PIDp_MIN' : 10.0
, 'Pi_PIDK_MAX' : 0.0
, 'K_PIDK_MIN' : 10.0
, 'Comb_ADAMASS_WIN' : 90.0 * MeV
, 'Comb_ADOCAMAX_MAX' : 0.1 * mm
, 'Lambdac_PT_MIN' : 0.0 * MeV
, 'Lambdac_VCHI2VDOF_MAX' : 20.0
, 'Lambdac_BPVVDCHI2_MIN' : 8.0
, 'Lambdac_BPVDIRA_MIN' : 0.9999
, 'Lambdac_BPVLTIME_MAX' : 0.0012 * ns
, 'Lambdac_BPVLTIME_MIN' : 0.0 * ns
, 'HltFilter' : "HLT_PASS_RE('Hlt1MB.*')"
, 'PrescaleLambdac2PKPi' : 1.0
, 'PostscaleLambdac2PKPi' : 1.0
}
|
[
"slavomirastefkova@b2pcx39016.desy.de"
] |
slavomirastefkova@b2pcx39016.desy.de
|
b6b66a4cef930e66539a9b62c88a7eb8347904bc
|
55bcc8b6d5eb2878405f71ad5559cc3c80f5edc9
|
/nlpAnylise/nlpctrTwo.py
|
30735dfca102be6fd889b08ca03eb4fb2bde0a4d
|
[
"Apache-2.0"
] |
permissive
|
chenwangwww/ppython
|
2989026d5ef6d8a733e8b62ef6d4d7bcd2783b38
|
13a2f1193714133701743bfdf1a8add61a29dd4c
|
refs/heads/master
| 2023-05-03T01:15:01.828534
| 2021-05-13T06:13:55
| 2021-05-13T06:13:55
| 343,254,494
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 6,587
|
py
|
#主语
# 谓语
# 宾语
# 主语的定语数组(定语的状语+介宾关系数组)[attStru, ...]
# 宾语的定语数组(定语的状语+介宾关系数组)
# 谓语的状语数组(状语+介宾关系数组) [{'adv': 谓语的状语, 'pob': 介宾关系},...]
# 并列谓语数组(并列谓语的宾语、宾语的定语数组、宾语的定语的状语+介宾关系数组)
# [[并列谓语, 宾语, [attStru, ...]], ...]
# 并列主语数组(并列主语的定语数组、定语的状语+介宾关系数组)
# [[并列主语, [attStru, ...]], ...]
# 并列宾语数组(并列宾语的定语数组、定语的状语+介宾关系数组)
# ---------------------------------------------------------------
# attStru = {'attM': 定语, 'adv': 定语的状语, 'pob': 介宾关系, 'att': 定语的定语}
# masterStru = {'master': 主语或宾语, 'rel': [attStru, ...]}
# advStru = {'adv': 状语, 'pob': 介宾关系}
# predStru = {'pred':谓语, 'objs': [masterStru, ...], 'advs': [advStru, ...]}
# 并列主语数组
# [masterStru, ...]
# 并列谓语数组
# [predStru, ...]
# ---------------------------------------------------------------
from ltp import LTP
ltp = LTP()
class NlpCtr(object):
def __init__(self):
self.seg = None
def trans_result(self, depArr):
tempdepArr = depArr[0]
tempArr = []
for item in tempdepArr:
dic = {
'dep': item[0],
'gov': item[1],
'type': item[2],
}
tempArr.append(dic)
return tempArr
def getHED(self, words):
root = None
for word in words:
if word['gov'] == 0 and word['type'] == 'HED':
root = word['dep']
return root
def getWord(self, words, GOV, wType):
res = None
for word in words:
if word['type'] == wType and word['gov'] == GOV:
res = word['dep']
return res
def getWords(self, words, GOV, wType):
res = []
for word in words:
if word['type'] == wType and word['gov'] == GOV:
res.append(word['dep'])
res = res if len(res) > 0 else None
return res
def getSubject(self, words, HED, ADVS):
subject = self.getWord(words, HED, 'SBV')
if subject is None and ADVS is not None:
for adv in ADVS:
if self.indexToWord(adv) == '被':
subject = self.getWord(words, adv, 'POB')
return subject
def getObject(self, words, HED):
vob = self.getWord(words, HED, 'VOB')
fob = self.getWord(words, HED, 'FOB')
return self.get_not_none([vob, fob])
# 获取定语相关信息
def getATTInfo(self, words, GOV):
atts = self.getWords(words, GOV, 'ATT')
res = []
if atts is not None:
for a in atts:
adv = self.getWord(words, a, 'ADV')
pob = self.getWord(words, adv, 'POB')
res.append((a, (adv, pob)))
res = res if len(res) > 0 else None
return res
# 获取并列主语或宾语的相关信息
def getCOOInfo(self, words, GOV):
res = []
coos = self.getWords(words, GOV, 'COO')
if coos is not None:
for coo in coos:
atts = self.getATTInfo(words, coo)
res.append((coo, atts))
res = res if len(res) > 0 else None
return res
# 状语+介宾关系数组
def getADVPOBS(self, words, ADVS):
res = []
if ADVS is not None:
for adv in ADVS:
pob = self.getWord(words, adv, 'POB')
res.append((adv, pob))
res = res if len(res) > 0 else None
return res
def get_not_none(self, alist):
for a in alist:
if a is not None:
return a
return None
def recuTran(self, source, target):
t = type(source)
if t == list or t == tuple:
for a in source:
subt = type(a)
if subt == list or subt == tuple:
target.append([])
self.recuTran(a, target[-1])
else:
target.append(self.indexToWord(a))
def indexToWord(self, index):
res = None
if index and index <= len(self.seg[0]):
res = self.seg[0][index - 1]
return res
def showWords(self, dic):
items = dic.items()
target = {}
for item in items:
t = type(item[1])
if t == list or t == tuple:
sub = []
self.recuTran(item[1], sub)
target.update({item[0]: sub})
elif item[1] is not None:
sub = self.indexToWord(item[1])
target.update({item[0]: sub})
print(dic)
print(target)
def abstractSentence(self, sentence):
dic = None
self.seg, hidden = ltp.seg([sentence])
dep = ltp.dep(hidden)
pos = ltp.pos(hidden)
words = self.trans_result(dep)
if len(words) > 0:
hed = self.getHED(words)
if hed is not None:
coos = self.getWords(words, hed, 'COO') #并列谓语
advs = self.getWords(words, hed, 'ADV') #谓语的状语
aps = self.getADVPOBS(words, advs) #谓语的状语+介宾关系
subject = self.getSubject(words, hed, advs) #主语
object = self.getObject(words, hed) #宾语
attsS = self.getATTInfo(words, subject) #主语的定语
attsO = self.getATTInfo(words, object) #宾语的定语
coosS = self.getCOOInfo(words, subject) #并列主语
coosO = self.getCOOInfo(words, object) #并列宾语
dic = {
'subject': subject,
'object': object,
'pred': hed,
'coos': coos,
'advs': advs,
'aps': aps,
'attsS': attsS,
'attsO': attsO,
'coosS': coosS,
'coosO': coosO
}
self.showWords(dic)
return dic
nlpCtr = NlpCtr()
# nlpCtr.abstractSentence('他因为酒驾被交警拘留了。')
# nlpCtr.abstractSentence('学术委员会的每个成员都是博士并且是教授。')
nlpCtr.abstractSentence('小明、小霞,和小刘是三兄弟。')
|
[
"chenwangwww@sina.com"
] |
chenwangwww@sina.com
|
3718b80b3f210bd3e22c1c5a08f559e56bad00df
|
ac5e52a3fc52dde58d208746cddabef2e378119e
|
/exps-sblp/sblp_ut=3.5_rd=0.5_rw=0.06_rn=4_u=0.075-0.325_p=harmonic-2/sched=RUN_trial=65/sched.py
|
c840f1d8aaa51cee48681dfe3e4420cbfb56d305
|
[] |
no_license
|
ricardobtxr/experiment-scripts
|
1e2abfcd94fb0ef5a56c5d7dffddfe814752eef1
|
7bcebff7ac2f2822423f211f1162cd017a18babb
|
refs/heads/master
| 2023-04-09T02:37:41.466794
| 2021-04-25T03:27:16
| 2021-04-25T03:27:16
| 358,926,457
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 401
|
py
|
-S 0 -X RUN -Q 0 -L 4 86 250
-S 0 -X RUN -Q 0 -L 4 83 400
-S 0 -X RUN -Q 0 -L 4 72 400
-S 1 -X RUN -Q 1 -L 3 60 300
-S 1 -X RUN -Q 1 -L 3 58 175
-S 2 -X RUN -Q 2 -L 2 55 200
-S 2 -X RUN -Q 2 -L 2 48 200
-S 3 -X RUN -Q 3 -L 1 37 125
-S 3 -X RUN -Q 3 -L 1 31 125
-S 4 30 125
-S 4 26 175
-S 5 25 250
-S 4 24 200
-S 5 24 250
-S 4 23 100
-S 4 12 100
-S 4 12 100
|
[
"ricardo.btxr@gmail.com"
] |
ricardo.btxr@gmail.com
|
79daec963e10fb204d7c5820ecf5dfa742767c35
|
208bc8b87cb20fc6e57c8c8846cbe947b2eec1f3
|
/pyocd/core/memory_interface.py
|
0bd655b77628999f3cd190bde3c656e1c7f96830
|
[
"Apache-2.0",
"CC-BY-4.0"
] |
permissive
|
canerbulduk/pyOCD
|
28c545f25ef9b2949a1cd49c00faeeda986a26fe
|
a61e8b8dc2050309510d9fe7ca63680aafe06749
|
refs/heads/main
| 2023-08-24T21:10:52.427697
| 2021-11-09T15:13:48
| 2021-11-09T15:13:48
| 426,275,463
| 0
| 0
|
Apache-2.0
| 2021-11-09T15:08:22
| 2021-11-09T15:08:21
| null |
UTF-8
|
Python
| false
| false
| 4,837
|
py
|
# pyOCD debugger
# Copyright (c) 2018-2020 Arm Limited
# SPDX-License-Identifier: Apache-2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from ..utility import conversion
class MemoryInterface(object):
"""! @brief Interface for memory access."""
def write_memory(self, addr, data, transfer_size=32):
"""! @brief Write a single memory location.
By default the transfer size is a word."""
raise NotImplementedError()
def read_memory(self, addr, transfer_size=32, now=True):
"""! @brief Read a memory location.
By default, a word will be read."""
raise NotImplementedError()
def write_memory_block32(self, addr, data):
"""! @brief Write an aligned block of 32-bit words."""
raise NotImplementedError()
def read_memory_block32(self, addr, size):
"""! @brief Read an aligned block of 32-bit words."""
raise NotImplementedError()
def write64(self, addr, value):
"""! @brief Shorthand to write a 64-bit word."""
self.write_memory(addr, value, 64)
def write32(self, addr, value):
"""! @brief Shorthand to write a 32-bit word."""
self.write_memory(addr, value, 32)
def write16(self, addr, value):
"""! @brief Shorthand to write a 16-bit halfword."""
self.write_memory(addr, value, 16)
def write8(self, addr, value):
"""! @brief Shorthand to write a byte."""
self.write_memory(addr, value, 8)
def read64(self, addr, now=True):
"""! @brief Shorthand to read a 64-bit word."""
return self.read_memory(addr, 64, now)
def read32(self, addr, now=True):
"""! @brief Shorthand to read a 32-bit word."""
return self.read_memory(addr, 32, now)
def read16(self, addr, now=True):
"""! @brief Shorthand to read a 16-bit halfword."""
return self.read_memory(addr, 16, now)
def read8(self, addr, now=True):
"""! @brief Shorthand to read a byte."""
return self.read_memory(addr, 8, now)
def read_memory_block8(self, addr, size):
"""! @brief Read a block of unaligned bytes in memory.
@return an array of byte values
"""
res = []
# try to read 8bits data
if (size > 0) and (addr & 0x01):
mem = self.read8(addr)
res.append(mem)
size -= 1
addr += 1
# try to read 16bits data
if (size > 1) and (addr & 0x02):
mem = self.read16(addr)
res.append(mem & 0xff)
res.append((mem >> 8) & 0xff)
size -= 2
addr += 2
# try to read aligned block of 32bits
if (size >= 4):
mem = self.read_memory_block32(addr, size // 4)
res += conversion.u32le_list_to_byte_list(mem)
size -= 4*len(mem)
addr += 4*len(mem)
if (size > 1):
mem = self.read16(addr)
res.append(mem & 0xff)
res.append((mem >> 8) & 0xff)
size -= 2
addr += 2
if (size > 0):
mem = self.read8(addr)
res.append(mem)
return res
def write_memory_block8(self, addr, data):
"""! @brief Write a block of unaligned bytes in memory."""
size = len(data)
idx = 0
#try to write 8 bits data
if (size > 0) and (addr & 0x01):
self.write8(addr, data[idx])
size -= 1
addr += 1
idx += 1
# try to write 16 bits data
if (size > 1) and (addr & 0x02):
self.write16(addr, data[idx] | (data[idx+1] << 8))
size -= 2
addr += 2
idx += 2
# write aligned block of 32 bits
if (size >= 4):
data32 = conversion.byte_list_to_u32le_list(data[idx:idx + (size & ~0x03)])
self.write_memory_block32(addr, data32)
addr += size & ~0x03
idx += size & ~0x03
size -= size & ~0x03
# try to write 16 bits data
if (size > 1):
self.write16(addr, data[idx] | (data[idx+1] << 8))
size -= 2
addr += 2
idx += 2
#try to write 8 bits data
if (size > 0):
self.write8(addr, data[idx])
|
[
"flit@me.com"
] |
flit@me.com
|
75738732f1f444b4a8f55db79e8378fbee80e1dc
|
46577285b990bb2711cc718b99a24f78c53a7da7
|
/파이썬 알고리즘_4기/자물쇠와 열쇠 사본/solution.py
|
2f4518155d216a95b0238d57febf454450496e5c
|
[] |
no_license
|
suwonraison900206/TIL
|
890f02ff768c3a7d6ed647a27ba8da96b0cc06b4
|
2153623238bcc6965ec6983df9e7b207cc5aa361
|
refs/heads/master
| 2022-06-08T09:51:12.289608
| 2022-05-18T12:04:56
| 2022-05-18T12:04:56
| 235,004,266
| 2
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,396
|
py
|
def solution(key, lock):
answer = True
def turn_key(key):
key_lst = []
for i in range(len(key)):
a = []
for j in range(len(key)-1, -1, -1):
a.append(key[j][i])
key_lst.append(a)
return key_lst
lst = [[0] * (len(lock) * 3) for __ in range(len(lock) * 3)]
for i in range(len(lock), (len(lock) * 2)):
for j in range(len(lock), (len(lock) * 2)):
lst[i][j] = lock[i-len(lock)][j - len(lock)]
L = len(lst)
K = len(key)
for cnt in range(4):
key = turn_key(key)
for i in range(0, (L-K+1)):
for j in range(0, (L-K+1)):
flag = True
for q in range(K):
for w in range(K):
lst[i+q][j+w] = lst[i+q][j+w] + key[q][w]
for x in range(len(lock), (len(lock) * 2)):
for y in range(len(lock), (len(lock) * 2)):
if lst[x][y] == 0 or lst[x][y] == 2:
flag = False
if flag == True:
return True
for q in range(K):
for w in range(K):
lst[i+q][j+w] = lst[i+q][j+w] - key[q][w]
return False
key = [[0, 0, 0], [1, 0, 0], [0, 1, 1]]
lock = [[1, 1, 1], [1, 1, 0], [1, 0, 1]]
print(solution(key,lock))
|
[
"suwonraison@gmail.com"
] |
suwonraison@gmail.com
|
bfd99852e69fe7ffaa50887a79dd2443937aae29
|
26f6313772161851b3b28b32a4f8d255499b3974
|
/Books/CrackingtheCodingInterview/1708_CircusTowerLCCI.py
|
e0a049aefcf9028bc196ec5e00e061eeb6f66683
|
[] |
no_license
|
here0009/LeetCode
|
693e634a3096d929e5c842c5c5b989fa388e0fcd
|
f96a2273c6831a8035e1adacfa452f73c599ae16
|
refs/heads/master
| 2023-06-30T19:07:23.645941
| 2021-07-31T03:38:51
| 2021-07-31T03:38:51
| 266,287,834
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,429
|
py
|
"""
有个马戏团正在设计叠罗汉的表演节目,一个人要站在另一人的肩膀上。出于实际和美观的考虑,在上面的人要比下面的人矮一点且轻一点。已知马戏团每个人的身高和体重,请编写代码计算叠罗汉最多能叠几个人。
示例:
输入:height = [65,70,56,75,60,68] weight = [100,150,90,190,95,110]
输出:6
解释:从上往下数,叠罗汉最多能叠 6 层:(56,90), (60,95), (65,100), (68,110), (70,150), (75,190)
提示:
height.length == weight.length <= 10000
来源:力扣(LeetCode)
链接:https://leetcode-cn.com/problems/circus-tower-lcci
著作权归领扣网络所有。商业转载请联系官方授权,非商业转载请注明出处。
"""
from typing import List
from functools import lru_cache
class Solution:
def bestSeqAtIndex(self, height: List[int], weight: List[int]) -> int:
"""
TLE
"""
@lru_cache(None)
def calc(idx, h, w):
if idx == length:
return 0
res = calc(idx + 1, h, w)
for j in range(idx, length):
h2, w2 = sorted_h_w[j]
if h2 > h and w2 > w:
res = max(res, 1 + calc(j, h2, w2))
return res
length = len(height)
sorted_h_w = sorted(zip(height, weight))
# print(sorted_h_w)
return calc(0, 0, 0)
from typing import List
from bisect import bisect_left, bisect_right
class Solution:
def bestSeqAtIndex(self, height: List[int], weight: List[int]) -> int:
length = len(height)
sorted_h_w = sorted(zip(height, weight), key=lambda x: (x[0], -x[1]))
stack = []
# print(sorted_h_w)
for i in range(length):
_, w = sorted_h_w[i]
idx = bisect_left(stack, w) # bisect_left will replace the idx, bisect_right will append num to the end
if idx == len(stack):
if not stack or w > stack[-1]:
stack.append(w)
else:
stack[idx] = w
# print(stack)
return len(stack)
S = Solution()
height = [65,70,56,75,60,68]
weight = [100,150,90,190,95,110]
print(S.bestSeqAtIndex(height, weight))
height =[1,2,3,4]
weight =[4,3,2,1]
print(S.bestSeqAtIndex(height, weight))
nums = [1,3,3,3,4,5,7,10]
for k in range(10):
print(k, bisect_left(nums, k), bisect_right(nums, k))
|
[
"here0009@163.com"
] |
here0009@163.com
|
89673a5966ae7356ef9eff43e2a87a6420672288
|
ded10c2f2f5f91c44ec950237a59225e8486abd8
|
/.history/3/ising2d_microstates_run_20200506001321.py
|
7e09e413f76c69e16147063ce74e71d6947cc7bc
|
[] |
no_license
|
jearistiz/Statistical-Physics-Projects
|
276a86407b32ded4e06b32efb2fadbd8eff8daed
|
d9c5b16a50856e148dc8604d92b6de3ea21fc552
|
refs/heads/master
| 2022-11-05T03:41:23.623050
| 2020-06-28T06:36:05
| 2020-06-28T06:36:05
| 254,909,897
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,809
|
py
|
from ising2d_microstates import *
################################################################################################
# PANEL DE CONTROL
################################################################################################
# Decide si corre algoritmo matrix squaring con aproximación de trotter
run_microstates_algorithm = True
# Decide si corre algoritmo para cálculo de energía interna
run_avg_energy = True
# Decide si corre algoritmo para optimización de dx y beta_ini
run_optimization = False
################################################################################################
# PARÁMETROS GENERALES PARA LAS FIGURAS
################################################################################################
# Usar latex en texto de figuras y agrandar tamaño de fuente
plt.rc('text', usetex=True)
plt.rcParams.update({'font.size':15,'text.latex.unicode':True})
# Obtenemos path para guardar archivos en el mismo directorio donde se ubica el script
script_dir = os.path.dirname(os.path.abspath(__file__))
# or just a list of the list of key value pairs
# list_key_value = [ [k,v] for k, v in dict.items() ]
if run_microstates_algorithm:
L = 4
microstates = ising_microstates(L)
print('All microstates, each in a single row:')
print(pd.DataFrame(microstates),'\n')
neighbours = ising_neighbours(L)
energies = ising_energy(microstates, neighbours, save_data=True)
ising_energy_plot(energies, L, save_plot=True)
microstate_rand_index = 2 ** (L*L) - np.random.randint(1, 2 ** (L*L))
microstate_rand = microstates[microstate_rand_index,:]
print('One random microstate as a 2D grid:')
print(pd.DataFrame(microstate_rand.reshape((L,L))), '\n')
ising_microstate_plot(microstate_rand, save_plot=True)
|
[
"jeaz.git@gmail.com"
] |
jeaz.git@gmail.com
|
103da40cbb391b6592cac3e559ff96bfbd0bcadf
|
dbe1f4110921a08cb13e22ea325d503bd5627195
|
/chuhuo_2.7_clickhouse/bluedon/bdwafd/newscantools/plugins/ChckDeadLinksScript.py
|
624b61e9bc42c1fa2dd55d579de07834790dac83
|
[] |
no_license
|
Hehouhua/waf_branches
|
92dc1b1cbecba20f24ef6c7372dde7caa43f9158
|
ca76f3a1ed8150b423474c9e37aee37841a5ee35
|
refs/heads/main
| 2023-01-07T11:33:31.667688
| 2020-11-03T06:58:33
| 2020-11-03T06:58:33
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,386
|
py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from lib.common import *
import httplib2
import urlparse
import re
def run_url(http,ob,item):
try:
result=[]
url=item['url']
detail=u''
detail=detail.encode('utf8')
parse=urlparse.urlparse(url)
path=parse.path
if path=="" or path=="/":
return result
if item['params'] == "":
return result
#end if
if item['method'] == 'get':
response,content=requestUrl(http,item['url']+"?"+item['params'],ob['task_id'],ob['domain_id'])
if response['status']!='200':
request=getRequest(item['url']+"?"+item['params'],'GET')
response=getResponse(response,"")
result.append(getRecord(ob,item['url']+"?"+item['params'],ob['level'],detail,request,response))
except Exception,e:
logging.getLogger().error("File:LocalFileIncludeScript.py, run_url function :" + str(e) + ",task id:" + ob['task_id'] + ",domain id:" + ob['domain_id'])
write_scan_log(ob['task_id'],ob['domain_id'],"File:LocalFileIncludeScript.py, run_url function :" + str(e)+", url:"+item['url']+"?"+item['params'])
#end try
return result
|
[
"hanson_wong@qq.com"
] |
hanson_wong@qq.com
|
2561a47c4d7afb0c4bf31d06192529c574366797
|
2fa016eeb6d4d4cc61fb0d43aa9f0fd1ad4ef2e3
|
/python/函数/quadratic.py
|
236d535788afe5e6a4ac3ede0dd3b40f57f946f1
|
[] |
no_license
|
juechen-zzz/learngit
|
521e0d2c13d97248f6f8b1f2096f718dc497351b
|
513d3e57f4e0fce72ca4ecd1f30be2d261ee9260
|
refs/heads/master
| 2021-07-04T17:20:58.456812
| 2020-08-27T02:08:05
| 2020-08-27T02:08:05
| 163,482,583
| 8
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 218
|
py
|
# -*- coding: utf-8 -*-
##一元二次方程求解
import math;
def quadratic(a,b,c):
m = b*b - 4*a*c;
x1 = (-b + math.sqrt(m))/(-2*a);
x2 = (-b - math.sqrt(m))/(-2*a);
return x1,x2
r = quadratic(1,6,9);
print(r);
|
[
"240553516@qq.com"
] |
240553516@qq.com
|
f16744f50e18e59f1326b83f02cb864a6192f983
|
d554b1aa8b70fddf81da8988b4aaa43788fede88
|
/5 - Notebooks e Data/1 - Análises numéricas/Arquivos David/Atualizados/logDicas-master/data/2019-1/226/users/4137/codes/1836_2604.py
|
2d39fb44f590d1ffec7a63f648064058d2d0a792
|
[] |
no_license
|
JosephLevinthal/Research-projects
|
a3bc3ca3b09faad16f5cce5949a2279cf14742ba
|
60d5fd6eb864a5181f4321e7a992812f3c2139f9
|
refs/heads/master
| 2022-07-31T06:43:02.686109
| 2020-05-23T00:24:26
| 2020-05-23T00:24:26
| 266,199,309
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 132
|
py
|
from numpy import*
from numpy.linalg import*
n = array(eval(input("Pagamentos:")))
for i in range(shape(n)[0]):
print(max(n[i]))
|
[
"jvlo@icomp.ufam.edu.br"
] |
jvlo@icomp.ufam.edu.br
|
b0cf9febae60c6a97766768835e4a2dd26a5c061
|
e24db52fb64c06e859e36122008fe27d7f2b7a81
|
/model/fixed_length_pqueue.py
|
c1d5e99db2b86d1534e97d1221d61c221a000d05
|
[] |
no_license
|
zhuowangsylu/sklearn-sknnsuite
|
1cd7f46d4f0f54190db8262a35e987cf103e62d4
|
22fe386be643e309d6491e9a408711e3472e396d
|
refs/heads/master
| 2020-12-08T09:20:49.940874
| 2017-06-10T20:24:56
| 2017-06-10T20:24:56
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,097
|
py
|
import itertools
import heapq
class FixedLengthPQueue(object):
def __init__(self, length):
"""
:param length: max length of queue. Should be greater then 0
"""
self.pq = [] # list of entries arranged in a heap
self.entry_finder = {} # mapping of tasks to entries
self.counter = itertools.count() # unique sequence count
self.length = length
def add_task(self, task, priority=0):
"""Add a new task or update the priority of an existing task"""
if len(self.pq) == self.length:
self.pop_task()
count = next(self.counter)
entry = [- priority, count, task]
self.entry_finder[task] = entry
heapq.heappush(self.pq, entry)
def pop_task(self):
"""Remove and return the lowest priority task. Raise KeyError if empty."""
while self.pq:
priority, count, task = heapq.heappop(self.pq)
del self.entry_finder[task]
return task
raise KeyError('pop from an empty priority queue')
|
[
"vasnetsov93@gmail.com"
] |
vasnetsov93@gmail.com
|
5cb01c05221174a919960232eb3fdbabe308c355
|
fb94faa56d5763607be6566925132f4957d751cf
|
/pyroms_toolbox/pyroms_toolbox/zview.py
|
508a6b3ba23f1910f4321953d47cf1b4eeea0055
|
[
"BSD-3-Clause"
] |
permissive
|
csherwood-usgs/pyroms
|
44e684ec0b20e242cf3743d128332be330209289
|
be5e40a1720561bb18698f08a2c74b1906c73bab
|
refs/heads/master
| 2021-01-24T23:51:00.789744
| 2013-12-30T20:46:15
| 2013-12-30T20:46:15
| 16,179,889
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 6,825
|
py
|
import numpy as np
import matplotlib.pyplot as plt
from matplotlib import cm, colors
from mpl_toolkits.basemap import Basemap
import pyroms
import pyroms_toolbox
def zview(var, tindex, depth, grid, filename=None, \
cmin=None, cmax=None, clev=None, clb_format='%.2f', \
fill=False, contour=False, d=4, range=None, fts=None, \
title=None, clb=True, pal=None, proj='merc', \
fill_land=False, outfile=None):
"""
map = zview(var, tindex, depth, grid, {optional switch})
optional switch:
- filename if defined, load the variable from file
- cmin set color minimum limit
- cmax set color maximum limit
- clev set the number of color step
- fill use contourf instead of pcolor
- contour overlay contour (request fill=True)
- range set axis limit
- fts set font size (default: 12)
- title add title to the plot
- clb add colorbar (defaul: True)
- pal set color map (default: cm.jet)
- proj set projection type (default: merc)
- fill_land fill land masked area with gray (defaul: True)
- outfile if defined, write figure to file
plot a constante-z slice of variable var. If filename is provided,
var must be a string and the variable will be load from the file.
grid can be a grid object or a gridid. In the later case, the grid
object correponding to the provided gridid will be loaded.
If proj is not None, return a Basemap object to be used with quiver
for example.
"""
# get grid
if type(grid).__name__ == 'ROMS_Grid':
grd = grid
else:
grd = pyroms.grid.get_ROMS_grid(grid)
# get variable
if filename == None:
var = var
else:
data = pyroms.io.Dataset(filename)
var = data.variables[var]
Np, Mp, Lp = grd.vgrid.z_r[0,:].shape
if tindex is not -1:
assert len(var.shape) == 4, 'var must be 4D (time plus space).'
K, N, M, L = var.shape
else:
assert len(var.shape) == 3, 'var must be 3D (no time dependency).'
N, M, L = var.shape
# determine where on the C-grid these variable lies
if N == Np and M == Mp and L == Lp:
Cpos='rho'
mask = grd.hgrid.mask_rho
if N == Np and M == Mp and L == Lp-1:
Cpos='u'
mask = grd.hgrid.mask_u
if N == Np and M == Mp-1 and L == Lp:
Cpos='v'
mask = grd.hgrid.mask_v
# get constante-z slice
if tindex == -1:
var = var[:,:,:]
else:
var = var[tindex,:,:,:]
depth = -abs(depth)
if fill == True:
zslice, lon, lat = pyroms.tools.zslice(var, depth, grd, \
Cpos=Cpos)
else:
zslice, lon, lat = pyroms.tools.zslice(var, depth, grd, \
Cpos=Cpos, vert=True)
# plot
if cmin is None:
cmin = zslice.min()
else:
cmin = float(cmin)
if cmax is None:
cmax = zslice.max()
else:
cmax = float(cmax)
if clev is None:
clev = 100.
else:
clev = float(clev)
dc = (cmax - cmin)/clev ; vc = np.arange(cmin,cmax+dc,dc)
if pal is None:
pal = cm.jet
else:
pal = pal
if fts is None:
fts = 12
else:
fts = fts
#pal.set_over('w', 1.0)
#pal.set_under('w', 1.0)
#pal.set_bad('w', 1.0)
pal_norm = colors.BoundaryNorm(vc,ncolors=256, clip = False)
if range is None:
lon_min = lon.min()
lon_max = lon.max()
lon_0 = (lon_min + lon_max) / 2.
lat_min = lat.min()
lat_max = lat.max()
lat_0 = (lat_min + lat_max) / 2.
else:
lon_min = range[0]
lon_max = range[1]
lon_0 = (lon_min + lon_max) / 2.
lat_min = range[2]
lat_max = range[3]
lat_0 = (lat_min + lat_max) / 2.
# clear figure
#plt.clf()
if proj is not None:
map = Basemap(projection=proj, llcrnrlon=lon_min, llcrnrlat=lat_min, \
urcrnrlon=lon_max, urcrnrlat=lat_max, lat_0=lat_0, lon_0=lon_0, \
resolution='h', area_thresh=5.)
#map = pyroms.utility.get_grid_proj(grd, type=proj)
x, y = map(lon,lat)
if fill_land is True and proj is not None:
# fill land and draw coastlines
map.drawcoastlines()
map.fillcontinents(color='grey')
else:
if proj is not None:
Basemap.pcolor(map, x, y, mask, vmin=-2, cmap=cm.gray)
pyroms_toolbox.plot_coast_line(grd, map)
else:
plt.pcolor(lon, lat, mask, vmin=-2, cmap=cm.gray)
pyroms_toolbox.plot_coast_line(grd)
if fill is True:
if proj is not None:
cf = Basemap.contourf(map, x, y, zslice, vc, cmap = pal, \
norm = pal_norm)
else:
cf = plt.contourf(lon, lat, zslice, vc, cmap = pal, \
norm = pal_norm)
else:
if proj is not None:
cf = Basemap.pcolor(map, x, y, zslice, cmap = pal, norm = pal_norm)
else:
cf = plt.pcolor(lon, lat, zslice, cmap = pal, norm = pal_norm)
if clb is True:
clb = plt.colorbar(cf, fraction=0.075,format=clb_format)
for t in clb.ax.get_yticklabels():
t.set_fontsize(fts)
if contour is True:
if fill is not True:
raise Warning, 'Please run again with fill=True to overlay contour.'
else:
if proj is not None:
Basemap.contour(map, x, y, zslice, vc[::d], colors='k', linewidths=0.5, linestyles='solid')
else:
plt.contour(lon, lat, zslice, vc[::d], colors='k', linewidths=0.5, linestyles='solid')
if proj is None and range is not None:
plt.axis(range)
if title is not None:
plt.title(title, fontsize=fts+4)
if proj is not None:
map.drawmeridians(np.arange(lon_min,lon_max, (lon_max-lon_min)/5.001), \
labels=[0,0,0,1], fmt='%.1f')
map.drawparallels(np.arange(lat_min,lat_max, (lat_max-lat_min)/5.001), \
labels=[1,0,0,0], fmt='%.1f')
if outfile is not None:
if outfile.find('.png') != -1 or outfile.find('.svg') != -1 or \
outfile.find('.eps') != -1:
print 'Write figure to file', outfile
plt.savefig(outfile, dpi=200, facecolor='w', edgecolor='w', \
orientation='portrait')
else:
print 'Unrecognized file extension. Please use .png, .svg or .eps file extension.'
if proj is None:
return
else:
return map
|
[
"kshedstrom@alaska.edu"
] |
kshedstrom@alaska.edu
|
cf67d8266fd1c7b49d932b7a0d593bdefd4d6ab8
|
a67a987ed078da0a1de2908c8c0e08070dee65b1
|
/genice/lattices/sTprime.py
|
16592d7d2a4fb71ae63cafd4bb23059714c8213e
|
[] |
no_license
|
Python3pkg/GenIce
|
ef1ce7ee2997c10e08dde75ac36050a653cd4fc5
|
1e9458b7bf8e0fd2ad5d0c4f8987cea0ae7ca0b0
|
refs/heads/master
| 2021-01-21T17:31:51.595858
| 2017-05-21T14:09:32
| 2017-05-21T14:09:32
| 91,962,047
| 0
| 0
| null | 2017-05-21T14:09:28
| 2017-05-21T14:09:28
| null |
UTF-8
|
Python
| false
| false
| 1,457
|
py
|
"""
Data source: Smirnov, G. S. & Stegailov, V. V. Toward Determination of the New Hydrogen Hydrate Clathrate Structures. J Phys Chem Lett 4, 3560-3564 (2013).
"""
density = 1.2 #default density
bondlen = 1.2 #bond threshold
celltype = "rect"
cell="""
4.04345643838 3.18400949048 3.18412710497
"""
#estimated by unitcell-designer2.py
coord="relative"
waters="""
0.324999313604 0.241626046227 0.741607546689
0.457688412412 0.491604796867 0.811167153486
0.324997440466 0.741611811736 0.741618888106
0.457662988572 0.991605010596 0.672075052384
0.692311583922 0.991727905597 0.672166183923
0.825002557263 0.741721003892 0.741714422407
0.692337007775 0.491727692099 0.811258277449
0.825000683446 0.241706769677 0.741725809343
0.192314228314 0.311166810957 0.491600600285
0.192335356629 0.67208565913 0.491600986442
0.957685764524 0.672166320486 0.491732720568
0.95766464453 0.311247477429 0.491732335146
0.32499754824 0.241612330348 0.241618912338
0.457662990343 0.491605708657 0.172075024014
0.324999242327 0.741626562519 0.241607525056
0.457688416499 0.991605358409 0.31116718725
0.69233701333 0.991728253805 0.311258313389
0.825000760623 0.741707288042 0.2417257845
0.692311587147 0.491728604283 0.172166142582
0.825002454031 0.241721519946 0.241714444623
0.192335354927 0.172085857455 0.991600896391
0.192314236022 0.811167014397 0.991600717379
0.957664643845 0.811247674634 0.991732448252
0.957685771209 0.172166522813 0.991732627999
"""
|
[
"vitroid@gmail.com"
] |
vitroid@gmail.com
|
e02234ba0c5b8cd9dfb2f91ac9dc4b789beb104e
|
a0e5418d4f79587dd8ea6f9425a84ded3351d139
|
/src/actions/conquer_action.py
|
917e56d82ffd6f263150995370328040c06b611b
|
[] |
no_license
|
thydungeonsean/Shinar_Genesis
|
db99f17c8d6afbf69a4950b46223b586e55c83cf
|
ac21f324c11db7c1a722f029a8186c5dc45c9097
|
refs/heads/master
| 2020-03-15T22:07:48.281124
| 2018-06-07T21:09:18
| 2018-06-07T21:09:18
| 116,487,934
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,425
|
py
|
from military_action import MilitaryAction
from src.enum.actions import CONQUER_ACTION
from map_tools import get_army_movement_options, get_conquered_points, in_player_domain
from src.enum.object_codes import *
class ConquerAction(MilitaryAction):
def __init__(self, state, player):
MilitaryAction.__init__(self, state, player, CONQUER_ACTION)
def place_text(self):
return 'Raise Army'
def select_text(self):
return 'Conquer the land'
# moving action
def compute_valid_move_points(self):
# points in range of selected army
return get_army_movement_options(self.state, self.selected_army, conquer=True)
def activate_effect(self, point):
# if enemy building - attack it
# else
# get conquered points
conquered = get_conquered_points(self.state, point)
# spread dominion to those points
map(self.extend_rule, conquered)
# end action
def extend_rule(self, point):
self.state.map.dominion_map.add_dominion(self.player.player_id, point)
# battle triggering helper methods
def get_win_effect(self, point, defender):
def win_effect():
self.activate_effect(point)
print 'attacker wins'
# if defender is garrison, apply correct building conquer interaction
if defender.is_garrison() and not defender.sallying:
defender.rout()
self.conquer_building(defender)
else:
defender.rout()
# end point
self.complete_action()
return win_effect
def conquer_building(self, garrison):
building = garrison.building
point = building.coord.int_position
if building.obj_code in {TOWER, PALACE}:
building.raze()
elif building.obj_code == GRANARY:
if in_player_domain(self.state, point):
building.capture(self.player)
self.selected_army.form_garrison(building)
else:
building.raze()
elif building.obj_code == ZIGGURAT:
if in_player_domain(self.state, point) and not building.under_construction and\
self.player.can_add_ziggurat():
building.capture(self.player)
self.selected_army.form_garrison(building)
else:
building.raze()
|
[
"marzecsean@gmail.com"
] |
marzecsean@gmail.com
|
33f386fa4ae0cf685ab62da52fc7c8b759b4cd0d
|
48c65330f577d11cedb29fd970aee35788ab72c6
|
/model_flfact_tpv__eg_cupones.py
|
6f28705bd6ef39f7f6e6d8c6c9eafc90d2a08dcc
|
[] |
no_license
|
yeboyebo/elganso_sync
|
309ecbaba3127493abe001cd1704cc7098234baa
|
66f033a0e27a05c1fc6704ec6ba2bd474d204b7e
|
refs/heads/master
| 2023-07-22T00:17:48.201252
| 2023-07-19T07:48:40
| 2023-07-19T07:48:40
| 173,096,155
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 842
|
py
|
# @class_declaration interna_eg_cupones #
import importlib
from YBUTILS.viewREST import helpers
from models.flfact_tpv import models as modelos
class interna_eg_cupones(modelos.mtd_eg_cupones, helpers.MixinConAcciones):
pass
class Meta:
proxy = True
# @class_declaration elganso_sync_eg_cupones #
class elganso_sync_eg_cupones(interna_eg_cupones, helpers.MixinConAcciones):
pass
class Meta:
proxy = True
# @class_declaration eg_cupones #
class eg_cupones(elganso_sync_eg_cupones, helpers.MixinConAcciones):
pass
class Meta:
proxy = True
def getIface(self=None):
return form.iface
definitions = importlib.import_module("models.flfact_tpv.eg_cupones_def")
form = definitions.FormInternalObj()
form._class_init()
form.iface.ctx = form.iface
form.iface.iface = form.iface
|
[
"jesus.yeboyebo@gmail.com"
] |
jesus.yeboyebo@gmail.com
|
cfb35bf471134268b39e963d38b5d44009cbd811
|
f92dff3781ce21b5a1fd18f30ab52a3976d254f5
|
/backend/wallet/api/v1/urls.py
|
4befa8658a0e2aa3f590ee382d837b3ab9016fa4
|
[] |
no_license
|
crowdbotics-apps/test-24800
|
41270507a49b02bc43fc89822b541cd99cb84005
|
6e7d5bb77af12c5d2c6e3f9f46b7d4d39d185267
|
refs/heads/master
| 2023-03-11T22:27:56.763867
| 2021-03-02T07:29:45
| 2021-03-02T07:29:45
| 343,671,693
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 635
|
py
|
from django.urls import path, include
from rest_framework.routers import DefaultRouter
from .viewsets import (
PaymentTransactionViewSet,
TaskerPaymentAccountViewSet,
TaskerWalletViewSet,
PaymentMethodViewSet,
CustomerWalletViewSet,
)
router = DefaultRouter()
router.register("taskerpaymentaccount", TaskerPaymentAccountViewSet)
router.register("customerwallet", CustomerWalletViewSet)
router.register("paymentmethod", PaymentMethodViewSet)
router.register("paymenttransaction", PaymentTransactionViewSet)
router.register("taskerwallet", TaskerWalletViewSet)
urlpatterns = [
path("", include(router.urls)),
]
|
[
"team@crowdbotics.com"
] |
team@crowdbotics.com
|
322a37cf0131836f7a7093c2ddcb5b15b9851a03
|
a50e73d880fcea987cd2ddd4cc059a67cd7e22e0
|
/day06/求区间的整数和函数版.py
|
400f1072c91905899a2f52dd43cd1e51e98234c1
|
[] |
no_license
|
Icecarry/learn
|
31bed60d5b61201d30bfbaaf520e4e0146e10863
|
2af301b92c9143def9b4c278024d6d2d6e21f0b9
|
refs/heads/master
| 2021-04-06T07:45:11.938995
| 2018-03-13T06:40:54
| 2018-03-13T06:40:54
| 124,759,777
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 433
|
py
|
# 编写一段代码,求1-100之间所有整数的和
# def sum1(m, n):
# return summer
# sum1(1, 50)
# 用户输入
num1 = int(input("请输入起始数:"))
num2 = int(input("请输入结束的数:"))
# print("%d - %d 之间的所有整数和为:" % (num1, num2), sum1(num1, num2))
# 运用循环累加
summer = 0
for x in range(num2 + 1):
if x >= num1:
summer += x
else:
continue
print(summer)
|
[
"tyj1035@outlook.com"
] |
tyj1035@outlook.com
|
419c2a5f336d46136d581b7847fb23c29400c2e3
|
fe62d139012bdde8431b1b9c2b36b2a1491c6ad6
|
/temps1series0213plot.py
|
011f2b595599cef85f2ad0777247c5330ad45954
|
[] |
no_license
|
Gattocrucco/locs12
|
923467db0f49b3bf9e45c6a45c548751c972b130
|
fa99528fadc2d8e745486e427319ec67527cf866
|
refs/heads/master
| 2023-03-28T21:40:22.385179
| 2021-04-08T21:52:11
| 2021-04-08T21:52:11
| 356,057,491
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,782
|
py
|
import numpy as np
from matplotlib import pyplot as plt
from scipy import interpolate
import textbox
inphotons = 2 # in [3, 7, 10, 15, 20, 30, 40]
idcr = 1 # in [25, 250] cps/pdm
rate = 100 # cps
table = np.load('temps1series0213.npy')
figkw = dict(clear=True, sharex=True, figsize=[9, 7])
figs = []
axs = []
for wname in ['efficiency', 'fakerate', 'effvsrate', 'effvssigma']:
figkw['sharey'] = 'col' if wname == 'effvsrate' else True
fig, ax = plt.subplots(2, 2, num=f'temps1series0213plot-{wname}', **figkw)
figs.append(fig)
axs.append(ax)
axs = np.array(axs)
for ax in axs[0].reshape(-1):
if ax.is_first_col():
ax.set_ylabel('S1 detection efficiency')
if ax.is_last_row():
ax.set_xlabel('Threshold on filter output')
for ax in axs[1].reshape(-1):
if ax.is_first_col():
ax.set_ylabel('Fake rate [cps]')
if ax.is_last_row():
ax.set_xlabel('Threshold on filter output')
for ax in axs[2].reshape(-1):
if ax.is_first_col():
ax.set_ylabel('S1 detection efficiency')
if ax.is_last_row():
ax.set_xlabel('Fake rate [cps]')
for ax in axs[3].reshape(-1):
if ax.is_first_col():
ax.set_ylabel(f'Efficiency at fake rate {rate} cps')
if ax.is_last_row():
ax.set_xlabel('Template $\\sigma$ [ns]')
# the shape of table is over (DCR, VL, nphotons, sigma)
for ivl in range(table.shape[1]):
entries = table[idcr, ivl]
if np.count_nonzero(entries['done']) == 0:
continue
for ifilter, fname in enumerate(['ER', 'NR']):
qax = axs[:, ifilter, ivl]
for ifig, ax in enumerate(qax):
for inph, entry in enumerate(entries):
if not np.any(entry['done']) or ifig != 3:
continue
entry = entry[entry['done']]
nph = entry[0]['parameters']['nphotons']
plotkw = dict(
alpha=(inph + 1) / len(entries),
color='#600',
label=f'{nph}',
linestyle=['-', '--', '-.', ':'][inph % 4],
)
x = entry['parameters']['sigma']
interpkw = dict(assume_sorted=True, copy=False)
y = [
interpolate.interp1d(subent['rate'], subent[fname]['efficiency'], **interpkw)(rate)
for subent in entry
]
ax.plot(x, y, **plotkw)
for isigma, entry in enumerate(entries[inphotons]):
if not entry['done'] or ifig == 3:
continue
sigma = entry['parameters']['sigma']
plotkw = dict(
alpha=(isigma + 1) / len(entries[inphotons]),
color='#600',
label=f'{sigma:.3g}',
linestyle=['-', '--', '-.', ':'][isigma % 4],
)
if ifig == 0:
x = entry['threshold']
y = entry[fname]['effthr']
elif ifig == 1:
x = entry['threshold']
y = entry[fname]['ratethr']
elif ifig == 2:
x = entry['rate']
y = entry[fname]['efficiency']
changepoint = np.flatnonzero(np.diff(y))
start = max(0 , changepoint[ 0] - 1)
end = min(len(y), changepoint[-1] + 3)
sel = slice(start, end)
x = x[sel]
y = y[sel]
ax.plot(x, y, **plotkw)
for ax in qax:
s1type = 'ER' if entries[0, 0]['parameters']['VL'] < 1 else 'NR'
ax.set_title(f'{s1type} S1, {fname} filter')
ax.minorticks_on()
ax.grid(True, which='major', linestyle='--')
ax.grid(True, which='minor', linestyle=':')
for ifig, fax in enumerate(axs):
if ifig == 3:
legendtitle = 'Nphotons'
else:
legendtitle = 'Template $\\sigma$ [ns]'
fax[0, 0].legend(loc='best', fontsize='small', ncol=2, title=legendtitle)
params = table[idcr, 0, inphotons, 0]['parameters']
info = f"""\
DCR = {params['DCR'] * 1e9:.3g} cps/pdm
tres = 10 ns
nevents = 1000"""
if ifig != 3:
info = f"nphotons = {params['nphotons']}\n" + info
infoheight = 'lower' if ifig in [2, 3] else 'upper'
textbox.textbox(fax[0, 1], info, loc=f'{infoheight} right', fontsize='small')
if ifig == 1:
fax[0, 0].set_yscale('log')
if ifig == 2:
fax[0, 0].set_xscale('log')
for fig in figs:
fig.tight_layout()
fig.show()
|
[
"info@giacomopetrillo.com"
] |
info@giacomopetrillo.com
|
4ac44ea7572c0f7f3ccbbb100cda5acbab08db23
|
70e75a0b0ca56fd8318606cc093e13fe3d700227
|
/Programmers/2020_카카오_상반기_공채/src/3.자물쇠와열쇠.py
|
ac042fd66fc62a187dd6c8ce877832ab61ec41db
|
[] |
no_license
|
hanameee/Algorithm
|
ba4632797ff8ea999f37d2578f32c3c00da02b0f
|
bf1203544e1b44d2bbf929fd729e263278260313
|
refs/heads/master
| 2023-02-04T19:22:44.845863
| 2023-01-27T17:05:26
| 2023-01-27T17:05:26
| 220,486,014
| 4
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,486
|
py
|
from copy import deepcopy
def is_solved(graph, m, n):
for i in range(m-1, m-1+n):
for j in range(m-1, m-1+n):
if graph[i][j] != 1:
return False
return True
def rotate(key):
new_key = [[0]*len(key[0]) for _ in range(len(key[0]))]
for i in range(len(key)):
for j in range(len(key)):
new_key[j][len(key)-i-1] = key[i][j]
return new_key
def process(key, graph, m, n):
g_len = len(graph)
for i in range(m-1+n):
for j in range(m-1+n):
g = deepcopy(graph)
# 키를 적용시켜본다
for key_i in range(len(key)):
if i+key_i > n+m-1:
break
for key_j in range(len(key)):
g[i+key_i][j+key_j] += key[key_i][key_j]
if is_solved(g, m, n):
return True
return False
def solution(key, lock):
m = len(key)
n = len(lock)
mp = [[0]*(n+2*(m-1)) for _ in range(n+2*(m-1))]
for i in range(m-1, m-1+n):
for j in range(m-1, m-1+n):
mp[i][j] = lock[i-m+1][j-m+1]
rotated_keys = [key]
for i in range(3):
new_key = rotate(key)
rotated_keys.append((new_key))
key = new_key
for key in rotated_keys:
result = process(key, mp, m, n)
if result:
return True
return False
print(solution([[0, 0, 0], [1, 1, 0], [1, 1, 1]],
[[1, 1, 1], [1, 1, 0], [1, 0, 1]]))
|
[
"hanameee@gmail.com"
] |
hanameee@gmail.com
|
8af58098a16f7c4b58e3049f703029d17be1afe1
|
c475cd8531a94ffae69cc92371d41531dbbddb6c
|
/Projects/bullet3-2.89/examples/pybullet/gym/pybullet_data/laikago/laikago.py
|
149c67714cd0441b6c31e290b2f2dca76dd6b712
|
[
"Apache-2.0",
"LicenseRef-scancode-free-unknown",
"Zlib"
] |
permissive
|
WolfireGames/overgrowth
|
72d3dd29cbd7254337265c29f8de3e5c32400114
|
594a2a4f9da0855304ee8cd5335d042f8e954ce1
|
refs/heads/main
| 2023-08-15T19:36:56.156578
| 2023-05-17T08:17:53
| 2023-05-17T08:20:36
| 467,448,492
| 2,264
| 245
|
Apache-2.0
| 2023-05-09T07:29:58
| 2022-03-08T09:38:54
|
C++
|
UTF-8
|
Python
| false
| false
| 3,056
|
py
|
import pybullet as p
import time
p.connect(p.GUI)
plane = p.loadURDF("plane.urdf")
p.setGravity(0,0,-9.8)
p.setTimeStep(1./500)
#p.setDefaultContactERP(0)
#urdfFlags = p.URDF_USE_SELF_COLLISION+p.URDF_USE_SELF_COLLISION_EXCLUDE_ALL_PARENTS
urdfFlags = p.URDF_USE_SELF_COLLISION
quadruped = p.loadURDF("laikago_toes.urdf",[0,0,.5],[0,0.5,0.5,0], flags = urdfFlags,useFixedBase=False)
#enable collision between lower legs
for j in range (p.getNumJoints(quadruped)):
print(p.getJointInfo(quadruped,j))
#2,5,8 and 11 are the lower legs
lower_legs = [2,5,8,11]
for l0 in lower_legs:
for l1 in lower_legs:
if (l1>l0):
enableCollision = 1
print("collision for pair",l0,l1, p.getJointInfo(quadruped,l0)[12],p.getJointInfo(quadruped,l1)[12], "enabled=",enableCollision)
p.setCollisionFilterPair(quadruped, quadruped, 2,5,enableCollision)
jointIds=[]
paramIds=[]
jointOffsets=[]
jointDirections=[-1,1,1,1,1,1,-1,1,1,1,1,1]
jointAngles=[0,0,0,0,0,0,0,0,0,0,0,0]
for i in range (4):
jointOffsets.append(0)
jointOffsets.append(-0.7)
jointOffsets.append(0.7)
maxForceId = p.addUserDebugParameter("maxForce",0,100,20)
for j in range (p.getNumJoints(quadruped)):
p.changeDynamics(quadruped,j,linearDamping=0, angularDamping=0)
info = p.getJointInfo(quadruped,j)
#print(info)
jointName = info[1]
jointType = info[2]
if (jointType==p.JOINT_PRISMATIC or jointType==p.JOINT_REVOLUTE):
jointIds.append(j)
p.getCameraImage(480,320)
p.setRealTimeSimulation(0)
joints=[]
with open("data1.txt","r") as filestream:
for line in filestream:
maxForce = p.readUserDebugParameter(maxForceId)
currentline = line.split(",")
frame = currentline[0]
t = currentline[1]
joints=currentline[2:14]
for j in range (12):
targetPos = float(joints[j])
p.setJointMotorControl2(quadruped,jointIds[j],p.POSITION_CONTROL,jointDirections[j]*targetPos+jointOffsets[j], force=maxForce)
p.stepSimulation()
for lower_leg in lower_legs:
#print("points for ", quadruped, " link: ", lower_leg)
pts = p.getContactPoints(quadruped,-1, lower_leg)
#print("num points=",len(pts))
#for pt in pts:
# print(pt[9])
time.sleep(1./500.)
index = 0
for j in range (p.getNumJoints(quadruped)):
p.changeDynamics(quadruped,j,linearDamping=0, angularDamping=0)
info = p.getJointInfo(quadruped,j)
js = p.getJointState(quadruped,j)
#print(info)
jointName = info[1]
jointType = info[2]
if (jointType==p.JOINT_PRISMATIC or jointType==p.JOINT_REVOLUTE):
paramIds.append(p.addUserDebugParameter(jointName.decode("utf-8"),-4,4,(js[0]-jointOffsets[index])/jointDirections[index]))
index=index+1
p.setRealTimeSimulation(1)
while (1):
for i in range(len(paramIds)):
c = paramIds[i]
targetPos = p.readUserDebugParameter(c)
maxForce = p.readUserDebugParameter(maxForceId)
p.setJointMotorControl2(quadruped,jointIds[i],p.POSITION_CONTROL,jointDirections[i]*targetPos+jointOffsets[i], force=maxForce)
|
[
"max@autious.net"
] |
max@autious.net
|
2bb89f735b7789b3fe3c0e3995cc2bbf484329da
|
986a8c617725cb707dd21c5bd1487dd9d46adaa0
|
/mutation/example_2.py
|
a9d60729aa1d8151f26eee7697bd93132e37abce
|
[] |
no_license
|
tohfaakib/python_playground
|
6ee497cc7011be798e68f74ce331c24bd7a4edab
|
13f2b2e179e2c4f4198ac99965c53e1ddcf436e0
|
refs/heads/master
| 2020-09-16T09:46:24.231034
| 2019-12-24T05:06:24
| 2019-12-24T05:06:24
| 223,732,363
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 392
|
py
|
def add_to(num, target=[]):
target.append(num)
return target
print(add_to(1))
# output: [1]
print(add_to(2))
# output: [1, 2]
print(add_to(3))
# output: [1, 2, 3]
def add_to_2(num, target=None):
if target is None:
target = []
target.append(num)
return target
print(add_to_2(1))
# output: [1]
print(add_to_2(2))
# output: [2]
print(add_to_2(3))
# output: [3]
|
[
"tohfaakib@my.smccd.edu"
] |
tohfaakib@my.smccd.edu
|
1aa3293f0f6dade194eb63b0104f6ef1b168dc27
|
654f400751dfb180a937e0f18f6b722119a5b4f1
|
/tests/unitario/zend_django/parametros/test_parametro_models.py
|
8756743f0a08311b89c4a2e539506ed56101ebbb
|
[] |
no_license
|
imagilex/tereapps
|
fae8bcb18ad4276f09a6ef6887d0c685c7a5522a
|
51d4da8dab1d184cb7dcfe144ac8d2405a179028
|
refs/heads/main
| 2023-07-04T12:19:32.943411
| 2021-08-10T19:41:00
| 2021-08-10T19:41:00
| 343,847,920
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 6,310
|
py
|
import pytest
from django.contrib.auth.models import User
from django.test import TestCase
from zend_django.models import ParametroSistema
from zend_django.models import ParametroUsuario
from zend_django.models import ParametroUsuarioValor
from zend_django.parametros_models import PARAM_TYPES
from zend_django.parametros_models import PARAM_TYPES_Tuples
from zend_django.parametros_models import get_param_type_to_show
pytestmark = pytest.mark.django_db
class TestParametrosModelsFunctions(TestCase):
def test_get_param_type_to_show(self):
for tipo in PARAM_TYPES_Tuples:
self.assertEqual(get_param_type_to_show(tipo[0]), tipo[1])
class TestParametroSistemaModel(TestCase):
def setUp(self):
self.objs = [
ParametroSistema.objects.get_or_create(
seccion='seccion',
nombre='nombre',
nombre_para_mostrar='mostrar como',
tipo=PARAM_TYPES['CADENA'],
)[0],
ParametroSistema.objects.get_or_create(
seccion='seccion',
nombre='nombre_2',
nombre_para_mostrar='mostrar como',
valor='valor_default',
tipo=PARAM_TYPES['IMAGEN'],
)[0]
]
def test_to_string(self):
self.assertEqual(
f"{self.objs[0]}",
self.objs[0].nombre_para_mostrar)
self.assertEqual(
f"{self.objs[1]}",
f"{self.objs[1].nombre_para_mostrar}: {self.objs[1].valor}")
def test_tipo_txt(self):
for obj in self.objs:
self.assertEqual(obj.tipo_txt, get_param_type_to_show(obj.tipo))
def test_get(self):
for obj in self.objs:
self.assertEqual(
obj.valor, ParametroSistema.get(obj.seccion, obj.nombre))
self.assertIn("no encontrado", ParametroSistema.get(
self.objs[0].seccion, "inexistente"))
self.assertIn("no encontrado", ParametroSistema.get(
"inexistente", self.objs[0].nombre))
self.assertIn("no encontrado", ParametroSistema.get(
"inexistente", "inexistente"))
class TestParametroUsuario(TestCase):
def setUp(self):
self.objs = [
ParametroUsuario.objects.get_or_create(
seccion='seccion',
nombre='nombre',
tipo=PARAM_TYPES['CADENA'],
)[0],
ParametroUsuario.objects.get_or_create(
seccion='seccion',
nombre='nombre_2',
valor_default='valor_default',
tipo=PARAM_TYPES['IMAGEN'],
)[0]
]
self.usrs = [
User.objects.get_or_create(username="testuser")[0],
User.objects.get_or_create(username="testuser")[1],
]
self.values = [
ParametroUsuarioValor.objects.get_or_create(
user=self.usrs[0],
parametro=self.objs[0],
valor="Valor"
)[0],
]
def test_to_string(self):
self.assertEqual(
f"{self.objs[0]}",
self.objs[0].nombre)
self.assertEqual(
f"{self.objs[1]}",
f"{self.objs[1].nombre}: {self.objs[1].valor_default}")
def test_tipo_txt(self):
for obj in self.objs:
self.assertEqual(obj.tipo_txt, get_param_type_to_show(obj.tipo))
def test_get_default(self):
for obj in self.objs:
self.assertEqual(
obj.valor_default,
ParametroUsuario.get_default(obj.seccion, obj.nombre))
self.assertRaises(
ParametroUsuario.DoesNotExist, ParametroUsuario.get_default,
self.objs[0].seccion, "inexistente")
self.assertRaises(
ParametroUsuario.DoesNotExist, ParametroUsuario.get_default,
"inexistente", self.objs[0].nombre)
self.assertRaises(
ParametroUsuario.DoesNotExist, ParametroUsuario.get_default,
"inexistente", "inexistente")
def test_get_value(self):
self.assertEqual(ParametroUsuario.get_valor(
self.usrs[0], "seccion", "nombre"), "Valor")
self.assertEqual(ParametroUsuario.get_valor(
self.usrs[0], "seccion", "nombre_2"), "valor_default")
self.assertEqual("", ParametroUsuario.get_valor(
self.usrs[0], "inexistente", "nombre"))
self.assertEqual("", ParametroUsuario.get_valor(
self.usrs[0], "seccion", "inexistente"))
self.assertEqual("", ParametroUsuario.get_valor(
self.usrs[0], "inexistente", "inexistente"))
def test_set_valor(self):
cnt1 = len(ParametroUsuarioValor.objects.all())
self.assertTrue(ParametroUsuario.set_valor(
self.usrs[0], "seccion", "nombre", "Valor"))
self.assertTrue(ParametroUsuario.set_valor(
self.usrs[0], "seccion", "nombre_2", "Valor"))
self.assertFalse(ParametroUsuario.set_valor(
self.usrs[0], "inexistente", "nombre", "Valor"))
self.assertFalse(ParametroUsuario.set_valor(
self.usrs[0], "seccion", "inexistente", "Valor"))
self.assertFalse(ParametroUsuario.set_valor(
self.usrs[0], "inexistente", "inexistente", "Valor"))
cnt2 = len(ParametroUsuarioValor.objects.all())
self.assertGreaterEqual(cnt2, cnt2)
class TestParametroUsuarioValor(TestCase):
def setUp(self):
self.usrs = [
User.objects.get_or_create(username="testuser")[0],
User.objects.get_or_create(username="testuser2")[0]
]
self.param = ParametroUsuario.objects.get_or_create(
seccion='seccion',
nombre='nombre',
tipo=PARAM_TYPES['CADENA'],
)[0]
self.objs = [
ParametroUsuarioValor.objects.get_or_create(
user=self.usrs[0],
parametro=self.param,
valor="Valor"
)[0],
ParametroUsuarioValor.objects.get_or_create(
user=self.usrs[1],
parametro=self.param,
)[0],
]
def test_to_string(self):
self.assertEqual("Valor", f'{self.objs[0]}')
self.assertEqual("", f'{self.objs[1]}')
|
[
"maths.unam@gmail.com"
] |
maths.unam@gmail.com
|
931b28badc8e29c492e27998419342b19c1db54c
|
0e1e643e864bcb96cf06f14f4cb559b034e114d0
|
/Exps_7_v3/doc3d/I_w_M_to_W_focus_Zok_div/ch032/wiColorJ/Sob_k05_s001_EroM/pyr_Tcrop255_p60_j15/pyr_1s/L5/step09_1side_L5.py
|
7219559e641a143505af65d8a2b9b3627b574ddb
|
[] |
no_license
|
KongBOy/kong_model2
|
33a94a9d2be5b0f28f9d479b3744e1d0e0ebd307
|
1af20b168ffccf0d5293a393a40a9fa9519410b2
|
refs/heads/master
| 2022-10-14T03:09:22.543998
| 2022-10-06T11:33:42
| 2022-10-06T11:33:42
| 242,080,692
| 3
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 6,182
|
py
|
#############################################################################################################################################################################################################
#############################################################################################################################################################################################################
### 把 kong_model2 加入 sys.path
import os
from tkinter import S
code_exe_path = os.path.realpath(__file__) ### 目前執行 step10_b.py 的 path
code_exe_path_element = code_exe_path.split("\\") ### 把 path 切分 等等 要找出 kong_model 在第幾層
kong_layer = code_exe_path_element.index("kong_model2") ### 找出 kong_model2 在第幾層
kong_model2_dir = "\\".join(code_exe_path_element[:kong_layer + 1]) ### 定位出 kong_model2 的 dir
import sys ### 把 kong_model2 加入 sys.path
sys.path.append(kong_model2_dir)
# print(__file__.split("\\")[-1])
# print(" code_exe_path:", code_exe_path)
# print(" code_exe_path_element:", code_exe_path_element)
# print(" kong_layer:", kong_layer)
# print(" kong_model2_dir:", kong_model2_dir)
#############################################################################################################################################################################################################
from step08_b_use_G_generate_I_w_M_to_Wx_Wy_Wz_combine import I_w_M_to_W
from step08_b_use_G_generate_0_util import Tight_crop, Color_jit
from step09_c_train_step import Train_step_I_w_M_to_W
from step09_d_KModel_builder_combine_step789 import KModel_builder, MODEL_NAME
color_jit = Color_jit(do_ratio=0.6)
use_what_gen_op = I_w_M_to_W( separate_out=True, focus=True, tight_crop=Tight_crop(pad_size=60, resize=(255, 255), jit_scale= 0) )
use_what_train_step = Train_step_I_w_M_to_W( separate_out=True, focus=True, tight_crop=Tight_crop(pad_size=60, resize=(255, 255), jit_scale= 15), color_jit=color_jit )
use_hid_ch = 32
import time
start_time = time.time()
###############################################################################################################################################################################################
###############################################################################################################################################################################################
########################################################### Block1
### Block1
#########################################################################################
pyramid_1side_1 = [1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1]
pyramid_1side_2 = [1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1]
pyramid_1side_3 = [1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 1]
pyramid_1side_4 = [1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1]
pyramid_1side_5 = [1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1]
pyramid_1side_6 = [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]
#########################################################################################
ch032_pyramid_1side_1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch=use_hid_ch, depth_level=5, out_ch=1, d_amount=3, bottle_divide=True, unet_acti="sigmoid", conv_block_num=pyramid_1side_1, ch_upper_bound= 2 ** 14).set_gen_op( use_what_gen_op ).set_train_step( use_what_train_step )
ch032_pyramid_1side_2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch=use_hid_ch, depth_level=5, out_ch=1, d_amount=3, bottle_divide=True, unet_acti="sigmoid", conv_block_num=pyramid_1side_2, ch_upper_bound= 2 ** 14).set_gen_op( use_what_gen_op ).set_train_step( use_what_train_step )
ch032_pyramid_1side_3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch=use_hid_ch, depth_level=5, out_ch=1, d_amount=3, bottle_divide=True, unet_acti="sigmoid", conv_block_num=pyramid_1side_3, ch_upper_bound= 2 ** 14).set_gen_op( use_what_gen_op ).set_train_step( use_what_train_step )
ch032_pyramid_1side_4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch=use_hid_ch, depth_level=5, out_ch=1, d_amount=3, bottle_divide=True, unet_acti="sigmoid", conv_block_num=pyramid_1side_4, ch_upper_bound= 2 ** 14).set_gen_op( use_what_gen_op ).set_train_step( use_what_train_step )
ch032_pyramid_1side_5 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch=use_hid_ch, depth_level=5, out_ch=1, d_amount=3, bottle_divide=True, unet_acti="sigmoid", conv_block_num=pyramid_1side_5, ch_upper_bound= 2 ** 14).set_gen_op( use_what_gen_op ).set_train_step( use_what_train_step )
ch032_pyramid_1side_6 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch=use_hid_ch, depth_level=5, out_ch=1, d_amount=3, bottle_divide=True, unet_acti="sigmoid", conv_block_num=pyramid_1side_6, ch_upper_bound= 2 ** 14).set_gen_op( use_what_gen_op ).set_train_step( use_what_train_step )
#########################################################################################
###############################################################################################################################################################################################
if(__name__ == "__main__"):
import numpy as np
print("build_model cost time:", time.time() - start_time)
data = np.zeros(shape=(1, 512, 512, 1))
use_model = ch032_pyramid_1side_4
use_model = use_model.build()
result = use_model.generator(data)
print(result.shape)
from kong_util.tf_model_util import Show_model_weights
Show_model_weights(use_model.generator)
use_model.generator.summary()
print(use_model.model_describe)
|
[
"s89334roy@yahoo.com.tw"
] |
s89334roy@yahoo.com.tw
|
f8ea905f492854fd8ecc472ff5ac65bb0b66c53f
|
2aa5d0ae8f74ebb0026c715f8c871388bed4427c
|
/nurse/tests.py
|
71daa82205d698b13836a4c9030dcb85e07285e1
|
[] |
no_license
|
surajit003/valentis
|
1614c46b77c5827ea187b47284f49d46584d7291
|
53fd97bd17f36ce14b2be28bb08d5b65abde8d82
|
refs/heads/master
| 2022-11-26T23:18:41.982907
| 2020-08-09T10:16:39
| 2020-08-09T10:16:39
| 285,679,287
| 1
| 2
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,319
|
py
|
import unittest
from django.urls import reverse
from django.test import Client
from .models import Nurse
from django.contrib.auth.models import User
from django.contrib.auth.models import Group
from django.contrib.contenttypes.models import ContentType
def create_django_contrib_auth_models_user(**kwargs):
defaults = {}
defaults["username"] = "username"
defaults["email"] = "username@tempurl.com"
defaults.update(**kwargs)
return User.objects.create(**defaults)
def create_django_contrib_auth_models_group(**kwargs):
defaults = {}
defaults["name"] = "group"
defaults.update(**kwargs)
return Group.objects.create(**defaults)
def create_django_contrib_contenttypes_models_contenttype(**kwargs):
defaults = {}
defaults.update(**kwargs)
return ContentType.objects.create(**defaults)
def create_models(**kwargs):
defaults = {}
defaults["systolic"] = "systolic"
defaults["diastolic"] = "diastolic"
defaults["temperature"] = "temperature"
defaults["oxygen_saturation"] = "oxygen_saturation"
defaults["urinalysis"] = "urinalysis"
defaults["heart_rate"] = "heart_rate"
defaults["others"] = "others"
defaults["attending_nurse"] = "attending_nurse"
defaults["patient_no"] = "patient_no"
defaults["first_name"] = "first_name"
defaults["last_name"] = "last_name"
defaults["middle_name"] = "middle_name"
defaults.update(**kwargs)
return Nurse.objects.create(**defaults)
class modelsViewTest(unittest.TestCase):
'''
Tests for Nurse
'''
def setUp(self):
self.client = Client()
def test_list_models(self):
url = reverse('nurse_models_list')
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
def test_create_models(self):
url = reverse('nurse_models_create')
data = {
"systolic": "systolic",
"diastolic": "diastolic",
"temperature": "temperature",
"oxygen_saturation": "oxygen_saturation",
"urinalysis": "urinalysis",
"heart_rate": "heart_rate",
"others": "others",
"attending_nurse": "attending_nurse",
"patient_no": "patient_no",
}
response = self.client.post(url, data=data)
self.assertEqual(response.status_code, 302)
def test_detail_models(self):
models = create_models()
url = reverse('nurse_models_detail', args=[models.slug,])
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
def test_update_models(self):
models = create_models()
data = {
"systolic": "systolic",
"diastolic": "diastolic",
"temperature": "temperature",
"oxygen_saturation": "oxygen_saturation",
"urinalysis": "urinalysis",
"heart_rate": "heart_rate",
"others": "others",
"attending_nurse": "attending_nurse",
"patient_no": "patient_no",
}
url = reverse('nurse_models_update', args=[models.slug,])
response = self.client.post(url, data)
self.assertEqual(response.status_code, 302)
|
[
"surajit@poweredbypeople.io"
] |
surajit@poweredbypeople.io
|
fc593f18e687cb291d60be67c2b8038adda0ff0a
|
c3c7398ec14865ea34c7f03aa5e012ddb19f0d5b
|
/app/models.py
|
e35cb5a4cf60d86793de1766edc457b72c9f70fa
|
[] |
no_license
|
mzm5466/blog
|
0e022f0ce85a0079cb72ffd9f472c7684f94d9fb
|
13625fe7028a0df11a30d7de32751e34d681de00
|
refs/heads/master
| 2021-01-23T16:51:58.296591
| 2018-11-17T06:05:50
| 2018-11-17T06:05:50
| 102,748,039
| 0
| 0
| null | 2018-11-12T23:28:57
| 2017-09-07T14:36:32
|
JavaScript
|
UTF-8
|
Python
| false
| false
| 1,318
|
py
|
#!/usr/bin/python
#-*- coding:utf-8 -*-
from __future__ import unicode_literals
from django.db import models
from DjangoUeditor.models import UEditorField
from django.core.urlresolvers import reverse
KIND_CHOICES=(
('python','python'),
('c++','c++'),
('java','java'),
('javascript','javascript'),
('html','html'),
('css','css'),
('linux','linux'),
)
# Create your models here.
class Moment(models.Model):
title=models.CharField(u'标题',max_length=30,default="请在此输入标题")
shortcontent=models.TextField(u'短梗概',max_length=100,default="请在此输入梗概")
content = UEditorField(u'内容', height=300, width=1000,
default=u'', blank=True, imagePath="",
toolbars='full', filePath='')#models.TextField(u'内容',max_length=3000,default="请在此处输入")
user_name=models.CharField(u'作者',max_length=20,default='匿名')
kind=models.CharField(u'文章类型',max_length=20,choices=KIND_CHOICES,default=KIND_CHOICES[0])
createtime=models.DateTimeField(auto_now=True)
def __unicode__(self): # 在Python3中用 __str__ 代替 __unicode__
return self.title
class Meta:
verbose_name = '博客文章'
verbose_name_plural = '博客文章'
ordering = ['-createtime']
|
[
"you@example.com"
] |
you@example.com
|
cc47cf7d57fa133086e9fd19950f7804b7feb362
|
2f2d2ceb7eb79e1d441ed278a92ea484dee8501e
|
/analysis/python/scattering/gb_scatt_fit.py
|
a4edccd2010cf300912f2ac406c7cc461027e8bc
|
[] |
no_license
|
rtreharne/SZO
|
b20943b1afaa10f0e4fc032a1f2955eda4fd54b5
|
2fb71f85cd3c19f46782c528d7357a1ae5dc49e5
|
refs/heads/master
| 2021-01-10T04:28:13.662690
| 2015-12-17T10:16:28
| 2015-12-17T10:16:28
| 48,166,656
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,927
|
py
|
#! /usr/bin/env python
from numpy import *
from pylab import *
from math import *
from pylab import *
from matplotlib.widgets import Slider
import matplotlib.pyplot as plt
import matplotlib.cm as cm
import matplotlib.mlab as mlab
import tkFileDialog
import numpy as np
import nvsmob
import nelmin
from matplotlib.font_manager import FontProperties
#matplotlib.rc('xtick', labelsize = 15)
#matplotlib.rc('ytick', labelsize = 15, )
class Page:
fig = figure(figsize = (6,5))
#subplots_adjust(bottom=0.35)
global e, m0, e0, hbar, k, T, kT, m
e = 1.602e-19
e0 = 8.85e-12
m0 = 9.11e-31
hbar = 1.055e-34
k = 1.38e-23
T = 300
kT = k*T
m = 0.4*m0
def __init__(self):
self.x = arange(1e19, 1e21, 1e18)
self.ax = subplot(111)
self.xdata, self.ydata, self.greenx, self.greeny= nvsmob.mob()
p = ([50, 1.8, 9.25, 0.54])
p = self.fit(p)
mu_total, mu1, mu2 = self.crunch(p)
self.update(self.x, mu_total, mu1, mu2)
print p
def crunch(self, p):
self.index = 0
mumax = 50
x = self.x
L = p[0]*1e-7
Nt = p[1]*1e14
A = p[2]/e
B = p[3]
mu0 = e*L/sqrt(2*pi*m*kT)
Eb = e**2*Nt**2/(8*8.4*e0*x)
Ef = hbar**2*(3*pi**2*x*1e6)**(2.0/3)/(2*m)
mu1 = mu0*exp(-Eb/kT)
mu2 = (mumax - mu0)/(1+exp(-A*(Ef - (B*e) - Eb)))
mu_total = mu1+mu2
return mu_total, mu1, mu2
def update(self, x, mu_total, mu1, mu2):
sigma = 1e-20
for i in range (0,len(x)):
x[i] = sigma*x[i]
for i in range (0,len(self.xdata)):
self.xdata[i] = sigma*self.xdata[i]
self.greenx[i] = sigma*self.greenx[i]
self.ax.clear()
line3, = self.ax.plot(x, mu_total, linewidth = 3, alpha = 0.75)
data1, = self.ax.plot(self.xdata, self.ydata, 'o', color = 'red', alpha = 0.7, markersize = 8)
data2, = self.ax.plot(self.greenx, self.greeny, '^', color = 'green', alpha = 0.7, markersize = 8)
#self.ax.plot(self.greenx, self.greeny, 'o', color = 'green')
line1, = self.ax.plot(x, mu1, '--', linewidth = 2, color = 'orange')
line2, = self.ax.plot(x, mu2, '-', linewidth = 2, color = 'purple')
line2.set_dashes([8, 4, 2, 4, 2, 4])
#self.ax.set_xscale('log')
self.ax.set_ylim(0, 18)
self.ax.set_ylabel(r'$\mu_e$ (cm$^2$V$^{-1}$s$^{-1}$)', fontsize = 15)
self.ax.set_xlim(0.5, 5)
self.ax.set_xlabel(r'$n_e$ ($\times10^{20}$ cm$^{-3}$)', fontsize = 15)
#self.ax.set_xscale('log')
fontP = FontProperties()
fontP.set_size('large')
leg1 = self.ax.legend((data1,data2,line1, line2,line3), (r'data ($< 0.65\%$ wt. SiO$_{2}$)',r'data ($>0.65\%$ wt. SiO$_{2}$)',r'$\mu_{gb}=\mu_0\exp(-\frac{\phi}{k_BT})$', r'$\mu_t=\frac{\mu_{ii}-\mu_{gb}}{1+\exp[-\alpha(E_f-\beta\phi)]}$',r'$\mu_{eff}=\mu_{gb} + \mu_t$'), 'upper left', prop = fontP,fancybox=False)
leg1.get_frame().set_alpha(0.0)
def func(self, p):
sum = 0
x = self.xdata
y = self.ydata
mumax = 50
L = p[0]*1e-7
Nt = p[1]*1e14
A = p[2]/e
B = p[3]
mu0 = e*L/sqrt(2*pi*m*kT)
Eb, Ef, mu1, mu2, mu_total = [],[],[],[],[]
for i in range (0, len(x)):
Eb.append(e**2*Nt**2/(8*8.4*e0*x[i]))
Ef.append(hbar**2*(3*pi**2*x[i]*1e6)**(2.0/3)/(2*m))
mu1.append(mu0*exp(-Eb[i]/kT))
mu2.append((mumax - mu0)/(1+exp(-A*(Ef[i] - (B*e) - Eb[i]))))
mu_total.append(mu1[i] + mu2[i])
sum += sqrt((mu_total[i]-y[i])**2)
sum = sum/len(x)
return sum
def fit(self, p):
for i in range (0, 5):
result, fx, conv_flag, nfe, res = nelmin.minimize(self.func, p)
p = result
return result
graph = Page()
show()
|
[
"R.Treharne@liverpool.ac.uk"
] |
R.Treharne@liverpool.ac.uk
|
21f24ea6ddca2a3fb1ffdbb15429ef55979c5e7d
|
439f3bbc4b9a84b27052b2d1d5ea166bca2e1498
|
/setup.py
|
dff003eddf7047204a0ae39f94ca37da91cabe76
|
[
"Apache-2.0"
] |
permissive
|
zeroyou/AndroidViewClient
|
dc52c821b11e96decf6066b670165c154101bc81
|
06cefca324d3ab255e4367990a5d1389b0a39d3d
|
refs/heads/master
| 2022-12-25T00:57:29.432351
| 2020-09-20T00:41:47
| 2020-09-20T00:41:47
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,135
|
py
|
#!/usr/bin/env python
from setuptools import setup, find_packages
setup(name='androidviewclient',
version='20.0.0b5',
description='''AndroidViewClient is a 100% pure python library and tools
that simplifies test script creation providing higher level
operations and the ability of obtaining the tree of Views present at
any given moment on the device or emulator screen.
''',
license='Apache',
keywords='android uiautomator viewclient monkeyrunner test automation',
author='Diego Torres Milano',
author_email='dtmilano@gmail.com',
url='https://github.com/dtmilano/AndroidViewClient/',
packages=find_packages('src'),
package_dir={'':'src'},
package_data={'':['*.png']},
include_package_data=True,
scripts=['tools/culebra', 'tools/dump'],
classifiers=['Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License'],
install_requires=['setuptools', 'requests', 'numpy', 'matplotlib', 'culebratester-client >= 2.0.6'],
)
|
[
"dtmilano@gmail.com"
] |
dtmilano@gmail.com
|
a55eb2498c338eae709672405c5607d332a40235
|
c81d7dfef424b088bf2509a1baf406a80384ea5a
|
/venv/Lib/site-packages/whitenoise/middleware.py
|
f359321a7b53c64e2d2620204bde7381490358d6
|
[] |
no_license
|
Goutham2591/OMK_PART2
|
111210d78fc4845481ed55c852b8f2f938918f4a
|
cb54fb21ebf472bffc6ee4f634bf1e68303e113d
|
refs/heads/master
| 2022-12-10T01:43:08.213010
| 2018-04-05T02:09:41
| 2018-04-05T02:09:41
| 124,828,094
| 0
| 1
| null | 2022-12-07T23:43:03
| 2018-03-12T03:20:14
|
Python
|
UTF-8
|
Python
| false
| false
| 1,545
|
py
|
from __future__ import absolute_import
from django.http import FileResponse
from whitenoise.django import DjangoWhiteNoise
class WhiteNoiseMiddleware(DjangoWhiteNoise):
"""
Wrap DjangoWhiteNoise to allow it to function as Django middleware, rather
than WSGI middleware
This functions as both old- and new-style middleware, so can be included in
either MIDDLEWARE or MIDDLEWARE_CLASSES.
"""
def __init__(self, get_response=None):
self.get_response = get_response
# We pass None for `application`
super(WhiteNoiseMiddleware, self).__init__(None)
def __call__(self, request):
response = self.process_request(request)
if response is None:
response = self.get_response(request)
return response
def process_request(self, request):
if self.autorefresh:
static_file = self.find_file(request.path_info)
else:
static_file = self.files.get(request.path_info)
if static_file is not None:
return self.serve(static_file, request)
def serve(self, static_file, request):
response = static_file.get_response(request.method, request.META)
status = int(response.status)
http_response = FileResponse(response.file or (), status=status)
# Remove default content-type
del http_response['content-type']
for key, value in response.headers:
http_response[key] = value
return http_response
|
[
"amatar@unomaha.edu"
] |
amatar@unomaha.edu
|
34ae4058e8b7b076fbec0cd6c034ebe978798f7f
|
1dacbf90eeb384455ab84a8cf63d16e2c9680a90
|
/pkgs/tornado-4.4.1-py27_0/lib/python2.7/site-packages/tornado/test/tcpserver_test.py
|
c01c04ddfb2baf903a76fbf5dfa182c3c2d21172
|
[
"Apache-2.0",
"BSD-3-Clause",
"LicenseRef-scancode-unknown"
] |
permissive
|
wangyum/Anaconda
|
ac7229b21815dd92b0bd1c8b7ec4e85c013b8994
|
2c9002f16bb5c265e0d14f4a2314c86eeaa35cb6
|
refs/heads/master
| 2022-10-21T15:14:23.464126
| 2022-10-05T12:10:31
| 2022-10-05T12:10:31
| 76,526,728
| 11
| 10
|
Apache-2.0
| 2022-10-05T12:10:32
| 2016-12-15T05:26:12
|
Python
|
UTF-8
|
Python
| false
| false
| 1,361
|
py
|
from __future__ import absolute_import, division, print_function, with_statement
import socket
from tornado import gen
from tornado.iostream import IOStream
from tornado.log import app_log
from tornado.stack_context import NullContext
from tornado.tcpserver import TCPServer
from tornado.testing import AsyncTestCase, ExpectLog, bind_unused_port, gen_test
class TCPServerTest(AsyncTestCase):
@gen_test
def test_handle_stream_coroutine_logging(self):
# handle_stream may be a coroutine and any exception in its
# Future will be logged.
class TestServer(TCPServer):
@gen.coroutine
def handle_stream(self, stream, address):
yield gen.moment
stream.close()
1 / 0
server = client = None
try:
sock, port = bind_unused_port()
with NullContext():
server = TestServer()
server.add_socket(sock)
client = IOStream(socket.socket())
with ExpectLog(app_log, "Exception in callback"):
yield client.connect(('localhost', port))
yield client.read_until_close()
yield gen.moment
finally:
if server is not None:
server.stop()
if client is not None:
client.close()
|
[
"wgyumg@mgail.com"
] |
wgyumg@mgail.com
|
048da83cda17a4a360c09c39781995cf2d3af27f
|
3691259d4be62b60d8d52f38b36d6a24e5fd4536
|
/docs/examples/compute/cloudsigma/create_vlan_subscription.py
|
a29e2394267225a7fe1141d0ed6edefb84091bdc
|
[
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
chenjiang1985/libcloud
|
f385fac278777c2bbfedaf440d353c9ad9eb5c69
|
587212da626dfe0e2936737108bcc49d666cf4b4
|
refs/heads/master
| 2021-07-16T14:29:21.821490
| 2019-11-27T02:20:43
| 2019-11-27T02:20:43
| 222,844,781
| 1
| 2
|
Apache-2.0
| 2020-10-27T22:06:36
| 2019-11-20T03:41:31
|
Python
|
UTF-8
|
Python
| false
| false
| 376
|
py
|
from libcloud.compute.types import Provider
from libcloud.compute.providers import get_driver
cls = get_driver(Provider.CLOUDSIGMA)
driver = cls('username', 'password', region='zrh', api_version='2.0')
subscription = driver.ex_create_subscription(amount=1, period='30 days',
resource='vlan', auto_renew=True)
print(subscription)
|
[
"jacob.cj@alibaba-inc.com"
] |
jacob.cj@alibaba-inc.com
|
58b1d1422474f6027aa3f69edddb42e44fbb2a52
|
ce76b3ef70b885d7c354b6ddb8447d111548e0f1
|
/life_and_point/public_number/go_person_with_week/time/small_fact/tell_high_number.py
|
8f2dcc2a9607cc6373040ba43be5bb60c8ac08a7
|
[] |
no_license
|
JingkaiTang/github-play
|
9bdca4115eee94a7b5e4ae9d3d6052514729ff21
|
51b550425a91a97480714fe9bc63cb5112f6f729
|
refs/heads/master
| 2021-01-20T20:18:21.249162
| 2016-08-19T07:20:12
| 2016-08-19T07:20:12
| 60,834,519
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 191
|
py
|
#! /usr/bin/env python
def thing(str_arg):
way(str_arg)
print('seem_great_time')
def way(str_arg):
print(str_arg)
if __name__ == '__main__':
thing('be_work_to_next_time')
|
[
"jingkaitang@gmail.com"
] |
jingkaitang@gmail.com
|
f83fa49d5475aa107fb54496baa054affa656ec8
|
9743d5fd24822f79c156ad112229e25adb9ed6f6
|
/xai/brain/wordbase/adverbs/_truest.py
|
f2ef775949b9144ce37ca8397bf6342c1da23e39
|
[
"MIT"
] |
permissive
|
cash2one/xai
|
de7adad1758f50dd6786bf0111e71a903f039b64
|
e76f12c9f4dcf3ac1c7c08b0cc8844c0b0a104b6
|
refs/heads/master
| 2021-01-19T12:33:54.964379
| 2017-01-28T02:00:50
| 2017-01-28T02:00:50
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 230
|
py
|
from xai.brain.wordbase.adverbs._true import _TRUE
#calss header
class _TRUEST(_TRUE, ):
def __init__(self,):
_TRUE.__init__(self)
self.name = "TRUEST"
self.specie = 'adverbs'
self.basic = "true"
self.jsondata = {}
|
[
"xingwang1991@gmail.com"
] |
xingwang1991@gmail.com
|
f958909a7a6b280c944a1b5dcfc27981d588a125
|
4f01328f202107399b5676c82be9d8fc246a7cf9
|
/torch2trt_dynamic/converters/new_ones.py
|
b45d45eeac35741c2040f6444c7c52a79a7cbd40
|
[
"MIT"
] |
permissive
|
JasonDu1993/torch2trt_dynamic
|
cccc6570ba68399e902346c06f362f80d38b6239
|
24bbad2ea4977bb88ce2ade433058becc6980c82
|
refs/heads/master
| 2022-12-29T17:10:03.714226
| 2020-10-11T07:24:37
| 2020-10-11T07:24:37
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,706
|
py
|
from torch2trt_dynamic.torch2trt_dynamic import *
@tensorrt_converter('torch.Tensor.new_ones')
def convert_new_ones(ctx):
input = ctx.method_args[0]
size = get_arg(ctx, 'size', pos=1, default=None)
dtype = get_arg(ctx, 'dtype', pos=2, default=input.dtype)
output = ctx.method_return
if isinstance(size, int):
size = (size, )
# check const
is_const = True
for s in size:
if hasattr(s,'_trt'):
is_const = False
break
if is_const:
# create const value
output_trt = trt_(ctx.network, output)
else:
# create fill
trt_size = []
for s in size:
if hasattr(s, '_trt'):
trt_size.append(s._trt)
else:
trt_size.append(trt_(ctx.network, s))
trt_size = ctx.network.add_concatenation(trt_size).get_output(0)
layer = ctx.network.add_fill(size, trt.FillOperation.RANDOM_UNIFORM)
layer.set_input(0, trt_size)
layer.set_input(1, trt_(ctx.network, input.new_tensor(1)))
layer.set_input(2, trt_(ctx.network, input.new_tensor(1)))
output_trt = layer.get_output(0)
data_type = None
if dtype==torch.float32:
data_type = trt.DataType.FLOAT
elif dtype==torch.int32 or dtype==torch.long:
data_type = trt.DataType.INT32
elif dtype==torch.bool:
data_type = trt.DataType.BOOL
else:
print("unsupported convert type:{}".format(dtype))
if data_type is not None:
layer = ctx.network.add_identity(output_trt)
layer.set_output_type(0, data_type)
output_trt = layer.get_output(0)
output._trt = output_trt
|
[
"streetyao@live.com"
] |
streetyao@live.com
|
df5fdd8502bafee22cd6422aadd62f41b6e93175
|
ade047677ca695a8d27ff50645f1afe8cd1463df
|
/Entity.py
|
74b0d2ab5a75cdc154d488c7aee0e35e161aff20
|
[] |
no_license
|
xuzhuo77/WorkSpace-FrameWork
|
677f02eead801fb48e60d0411ea4dc5aa516af57
|
109c691c270020ef20f36f625c31166c49386351
|
refs/heads/master
| 2023-03-06T13:02:28.528231
| 2021-02-21T23:01:08
| 2021-02-21T23:01:08
| 337,577,954
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 947
|
py
|
from sqlalchemy import Column, Integer, String,DateTime
from sqlalchemy.ext.declarative import declarative_base
import datetime
from Utils.UniqueIdUtil import gen_guid
base = declarative_base()
class Entity(base):
__abstract__ = True
# __tablename__ = "Entity"
id = Column(Integer, primary_key=True,autoincrement=True)
version = Column(Integer, nullable=False)
update_time=Column(DateTime, default=datetime.datetime.now, comment='更新时间')
# guid =Column(String(64), default=gen_guid(), primary_key=True)
# delete_flag=Column(Integer)
# creator=Column(String(64))
create_date=Column(DateTime, default=datetime.datetime.now, comment='创建时间')
# def __new__(cls, *args, **kwargs):
# print(kwargs)
# def __init__(self,*args,**kwargs):
# for i,k in kwargs:
# def __dict__(self):
# return str({c.name: getattr(self, c.name, None) for c in self.__table__.columns})
|
[
"403863214@qq.com"
] |
403863214@qq.com
|
b4328f61f077d87793f58c3c84eb3f34d5b7bf26
|
607dc8df19fc5248f6289cdda97857b5d58ca16f
|
/smac/model/gaussian_process/kernels/rbf_kernel.py
|
5bf20765886b3b5320be8d5b80fa8810a4ef6c68
|
[
"BSD-3-Clause",
"LicenseRef-scancode-philippe-de-muyter",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
automl/SMAC3
|
7dce243a33023c52d6819deff966f7b502e90ed0
|
541ee7e0383b491b86d1a23dcff669f2efad616d
|
refs/heads/main
| 2023-08-31T17:36:06.067579
| 2023-08-01T13:02:51
| 2023-08-01T13:02:51
| 65,900,469
| 943
| 259
|
NOASSERTION
| 2023-09-11T02:36:57
| 2016-08-17T10:58:05
|
Python
|
UTF-8
|
Python
| false
| false
| 2,738
|
py
|
from __future__ import annotations
import numpy as np
import scipy.optimize
import scipy.spatial.distance
import scipy.special
import sklearn.gaussian_process.kernels as kernels
from smac.model.gaussian_process.kernels.base_kernels import AbstractKernel
from smac.model.gaussian_process.priors.abstract_prior import AbstractPrior
__copyright__ = "Copyright 2022, automl.org"
__license__ = "3-clause BSD"
class RBFKernel(AbstractKernel, kernels.RBF):
"""RBF kernel implementation."""
def __init__(
self,
length_scale: float | tuple[float, ...] | np.ndarray = 1.0,
length_scale_bounds: tuple[float, float] | list[tuple[float, float]] | np.ndarray = (1e-5, 1e5),
operate_on: np.ndarray | None = None,
has_conditions: bool = False,
prior: AbstractPrior | None = None,
) -> None:
super().__init__(
operate_on=operate_on,
has_conditions=has_conditions,
prior=prior,
length_scale=length_scale,
length_scale_bounds=length_scale_bounds,
)
def _call(
self,
X: np.ndarray,
Y: np.ndarray | None = None,
eval_gradient: bool = False,
active: np.ndarray | None = None,
) -> np.ndarray | tuple[np.ndarray, np.ndarray]:
X = np.atleast_2d(X)
length_scale = kernels._check_length_scale(X, self.length_scale)
if Y is None:
dists = scipy.spatial.distance.pdist(X / length_scale, metric="sqeuclidean")
K = np.exp(-0.5 * dists)
# convert from upper-triangular matrix to square matrix
K = scipy.spatial.distance.squareform(K)
np.fill_diagonal(K, 1)
else:
if eval_gradient:
raise ValueError("Gradient can only be evaluated when Y is None.")
dists = scipy.spatial.distance.cdist(X / length_scale, Y / length_scale, metric="sqeuclidean")
K = np.exp(-0.5 * dists)
if active is not None:
K = K * active
if eval_gradient:
if self.hyperparameter_length_scale.fixed:
# Hyperparameter l kept fixed
return K, np.empty((X.shape[0], X.shape[0], 0))
elif not self.anisotropic or length_scale.shape[0] == 1:
K_gradient = (K * scipy.spatial.distance.squareform(dists))[:, :, np.newaxis]
return K, K_gradient
elif self.anisotropic:
# We need to recompute the pairwise dimension-wise distances
K_gradient = (X[:, np.newaxis, :] - X[np.newaxis, :, :]) ** 2 / (length_scale**2)
K_gradient *= K[..., np.newaxis]
return K, K_gradient
return K
|
[
"noreply@github.com"
] |
automl.noreply@github.com
|
089cc627b0ee56e98b1c995bece77eec11b6c657
|
76e931912629c37beedf7c9b112b53e7de5babd7
|
/1-mouth01/day14/exe01.py
|
e46e565f07c85e20b97d5613bbc0a84c048de75f
|
[
"Apache-2.0"
] |
permissive
|
gary-gggggg/gary
|
c59ac21d8e065f296ff986d11a0e4cbf186a1bc4
|
d8ba30ea4bc2b662a2d6a87d247f813e5680d63e
|
refs/heads/main
| 2023-02-23T06:54:34.500683
| 2021-02-01T10:17:02
| 2021-02-01T10:17:02
| 334,905,744
| 4
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 151
|
py
|
from module_exe import data
import module_exe
from module_exe import MyClass
c1 = MyClass()
c1.func02()
c1.func03()
print(data)
module_exe.func01()
|
[
"673248932@qq.com"
] |
673248932@qq.com
|
81026946ea3022346dd16b919458822b14a2eb72
|
ac2c3e8c278d0aac250d31fd023c645fa3984a1b
|
/saleor/saleor/graphql/core/enums.py
|
8046ea0ca5905d638c87ce50b2abe62ae9f3c1c4
|
[
"BSD-3-Clause",
"CC-BY-4.0"
] |
permissive
|
jonndoe/saleor-test-shop
|
152bc8bef615382a45ca5f4f86f3527398bd1ef9
|
1e83176684f418a96260c276f6a0d72adf7dcbe6
|
refs/heads/master
| 2023-01-21T16:54:36.372313
| 2020-12-02T10:19:13
| 2020-12-02T10:19:13
| 316,514,489
| 1
| 1
|
BSD-3-Clause
| 2020-11-27T23:29:20
| 2020-11-27T13:52:33
|
TypeScript
|
UTF-8
|
Python
| false
| false
| 5,088
|
py
|
import graphene
from ...account import error_codes as account_error_codes
from ...app import error_codes as app_error_codes
from ...checkout import error_codes as checkout_error_codes
from ...core import JobStatus, error_codes as core_error_codes
from ...core.permissions import get_permissions_enum_list
from ...core.weight import WeightUnits
from ...csv import error_codes as csv_error_codes
from ...discount import error_codes as discount_error_codes
from ...giftcard import error_codes as giftcard_error_codes
from ...invoice import error_codes as invoice_error_codes
from ...menu import error_codes as menu_error_codes
from ...order import error_codes as order_error_codes
from ...page import error_codes as page_error_codes
from ...payment import error_codes as payment_error_codes
from ...plugins import error_codes as plugin_error_codes
from ...plugins.vatlayer import TaxRateType as CoreTaxRateType
from ...product import error_codes as product_error_codes
from ...shipping import error_codes as shipping_error_codes
from ...warehouse import error_codes as warehouse_error_codes
from ...webhook import error_codes as webhook_error_codes
from ...wishlist import error_codes as wishlist_error_codes
from .utils import str_to_enum
# FIXME CoreTaxRateType should be removed after we will drop old api fields dedicated
# to taxes
class OrderDirection(graphene.Enum):
ASC = ""
DESC = "-"
@property
def description(self):
# Disable all the no-member violations in this function
# pylint: disable=no-member
if self == OrderDirection.ASC:
return "Specifies an ascending sort order."
if self == OrderDirection.DESC:
return "Specifies a descending sort order."
raise ValueError("Unsupported enum value: %s" % self.value)
class ReportingPeriod(graphene.Enum):
TODAY = "TODAY"
THIS_MONTH = "THIS_MONTH"
def to_enum(enum_cls, *, type_name=None, **options) -> graphene.Enum:
"""Create a Graphene enum from a class containing a set of options.
:param enum_cls:
The class to build the enum from.
:param type_name:
The name of the type. Default is the class name + 'Enum'.
:param options:
- description:
Contains the type description (default is the class's docstring)
- deprecation_reason:
Contains the deprecation reason.
The default is enum_cls.__deprecation_reason__ or None.
:return:
"""
# note this won't work until
# https://github.com/graphql-python/graphene/issues/956 is fixed
deprecation_reason = getattr(enum_cls, "__deprecation_reason__", None)
if deprecation_reason:
options.setdefault("deprecation_reason", deprecation_reason)
type_name = type_name or (enum_cls.__name__ + "Enum")
enum_data = [(str_to_enum(code.upper()), code) for code, name in enum_cls.CHOICES]
return graphene.Enum(type_name, enum_data, **options)
TaxRateType = graphene.Enum(
"TaxRateType", [(str_to_enum(rate[0]), rate[0]) for rate in CoreTaxRateType.CHOICES]
)
JobStatusEnum = to_enum(JobStatus)
PermissionEnum = graphene.Enum("PermissionEnum", get_permissions_enum_list())
WeightUnitsEnum = graphene.Enum(
"WeightUnitsEnum", [(str_to_enum(unit[0]), unit[0]) for unit in WeightUnits.CHOICES]
)
AccountErrorCode = graphene.Enum.from_enum(account_error_codes.AccountErrorCode)
AppErrorCode = graphene.Enum.from_enum(app_error_codes.AppErrorCode)
CheckoutErrorCode = graphene.Enum.from_enum(checkout_error_codes.CheckoutErrorCode)
ExportErrorCode = graphene.Enum.from_enum(csv_error_codes.ExportErrorCode)
DiscountErrorCode = graphene.Enum.from_enum(discount_error_codes.DiscountErrorCode)
PluginErrorCode = graphene.Enum.from_enum(plugin_error_codes.PluginErrorCode)
GiftCardErrorCode = graphene.Enum.from_enum(giftcard_error_codes.GiftCardErrorCode)
MenuErrorCode = graphene.Enum.from_enum(menu_error_codes.MenuErrorCode)
MetadataErrorCode = graphene.Enum.from_enum(core_error_codes.MetadataErrorCode)
OrderErrorCode = graphene.Enum.from_enum(order_error_codes.OrderErrorCode)
InvoiceErrorCode = graphene.Enum.from_enum(invoice_error_codes.InvoiceErrorCode)
PageErrorCode = graphene.Enum.from_enum(page_error_codes.PageErrorCode)
PaymentErrorCode = graphene.Enum.from_enum(payment_error_codes.PaymentErrorCode)
PermissionGroupErrorCode = graphene.Enum.from_enum(
account_error_codes.PermissionGroupErrorCode
)
ProductErrorCode = graphene.Enum.from_enum(product_error_codes.ProductErrorCode)
ShopErrorCode = graphene.Enum.from_enum(core_error_codes.ShopErrorCode)
ShippingErrorCode = graphene.Enum.from_enum(shipping_error_codes.ShippingErrorCode)
StockErrorCode = graphene.Enum.from_enum(warehouse_error_codes.StockErrorCode)
WarehouseErrorCode = graphene.Enum.from_enum(warehouse_error_codes.WarehouseErrorCode)
WebhookErrorCode = graphene.Enum.from_enum(webhook_error_codes.WebhookErrorCode)
WishlistErrorCode = graphene.Enum.from_enum(wishlist_error_codes.WishlistErrorCode)
TranslationErrorCode = graphene.Enum.from_enum(core_error_codes.TranslationErrorCode)
|
[
"testuser@151-248-122-3.cloudvps.regruhosting.ru"
] |
testuser@151-248-122-3.cloudvps.regruhosting.ru
|
9e3b1da5a3fe55cbd4b708c74114095a43a6ea6a
|
f1748434c3a06e6005618afc1c1b259ce2c5b115
|
/Learn/DataStructure/Graph/MinimumSpanningTree/kruskal.py
|
04faf853259776ac7c8a87b5d0e1da488a7ebcec
|
[] |
no_license
|
yue-yue-haha/Algorithm
|
f5175ae0e1339dba98c1bbd1c6b238634ced969c
|
1f7a120d262b80f2b4fc452a33f698ccdd9e1fad
|
refs/heads/master
| 2023-03-12T22:53:32.873956
| 2021-02-19T02:29:14
| 2021-02-19T02:29:14
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 823
|
py
|
from functools import cmp_to_key
from Learn.DataStructure.UnionFindSet.template import UnionFindSet
def kruskal(graph: dict):
tree, n = [], len(graph.get("nodes"))
ufs = UnionFindSet(n + 1)
graph.get("edges").sort(key=cmp_to_key(lambda a, b: a[2] - b[2]))
for start, end, weight in graph.get("edges"):
if len(tree) == n - 1:
break
if not ufs.same(start, end):
ufs.union(start, end)
tree.append((start, end, weight))
return tree
if __name__ == '__main__':
graphData = {
"nodes": [1, 2, 3, 4, 5],
"edges": [(1, 2, 1),
(1, 3, 3),
(1, 4, 5),
(2, 3, 2),
(2, 4, 4),
(2, 5, 6)]
}
res = kruskal(graphData)
print(f"Kruskal = {res}")
|
[
"alex18812649207@gmail.com"
] |
alex18812649207@gmail.com
|
2c99815d97b01dd33b9f52fda6a4d4f27a943fe2
|
d3af72e4c623dffeda95e662d495a95c8f2e317a
|
/scripts/gene_checker/annotations/glimmer_annotation.py
|
53a169406e4c8d9a608689a2828ffe6aab7b0b88
|
[] |
no_license
|
bioinf/bi2014-mycoplasma-genitalium
|
0e2fbf095a461339064ea38f1be4586897f7c2ac
|
bd8eb82bb8d883faeb0492d74deb7a396577b782
|
refs/heads/master
| 2016-09-05T11:34:00.325602
| 2014-12-06T12:37:12
| 2014-12-06T12:37:12
| 24,504,082
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 649
|
py
|
__author__ = 'nikita_kartashov'
from generic_annotation import GenericAnnotation
class GlimmerAnnotation(GenericAnnotation):
def __init__(self, line):
super(GlimmerAnnotation, self).__init__()
self.__parse_annotation(line.split())
self._length = self.end() - self.start()
def __parse_annotation(self, annotation_list):
self._id = annotation_list[0]
self._start = int(annotation_list[1])
self._end = int(annotation_list[2])
self._forward_chain = annotation_list[3][0] == '+'
def check_annotation(self, code):
return super(GlimmerAnnotation, self).check_annotation(code)
|
[
"snailandmail@gmail.com"
] |
snailandmail@gmail.com
|
795a88f021e67b571a81ee946bc1108a9a4f7243
|
336f11ee8934581f05ab620c5324c601ba864b05
|
/python_unit_testing/Introduction/The possible test results/possible_test_results.py
|
2f19b271168fe43ac4d72e877771ed0bb40c8b2c
|
[] |
no_license
|
ancient-clever/sandbox
|
01adeee2638a23533965cf57ca873a30e7dfad3d
|
87dec3bf8860a67a36154ee5d7c826d919d3111b
|
refs/heads/master
| 2022-05-17T04:49:54.703068
| 2020-01-19T17:44:27
| 2020-01-19T17:44:27
| 206,946,679
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 223
|
py
|
import unittest
class TestPossibleResults(unittest.TestCase):
def test_success(self):
pass
def test_failure(self):
self.assertEqual(True, False)
def test_error(self):
raise Exception
|
[
"ancient-clever@outlook.com"
] |
ancient-clever@outlook.com
|
83dde2b826ec281476013ca779abb5abbedd1de5
|
1617a9a9c92146bcdac89b5efb1ef0d18408160b
|
/contlab7/31/solution.py
|
f68d9662c990a36c62a4e6b0cab797991dbca222
|
[] |
no_license
|
LitRidl/checker-content
|
1b1329b4462b87731e0755ab33480ff063a94a00
|
b5d0456c8d4d28db6e6022e272a95a385f253797
|
refs/heads/master
| 2023-08-17T18:08:07.377680
| 2018-02-04T11:16:34
| 2018-02-04T11:16:34
| 120,077,784
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 759
|
py
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
u'''
Составить алгоритм кодирования слова в латинском алфавите по Цезарю со сдвигом, равным 13 (см. шифр rot13).
Входное слово представляет собой последовательность малых латинских букв a-z.
последовательность малых латинских букв, полученная из исходной путём применения шифра rot13
icanhazcheeseburger
'''
from __future__ import print_function
from numpy import base_repr
try:
a = raw_input().strip()
except EOFError:
a = ''
result = a.encode('rot_13')
print('{0}'.format(result))
|
[
"tutkarma@gmail.com"
] |
tutkarma@gmail.com
|
2efbeedfb1feb7576a382150e2e2754a57d664e2
|
003372621424577306aff35de88f7366fcc4baa0
|
/sa_tools_core/libs/qcloud/qcloudsdkbmeip/EipBmBindVpcIpRequest.py
|
65b528a54cc4dd083a6ef835f2228e02f6de0ceb
|
[] |
no_license
|
stoensin/sa-tools-core
|
ab28ca5f7a8d5703952db9e6554b104682507964
|
c0faeef4de8ba677817384d88cb107ad2308c03e
|
refs/heads/master
| 2020-07-11T06:59:33.224305
| 2019-08-22T07:36:46
| 2019-08-22T07:36:46
| 204,472,284
| 1
| 0
| null | 2019-08-26T12:36:18
| 2019-08-26T12:36:18
| null |
UTF-8
|
Python
| false
| false
| 852
|
py
|
# -*- coding: utf-8 -*-
from ..qcloudsdkcore.request import Request
class EipBmBindVpcIpRequest(Request):
def __init__(self):
super(EipBmBindVpcIpRequest, self).__init__(
'bmeip', 'qcloudcliV1', 'EipBmBindVpcIp', 'bmeip.api.qcloud.com')
def get_eipId(self):
return self.get_params().get('eipId')
def set_eipId(self, eipId):
self.add_param('eipId', eipId)
def get_unVpcId(self):
return self.get_params().get('unVpcId')
def set_unVpcId(self, unVpcId):
self.add_param('unVpcId', unVpcId)
def get_vpcId(self):
return self.get_params().get('vpcId')
def set_vpcId(self, vpcId):
self.add_param('vpcId', vpcId)
def get_vpcIp(self):
return self.get_params().get('vpcIp')
def set_vpcIp(self, vpcIp):
self.add_param('vpcIp', vpcIp)
|
[
"tclh123@gmail.com"
] |
tclh123@gmail.com
|
d70c87e261956e3d381b2fac3f1dc1fe02bde460
|
5d304c6ec0f01edee73e3b612f84307060c0da54
|
/add_two_numbers.py
|
d9649cba9d5f094e5a6a7c4bdc848c78665f5ec0
|
[] |
no_license
|
xartisan/leetcode-solutions-in-python
|
cfa06b9e02f7ec0446cf6b71df4ea46caa359adc
|
7e3929a4b5bd0344f93373979c9d1acc4ae192a7
|
refs/heads/master
| 2020-03-14T17:10:07.957089
| 2018-07-29T10:11:01
| 2018-07-29T10:11:01
| 131,713,447
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,340
|
py
|
# Definition for singly-linked list.
class ListNode:
def __init__(self, x, next_node=None):
self.val = x
self.next = next_node
def __repr__(self):
rv = str(self.val)
if self.next is not None:
rv += ' -> ' + repr(self.next)
return rv
class Solution:
def addTwoNumbers(self, l1, l2):
"""
:type l1: ListNode
:type l2: ListNode
:rtype: ListNode
"""
# dummy node
dummy_node = ListNode(0)
cur_node = dummy_node
acc = 0
while l1 is not None or l2 is not None:
s = acc + (l1 or dummy_node).val + (l2 or dummy_node).val
acc = 0
if s >= 10:
s, acc = s - 10, 1
new_node = ListNode(s)
cur_node.next = new_node
cur_node = new_node
if l1 is not None:
l1 = l1.next
if l2 is not None:
l2 = l2.next
if acc == 1:
cur_node.next = ListNode(1)
return dummy_node.next
if __name__ == '__main__':
l1 = ListNode(2, ListNode(4, ListNode(3)))
print(l1)
l2 = ListNode(5, ListNode(6, ListNode(4)))
print(l2)
s = Solution()
rv = s.addTwoNumbers(l1, l2)
print(rv)
assert repr(rv) == '7 -> 0 -> 8', 'Wrong answer!'
|
[
"codeartisan@outlook.com"
] |
codeartisan@outlook.com
|
1d7d09411e8e1745eeb4db330972104eba8afa79
|
0fccee4c738449f5e0a8f52ea5acabf51db0e910
|
/genfragments/ThirteenTeV/WR5000/WR5000_Zp1000_HN100_mumu_cfg.py
|
59e93042f1fff94bb505ee06a633c5fd7a303381
|
[] |
no_license
|
cms-sw/genproductions
|
f308ffaf3586c19b29853db40e6d662e937940ff
|
dd3d3a3826343d4f75ec36b4662b6e9ff1f270f4
|
refs/heads/master
| 2023-08-30T17:26:02.581596
| 2023-08-29T14:53:43
| 2023-08-29T14:53:43
| 11,424,867
| 69
| 987
| null | 2023-09-14T12:41:28
| 2013-07-15T14:18:33
|
Python
|
UTF-8
|
Python
| false
| false
| 6,270
|
py
|
# Auto generated configuration file
# using:
# Revision: 1.19
# Source: /local/reps/CMSSW/CMSSW/Configuration/Applications/python/ConfigBuilder.py,v
# with command line options: Configuration/GenProduction/python/ThirteenTeV/WRToNuMuToMuMuJJ_MW-3400_MNu-1700_TuneCUETP8M1_13TeV-pythia8_cfg.py --fileout step1.root --mc --pileup_input pileup.root --eventcontent RAWSIM --pileup 2016_25ns_SpringMC_PUScenarioV1_PoissonOOTPU --era Run2_25ns --datatier GEN-SIM-RAW --conditions 80X_mcRun2_asymptotic_2016_v3 --step GEN,SIM,DIGI,L1,DIGI2RAW,HLT:@frozen25ns --no_exec -n 1000 --python_filename test_GEN_to_HLT_cfg.py --fileout file:step1.root --python_filename step1.py
import FWCore.ParameterSet.Config as cms
from Configuration.StandardSequences.Eras import eras
process = cms.Process('HLT',eras.Run2_25ns)
# import of standard configurations
process.load('Configuration.StandardSequences.Services_cff')
process.load('SimGeneral.HepPDTESSource.pythiapdt_cfi')
process.load('FWCore.MessageService.MessageLogger_cfi')
process.load('Configuration.EventContent.EventContent_cff')
process.load('SimGeneral.MixingModule.mix_2016_25ns_SpringMC_PUScenarioV1_PoissonOOTPU_cfi')
process.load('Configuration.StandardSequences.GeometryRecoDB_cff')
process.load('Configuration.Geometry.GeometrySimDB_cff')
process.load('Configuration.StandardSequences.MagneticField_cff')
process.load('Configuration.StandardSequences.Generator_cff')
process.load('IOMC.EventVertexGenerators.VtxSmearedRealistic50ns13TeVCollision_cfi')
process.load('GeneratorInterface.Core.genFilterSummary_cff')
process.load('Configuration.StandardSequences.SimIdeal_cff')
process.load('Configuration.StandardSequences.Digi_cff')
process.load('Configuration.StandardSequences.SimL1Emulator_cff')
process.load('Configuration.StandardSequences.DigiToRaw_cff')
process.load('HLTrigger.Configuration.HLT_GRun_cff')
process.load('Configuration.StandardSequences.EndOfProcess_cff')
process.load('Configuration.StandardSequences.FrontierConditions_GlobalTag_cff')
process.maxEvents = cms.untracked.PSet(
input = cms.untracked.int32(1000)
)
# Input source
process.source = cms.Source("EmptySource")
process.options = cms.untracked.PSet(
)
# Production Info
process.configurationMetadata = cms.untracked.PSet(
annotation = cms.untracked.string('Configuration/GenProduction/python/ThirteenTeV/WRToNuMuToMuMuJJ_MW-3400_MNu-1700_TuneCUETP8M1_13TeV-pythia8_cfg.py nevts:1000'),
name = cms.untracked.string('Applications'),
version = cms.untracked.string('$Revision: 1.19 $')
)
# Output definition
process.RAWSIMoutput = cms.OutputModule("PoolOutputModule",
SelectEvents = cms.untracked.PSet(
SelectEvents = cms.vstring('generation_step')
),
dataset = cms.untracked.PSet(
dataTier = cms.untracked.string('GEN-SIM-RAW'),
filterName = cms.untracked.string('')
),
eventAutoFlushCompressedSize = cms.untracked.int32(5242880),
fileName = cms.untracked.string('step1.root'),
outputCommands = process.RAWSIMEventContent.outputCommands,
splitLevel = cms.untracked.int32(0)
)
# Additional output definition
# Other statements
process.mix.input.fileNames = cms.untracked.vstring(['pileup.root'])
process.genstepfilter.triggerConditions=cms.vstring("generation_step")
from Configuration.AlCa.GlobalTag import GlobalTag
process.GlobalTag = GlobalTag(process.GlobalTag, '80X_mcRun2_asymptotic_2016_v3', '')
process.generator = cms.EDFilter("Pythia8GeneratorFilter",
PythiaParameters = cms.PSet(
parameterSets = cms.vstring('pythia8CommonSettings',
'pythia8CUEP8M1Settings',
'processParameters'),
processParameters = cms.vstring('LeftRightSymmmetry:ffbar2ZR = on',
'9900024:m0 = 5000',
'9900023:m0 = 1000',
'9900014:m0 = 100',
'9900024:onMode = off',
'9900023:onMode = off',
'9900023:onIfAny = 9900014, 9900014',
'9900024:onIfAny = 13,9900014'),
pythia8CUEP8M1Settings = cms.vstring('Tune:pp 14',
'Tune:ee 7',
'MultipartonInteractions:pT0Ref=2.4024',
'MultipartonInteractions:ecmPow=0.25208',
'MultipartonInteractions:expPow=1.6'),
pythia8CommonSettings = cms.vstring('Tune:preferLHAPDF = 2',
'Main:timesAllowErrors = 10000',
'Check:epTolErr = 0.01',
'Beams:setProductionScalesFromLHEF = off',
'SLHA:keepSM = on',
'SLHA:minMassSM = 1000.',
'ParticleDecays:limitTau0 = on',
'ParticleDecays:tau0Max = 10',
'ParticleDecays:allowPhotonRadiation = on')
),
comEnergy = cms.double(13000.0),
filterEfficiency = cms.untracked.double(1.0),
maxEventsToPrint = cms.untracked.int32(1),
pythiaHepMCVerbosity = cms.untracked.bool(False),
pythiaPylistVerbosity = cms.untracked.int32(1)
)
# Path and EndPath definitions
process.generation_step = cms.Path(process.pgen)
process.simulation_step = cms.Path(process.psim)
process.digitisation_step = cms.Path(process.pdigi)
process.L1simulation_step = cms.Path(process.SimL1Emulator)
process.digi2raw_step = cms.Path(process.DigiToRaw)
process.genfiltersummary_step = cms.EndPath(process.genFilterSummary)
process.endjob_step = cms.EndPath(process.endOfProcess)
process.RAWSIMoutput_step = cms.EndPath(process.RAWSIMoutput)
# Schedule definition
process.schedule = cms.Schedule(process.generation_step,process.genfiltersummary_step,process.simulation_step,process.digitisation_step,process.L1simulation_step,process.digi2raw_step)
process.schedule.extend(process.HLTSchedule)
process.schedule.extend([process.endjob_step,process.RAWSIMoutput_step])
# filter all path with the production filter sequence
for path in process.paths:
getattr(process,path)._seq = process.generator * getattr(process,path)._seq
# customisation of the process.
# Automatic addition of the customisation function from HLTrigger.Configuration.customizeHLTforMC
from HLTrigger.Configuration.customizeHLTforMC import customizeHLTforFullSim
#call to customisation function customizeHLTforFullSim imported from HLTrigger.Configuration.customizeHLTforMC
process = customizeHLTforFullSim(process)
# End of customisation functions
|
[
"saptaparna.bhattacharya@cern.ch"
] |
saptaparna.bhattacharya@cern.ch
|
55b7536a054de0a55848639515962924284c30e3
|
4dec0f934760ca69e40b62fa56b37a1aa3918b24
|
/test/test_web_deface_utils.py
|
418c9c67aa4f0448c4eb77b11bd8eeb6df758bdc
|
[
"MIT"
] |
permissive
|
rejahrehim/SecureTea-Project
|
28ebc89f27ed59e3845b8c82f9316108cda40a24
|
43dec187e5848b9ced8a6b4957b6e9028d4d43cd
|
refs/heads/master
| 2020-03-27T12:36:21.779426
| 2019-09-02T16:01:55
| 2019-09-02T16:01:55
| 146,556,097
| 1
| 0
|
MIT
| 2018-08-29T06:35:54
| 2018-08-29T06:35:53
| null |
UTF-8
|
Python
| false
| false
| 1,228
|
py
|
# -*- coding: utf-8 -*-
import unittest
from securetea.lib.web_deface import utils
try:
# if python 3.x.x
from unittest.mock import patch
except ImportError: # python 2.x.x
from mock import patch
class TestUtils(unittest.TestCase):
"""
Test class for SecureTea Web Deface Utils.
"""
@patch("securetea.lib.web_deface.utils.get_system_name")
def test_categorize_os(self, mock_system):
"""
Test categorize_os.
"""
mock_system.return_value = "debian"
self.assertEqual(utils.categorize_os(), "debian")
@patch("securetea.lib.web_deface.utils.platform")
def test_get_system_name(self, mock_platform):
"""
Test get_system_name.
"""
mock_platform.dist.return_value = ["debian"]
res = utils.get_system_name()
self.assertEqual(res, "debian")
@patch("securetea.lib.web_deface.utils.os")
def test_check_root(self, mock_os):
"""
Test check_root.
"""
# Running as root
mock_os.getuid.return_value = 0
self.assertTrue(utils.check_root())
# Not running as root
mock_os.getuid.return_value = 1
self.assertFalse(utils.check_root())
|
[
"abhishek_official@hotmail.com"
] |
abhishek_official@hotmail.com
|
3170717deab4907adb3968420a93b4be31911af6
|
303a4d41da8f2cd2000630ff30424d2875490e67
|
/hotglycol/genangletarg.py
|
e3e3568faeabd27bb319e698874ecd591da9fe3c
|
[] |
no_license
|
noobermin/sharks
|
beb1d3d6a593e8d62f3d7416697d4de1fe9558b1
|
af87113781eb67af45a9c2f79b73b1512ae0a1fa
|
refs/heads/master
| 2022-05-10T11:55:17.200591
| 2021-09-30T14:27:22
| 2021-09-30T14:27:22
| 19,997,024
| 0
| 2
| null | 2016-05-20T19:27:49
| 2014-05-20T20:49:16
|
Common Lisp
|
UTF-8
|
Python
| false
| false
| 1,559
|
py
|
#!/usr/bin/env python2
import numpy as np;
def mk45(dim=[-5e-4,5e-4,-5e-4,5e-4,],
N0=1.08e22,
width=0.5e-4,
dropcorners=False,):
xlim = dim[:2];
ylim = dim[2:];
def _corner(x,y,good):
ret = y < width/np.sqrt(2) + ylim[0] -(x-xlim[0])
ret|= y > -width/np.sqrt(2) + ylim[1] -(x-xlim[1])
ret = np.logical_not(ret);
return np.logical_and(ret,good);
def roundcorner(x,y,good):
ret = y < ylim[0] + width - (x-xlim[0])
ret|= y > ylim[1] - width - (x-xlim[1])
ret = good and not ret;
ret|= (x-xlim[0]-width/2)**2 + (y-ylim[0]-width/2)**2 <= width**2/4.0;
ret|= (x-xlim[1]+width/2)**2 + (y-ylim[1]+width/2)**2 <= width**2/4.0;
return ret;
if dropcorners == True:
corner = _corner;
elif dropcorners == 'round':
corner = roundcorner;
else:
corner = lambda x,y,g: g;
@np.vectorize
def f(x,y):
good = xlim[0] <= x <= xlim[1];
good&= ylim[0] <= y <= ylim[1];
good&=np.abs(y - x)*np.sqrt(2) < width;
good =corner(x,y,good);
if good:
return N0;
else:
return 0.0;
return f;
if __name__ == "__main__":
dx = 0.01
mn,mx = -10.5,10.5,
lmn, lmx = -11.5,11.5
width = 2;
F=mk45(dim=[mn,mx,mn,mx],width=width,dropcorners='round');
X,Y=np.mgrid[
lmn:lmx + dx:dx,
lmn:lmx + dx:dx];
import matplotlib.pyplot as plt;
plt.pcolormesh(X,Y,F(X,Y));
plt.axis('equal');
plt.show();
|
[
"ngirmang.1@osu.edu"
] |
ngirmang.1@osu.edu
|
3bd3248d2c6bde2df7607b256dd029658457051f
|
aef69557d8960205a780e61b7c2dfbb1d7733449
|
/Code/SarahBeth/tests.py
|
7bbcce152ea94f86ce923360be692fa4a260987c
|
[] |
no_license
|
sbtries/class_pandaaaa
|
579d6be89a511bdc36b0ce8c95545b9b704a734a
|
bbf9c419a00879118a55c2c19e5b46b08af806bc
|
refs/heads/master
| 2023-07-18T14:18:25.881333
| 2021-09-02T22:48:29
| 2021-09-02T22:48:29
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 154
|
py
|
# card1= input('what's your first card?')
# card2= input('what's your first card?')
# card3= input('what's your first card?')
cards2 = ['q', '1', '2']
|
[
"sarahbtracy17@gmail.com"
] |
sarahbtracy17@gmail.com
|
35e94a777c8f8bc8a326cf67aab84070f19105e0
|
e4dcd63ed6b66b2cc164e4a9667e163e9c40601c
|
/virtual/bin/confusable_homoglyphs
|
8f58c52f317751308d6728b3aa7fd3e3d40eeb4d
|
[] |
no_license
|
kepha-okari/tabler
|
8e95803843f5fd9a8726e4ee85a57b48a77d2a2e
|
f682a77d581834151f723cdd2de2a37353369047
|
refs/heads/master
| 2022-12-22T21:52:45.879263
| 2018-07-18T15:45:52
| 2018-07-18T15:45:52
| 136,938,470
| 0
| 0
| null | 2022-12-08T00:59:12
| 2018-06-11T14:32:53
|
Python
|
UTF-8
|
Python
| false
| false
| 269
|
#!/home/rkepha/Documents/hir/timetabler/virtual/bin/python3.6
# -*- coding: utf-8 -*-
import re
import sys
from confusable_homoglyphs.cli import cli
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(cli())
|
[
"kephaokari@gmail.com"
] |
kephaokari@gmail.com
|
|
15e3e7fd280ed96ea086aab5efc0b2fc21e4b360
|
a904c2fd006d6652d28af8eb8634d29d66d0024f
|
/net/PRESUBMIT.py
|
e82166c7b55e59fde047d70a83ff671f5f61f6a2
|
[
"BSD-3-Clause"
] |
permissive
|
esprehn/mojo
|
1cba014abe08168509ebb202dd4b032f61f06713
|
e50a99d5c5b046aa24a5415744f6661cb12a66c3
|
refs/heads/master
| 2020-12-01T13:05:42.886923
| 2014-10-30T22:23:04
| 2014-10-30T22:23:04
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,007
|
py
|
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Chromium presubmit script for src/net.
See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts
for more details on the presubmit API built into gcl.
"""
def GetPreferredTryMasters(project, change):
masters = {
'tryserver.chromium.linux': {
'linux_chromium_rel': set(['defaulttests']),
},
'tryserver.chromium.mac': {
'mac_chromium_rel': set(['defaulttests']),
},
'tryserver.chromium.win': {
'win_chromium_rel': set(['defaulttests']),
}
}
# Changes that touch NSS files will likely need a corresponding OpenSSL edit.
# Conveniently, this one glob also matches _openssl.* changes too.
if any('nss' in f.LocalPath() for f in change.AffectedFiles()):
masters['tryserver.chromium.linux'].setdefault(
'linux_redux', set()).add('defaulttests')
return masters
|
[
"jamesr@chromium.org"
] |
jamesr@chromium.org
|
0872b8ae6fd54a0177c8e8bc0a1dcb97b506dd72
|
946a9dcf4e644f0d3f806f016a23ae8d96095082
|
/LeetCode/DP/375_GuessNumberHigherOrLower2.py
|
2ac71345a5c550d6ff8bd1d9657c2b24b4745021
|
[] |
no_license
|
HzCeee/Algorithms
|
3dea898f071f4103ca3eb038f63b01ba4ba95383
|
e05f29071d0badd081535e773f43ebc303aa12c4
|
refs/heads/master
| 2018-10-20T21:54:37.154631
| 2018-10-11T20:46:34
| 2018-10-11T20:46:34
| 116,142,856
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 667
|
py
|
class Solution(object):
def getMoneyAmount(self, n):
"""
:type n: int
:rtype: int
"""
# dp[low][high] refers to the minimum money need to guarantee a win within range [low, high]
# dp[low][high] = min([guess + max(dp[low][guess], dp[guess][high]) for guess in range(low, high)])
dp = [[0] * n for _ in range(n)]
for low in range(n)[::-1]:
for high in range(low + 1, n):
tmp = [guess + 1 + max(dp[low][guess - 1], dp[guess + 1][high]) for guess in range(low, high)]
dp[low][high] = min(tmp) if tmp else 0
return dp[0][-1]
|
[
"huangzixihzx@gmail.com"
] |
huangzixihzx@gmail.com
|
e18292f3203af3b07da2eabe2cbae4e3147fd60b
|
b6be7bef4c8ffd48c3a1c89fa2ad84bc8d042eb7
|
/Inception.py
|
57388fe850812ec59514d31ce4604505005ffc93
|
[] |
no_license
|
Wushaoyong/tensflowtest
|
48e747c1a6cdd7374313013d43cb095a97f3bc34
|
f393a945e733fcfe0c63f5dcfffc44c60d2a5862
|
refs/heads/master
| 2023-01-10T01:47:38.764265
| 2020-11-10T02:26:16
| 2020-11-10T02:26:16
| 305,975,864
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,758
|
py
|
import tensorflow as tf
with tf.variable_scope('conv7') as scope:
input_image = pool6
input_1 = 110
input_2 = 120
input_3 = input_2 * 3
weights = tf.get_variable('weights',shape=[3, 3, input_1, input_2],
dtype=tf.float32,
initializer=tf.truncated_normal_initializer(stddev=0.05, dtype=tf.float32))
biases = tf.get_variable('biases',shape=[input_2],dtype=tf.float32,
initializer=tf.constant_initializer(0.1))
weights_deduce = tf.get_variable('weights_deduce',shape=[3, 3, input_3, input_2],dtype=tf.float32, initializer=tf.truncated_normal_initializer
(stddev=0.05, dtype=tf.float32))
biases_deduce = tf.get_variable('biases_deduce',shape=[input_2],
dtype=tf.float32,initializer=tf.constant_initializer(0.1))
conv = tf.nn.conv2d(input_image, weights, strides=[1, 1, 1, 1], padding='SAME')
pre_activation = tf.nn.bias_add(conv, biases)
pre_activation = batch_norm(pre_activation, is_training_mmodel)
conv_relu = tf.nn.relu(pre_activation, name=scope.name)
pool_2_2 = tf.nn.max_pool(conv_relu, ksize=[1, 2, 2, 1], strides=[1, 1, 1, 1], padding='SAME', name='pooling2_2')
pool_3_3 = tf.nn.max_pool(conv_relu, ksize=[1, 3, 3, 1], strides=[1, 1, 1, 1], padding='SAME', name='pooling3_3')
pool_4_4 = tf.nn.max_pool(conv_relu, ksize=[1, 4, 4, 1], strides=[1, 1, 1, 1], padding='SAME', name='pooling4_4')
pool_pre = tf.concat([pool_2_2, pool_3_3, pool_4_4], 3)
with tf.variable_scope('reduce_layer') as deduce0:
conv = tf.nn.conv2d(pool_pre, weights_deduce, strides=[1, 1, 1, 1], padding='SAME')
pre_activation = tf.nn.bias_add(conv, biases_deduce)
pre_activation = batch_norm2(pre_activation, is_training_mmodel)
pool7 = tf.nn.relu(pre_activation, name=deduce0.name)
|
[
"1123893617@qq.com"
] |
1123893617@qq.com
|
b0a7dbf1e9f1a38e8c3c7c3f7498e5b970e34a7f
|
d92fc7ba34412e8a1633b87bea5761c0bdbf196e
|
/utils/OfflineDataLoader.py
|
4a6d8b8f8004d9e9b3de5a094691bbe7da61e89e
|
[
"MIT"
] |
permissive
|
yigitozgumus/PolimiRecSys2018
|
d37876c8d678aecc52d8c4558f2b6d40d1a03d83
|
1e63f46b83bd52399dc600b1abbf254bf47db094
|
refs/heads/master
| 2023-02-17T16:24:39.853937
| 2022-08-13T08:24:13
| 2022-08-13T08:24:13
| 152,449,643
| 0
| 0
|
MIT
| 2023-02-10T22:42:06
| 2018-10-10T15:49:36
|
Python
|
UTF-8
|
Python
| false
| false
| 2,954
|
py
|
from utils.util import working_directory
import os
import re
import models as m
class OfflineDataLoader(object):
def __init__(self,model_folder="saved_models",parameter_folder="saved_parameters"):
super(OfflineDataLoader, self).__init__()
self.repository = "tuned_parameters"
self.model_folder = model_folder
self.parameter_folder = parameter_folder
self.training = self.model_folder + "/" + "training"
self.submission = self.model_folder + "/" + "submission"
self.training_models = self.get_models(self.training)
self.submission_models = self.get_models(self.submission)
self.parameter_files = self.get_models(self.parameter_folder)
self.repository_files = self.build_repository(self.repository)
def get_model(self,model_name,training=True):
if training:
result = [i for i in self.training_models if re.compile(model_name).search(i)]
folder_path = str("/".join(result[0].split("/")[:-1])+"/")
file_name = result[0].split("/")[-1]
return folder_path,file_name
else:
result = [i for i in self.submission_models if re.compile(model_name).search(i)]
folder_path = str("/".join(result[0].split("/")[:-1])+"/")
file_name = result[0].split("/")[-1]
return folder_path,file_name
def get_parameter(self,model_name):
result = [i for i in self.parameter_files if re.compile(model_name).search(i)]
folder_path = str("/".join(result[0].split("/")[:-1])+"/")
file_name = result[0].split("/")[-1]
return folder_path,file_name
def get_models(self,folder_name):
fileList = os.listdir(folder_name)
filter = re.compile(r'\..+|.+\.txt$')
filtered_files = [folder_name + "/" + i for i in fileList if not filter.search(i)]
return filtered_files
def build_repository(self,repo_folder):
filter = re.compile(r'\..+|.+\.txt$')
listOfFolders = os.listdir(repo_folder)
filteredDirPaths = [repo_folder+"/"+i for i in listOfFolders if not filter.search(i)]
files = []
for folder in filteredDirPaths:
with working_directory(folder):
filePaths = [folder +"/"+ i for i in os.listdir(".")]
files.extend(filePaths)
# Categorize
# Define error filter
errorFilter = re.compile(r'Error.+')
# Make it error free
errorFilteredFiles = [i for i in files if not errorFilter.search(i)]
bestModelFilter = re.compile(r'best_model$')
self.best_models = [i for i in files if bestModelFilter.search(i)]
parameterFilter = re.compile(r'best_parameters$')
self.best_parameters = [i for i in files if parameterFilter.search(i)]
resultFilter = re.compile(r'best_result_test$')
self.best_results = [i for i in files if resultFilter.search(i)]
|
[
"yigitozgumus1@gmail.com"
] |
yigitozgumus1@gmail.com
|
dde8ce33f41abb767cc6c00643052aeb98027f76
|
2c74bb301f1ed83b79254944183ac5a18a639fdf
|
/tests/components/modern_forms/test_init.py
|
fd6ff49547078e61218c25b6e6b05643814c2186
|
[
"Apache-2.0"
] |
permissive
|
Adminiuga/home-assistant
|
5bec93007ddac1a268cc359bf7e48530c5f73b38
|
dcf68d768e4f628d038f1fdd6e40bad713fbc222
|
refs/heads/dev
| 2023-02-22T22:03:31.013931
| 2022-11-09T00:27:20
| 2022-11-09T00:27:20
| 123,929,062
| 5
| 4
|
Apache-2.0
| 2023-02-22T06:14:31
| 2018-03-05T14:11:09
|
Python
|
UTF-8
|
Python
| false
| false
| 1,766
|
py
|
"""Tests for the Modern Forms integration."""
from unittest.mock import MagicMock, patch
from aiomodernforms import ModernFormsConnectionError
from homeassistant.components.modern_forms.const import DOMAIN
from homeassistant.config_entries import ConfigEntryState
from homeassistant.core import HomeAssistant
from homeassistant.helpers import entity_registry as er
from . import init_integration, modern_forms_no_light_call_mock
from tests.test_util.aiohttp import AiohttpClientMocker
@patch(
"homeassistant.components.modern_forms.ModernFormsDevice.update",
side_effect=ModernFormsConnectionError,
)
async def test_config_entry_not_ready(
mock_update: MagicMock, hass: HomeAssistant, aioclient_mock: AiohttpClientMocker
) -> None:
"""Test the Modern Forms configuration entry not ready."""
entry = await init_integration(hass, aioclient_mock)
assert entry.state is ConfigEntryState.SETUP_RETRY
async def test_unload_config_entry(
hass: HomeAssistant, aioclient_mock: AiohttpClientMocker
) -> None:
"""Test the Modern Forms configuration entry unloading."""
entry = await init_integration(hass, aioclient_mock)
assert hass.data[DOMAIN]
await hass.config_entries.async_unload(entry.entry_id)
await hass.async_block_till_done()
assert not hass.data.get(DOMAIN)
async def test_fan_only_device(hass, aioclient_mock):
"""Test we set unique ID if not set yet."""
await init_integration(
hass, aioclient_mock, mock_type=modern_forms_no_light_call_mock
)
entity_registry = er.async_get(hass)
fan_entry = entity_registry.async_get("fan.modernformsfan_fan")
assert fan_entry
light_entry = entity_registry.async_get("light.modernformsfan_light")
assert light_entry is None
|
[
"noreply@github.com"
] |
Adminiuga.noreply@github.com
|
1608fa8e1c84cd41b61e54ffed640e62909cec44
|
c1f15f5834062b0d5a6d6857a89124f3f114b2bd
|
/quick/features.py
|
fea39d60247ca6a9a0113700e9d9fa359c349cb5
|
[] |
no_license
|
msoedov/quick.py
|
4bd3294daf8136989bbb0d2316224360657330c3
|
5c89c11c7f6bc1fabbee757a8bb217dea08e359c
|
refs/heads/master
| 2023-01-09T20:20:49.811969
| 2020-04-13T12:46:52
| 2020-04-13T12:46:52
| 44,098,220
| 15
| 4
| null | 2022-12-26T20:21:03
| 2015-10-12T09:41:22
|
Python
|
UTF-8
|
Python
| false
| false
| 4,243
|
py
|
import functools
import sys
import unittest
from collections import namedtuple
from copy import deepcopy
from typing import Any, Callable, List
from .common import *
from .core import Schema, flatten, generate
from .shrink import shrink
config = {"max_count": 100, "max_scale": sys.maxsize}
experiment = namedtuple("experiment", "name fn config")
default = object()
debug = print
def verify(prop: experiment, simplification: bool = False) -> Any:
test_case, schema = generate(prop.fn)
kwargs = flatten(schema)
ok = test_case(**kwargs)
if ok:
return True, kwargs, None, None
if simplification:
shrunked, simplified_to = shrink(test_case, schema)
else:
shrunked = False
simplified_to = kwargs
return False, kwargs, shrunked, simplified_to
def code_gen(
experiment: experiment, x: int, skip_group: Callable, simplification: bool = False
) -> Callable:
@skip_group
def test_experiment(t):
ok, kwargs, shrunked, simplified_to = verify(experiment, simplification)
if not ok:
description = "`{}` Input: #{}".format(experiment.name, kwargs)
if shrunked:
description = "{}\nSimplified to: {}".format(description, simplified_to)
else:
description = "{}\n Failed to simplify".format(description)
t.assertTrue(ok, description)
test_experiment.__doc__ = experiment.name
return test_experiment
class QuickCheck(object):
def __init__(self, **settings) -> None:
super(QuickCheck, self).__init__()
self.settings = settings or config
self.experiments = {}
def __call__(self, experiment_name: str, **defaults) -> Callable:
def decorator(fn):
config = default
if defaults:
config = deepcopy(self.settings)
config.update(defaults)
debug("Register {} to {}".format(experiment_name, fn))
self.experiments[experiment_name] = experiment(experiment_name, fn, config)
return fn
return decorator
forall = __call__
def as_testcase(
self, prototype=unittest.TestCase, skip_on_failure=True, simplification=True
):
"""
:param prototype: class of test case
:param skip_on_failure: boolean flag to skip all test group on first failure
:return: test case class
"""
debug("_" * 50)
class TestProperties(prototype):
"""
Automatically generated tests case based on quick check properties
"""
@classmethod
def should_fail(cls):
cls.__unittest_expecting_failure__ = True
return cls
def skip_if():
skip = False
def wrap(fn):
@functools.wraps(fn)
def inner(*args, **kwargs):
nonlocal skip
if skip and skip_on_failure:
raise unittest.SkipTest("Failed experiment")
try:
return fn(*args, **kwargs)
except Exception as e:
skip = True
raise e
return inner
return wrap
settings = self.settings
properties = []
for experiment in self.experiments.values():
if experiment.config is not default:
settings = experiment.config
max_count = settings["max_count"]
skip_group = skip_if()
debug("Generating {} tests for [{}]".format(max_count, experiment.name))
for x in range(max_count):
test_experiment = code_gen(experiment, x, skip_group, simplification)
setattr(
TestProperties, "{}#{}".format(experiment.name, x), test_experiment
)
properties.append(test_experiment)
TestProperties.properties = properties
return TestProperties
def verify(self) -> List[NoneType]:
test_cls = self.as_testcase()
test = test_cls()
return [prop(test) for prop in test.properties]
forall = QuickCheck()
|
[
"msoedov@gmail.com"
] |
msoedov@gmail.com
|
2ac05a8ce20667dc6ca62cd5a23ac3d6825a9e76
|
c1fd96e92379b7a5eda183a0e9522d3bc8a1cd3d
|
/kaa/filetype/python/pythonmode.py
|
cc86751f85387bc8ba2e2653a1a6c95f79cbeac4
|
[
"MIT"
] |
permissive
|
okazu-dm/kaa
|
3b2c59652f2468ed4a78d764667f7f896e759a33
|
3326100c64ca6f638db6361ca72bc56a4aef7b8f
|
refs/heads/master
| 2020-05-23T10:09:05.153521
| 2013-10-22T13:05:45
| 2013-10-22T13:05:45
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,653
|
py
|
import keyword, copy
from kaa.filetype.default import defaultmode, theme
from kaa.highlight import Tokenizer, Keywords, Span
from kaa.theme import Theme, Style
from gappedbuf import re as gre
from kaa.command import Commands, command, norec, norerun
from kaa.keyboard import *
PythonThemes = {
'default':
Theme([
Style('python-bytes', 'blue', 'default'),
])
}
PYTHONMENU = [
['&Comment', None, 'code.region.linecomment'],
['&Uncomment', None, 'code.region.unlinecomment'],
]
python_code_keys = {
((alt, 'm'), ('c')): 'menu.python.code',
}
class PythonMode(defaultmode.DefaultMode):
MODENAME = 'Python'
re_begin_block = gre.compile(r"[^#]*:\s*(#.*)?$")
LINE_COMMENT = '#'
def init_keybind(self):
super().init_keybind()
self.register_keys(self.keybind, [python_code_keys])
def init_menu(self):
super().init_menu()
self.menu['CODE'] = copy.deepcopy(PYTHONMENU)
def init_themes(self):
super().init_themes()
self.themes.append(PythonThemes)
def init_tokenizers(self):
self.tokenizers = [Tokenizer([
Keywords('python-statement', 'keyword', keyword.kwlist),
Span('python-comment', 'comment', r'\#', '$', escape='\\'),
Span('python-string31', 'string', 'r?"""', '"""', escape='\\'),
Span('python-string32', 'string', "r?'''", "'''", escape='\\'),
Span('python-string11', 'string', 'r?"', '"', escape='\\'),
Span('python-string12', 'string', "r?'", "'", escape='\\'),
Span('python-bytes31', 'python-bytes', '(br?|r?b)"""', '"', escape='\\'),
Span('python-bytes32', 'python-bytes', "(br?|r?b)'''", "'''", escape='\\'),
Span('python-bytes11', 'python-bytes', '(br?|r?b)"', '"', escape='\\'),
Span('python-bytes12', 'python-bytes', "(br?|r?b)'", "'", escape='\\'),
])]
RE_BEGIN_NEWBLOCK = gre.compile(r"[^#]*\:\s*(#.*)?$", gre.M)
def on_auto_indent(self, wnd):
pos = wnd.cursor.pos
tol = self.document.gettol(pos)
m = self.RE_BEGIN_NEWBLOCK.match(self.document.buf, tol, pos)
if not m:
super().on_auto_indent(wnd)
else:
f, t = self.get_indent_range(pos)
t = min(t, pos)
cols = self.calc_cols(f, t)
indent = self.build_indent_str(cols+self.indent_width)
indent = '\n'+indent
self.edit_commands.insert_string(wnd, pos, indent,
update_cursor=False)
wnd.cursor.setpos(pos+len(indent))
wnd.cursor.savecol()
|
[
"ishimoto@gembook.org"
] |
ishimoto@gembook.org
|
8a9f2f260ce87cc91474a5b8be78abd0ed4c103b
|
ab50920ebb8d9679230c13b8f91998e47e9f4f82
|
/samples/server/petstore/python-blueplanet/app/openapi_server/models/order.py
|
e222f5ee0069dbb383352d37536a24db5e77ec3c
|
[
"Apache-2.0"
] |
permissive
|
oxidecomputer/openapi-generator
|
f50ee17579b02a35d30894f16a4d98dc81f8b06b
|
f8770d7c3388d9f1a5069a7f37378aeadcb81e16
|
refs/heads/master
| 2023-08-25T09:24:27.666296
| 2021-02-25T15:36:35
| 2021-02-25T15:36:35
| 334,329,847
| 6
| 0
|
Apache-2.0
| 2022-10-14T05:05:39
| 2021-01-30T04:46:13
|
Java
|
UTF-8
|
Python
| false
| false
| 4,935
|
py
|
# coding: utf-8
from __future__ import absolute_import
from datetime import date, datetime # noqa: F401
from typing import List, Dict # noqa: F401
from app.openapi_server.models.base_model_ import Model
from openapi_server import util
class Order(Model):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
def __init__(self, id: int=None, pet_id: int=None, quantity: int=None, ship_date: datetime=None, status: str=None, complete: bool=False): # noqa: E501
"""Order - a model defined in Swagger
:param id: The id of this Order. # noqa: E501
:type id: int
:param pet_id: The pet_id of this Order. # noqa: E501
:type pet_id: int
:param quantity: The quantity of this Order. # noqa: E501
:type quantity: int
:param ship_date: The ship_date of this Order. # noqa: E501
:type ship_date: datetime
:param status: The status of this Order. # noqa: E501
:type status: str
:param complete: The complete of this Order. # noqa: E501
:type complete: bool
"""
self.swagger_types = {
'id': int,
'pet_id': int,
'quantity': int,
'ship_date': datetime,
'status': str,
'complete': bool
}
self.attribute_map = {
'id': 'id',
'pet_id': 'petId',
'quantity': 'quantity',
'ship_date': 'shipDate',
'status': 'status',
'complete': 'complete'
}
self._id = id
self._pet_id = pet_id
self._quantity = quantity
self._ship_date = ship_date
self._status = status
self._complete = complete
@classmethod
def from_dict(cls, dikt) -> 'Order':
"""Returns the dict as a model
:param dikt: A dict.
:type: dict
:return: The Order of this Order. # noqa: E501
:rtype: Order
"""
return util.deserialize_model(dikt, cls)
@property
def id(self) -> int:
"""Gets the id of this Order.
:return: The id of this Order.
:rtype: int
"""
return self._id
@id.setter
def id(self, id: int):
"""Sets the id of this Order.
:param id: The id of this Order.
:type id: int
"""
self._id = id
@property
def pet_id(self) -> int:
"""Gets the pet_id of this Order.
:return: The pet_id of this Order.
:rtype: int
"""
return self._pet_id
@pet_id.setter
def pet_id(self, pet_id: int):
"""Sets the pet_id of this Order.
:param pet_id: The pet_id of this Order.
:type pet_id: int
"""
self._pet_id = pet_id
@property
def quantity(self) -> int:
"""Gets the quantity of this Order.
:return: The quantity of this Order.
:rtype: int
"""
return self._quantity
@quantity.setter
def quantity(self, quantity: int):
"""Sets the quantity of this Order.
:param quantity: The quantity of this Order.
:type quantity: int
"""
self._quantity = quantity
@property
def ship_date(self) -> datetime:
"""Gets the ship_date of this Order.
:return: The ship_date of this Order.
:rtype: datetime
"""
return self._ship_date
@ship_date.setter
def ship_date(self, ship_date: datetime):
"""Sets the ship_date of this Order.
:param ship_date: The ship_date of this Order.
:type ship_date: datetime
"""
self._ship_date = ship_date
@property
def status(self) -> str:
"""Gets the status of this Order.
Order Status # noqa: E501
:return: The status of this Order.
:rtype: str
"""
return self._status
@status.setter
def status(self, status: str):
"""Sets the status of this Order.
Order Status # noqa: E501
:param status: The status of this Order.
:type status: str
"""
allowed_values = ["placed", "approved", "delivered"] # noqa: E501
if status not in allowed_values:
raise ValueError(
"Invalid value for `status` ({0}), must be one of {1}"
.format(status, allowed_values)
)
self._status = status
@property
def complete(self) -> bool:
"""Gets the complete of this Order.
:return: The complete of this Order.
:rtype: bool
"""
return self._complete
@complete.setter
def complete(self, complete: bool):
"""Sets the complete of this Order.
:param complete: The complete of this Order.
:type complete: bool
"""
self._complete = complete
|
[
"wing328hk@gmail.com"
] |
wing328hk@gmail.com
|
db53a329ef4705bdbd512dcc044c8a8a69cba074
|
f6003f9f25dcc182e9fbce7a96d0dabb9341744c
|
/Exercícios/Lista 6 - Seção 13 - Leitura e escrita em arquivo/Questão 20 - Enunciado no código .py
|
6605ae8200a5865776d234e52d03e8f1a7195514
|
[] |
no_license
|
henriquecl/Aprendendo_Python
|
60a87959714f82894e996c06b0a1b767838c38fc
|
672029855431795defafd7e20e8da319bf34e502
|
refs/heads/master
| 2023-06-08T10:48:13.667893
| 2021-06-22T00:55:14
| 2021-06-22T00:55:14
| 261,029,613
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 886
|
py
|
"""
Questão 20 - Crie um programa que receba como entrada o número de alunos. Receba em uma lista o nome dos alunos e o seg
undo contém suas notas finais. Crie um arquivo que armazene a cada linha o nome do aluno e sua nota final. O nome tem
que ter no máximo 40 caracteres. Se não tiver 40, preencher com espaço em branco
"""
alunos = []
notas = []
texto = ''
while True:
nome = input('Digite o nome do aluno, para parar digite "sair"\n')
if nome == 'sair':
break
nota = input(f'Digite a nota da(o) aluno: ')
alunos.append(nome)
notas.append(nota)
for i2 in range(len(alunos)):
if len(alunos[i2]) < 40:
alunos[i2] = alunos[i2] + ' '*(40 - len(alunos[i2]))
for i3 in range(len(alunos)):
texto = texto + alunos[i3] + 'Nota:' + notas[i3] + '\n'
with open('questao20.txt', 'w', encoding='UTF-8') as arquivo:
arquivo.write(texto)
|
[
"64755074+henriquecl@users.noreply.github.com"
] |
64755074+henriquecl@users.noreply.github.com
|
b40616b31df98d43d1b6a084948c93946dd914ac
|
c2dd6b06b56a4db596e196b77a072caeb53042ce
|
/python_modules/dagster/dagster_tests/cli_tests/test_workspace_config_schema.py
|
82bc9e9410785d896141188fad8721e511b626fe
|
[
"Apache-2.0"
] |
permissive
|
shasha79/dagster
|
ed8beac9a134dc22e3c23b0db3bb60884fe2e1de
|
b138d2454eb4a7f7e19a9d1763fa6c06bbb239e6
|
refs/heads/master
| 2022-11-11T16:59:11.144130
| 2020-07-03T16:44:19
| 2020-07-03T16:44:19
| 276,888,663
| 0
| 0
|
Apache-2.0
| 2020-07-03T11:55:59
| 2020-07-03T11:55:58
| null |
UTF-8
|
Python
| false
| false
| 3,371
|
py
|
import yaml
from dagster.cli.workspace.config_schema import validate_workspace_config
def _validate_yaml_contents(yaml_contents):
return validate_workspace_config(yaml.safe_load(yaml_contents))
def test_repository_yaml_parsing():
valid_yaml_contents = '''
repository:
module: some_module
fn: a_repo
'''
assert _validate_yaml_contents(valid_yaml_contents).success
invalid_yaml_contents = '''
repository:
module: some_module
wrong: a_repo
'''
assert not _validate_yaml_contents(invalid_yaml_contents).success
def test_python_file():
terse_workspace_yaml = '''
load_from:
- python_file: a_file.py
'''
assert _validate_yaml_contents(terse_workspace_yaml).success
nested_workspace_yaml = '''
load_from:
- python_file:
relative_path: a_file.py
'''
assert _validate_yaml_contents(nested_workspace_yaml).success
nested_workspace_yaml_with_def_name = '''
load_from:
- python_file:
relative_path: a_file.py
attribute: repo_symbol
'''
assert _validate_yaml_contents(nested_workspace_yaml_with_def_name).success
nested_workspace_yaml_with_def_name_and_location = '''
load_from:
- python_file:
relative_path: a_file.py
attribute: repo_symbol
location_name: some_location
'''
assert _validate_yaml_contents(nested_workspace_yaml_with_def_name_and_location).success
def test_python_module():
terse_workspace_yaml = '''
load_from:
- python_module: a_module
'''
assert _validate_yaml_contents(terse_workspace_yaml).success
nested_workspace_yaml = '''
load_from:
- python_module:
module_name: a_module
'''
assert _validate_yaml_contents(nested_workspace_yaml).success
nested_workspace_yaml_with_def_name = '''
load_from:
- python_module:
module_name: a_module
attribute: repo_symbol
'''
assert _validate_yaml_contents(nested_workspace_yaml_with_def_name).success
nested_workspace_yaml_with_def_name_and_location = '''
load_from:
- python_module:
module_name: a_module
attribute: repo_symbol
location_name: some_location
'''
assert _validate_yaml_contents(nested_workspace_yaml_with_def_name_and_location).success
def test_cannot_do_both():
both_yaml = '''
load_from:
- python_module: a_module
python_file: a_file.py
'''
assert not _validate_yaml_contents(both_yaml).success
def test_load_both():
both_yaml = '''
load_from:
- python_module: a_module
- python_file: a_file.py
'''
assert _validate_yaml_contents(both_yaml).success
def test_load_python_environment_with_file():
python_environment_yaml_with_file = '''
load_from:
- python_environment:
executable_path: /path/to/venv/bin/python
target:
python_file: file_valid_in_that_env.py
'''
validation_result = _validate_yaml_contents(python_environment_yaml_with_file)
assert validation_result.success
def test_load_python_environment_with_module():
python_environment_yaml_with_module = '''
load_from:
- python_environment:
executable_path: /path/to/venv/bin/python
target:
python_module: module_valid_in_that_env.py
'''
validation_result = _validate_yaml_contents(python_environment_yaml_with_module)
assert validation_result.success
|
[
"schrockn@elementl.com"
] |
schrockn@elementl.com
|
689df46f7b895f0325dd2aa9407ac9eaec2f709b
|
a140fe192fd643ce556fa34bf2f84ddbdb97f091
|
/.history/다양한 출력포맷_20200705141045.py
|
b9bbcf137e8d3e41db97353267569be16ddbb422
|
[] |
no_license
|
sangha0719/py-practice
|
826f13cb422ef43992a69f822b9f04c2cb6d4815
|
6d71ce64bf91cc3bccee81378577d84ba9d9c121
|
refs/heads/master
| 2023-03-13T04:40:55.883279
| 2021-02-25T12:02:04
| 2021-02-25T12:02:04
| 342,230,484
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 720
|
py
|
# 빈 자리는 빈공간을 두고, 오른쪽 정렬을 하되, 총 10자리 공간을 확보
print("{0: >10}".format(500))
# 양수일 때는 +로 표시, 음수일 땐 -로 표시
print("{0: >+10}".format(500))
print("{0: >+10}".format(-500))
# 왼쪽 정렬하고, 빈칸을 _로 채움
print("{0:_<+10}".format(500))
# 3자리 마다 콤마 찍어주기
print("{0:,}".format(100000000000))
# 3자리 마다 콤마 찍어주기, +- 부호도 붙이기
print("{0:+,}".format(100000000000))
print("{0:+,}".format(-100000000000))
# 3자리 마다 콤마 찍어주기, 부호도 붙이고, 자릿수 확보하기
# 돈이 많으면 행복하니까 빈 자리는 ^ 로 채워주기
print("{0:+,}".format(100000000000))
|
[
"sangha0719@gmail.com"
] |
sangha0719@gmail.com
|
0bed65bcd8fe1c0b2dba68f4c6e191e1c0dd7d79
|
01d38140093f41e6afbd67445dbca78d9220d7fd
|
/linear_regression_learner.py
|
05b5f705f8a1be190a199b6dcdb5c38254cfec16
|
[] |
no_license
|
boyko11/LinRegTorch
|
e174eb1452646b52f9c7c9161ee30f45630ef8b6
|
a8f05fe67b4be3ee9e885194b5b750371b1cb5e1
|
refs/heads/master
| 2022-07-04T04:36:04.136287
| 2020-05-10T21:15:14
| 2020-05-10T21:15:14
| 262,623,970
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,021
|
py
|
import numpy as np
import torch
class LinearRegressionLearner():
def __init__(self, theta_size, learning_rate=0.00001):
self.theta_tensor = torch.randn(theta_size, 1, requires_grad=True)
self.loss_function = torch.nn.L1Loss()
self.learning_rate = learning_rate
self.loss_history = []
def predict(self, features_tensor):
return torch.mm(features_tensor, self.theta_tensor)
def calculate_loss(self, predictions_tensor, labels_tensor):
return self.loss_function(predictions_tensor, labels_tensor)
def train(self, features_tensor, labels_tensor, epochs=1000):
for i in range(epochs):
predictions = self.predict(features_tensor)
loss = self.calculate_loss(predictions, labels_tensor)
self.loss_history.append(loss)
loss.backward()
with torch.no_grad():
self.theta_tensor -= self.theta_tensor.grad * self.learning_rate
self.theta_tensor.grad.zero_()
|
[
"boyko11@gmail.com"
] |
boyko11@gmail.com
|
f0db017322cd95c267fcbaddaf60360bec86248e
|
de24f83a5e3768a2638ebcf13cbe717e75740168
|
/moodledata/vpl_data/405/usersdata/284/75780/submittedfiles/exe11.py
|
487a2312007a81920fb10875f75153e3da8fe793
|
[] |
no_license
|
rafaelperazzo/programacao-web
|
95643423a35c44613b0f64bed05bd34780fe2436
|
170dd5440afb9ee68a973f3de13a99aa4c735d79
|
refs/heads/master
| 2021-01-12T14:06:25.773146
| 2017-12-22T16:05:45
| 2017-12-22T16:05:45
| 69,566,344
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 748
|
py
|
# -*- coding: utf-8 -*-
a=int(input('digite o numero: '))
if a>100000000 or a<9999999:
print('nao sei')
else:
d1=(a//(10**7))
d2=(a-(d1*(10**7)))//(10**6)
d3=(a-(d1*(10**7))-(d2*(10**6)))//(10**5)
d4=(a-(d1*(10**7))-(d2*(10**6))-(d3*(10**5)))//(10**4)
d5=(a-(d1*(10**7))-(d2*(10**6))-(d3*(10**5))-(d4*(10**4)))//(10**3)
d6=(a-(d1*(10**7))-(d2*(10**6))-(d3*(10**5))-(d4*(10**4))-(d5*(10**3)))//10**2)
d7=(a-(d1*(10**7))-(d2*(10**6))-(d3*(10**5))-(d4*(10**4))-(d5*(10**3))-(d6*(10**2)))//10**1)
d8=(a-(d1*(10**7))-(d2*(10**6))-(d3*(10**5))-(d4*(10**4))-(d5*(10**3))-(d6*(10**2))-(d7*(10**1)))//1)
print(d1)
print(d2)
print(d3)
print(d4)
print(d5)
print(d6)
print(d7)
print(d8)
|
[
"rafael.mota@ufca.edu.br"
] |
rafael.mota@ufca.edu.br
|
16a211c171b197fac8281ce6fa826d216b8534b8
|
3af6960c805e9903eb27c09d8bc7ebc77f5928fe
|
/problems/0173_Binary_Search_Tree_Iterator/solution.py
|
7f652418a02ce3af736afa21f1de7714c75c320d
|
[] |
no_license
|
romain-li/leetcode
|
b3c8d9d4473eebd039af16ad2d4d99abc2768bdd
|
5e82b69bd041c2c168d75cb9179a8cbd7bf0173e
|
refs/heads/master
| 2020-06-04T20:05:03.592558
| 2015-06-08T18:05:03
| 2015-06-08T18:05:03
| 27,431,664
| 2
| 1
| null | 2015-06-08T18:05:04
| 2014-12-02T12:31:58
|
Python
|
UTF-8
|
Python
| false
| false
| 567
|
py
|
# Definition for a binary tree node
# class TreeNode:
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class BSTIterator:
# @param root, a binary search tree's root node
def __init__(self, root):
# @return a boolean, whether we have a next smallest number
def hasNext(self):
# @return an integer, the next smallest number
def next(self):
# Your BSTIterator will be called like this:
# i, v = BSTIterator(root), []
# while i.hasNext(): v.append(i.next())
|
[
"romain_li@163.com"
] |
romain_li@163.com
|
0a58cf2911a659203311090431b380b9748431c3
|
90574ae9759f98f8687cd3e842a6b4301578baea
|
/batbelt/utils.py
|
fddd41a996904a6a1cf6a42f8e2d5dc3ff6eefd7
|
[
"Zlib"
] |
permissive
|
sametmax/Bat-belt
|
88a52a1d89e5dac8bfd69e26a0f106bc0520fdea
|
372117e3876328f84804a296ee9636dee1e82206
|
refs/heads/master
| 2021-05-16T02:29:13.264688
| 2015-09-18T16:59:26
| 2015-09-18T16:59:26
| 6,219,068
| 19
| 3
| null | 2017-05-18T06:03:15
| 2012-10-14T22:55:42
|
Python
|
UTF-8
|
Python
| false
| false
| 2,792
|
py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# vim: ai ts=4 sts=4 et sw=4 nu
"""
The infamous utils.py module filled with functions you don't where else
to put.
"""
import sys
import os
from datetime import datetime
CLASSIC_DATETIME_FORMAT = '%Y-%m-%d %H:%M:%S.%f'
CLASSIC_DATETIME_PATTERN = r'\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}\.\d{6}'
def to_timestamp(dt):
"""
Return a timestamp for the given datetime object.
Example:
>>> import datetime
>>> to_timestamp(datetime.datetime(2000, 1, 1, 1, 1, 1, 1))
946688461
"""
return (dt - datetime(1970, 1, 1)).total_seconds()
class ImportableItems(list):
def __init__(self, *args, **kwargs):
super(ImportableItems, self).__init__(*args, **kwargs)
self.non_importable_items = {}
def append(self, item_name):
self.non_importable_items.pop(item_name, None)
super(ImportableItems, self).append(item_name)
def import_list(*args):
"""
Allow to create easily a __all__ listing for a module.
Returns a value for __all__ and a decorator to add anything
to it easily.
"""
importable_items = ImportableItems()
importable_items.non_importable_items.update(sys._getframe(1).f_globals)
for item in args:
importable_items.append(item)
def importable(func, name=None):
if name is None:
try:
name = func.__name__
except AttributeError:
raise ValueError('You must provide a name for '
'this item: %s' % repr(func))
importable_items.append(name)
return func
return importable_items, importable
def add_to_pythonpath(path, starting_point='.', insertion_index=None):
"""
Add the directory to the sys.path.
You can path an absolute or a relative path to it.
If you choose to use a relative path, it will be relative to
`starting_point` by default, which is set to '.'.
You may want to set it to something like __file__ (the basename will
be stripped, and the current file's parent directory will be used
as a starting point, which is probably what you expect in the
first place).
:example:
>>> add_to_pythonpath('../..', __file__)
"""
if not os.path.isabs(path):
if os.path.isfile(starting_point):
starting_point = os.path.dirname(starting_point)
path = os.path.join(starting_point, path)
path = os.path.realpath(os.path.expandvars(os.path.expanduser(path)))
if path not in sys.path:
if insertion_index is None:
sys.path.append(path)
else:
sys.path.insert(insertion_index, path)
|
[
"lesametlemax@gmail.com"
] |
lesametlemax@gmail.com
|
df404f3b8ffb691f89f96c0f76927a6c6e7be13b
|
c85238daac6141efea2895485b9b18fc08cf4cf2
|
/PathTracking/lqr/unicycle_model.py
|
6e55e37b422e9be8ec8346a25d0b4cf7a5720654
|
[
"MIT"
] |
permissive
|
RyoheiTakahashi/PythonRobotics
|
6543529ff677a68792cfc0a336aa9992d163ec96
|
bee232e84dedcfe0b1dc494ce86c67130b42a8c8
|
refs/heads/master
| 2021-01-02T09:00:24.091192
| 2017-07-31T06:50:07
| 2017-07-31T06:50:07
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,167
|
py
|
#! /usr/bin/python
# -*- coding: utf-8 -*-
"""
author Atsushi Sakai
"""
import math
dt = 0.1 # [s]
L = 2.9 # [m]
class State:
def __init__(self, x=0.0, y=0.0, yaw=0.0, v=0.0):
self.x = x
self.y = y
self.yaw = yaw
self.v = v
def update(state, a, delta):
state.x = state.x + state.v * math.cos(state.yaw) * dt
state.y = state.y + state.v * math.sin(state.yaw) * dt
state.yaw = state.yaw + state.v / L * math.tan(delta) * dt
state.v = state.v + a * dt
return state
if __name__ == '__main__':
print("start unicycle simulation")
import matplotlib.pyplot as plt
T = 100
a = [1.0] * T
delta = [math.radians(1.0)] * T
# print(delta)
# print(a, delta)
state = State()
x = []
y = []
yaw = []
v = []
for (ai, di) in zip(a, delta):
state = update(state, ai, di)
x.append(state.x)
y.append(state.y)
yaw.append(state.yaw)
v.append(state.v)
flg, ax = plt.subplots(1)
plt.plot(x, y)
plt.axis("equal")
plt.grid(True)
flg, ax = plt.subplots(1)
plt.plot(v)
plt.grid(True)
plt.show()
|
[
"asakai.amsl+github@gmail.com"
] |
asakai.amsl+github@gmail.com
|
75f0cc9ced2b0b3b7f516afcd8b9a89bfbef64ea
|
d4a8bedc9c1c9897b3e07a3c9067f7c89c63be5f
|
/python_builtins/sort_by_size.py
|
cf8b0e967cb9cab9f931467b0f72881a53f9bdbd
|
[] |
no_license
|
udoyen/python_refresher
|
816fe6264dbc21ce7a3697eb0f96aa8f9f402032
|
8e542d8e6221b041fc945f5770f25d5da03d8f6a
|
refs/heads/master
| 2023-05-25T19:51:55.938397
| 2019-11-19T17:22:30
| 2019-11-19T17:22:30
| 222,283,118
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 791
|
py
|
from .sort_on_length import lensort
n = [1, 2, 3, 5, 6, 4, 10, 11, 12, 13, 14]
s = ['python', 'perl', 'java', 'c', 'haskell', 'ruby']
def sort_by_size(n):
state = False
for i in n:
if isinstance(i, str):
state = True
if state:
lensort(n)
else:
for i in range(len(n)):
for j in range(len(n)):
pos1 = n[j]
pos2 = n[j + 1 if j != (len(n) - 1) else j]
if n[j] > n[j + 1 if j != (len(n) - 1) else j]:
n[j] = pos2
n[j + 1 if j != (len(n) - 1) else j] = pos1
print(n)
if __name__ == '__name__' and __package__ is None:
from os import sys, path
sys.path.append(path.dirname(path.dirname(path.abspath(__file__))))
sort_by_size(n)
|
[
"datameshprojects@gmail.com"
] |
datameshprojects@gmail.com
|
05297da59c8dfa9824b8284de359353b5ce477a1
|
d926c87644a2861d3c4edf36c1e1692acec38fb5
|
/src/api/forms.py
|
fca77e5fde7cb8423ea759b5d0b4c6fcd89f62d6
|
[] |
no_license
|
mainhith97/AG-BE
|
1058aee4bce0888da07a30fa44802115851b5e6e
|
908b6e542a3219a233417f8f061241985265c777
|
refs/heads/master
| 2023-01-09T02:54:12.888094
| 2019-12-12T04:09:03
| 2019-12-12T04:09:03
| 213,387,121
| 0
| 0
| null | 2022-12-27T15:36:51
| 2019-10-07T13:13:13
|
Python
|
UTF-8
|
Python
| false
| false
| 1,265
|
py
|
#!/usr/bin/env python
from django import forms
from django.contrib.auth import get_user_model
from django.contrib.auth.forms import ReadOnlyPasswordHashField
User = get_user_model()
class UserAdminChangeForm(forms.ModelForm):
password = ReadOnlyPasswordHashField()
class Meta:
model = User
fields = ('username', 'password', 'active', 'admin')
def clean_password(self):
return self.initial["password"]
class UserAdminCreationForm(forms.ModelForm):
password1 = forms.CharField(label='Password', widget=forms.PasswordInput)
password2 = forms.CharField(label='Password confirmation', widget=forms.PasswordInput)
class Meta:
model = User
fields = ('username', 'admin', 'staff')
def clean_password2(self):
password1 = self.cleaned_data.get("password1")
password2 = self.cleaned_data.get("password2")
if password1 and password2 and password1 != password2:
raise forms.ValidationError("Passwords don't match")
return password2
def save(self, commit=True):
user = super(UserAdminCreationForm, self).save(commit=False)
user.set_password(self.cleaned_data["password1"])
if commit:
user.save()
return user
|
[
"you@example.com"
] |
you@example.com
|
3549f90d7b101b2118ff612c4fe503a21f99d31b
|
238702bd275d9a8171c6e479c6ad1bbaa1ee3158
|
/app/blog/models.py
|
4dec50a6791965d9f31409b8992f01e302a8a1d8
|
[] |
no_license
|
maro99/djangogirls_pt_for_test
|
aebf03672741786ea74686efac21303961bf3987
|
4400aed75640273d8adbe7b1592607ccb23b42c2
|
refs/heads/master
| 2020-03-19T17:46:27.767327
| 2018-06-10T15:02:17
| 2018-06-10T15:02:17
| 136,775,121
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 566
|
py
|
from django.conf import settings
from django.db import models
from django.utils import timezone
class Post(models.Model):
author = models.ForeignKey(settings.AUTH_USER_MODEL,on_delete=models.CASCADE)
title = models.CharField(max_length=200)
text = models.TextField(blank=True)
created_date = models.DateTimeField(default=timezone.now)
published_date = models.DateTimeField(blank=True, null=True)
def publish(self):
self.published_date=timezone.now()
self.save()
def __str__(self):
return self.title
|
[
"nadcdc4@gmail.com"
] |
nadcdc4@gmail.com
|
a466328f675d05531ab0713a6dafb6c78b0f3ff7
|
c6ae1b415c5de367677dceaecd5cd983365609e0
|
/Atividades/Roteiro 5 - While/Programas/Roteiro 5 Questão 7.py
|
0e3f5b5b3eb25ddead15c1c27823d585c58d4985
|
[] |
no_license
|
JardelBrandon/Algoritmos_e_Programacao
|
8f00548ed9706cfeb3ad2b2ac6db0b9d2eb0f13c
|
66784a567d85cf340d50400a14ea6915779a1304
|
refs/heads/master
| 2021-07-11T14:36:09.260443
| 2017-10-12T20:55:41
| 2017-10-12T20:55:41
| 106,738,616
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 831
|
py
|
#7. Escreva um programa que leia números inteiros da entrada padrão até que seja informado um
#número negativo ou par.
while True :
num = int (input ("Digite um número inteiro aleatório :"))
print (num)
if num < 0 :
print(num, "Digito negativo, fim !")
break
if num % 2 == 0 :
print(num, "Digito par, fim !")
break
# O Algoritmo do programa realiza os seguintes comandos :
# Entra em um laço de repetição afirmando que o comando while (Enquanto) é True (Verdadeiro)
# Define a condição por meio de uma comparação matemática
# Imprime na tela os números de acordo com a condição imposta
# Quando uma das condições não são atendidas
# O programa imprime o número digitado acrescido da mensagem entre aspas e finaliza
# Atendendo o que se pede na questão
|
[
"jardelbrandon@hotmail.com"
] |
jardelbrandon@hotmail.com
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.