blob_id stringlengths 40 40 | directory_id stringlengths 40 40 | path stringlengths 3 288 | content_id stringlengths 40 40 | detected_licenses listlengths 0 112 | license_type stringclasses 2 values | repo_name stringlengths 5 115 | snapshot_id stringlengths 40 40 | revision_id stringlengths 40 40 | branch_name stringclasses 684 values | visit_date timestamp[us]date 2015-08-06 10:31:46 2023-09-06 10:44:38 | revision_date timestamp[us]date 1970-01-01 02:38:32 2037-05-03 13:00:00 | committer_date timestamp[us]date 1970-01-01 02:38:32 2023-09-06 01:08:06 | github_id int64 4.92k 681M ⌀ | star_events_count int64 0 209k | fork_events_count int64 0 110k | gha_license_id stringclasses 22 values | gha_event_created_at timestamp[us]date 2012-06-04 01:52:49 2023-09-14 21:59:50 ⌀ | gha_created_at timestamp[us]date 2008-05-22 07:58:19 2023-08-21 12:35:19 ⌀ | gha_language stringclasses 147 values | src_encoding stringclasses 25 values | language stringclasses 1 value | is_vendor bool 2 classes | is_generated bool 2 classes | length_bytes int64 128 12.7k | extension stringclasses 142 values | content stringlengths 128 8.19k | authors listlengths 1 1 | author_id stringlengths 1 132 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
ef2d3f3f7e8a2e96fbc502eab722471aa49295ce | e0dc9c1562df240bb14a0b70c92dfff0d06f898f | /pyatv/mrp/pairing.py | b816735ce73dad35129497e775b15b6da5f9b147 | [
"MIT"
] | permissive | sdrmm/pyatv | 1c5e7f9623fee9e91bc1de1d76cee57bde20569e | 04c86fe94a97868b33625852894fa67f13961752 | refs/heads/master | 2021-04-09T03:37:57.360823 | 2020-03-20T12:16:35 | 2020-03-20T12:48:17 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,117 | py | """Device pairing and derivation of encryption keys."""
import logging
from pyatv import exceptions
from pyatv.const import Protocol
from pyatv.interface import PairingHandler
from pyatv.mrp.auth import MrpPairingProcedure
from pyatv.mrp.srp import SRPAuthHandler
from pyatv.mrp.protocol import MrpProtocol
from pyatv.mrp.connection import MrpConnection
from pyatv.support import error_handler
_LOGGER = logging.getLogger(__name__)
class MrpPairingHandler(PairingHandler):
"""Base class for API used to pair with an Apple TV."""
def __init__(self, config, session, loop):
"""Initialize a new MrpPairingHandler."""
super().__init__(session, config.get_service(Protocol.MRP))
self.connection = MrpConnection(config.address, self.service.port, loop)
self.srp = SRPAuthHandler()
self.protocol = MrpProtocol(loop, self.connection, self.srp, self.service)
self.pairing_procedure = MrpPairingProcedure(self.protocol, self.srp)
self.pin_code = None
async def close(self):
"""Call to free allocated resources after pairing."""
self.connection.close()
await super().close()
@property
def has_paired(self):
"""If a successful pairing has been performed."""
return self.service.credentials is not None
def begin(self):
"""Start pairing process."""
return error_handler(
self.pairing_procedure.start_pairing, exceptions.PairingError
)
async def finish(self):
"""Stop pairing process."""
if not self.pin_code:
raise exceptions.PairingError("no pin given")
self.service.credentials = str(
await error_handler(
self.pairing_procedure.finish_pairing,
exceptions.PairingError,
self.pin_code,
)
)
@property
def device_provides_pin(self):
"""Return True if remote device presents PIN code, else False."""
return True
def pin(self, pin):
"""Pin code used for pairing."""
self.pin_code = str(pin).zfill(4)
| [
"pierre.staahl@gmail.com"
] | pierre.staahl@gmail.com |
5f88a526d7a2c14bbe7c638a4f511eb9d0b52a79 | 3ddc54ded27dbb4d40b31958f3f2d2e14c26d8f2 | /setup.py | 42b9793074b19a6aadb25e5eda042d7c397b2ec3 | [] | no_license | chrisspen/webtimer | 10f813040982232d49c96f616fe11155f020c0a1 | 0641f8510d32d5d47bcdb4a2ab79fadad69dd521 | refs/heads/master | 2021-01-10T20:46:16.804849 | 2014-03-31T05:03:56 | 2014-03-31T05:03:56 | 18,275,355 | 1 | 0 | null | 2014-04-19T15:06:30 | 2014-03-30T22:29:10 | Python | UTF-8 | Python | false | false | 838 | py | from distutils.core import setup
import webtimer
setup(name='webtimer',
version=webtimer.__version__,
description='Measures download times of web page resources.',
author='Chris Spencer',
author_email='chrisspen@gmail.com',
url='https://github.com/chrisspen/webtimer',
license='LGPL License',
py_modules=['webtimer'],
scripts=['webtimer.py'],
install_requires=['fake-useragent>=0.0.5'],
classifiers = [
"Programming Language :: Python",
#"Development Status :: 5 - Production/Stable",
"License :: OSI Approved :: GNU Library or Lesser General Public License (LGPL)",
"Operating System :: OS Independent",
"Topic :: Software Development :: Libraries :: Python Modules",
"Topic :: Text Processing :: General",
],
platforms=['OS Independent'],
) | [
"chris@coronis"
] | chris@coronis |
a7e153f6d5c6acf1a84f1d1d2bc571239672827c | e1857e582609640f60923ea461da3e84c498095a | /block5-datastructures/lists/test_list_group_items.py | 418fb8d8eea3d35b4c9df8f5a256d7e6b1969623 | [] | no_license | mbaeumer/python-challenge | 178f188004e66c5c4092af51ae5d496679d39dec | 4cff4a4939268a496117158b0be4e20f4d934213 | refs/heads/master | 2023-08-07T22:43:35.490777 | 2023-07-21T21:26:46 | 2023-07-21T21:26:46 | 75,015,661 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 443 | py | import unittest
from list_group_items import group_items
class GroupItemTestCase(unittest.TestCase):
def test_group_items(self):
list = ['A', 'B', 'C', 'B', 'B', 'C', 'B', 'A']
result = group_items(list)
self.assertTrue('A' in result)
countA = result['A']
self.assertEqual(countA, 2)
countB = result['B']
self.assertEqual(countB, 4)
if __name__ == '__main__':
unittest.main() | [
"martin.baeumer@gmail.com"
] | martin.baeumer@gmail.com |
6062ba986cf5fb20d971cc5e35f2973d8e23fda4 | 56bbd0ed01d3284e0c884e6e339f88d836354079 | /slurm/run_zsl_hpo.py | 766b97d676057fbc04468f075663913e6dcbffb2 | [] | no_license | yuanmengzhixing/class-norm | 93f93e6ab0037e4f76c6cb71d9e72392cb85bc0d | 0a4cdd651b30942e98191594b4782e4d0e546c73 | refs/heads/master | 2023-02-26T07:14:03.126491 | 2021-02-05T11:35:28 | 2021-02-05T11:37:29 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,219 | py | #!/usr/bin/env python
import sys; sys.path.append('.')
import os
import numpy as np
import argparse
from typing import Dict, List, Any, Callable
import numpy as np
from firelab.config import Config
from src.trainers.zsl_trainer import ZSLTrainer
from utils import generate_experiments_from_hpo_grid
def read_args() -> argparse.Namespace:
parser = argparse.ArgumentParser('Running LLL trainer')
parser.add_argument('-n', '--num_runs', type=int, help='Number of runs for each experimental setup')
parser.add_argument('-d', '--dataset', type=str, help='Which dataset to run on?')
parser.add_argument('-e', '--experiment', type=str, help='Which HPO experiment to run.')
parser.add_argument('--random_search_seed', type=int, default=42, help='Random seed for Random Grid Search')
parser.add_argument('--silent', type=bool, default=True, help='Should we run the trainer in a silent mode?')
parser.add_argument('--metric', default='mean')
parser.add_argument('--count', action='store_true', help='Should we just count and exit?')
return parser.parse_args()
def main():
args = read_args()
hpos = Config.load('configs/experiments.yml')[args.experiment]
experiments_vals = generate_experiments_from_hpo_grid(hpos.grid)
if hpos.get('search_type') == 'random':
experiments_vals = np.random.RandomState(args.random_search_seed).choice(
experiments_vals, size=(min(len(experiments_vals), hpos.num_experiments),), replace=False)
experiments_vals = [{p.replace('|', '.'): v for p, v in exp.items()} for exp in experiments_vals]
hps = [Config(e) for e in experiments_vals]
if args.count:
print(f'Total number of experiments: {len(hps)} x [{args.num_runs} seed] = {len(hps) * args.num_runs}')
else:
run_hpo(args, hps)
def run_hpo(args, hps):
experiments_dir = f'/ibex/scratch/skoroki/zsl-experiments/{args.experiment}-{args.random_search_seed}'
os.makedirs(experiments_dir, exist_ok=True)
log_file = f'hpo_logs/{args.experiment}-{args.random_search_seed}/{args.dataset}.log'
os.makedirs(os.path.dirname(log_file), exist_ok=True)
default_config = Config.load('configs/zsl.yml', frozen=False)
default_config.experiments_dir = experiments_dir
best_last_score_hp = None
best_best_score_hp = None
best_last_score_val = 0
best_last_score_std = 0
best_best_score_val = 0
best_best_score_std = 0
for i, hp in enumerate(hps):
print(f'<======= Running hp #{i+1}/{len(hps)} =======>')
last_scores = []
best_scores = []
for random_seed in range(1, args.num_runs + 1):
print(f'=> Seed #{random_seed}/{args.num_runs}')
config = default_config.clone(frozen=False)
config[args.dataset].set('hp', config[args.dataset].hp.overwrite(hp))
config.set('random_seed', random_seed)
config.set('dataset', args.dataset)
config.set('silent', args.silent)
config.set('no_saving', True)
trainer = ZSLTrainer(config)
trainer.start()
last_scores.append(trainer.curr_val_scores[2])
best_scores.append(trainer.best_val_scores[2])
if args.metric == 'mean':
mean_last_score = np.mean(last_scores)
mean_best_score = np.mean(best_scores)
elif args.metric == 'median':
mean_last_score = np.median(last_scores)
mean_best_score = np.median(best_scores)
else:
raise ValueError(f'Unknown metric: {args.metric}')
std_last_score = np.std(last_scores)
std_best_score = np.std(best_scores)
if mean_last_score > best_last_score_val:
best_last_score_val = mean_last_score
best_last_score_std = std_last_score
best_last_score_hp = hp
log_str = f'Found new best_last_score_val: {best_last_score_val} (std: {best_last_score_std})\n'
log_str += str(best_last_score_hp)
with open(log_file, 'a') as f:
f.write('\n======================================\n')
f.write(log_str)
if mean_best_score > best_best_score_val:
best_best_score_val = mean_best_score
best_best_score_std = std_best_score
best_best_score_hp = hp
log_str = f'Found new best_best_score_val: {best_best_score_val} (std: {best_best_score_std})\n'
log_str += str(best_best_score_hp)
with open(log_file, 'a') as f:
f.write('\n======================================\n')
f.write(log_str)
log_str = f'Best last score hp (value: {best_last_score_val}, std: {best_last_score_std})\n'
log_str += str(best_last_score_hp)
log_str += f'Best best score hp (value: {best_best_score_val}, std: {best_best_score_std})\n'
log_str += str(best_best_score_hp)
with open(log_file, 'a') as f:
f.write('======================================\n')
f.write('========== HPO FINAL RESULT ==========\n')
f.write('======================================\n')
f.write(log_str)
print(log_str)
if __name__ == "__main__":
main()
| [
"iskorokhodov@gmail.com"
] | iskorokhodov@gmail.com |
9c508e3c4dccb29eb08f16600aac8b7eada40121 | f50f1aa1f8f139d546db3230a1cb1f53043fd9e6 | /hardware/firewire/libiec61883/actions.py | 007551952361e195493a251e2a1d2850a7c0fcfc | [] | no_license | pars-linux/corporate2 | 7887961d1552d39bc3b0bef4a60fd3413d9b82bb | 14d1eacfc824fb8d0bff8173e7ac06b36b88d10d | refs/heads/master | 2020-05-26T15:02:12.005654 | 2017-02-27T03:07:14 | 2017-02-27T03:07:14 | 82,476,084 | 4 | 0 | null | null | null | null | UTF-8 | Python | false | false | 547 | py | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright 2006-2009 TUBITAK/UEKAE
# Licensed under the GNU General Public License, version 2.
# See the file http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt
from pisi.actionsapi import autotools
from pisi.actionsapi import pisitools
from pisi.actionsapi import get
def setup():
autotools.configure("--disable-static")
def build():
autotools.make()
def install():
autotools.rawInstall("DESTDIR=%s" % get.installDIR())
pisitools.dodoc("AUTHORS", "ChangeLog", "NEWS", "README")
| [
"eki@420bcd57-4a62-4fd6-832e-5ede16c90cc9"
] | eki@420bcd57-4a62-4fd6-832e-5ede16c90cc9 |
2436f1583a5c32bfe94f6c6df69b6d3614093754 | 2af94f8a7609d47fdcea28a2132c4f8bacb103e3 | /src/devices/system_device.py | b8077e5ce134a2fe3c10ade8d8d40eb6713b3033 | [] | no_license | bernhara/DigiGateway4Raph | 685527723f0b306f387233c78d27fe9d78717c38 | f36ba29ef883d70f94b8609ff734b5dcde786c66 | refs/heads/master | 2020-07-05T19:56:27.027547 | 2019-08-19T06:10:46 | 2019-08-19T06:10:46 | 202,756,662 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,334 | py | ############################################################################
# #
# Copyright (c)2008, 2009, Digi International (Digi). All Rights Reserved. #
# #
# Permission to use, copy, modify, and distribute this software and its #
# documentation, without fee and without a signed licensing agreement, is #
# hereby granted, provided that the software is used on Digi products only #
# and that the software contain this copyright notice, and the following #
# two paragraphs appear in all copies, modifications, and distributions as #
# well. Contact Product Management, Digi International, Inc., 11001 Bren #
# Road East, Minnetonka, MN, +1 952-912-3444, for commercial licensing #
# opportunities for non-Digi products. #
# #
# DIGI SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, BUT NOT LIMITED #
# TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A #
# PARTICULAR PURPOSE. THE SOFTWARE AND ACCOMPANYING DOCUMENTATION, IF ANY, #
# PROVIDED HEREUNDER IS PROVIDED "AS IS" AND WITHOUT WARRANTY OF ANY KIND. #
# DIGI HAS NO OBLIGATION TO PROVIDE MAINTENANCE, SUPPORT, UPDATES, #
# ENHANCEMENTS, OR MODIFICATIONS. #
# #
# IN NO EVENT SHALL DIGI BE LIABLE TO ANY PARTY FOR DIRECT, INDIRECT, #
# SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES, INCLUDING LOST PROFITS, #
# ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN IF #
# DIGI HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. #
# #
############################################################################
# imports
from devices.device_base import DeviceBase
from settings.settings_base import SettingsBase, Setting
from channels.channel_source_device_property import *
from common.digi_device_info import query_state
from samples.sample import Sample
from common.shutdown import SHUTDOWN_WAIT
import threading
import digitime
class SystemDevice(DeviceBase, threading.Thread):
"""
This class extends one of our base classes and is intended as an
example of a concrete, example implementation, but it is not itself
meant to be included as part of our developer API. Please consult the
base class documentation for the API and the source code for this file
for an example implementation.
"""
def __init__(self, name, core_services):
self.__name = name
self.__core = core_services
from core.tracing import get_tracer
self.__tracer = get_tracer(name)
## Settings Table Definition:
settings_list = [
Setting(
name='update_rate', type=float, required=False, default_value=1.0,
verify_function=lambda x: x > 0.0)
]
property_list = [
ChannelSourceDeviceProperty(name="uptime", type=int,
initial=Sample(timestamp=0, value=-1, unit="sec"),
perms_mask=DPROP_PERM_GET, options=DPROP_OPT_AUTOTIMESTAMP),
ChannelSourceDeviceProperty(name="cpu_utilization", type=int,
initial=Sample(timestamp=0, value=0, unit="%"),
perms_mask=DPROP_PERM_GET, options=DPROP_OPT_AUTOTIMESTAMP),
ChannelSourceDeviceProperty(name="free_memory", type=int,
initial=Sample(timestamp=0, value=-1, unit="bytes"),
perms_mask=DPROP_PERM_GET, options=DPROP_OPT_AUTOTIMESTAMP),
ChannelSourceDeviceProperty(name="used_memory", type=int,
initial=Sample(timestamp=0, value=-1, unit="bytes"),
perms_mask=DPROP_PERM_GET, options=DPROP_OPT_AUTOTIMESTAMP),
ChannelSourceDeviceProperty(name="total_memory", type=int,
initial=Sample(timestamp=0, value=-1, unit="bytes"),
perms_mask=DPROP_PERM_GET, options=DPROP_OPT_AUTOTIMESTAMP),
]
## Initialize the DeviceBase interface:
DeviceBase.__init__(self, self.__name, self.__core,
settings_list, property_list)
## Thread initialization:
self.__stopevent = threading.Event()
threading.Thread.__init__(self, name=name)
threading.Thread.setDaemon(self, True)
## Functions which must be implemented to conform to the DeviceBase
## interface:
def apply_settings(self):
SettingsBase.merge_settings(self)
accepted, rejected, not_found = SettingsBase.verify_settings(self)
if len(rejected) or len(not_found):
self.__tracer.error("Settings rejected/not found: %s %s",
rejected, not_found)
if (('update_rate' in accepted) and
(accepted['update_rate'] > SHUTDOWN_WAIT)):
self.__tracer.warning("Long update_rate setting may " +
"interfere with shutdown of DIA")
SettingsBase.commit_settings(self, accepted)
return (accepted, rejected, not_found)
def start(self):
threading.Thread.start(self)
return True
def stop(self):
self.__stopevent.set()
return True
# Threading related functions:
def run(self):
while 1:
if self.__stopevent.isSet():
self.__stopevent.clear()
break
try:
device_stats = query_state("device_stats")
for stat in ['uptime', 'cpu', 'freemem', 'usedmem', 'totalmem']:
for item in device_stats:
data = item.find(stat)
if data != None:
data = data.text
break
else:
continue
if stat == 'uptime':
self.property_set("uptime",
Sample(0, int(float(data)), unit="sec"))
elif stat == 'cpu':
self.property_set("cpu_utilization",
Sample(0, int(data), unit="%"))
elif stat == 'freemem':
self.property_set("free_memory",
Sample(0, int(data), unit="bytes"))
elif stat == 'usedmem':
self.property_set("used_memory",
Sample(0, int(data), unit="bytes"))
elif stat == 'totalmem':
self.property_set("total_memory",
Sample(0, int(data), unit="bytes"))
except Exception, e:
self.__tracer.error("Unable to update stat: %s", str(e))
digitime.sleep(SettingsBase.get_setting(self,"update_rate"))
# internal functions & classes
| [
"ORBA6563@S-ORBA65630.rd.francetelecom.fr"
] | ORBA6563@S-ORBA65630.rd.francetelecom.fr |
d5f6cdc071d241ecd80c2cc76b3a6299f99d09ee | 91acf428043e39323e6879861676d6528ef39419 | /agence/migrations/0013_agence_mdp.py | 9666fa42d96cd8150d18a122e4279ff80fefa0ee | [] | no_license | Zaenma/paiement-django | dec9fca85b2cad0c62b7ec3fa416b34420dea31f | e361873d4a1771403f008e768c76e374bbc8da2c | refs/heads/main | 2023-03-19T07:31:53.409214 | 2021-03-15T23:47:03 | 2021-03-15T23:47:03 | 311,372,216 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 407 | py | # Generated by Django 3.1.7 on 2021-03-02 07:27
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('agence', '0012_remove_agence_password'),
]
operations = [
migrations.AddField(
model_name='agence',
name='mdp',
field=models.CharField(blank=True, max_length=20, null=True),
),
]
| [
"zaenma.halidisalim@gmail.com"
] | zaenma.halidisalim@gmail.com |
9beb60e6112c738434f2380b7f1712a919561405 | 575d197af5bbc31b89df37f8733e81707294948c | /Python2/examples/tkinter/13_sticky_buttons_west_east.py | 6cd880575f0336e0fbe9b4164c61aa419b236df5 | [] | no_license | tisnik/python-programming-courses | 5c7f1ca9cae07a5f99dd8ade2311edb30dc3e088 | 4e61221b2a33c19fccb500eb5c8cdb49f5b603c6 | refs/heads/master | 2022-05-13T07:51:41.138030 | 2022-05-05T15:37:39 | 2022-05-05T15:37:39 | 135,132,128 | 3 | 2 | null | 2021-04-06T12:19:16 | 2018-05-28T08:27:19 | Python | UTF-8 | Python | false | false | 649 | py | #!/usr/bin/env python3
# vim: set fileencoding=utf-8
from tkinter import *
from tkinter import ttk
import sys
root = Tk()
button1 = ttk.Button(root, text="1st btn", command=lambda: sys.exit(0))
button2 = ttk.Button(root, text="Second button", command=lambda: sys.exit(0))
button3 = ttk.Button(root, text="Third button", command=lambda: sys.exit(0))
button4 = ttk.Button(
root, text="This is fourth button, the last one", command=lambda: sys.exit(0)
)
button1.grid(column=1, row=1, sticky="we")
button2.grid(column=2, row=1, sticky="we")
button3.grid(column=1, row=2, sticky="we")
button4.grid(column=2, row=2, sticky="we")
root.mainloop()
| [
"ptisnovs@redhat.com"
] | ptisnovs@redhat.com |
2a00518531523ffa160b24e45e13d6d1735004ec | f82107f045898f0300aa414780f1d406a6c3398a | /mlmo/eval/metrics/accuracy.py | db0d5709eed4ec74df788ee73eaf759bbde88bc7 | [
"MIT"
] | permissive | DanielGutmann/mltoolkit | 9b609bb2976e4492473d0692d1a2b26fa8b448d4 | acc192bafc66b7661d541ef4f604b5e5ab7df5ca | refs/heads/master | 2022-03-09T05:31:46.317003 | 2019-11-07T12:03:31 | 2019-11-07T12:03:31 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,663 | py | from mlmo.eval.metrics import BaseMetric
from mlutils.helpers.general import sort_hash
from collections import OrderedDict
class Accuracy(BaseMetric):
def __init__(self, excluded_labels=None):
super(Accuracy, self).__init__()
self.excluded_labels = excluded_labels if excluded_labels else []
self.nr_correct = 0
self._true_label_counts = {}
self._pred_label_counts = {}
self.nr_total = 0
def accum(self, predicted_labels, true_labels):
"""
Evaluates a chunk of predicted and true labels, and updates the
necessary statistics.
:param predicted_labels: list or array of predictions.
:param true_labels: list or array of true labels.
:return boolean list of matching labels. If a true labels was excluded,
it will be marked as None.
"""
assert len(predicted_labels) == len(true_labels)
matches = []
for pr_l, tr_l in zip(predicted_labels, true_labels):
if tr_l in self.excluded_labels:
matches.append(None)
continue
match = pr_l == tr_l
self.nr_correct += match
self.nr_total += 1
matches.append(match)
# store counts
if tr_l not in self._true_label_counts:
self._true_label_counts[tr_l] = 0
if pr_l not in self._pred_label_counts:
self._pred_label_counts[pr_l] = 0
self._true_label_counts[tr_l] += 1
self._pred_label_counts[pr_l] += 1
return matches
def aggr(self):
res = OrderedDict()
accuracy = float(self.nr_correct)/float(self.nr_total)
res["accuracy"] = accuracy
return res
def calculate_label_freq(self, print_friendly=False):
"""Computes frequency for true and predicted labels."""
res = {}
for freq_name, counts_hash in zip(["true label freq.",
"pred. label freq."],
[self._true_label_counts,
self._pred_label_counts]):
if counts_hash:
counts_hash = sort_hash(counts_hash, by_key=False)
norm_counts_hash = {k: float(v) / self.nr_total for k, v
in counts_hash.items()}
if print_friendly:
form = ["%s: %f" % (k, v) for k, v in norm_counts_hash.items()]
res[freq_name] = " ".join(form)
else:
res[freq_name] = norm_counts_hash
return res
| [
"bulletdll@gmail.com"
] | bulletdll@gmail.com |
b5522106325b0abf5f2db1cd14c624586d8a20b2 | 7e01c039f2427d434a4ef44a1b9dc0ea21db65ba | /venv/lib/python3.8/site-packages/django/contrib/auth/management/commands/changepassword.py | d5e11559b4262cc1ea25a62c8f7403daa5137cec | [] | no_license | dmfranz/Spike-exercise | 09f8051163d2a63dfbc3f75da2de0a1bbbbb122d | 83971e95a72d504f629778fece2cdfb953e5d08b | refs/heads/main | 2023-08-23T04:18:43.934471 | 2021-10-11T04:54:28 | 2021-10-11T04:54:28 | 413,568,735 | 0 | 1 | null | 2021-10-11T04:36:22 | 2021-10-04T20:10:01 | Python | UTF-8 | Python | false | false | 2,616 | py | import getpass
from django.contrib.auth import get_user_model
from django.contrib.auth.password_validation import validate_password
from django.core.exceptions import ValidationError
from django.core.management.base import BaseCommand, CommandError
from django.db import DEFAULT_DB_ALIAS
UserModel = get_user_model()
class Command(BaseCommand):
help = "Change a user's password for django.contrib.auth."
requires_migrations_checks = True
requires_system_checks = []
def _get_pass(self, prompt="Password: "):
p = getpass.getpass(prompt=prompt)
if not p:
raise CommandError("aborted")
return p
def add_arguments(self, parser):
parser.add_argument(
'username', nargs='?',
help='Username to change password for; by default, it\'s the current username.',
)
parser.add_argument(
'--database',
default=DEFAULT_DB_ALIAS,
help='Specifies the database to use. Default is "default".',
)
def handle(self, *args, **options):
if options['username']:
username = options['username']
else:
username = getpass.getuser()
try:
u = UserModel._default_manager.using(options['database']).get(**{
UserModel.USERNAME_FIELD: username
})
except UserModel.DoesNotExist:
raise CommandError("user '%s' does not exist" % username)
self.stdout.write("Changing password for user '%s'" % u)
MAX_TRIES = 3
count = 0
p1, p2 = 1, 2 # To make them initially mismatch.
password_validated = False
while (p1 != p2 or not password_validated) and count < MAX_TRIES:
p1 = self._get_pass()
p2 = self._get_pass("Password (again): ")
if p1 != p2:
self.stdout.write('Passwords do not match. Please try again.')
count += 1
# Don't validate passwords that don't match.
continue
try:
validate_password(p2, u)
except ValidationError as err:
self.stderr.write('\n'.join(err.messages))
count += 1
else:
password_validated = True
if count == MAX_TRIES:
raise CommandError("Aborting password change for user '%s' after %s attempts" % (u, count))
u.set_password(p1)
u.save()
return "Password changed successfully for user '%s'" % u
| [
"marmara@wisc.edu"
] | marmara@wisc.edu |
5bb5e540e102b326c5b289a69f535dced5058f30 | 51427589a09ebf1148e0d2dff01716269696db69 | /tuning_suite.py | ffd3bd627dbf9fef033ed4a7ee791ac241c48953 | [] | no_license | Thapz123/cs231n-project | 41c55761d0c31840d4648ff7abbad01234da3ea0 | d5e77db4d49038114b9a5a7eb209bb0620e8889e | refs/heads/master | 2020-05-22T18:37:27.472888 | 2019-06-05T17:02:54 | 2019-06-05T17:02:54 | 186,476,642 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,664 | py | import os
import numpy as np
import torch
import torch.nn as nn
import torch.optim as optim
from torch.utils.data import random_split, Subset
from torch.optim import lr_scheduler
from torch.utils.data import DataLoader
from torch.utils.data import sampler
import torchvision.datasets as dset
import torchvision.transforms as T
import matplotlib.pyplot as plt
import time
import copy
import shutil
TEST_SUITE_PATH = './tests/'
from model.data_loader import *
def test_suite(model_init, params, dataloaders):
step_sizes, gammas, lrs = params
best_model_wts = None
best_acc = 0.0
best_hist = []
best_params = None
for ss in step_sizes:
print("On step size:{}".format(ss))
for g in gammas:
for lr in lrs:
model, hist = model_init(dataloaders, num_epochs = 10, lr=lr, step_size = ss,gamma=g)
acc = np.amax(hist)
if acc>best_acc:
best_model_wts = copy.deepcopy(model.state_dict())
best_acc = acc
best_hist = hist
# best_params = [ss,g,lr]
best_params = {'step_size':ss, 'gamma': g, 'lr':lr}
best_info = (best_model_wts, best_acc, best_hist, best_params)
return best_info
def get_data():
PHOTOSHOPS_FULL ='data/photoshops_resized'
ORIGINALS_FULL ='data/originals_resized'
master_dataset = PhotoshopDataset(ORIGINALS_FULL, PHOTOSHOPS_FULL)
print("Size of master dataset: {}".format(len(master_dataset)))
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
random.seed(42)
n = len(master_dataset)
n_test= int( n * .15 ) # number of test/val elements
n_val = n_test
n_train = n - 2 * n_test
train_set, val_set, test_set = random_split(master_dataset, (n_train, n_val, n_test))
train_set.transform = data_transforms['train']
val_set.transform = data_transforms['val']
test_set.transform = data_transforms['val']
n_train_dev = int( n_train * .1 )
n_test_dev= int( n_test * .1 )
n_val_dev = int( n_val * .1)
train_set_dev = Subset(train_set, range(n_train_dev))
val_set_dev = Subset(val_set,range(n_val_dev))
test_set_dev = Subset(test_set, range(n_test_dev))
print("Size of subsets:\nTrain Dev:{}\tVal Dev:{}\tTest Dev:{}".format(len(train_set_dev), len(val_set_dev), len(test_set_dev)))
full_dataloaders = {
'train' : DataLoader(train_set, batch_size=128, shuffle=True, num_workers=2, drop_last = True),
'val' : DataLoader(val_set, batch_size=128, shuffle=True, num_workers=2, drop_last = True),
'test' : DataLoader(test_set, batch_size=128, shuffle=True, num_workers=2, drop_last = True),
}
dev_dataloaders = {
'train' : DataLoader(train_set_dev, batch_size=8, shuffle=True, num_workers=2, drop_last = True),
'val' : DataLoader(val_set_dev, batch_size=8, shuffle=True, num_workers=2, drop_last = True),
'test' : DataLoader(test_set_dev, batch_size=8, shuffle=True, num_workers=2, drop_last = True)
}
return dev_dataloaders
#Importing Inception Classifier
from model.inception_net import inception_classifier
from model.cnn import cnn_classifier
from model.nn import nn_classifier
import json
from pathlib import Path
# classifiers = [inception_classifier, cnn_classifier, nn_classifier]
# classifier_names = ['inception_classifier', 'cnn_classifier', 'nn_classifier']
classifiers = [cnn_classifier, nn_classifier]
classifier_names = ['cnn_classifier', 'nn_classifier']
params = ([2, 5],[0.05,0.1,0.2], [0.001, 0.005, 0.01])
#params = ([2],[0.1], [0.001])
dataloaders = get_data()
for i, name in enumerate(classifier_names):
# if i == 2:
print("Started performing hyperparameter Tuning on {}\n".format(name))
best_model_wts, best_acc, best_hist, best_params =test_suite(classifiers[i], params, dataloaders)
# if(os.path.exists(TEST_SUITE_PATH)):
# shutil.rmtree(TEST_SUITE_PATH)
# os.mkdir(TEST_SUITE_PATH)
print("dir is {}".format(TEST_SUITE_PATH))
torch.save(best_model_wts, "{}{}.pt".format(TEST_SUITE_PATH, name))
best_hist = [acc.data.cpu().numpy().tolist() for acc in best_hist]
# best_params = [param.data.cpu().numpy().tolist() for param in best_params]
dic = {'best_acc':best_acc.data.cpu().numpy().tolist(), 'best_hist':best_hist, 'best_params': best_params}
# print("")
with open("{}{}.json".format(TEST_SUITE_PATH, name), 'w') as json_file:
json.dump(dic, json_file)
print("Completed performing hyperparameter Tuning on {}".format(name))
| [
"google-dl-platform@googlegroups.com"
] | google-dl-platform@googlegroups.com |
55df3ae7ca328429a7daac84128c8406f8666fce | 236402efa32923fefc9f3924ba4155142e8052fe | /2016/_22_grid_computing_test.py | 9d9c4c1eb11bddd8f5fc95efca73a9e4c82310e4 | [
"MIT"
] | permissive | pchudzik/adventofcode | 7c32126948ea57cdef3858ae3eb63cafdd67abb0 | 72304880c6b080d6c177d11fc9b9eb7b58e876b7 | refs/heads/master | 2022-05-08T00:20:58.586672 | 2022-04-29T19:30:34 | 2022-04-29T19:30:34 | 164,089,632 | 0 | 0 | MIT | 2022-04-22T14:29:37 | 2019-01-04T09:51:33 | Python | UTF-8 | Python | false | false | 2,041 | py | import pytest
from _22_grid_computing import Node, find_pairs_viable_to_transfer
def test_parse_node():
node = Node.parse("/dev/grid/node-x4-y24 90T 70T 20T 77%")
assert node.x == 4
assert node.y == 24
assert node.size == 90
assert node.used == 70
assert node.available == 20
assert node.usage == 0.7778
@pytest.mark.parametrize(
"first, second, result",
[
((90, 70), (120, 10), True),
((90, 70), (125, 20), True),
((90, 70), (130, 90), False)])
def test_will_fit(first, second, result):
node1 = Node(0, 0, *first)
node2 = Node(1, 2, *second)
assert node1.can_transfer_to(node2) == result
def test_transfer_data_to_the_same_node():
node1 = Node(0, 0, 100, 1)
node2 = Node(0, 0, 100, 1)
assert not node1.can_transfer_to(node2)
assert not node1.can_transfer_to(node1)
assert not node2.can_transfer_to(node1)
def test_transfer_from_empty_node():
empty_node = Node(0, 0, 100, 0)
other_node = Node(1, 1, 100, 1)
assert not empty_node.can_transfer_to(other_node)
def test_find_pairs_viable_to_transfer():
node1 = Node(0, 0, 100, 1)
node2 = Node(1, 0, 100, 1)
node3 = Node(4, 0, 100, 1)
node4 = Node(5, 0, 100, 1)
node5 = Node(6, 0, 100, 100)
viable_to_transfer = find_pairs_viable_to_transfer([
node1,
node2,
node3,
node4,
node5])
assert len(viable_to_transfer) == 12
assert (node1, node2) in viable_to_transfer
assert (node1, node3) in viable_to_transfer
assert (node1, node4) in viable_to_transfer
assert (node2, node1) in viable_to_transfer
assert (node2, node3) in viable_to_transfer
assert (node2, node4) in viable_to_transfer
assert (node3, node1) in viable_to_transfer
assert (node3, node2) in viable_to_transfer
assert (node3, node4) in viable_to_transfer
assert (node4, node1) in viable_to_transfer
assert (node4, node2) in viable_to_transfer
assert (node4, node3) in viable_to_transfer
| [
"pawel.chudzik@gmail.com"
] | pawel.chudzik@gmail.com |
20e75a94c1d3e0370135db0caf3b810a8f11b8b3 | dfbd3e12a7a7ed28c13715b2fa0c964d0745c8cb | /python/day16/solve.py | 59109de2f2ba7cc2040004f56e4122f939dad14f | [
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | ijanos/advent2017 | 3a90c479bf4f1689264576fb2c4468883458b911 | db7ba6c3f2abbe206e47f25480c24d2bade709bb | refs/heads/master | 2021-08-31T23:20:35.637440 | 2017-12-23T12:09:55 | 2017-12-23T12:09:55 | 112,766,905 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,257 | py | #!/usr/bin/env python3
import fileinput
from collections import deque
from string import ascii_lowercase
def dance(moves, times=1):
order = deque(ascii_lowercase[0:16])
cache = {}
for _ in range(times):
start_state = ''.join(order)
if start_state in cache:
order = deque(cache[start_state])
continue
for cmd, move in moves:
if cmd == 's':
order.rotate(move)
elif cmd == 'x':
i, j = move
order[i], order[j] = order[j], order[i]
else:
x, y = move
i = order.index(x)
j = order.index(y)
order[i], order[j] = y, x
cache[start_state] = ''.join(order)
return ''.join(order)
moves = []
for line in fileinput.input():
for move in line.strip().split(','):
cmd = move[0]
if cmd == 's':
moves.append(('s', int(move[1:])))
elif cmd == "x":
i, j = (int(n) for n in move[1:].split('/'))
moves.append(('x', (i, j)))
else:
i, j = move[1:].split('/')
moves.append(('p', (i, j)))
print("Part 1:", dance(moves))
print("Part 2:", dance(moves, 1_000_000))
| [
"ijanos@gmail.com"
] | ijanos@gmail.com |
109038fe12c447a203ae901a7a6fad0dc0771689 | a08225934c425be313a12975c9563a72ded58be6 | /round704/rearrange.py | e02a36d8961d65e47c3a0a59e7e8b287c9cd5dfc | [] | no_license | marcus-aurelianus/codeforce | 27c966554dee9986f23fb2925bd53e6cceb8b9e9 | 4764df151ade7806e32b6c88283a2de946f99e16 | refs/heads/master | 2023-03-18T09:30:55.042594 | 2021-03-12T18:14:08 | 2021-03-12T18:14:08 | 231,387,022 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,710 | py | class Node:
def __init__(self, x):
self.data = x
self.next = None
# Function for rearranging a linked
# list with high and low value
def rearrange(head):
if not head:
# Quick response for empty linked list
return None
oriHead = head
fast, slow = head, head
while fast and fast.next:
slow, fast = slow.next, fast.next.next
mid = slow
# ------------------------------------------
# Reverse second half
prev, cur = None, mid
while cur:
cur.next, prev, cur = prev, cur, cur.next
head_of_second_rev = prev
# ------------------------------------------
# Update link between first half and reversed second half
first, second = head, head_of_second_rev
while second.next:
next_hop = first.next
first.next = second
first = next_hop
next_hop = second.next
second.next = first
second = next_hop
return oriHead
# Function to insert a node in the
# linked list at the beginning
def push(head, k):
tem = Node(k)
tem.data = k
tem.next = head
head = tem
return head
# Function to display node of linked list
def display(head):
curr = head
while (curr != None):
print(curr.data, end = " ")
curr = curr.next
# Driver code
if __name__ == '__main__':
head = None
# Let create a linked list
# 9 . 6 . 8 . 3 . 7
head = push(head, 7)
head = push(head, 3)
head = push(head, 8)
head = push(head, 6)
head = push(head, 9)
head = rearrange(head)
display(head)
| [
"37787424+marcus-aurelianus@users.noreply.github.com"
] | 37787424+marcus-aurelianus@users.noreply.github.com |
032f7db82d21d523b5513ec0dbd7bd4f40056c5b | 8aa0102f8a89bb132b2564d56383ffa1c831a2d8 | /runway/core/system/actions/email.py | 8bffd7f5d20e6dd20438f521a73fa28590ff176e | [] | no_license | Teifion/runway | 97b7e579160b18b36eeef96ceb8b094868a5aba0 | 3e24a4feec4a8eefa7db0ad44912da50c98b3a12 | refs/heads/master | 2021-01-17T15:19:57.185004 | 2016-06-05T23:56:45 | 2016-06-05T23:56:45 | 44,634,061 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,207 | py | from ....core.triggers import Action
from ....core.system.lib import email_f
class Email(Action):
name = "system_email"
group = "System"
label = "Email"
description = """Takes multiple inputs and formats them according to a scheme. Ultimately just a wrapper for the string.format function in Python."""
documentation = """HTML DOC"""
location = __file__
# The data the trigger is expected to produce
inputs = (
("recipient1", str, ""),
("recipient2", str, ""),
("recipient3", str, ""),
("subject", str, ""),
("content", str, ""),
)
outputs = (
# ("formatted_string", str, "The result of applying format to the unformatted string."),
)
permissions = ["developer"]
examples = [
(
{"recipient1":"user@email.com", "recipient2":"user@email.com", "recipient3":"user@email.com", "subject":"Subject", "content":"Email content", "args":["1", "2", "3"]},
{}
),
]
def __call__(self, recipient1, recipient2, recipient3, subject, content, test_mode=False):
recipients = recipient1.split(";") + recipient2.split(";") + recipient3.split(";")
email_f.send_email(recipients, subject, text_message=content, test_mode=test_mode)
# {"actions": [{"input_map": {"user": "trigger.user"}, "name": "system_get_user_1", "action": "system_get_user", "label": "The user involved"}, {"input_map": {"kwargs": {"": "\"\"", "description": "trigger.description", "data": "trigger.data", "timestamp": "trigger.timestamp", "log_id": "trigger.log_id", "traceback": "trigger.traceback", "user": "system_get_user_1.username", "path": "trigger.path"}, "unformatted_string": "\"<strong>{user}</strong>: {path}\r\n{timestamp}\r\n{data}\r\n\r\n{description}\r\n\r\n{traceback}\r\n\""}, "name": "system_formatter_1", "action": "system_formatter", "label": "Formatter for email"}, {"input_map": {"recipient2": "\"\"", "content": "system_formatter_1.formatted_string", "subject": "\"Runway error: EUI\"", "recipient1": "\"sarkalian@gmail.com\"", "recipient3": "\"\""}, "name": "system_email_1", "action": "system_email", "label": "Emailer"}], "conditions": []} | [
"sarkalian@gmail.com"
] | sarkalian@gmail.com |
3abb49d20403eeba223e44436b81ae3dcfe947fd | ec8c050f398e53260a66c8bb8c438fd4c0015021 | /software/edison/tests/motion_example.py | 5f894c875b7204cb71be15878a01315ec0e98049 | [] | no_license | myronww/sk8flair | d7e977d69fc1229aa18c27a5d30dec84da41a1e5 | 75c0342eda4e70de8cadd521b3ee3ff50d099c2b | refs/heads/main | 2021-06-22T05:05:06.647757 | 2021-01-16T18:20:09 | 2021-01-16T18:20:09 | 179,767,207 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,155 | py | from SF_9DOF import IMU
import time
# Create IMU object
imu = IMU() # To select a specific I2C port, use IMU(n). Default is 1.
# Initialize IMU
imu.initialize()
# Enable accel, mag, gyro, and temperature
imu.enable_accel()
imu.enable_mag()
imu.enable_gyro()
imu.enable_temp()
# Set range on accel, mag, and gyro
# Specify Options: "2G", "4G", "6G", "8G", "16G"
imu.accel_range("2G") # leave blank for default of "2G"
# Specify Options: "2GAUSS", "4GAUSS", "8GAUSS", "12GAUSS"
imu.mag_range("2GAUSS") # leave blank for default of "2GAUSS"
# Specify Options: "245DPS", "500DPS", "2000DPS"
imu.gyro_range("245DPS") # leave blank for default of "245DPS"
# Loop and read accel, mag, and gyro
while(1):
imu.read_accel()
imu.read_mag()
imu.read_gyro()
imu.readTemp()
# Print the results
# print "Accel: " + str(imu.ax) + ", " + str(imu.ay) + ", " + str(imu.az)
print "Mag: " + str(imu.mx) + ", " + str(imu.my) + ", " + str(imu.mz)
# print "Gyro: " + str(imu.gx) + ", " + str(imu.gy) + ", " + str(imu.gz)
# print "Temperature: " + str(imu.temp)
# Sleep for 1/10th of a second
time.sleep(0.1)
| [
"myron.walker@gmail.com"
] | myron.walker@gmail.com |
6a6c3621936d761145fe604012ef88b5610b750d | d587b52ed6c8a2b57977ead0ec0444222c35fbc8 | /dictionary_json_example.py | e178498ab4ca626fdf336dcdb771d788d1c0f677 | [] | no_license | BAFurtado/Python4ABMIpea2019 | c7209fd11617f954630a5e43e1981985e720989d | c5b5c3e01b01703454fbe57e3f3e0068da8f4316 | refs/heads/master | 2022-01-05T19:59:44.823307 | 2019-05-30T00:09:25 | 2019-05-30T00:09:25 | 166,847,263 | 0 | 2 | null | null | null | null | UTF-8 | Python | false | false | 1,280 | py | """ Class example to create and maintain a persistent database using JSON and dictionaries
Para usar é necessário sempre entrar com o valor em formato DICIONÁRIO
"""
import json
import os
from collections import defaultdict
def my_database(k, v):
# Checking if database exists
''' args:
return '''
if os.path.exists('my_database.json'):
with open('my_database.json', 'r') as f:
data = json.load(f)
# If not saved before, create dictionary for the first time
else:
# Prepara o dicionário para receber outros dicionários como valores
data = defaultdict(dict)
# Adding data to the dictionary
# Teste para quando a key é introduzida pela primeira vez
if k not in data:
data[k] = v
# Caso a key já exista no dicionário. Mètodo update recebe um dicionário
else:
data[k].update(v)
# Saving it in file
with open('my_database.json', 'w') as f:
json.dump(data, f)
print('Current dictionary is: \n{}'.format(data))
return data
if __name__ == '__main__':
# key = '054'
# value = {'admission': '04/01/2019'}
# d = my_database(key, value)
key = '021'
value = {'nome': 'Paul', 'órgão': 'ABIN'}
d = my_database(key, value)
| [
"furtadobb@gmail.com"
] | furtadobb@gmail.com |
35263ed70bc33fbc7526a7a7bc3936d9f556fa25 | da199a7ff8bcc7a37efe2ac9036b785bf45c71c0 | /service_mds/node_drop.py | 3b1702490ed05f8b256a1c16adac55d3cfb1da5d | [] | no_license | saxisuer/smartmgr-v2 | f8ed495ce7ce940477f27c12980bfd159bc159c3 | 6e3895062d37b6815a0d6de031652048b8f22ad3 | refs/heads/master | 2021-01-15T21:24:56.622142 | 2017-07-24T14:35:17 | 2017-07-24T14:35:17 | 99,865,861 | 0 | 2 | null | 2017-08-10T01:03:19 | 2017-08-10T01:03:19 | null | UTF-8 | Python | false | false | 4,408 | py | # -*- coding: utf-8 -*-
# vim: tabstop=4 shiftwidth=4 softtabstop=4
from pdsframe import *
from pdsframe.common import dbclient
from service_mds import g
from service_mds import common
from service_mds import pb2dict_proxy
import message.pds_pb2 as msg_pds
import message.mds_pb2 as msg_mds
import message.ios_pb2 as msg_ios
class NodeDelMachine(BaseMachine):
__metaclass__ = MataMachine
MID = msg_mds.NODE_DROP_REQUEST
def INIT(self, request):
self.response = MakeResponse(msg_mds.NODE_DROP_RESPONSE, request)
self.request = request
self.request_body = request.body.Extensions[msg_mds.node_drop_request]
if g.is_ready == False:
self.response.rc.retcode = msg_mds.RC_MDS_SERVICE_IS_NOT_READY
self.response.rc.message = "MDS service is not ready"
self.SendResponse(self.response)
return MS_FINISH
#处理参数传递
if self.request_body.HasField('node_index'):
node_index = self.request_body.node_index
else:
self.response.rc.retcode = msg_mds.RC_MDS_ERROR_PARAMS
self.response.rc.message = "Not specify params node index"
self.SendResponse(self.response)
return MS_FINISH
#获取本地节点列表对应的uuid
error,self.node_uuid = common.GetUUIDFromIndex(node_index)
if error:
self.response.rc.retcode = msg_mds.RC_MDS_NODE_FIND_FAIL
self.response.rc.message = "Not find %s" %node_index
self.SendResponse(self.response)
return MS_FINISH
# 查询此节点有没有做lun映射
for _lun_info in g.lun_list.lun_infos:
if _lun_info.group_info:
for info in _lun_info.group_info:
if info.group_state and info.group_uuid == self.node_uuid:
self.response.rc.retcode = msg_mds.RC_MDS_NODE_HAV_LUN
self.response.rc.message = "The node for lun used drop first"
self.SendResponse(self.response)
return MS_FINISH
self.ios_request = MakeRequest(msg_ios.LUN_GROUP_DROP_REQUEST, self.request)
self.ios_request_body = self.ios_request.body.Extensions[msg_ios.lun_group_drop_request]
self.ios_request_body.node_uuid = self.node_uuid
self.SendRequest(g.ios_service.listen_ip, g.ios_service.listen_port, self.ios_request, self.Entry_DropLunGroup)
return MS_CONTINUE
def Entry_DropLunGroup(self,response):
if response.rc.retcode != msg_pds.RC_SUCCESS:
self.response.rc.CopyFrom(response.rc)
self.SendResponse(self.response)
return MS_FINISH
e, node_list = dbservice.srv.delete("/node_list/%s"%self.node_uuid)
if e:
logger.run.error("delete list info faild %s:%s" % (e, node_list))
self.response.rc.retcode = msg_mds.RC_MDS_DELETE_DB_DATA_FAILED
self.response.rc.message = "Keep data failed"
self.SendResponse(self.response)
return MS_FINISH
# 同配置文件全局更新
node_list = msg_mds.G_NSNodeConfList()
for node_info in filter(lambda node_info:node_info.node_uuid!=self.node_uuid,g.nsnode_conf_list.nsnode_infos):
node_list.nsnode_infos.add().CopyFrom(node_info)
g.nsnode_conf_list = node_list
#同一节点可以在多个组中
group_info = msg_pds.GroupInfoConf()
for group in g.group_list.groups:
if self.node_uuid in group.node_uuids:
group_info.CopyFrom(group)
group_info.remove(self.node_uuid)
data = pb2dict_proxy.pb2dict("group_info", group_info)
e, _ = dbservice.srv.update("/group_list/%s"%group.group_name, data)
if e:
logger.run.error("Update node list faild %s:%s" % (e, _))
self.response.rc.retcode = msg_mds.RC_MDS_UPDATE_DB_DATA_FAILED
self.response.rc.message = "Keep data failed"
self.SendResponse(self.response)
return MS_FINISH
group.remove(self.node_uuid)
self.response.rc.retcode = msg_pds.RC_SUCCESS
self.SendResponse(self.response)
return MS_FINISH
| [
"wuweisunshine@163.com"
] | wuweisunshine@163.com |
31bddbf6ea0f1f17c7e068a64155f4fed377a7d1 | ebd9c249d446d809abc9a0f3e4593f34922a1b93 | /lintcode/242_convert_binary_tree_to_linked_lists_by_depth.py | 7b7587f14b1a53a385204554a293bbe0c64a4e8c | [] | no_license | jaychsu/algorithm | ac7a9dc7366f58c635a68bc46bf1640d2f5ff16d | 91892fd64281d96b8a9d5c0d57b938c314ae71be | refs/heads/master | 2023-05-11T00:40:39.237813 | 2022-09-14T07:43:12 | 2022-09-14T07:43:12 | 106,277,156 | 143 | 39 | null | 2022-09-14T07:43:13 | 2017-10-09T11:51:48 | Python | UTF-8 | Python | false | false | 946 | py | """
Definition of TreeNode:
class TreeNode:
def __init__(self, val):
this.val = val
this.left, this.right = None, None
Definition for singly-linked list.
class ListNode:
def __init__(self, x):
self.val = x
self.next = None
"""
class Solution:
# @param {TreeNode} root the root of binary tree
# @return {ListNode[]} a lists of linked list
def binaryTreeToLists(self, root):
ans = []
if not root:
return ans
queue = [root]
while queue:
_queue = []
dummy = tail = ListNode(-1)
for node in queue:
tail.next = ListNode(node.val)
tail = tail.next
if node.left:
_queue.append(node.left)
if node.right:
_queue.append(node.right)
queue = _queue
ans.append(dummy.next)
return ans
| [
"hi@jaych.su"
] | hi@jaych.su |
564ec90736d9724e65864392e0f6e67427c10a4a | d9aa4291a4978b932bef84b8d26aa4b911ca2add | /day11网络编程/05udp_s.py | 170be99144cce0d5ca933a869218816fc299ee18 | [] | no_license | SelfShadows/my_git | 9a32d3713efb1b055d04c813b319eb2196fdcf53 | b10a4c838e1146b3f6ce297480840de9a8e89206 | refs/heads/master | 2020-12-15T22:33:49.273814 | 2020-02-14T16:33:46 | 2020-02-14T16:33:46 | 235,274,933 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 337 | py | import socket
sk=socket.socket(type=socket.SOCK_DGRAM) #DGRAM datagram
sk.bind(('127.0.0.1',8080))
while True:
msg,addr=sk.recvfrom(1024)
print(msg.decode('utf-8'))
info = input('服务器:')
info = ('\033[34m来自服务器发送的消息:\033[0m%s'%info).encode('utf-8')
sk.sendto(bytes(info),addr)
sk.close() | [
"870670791@qq.com"
] | 870670791@qq.com |
f050c906e25fa540fa7598c82a1fb69731198236 | 729be94363b5ed676d9e83aa9f50a2ee95bd0a92 | /isso/core.py | e0306854d3f2182ddaadc5f66a9d287f11cdeefe | [
"MIT"
] | permissive | kod3r/isso | 2b603de0b7956332fdcb02543255b1d9acba3bee | 6e31111554727e0563472ec11479760d6d7f9c0f | refs/heads/master | 2020-12-03T09:32:05.433362 | 2013-11-29T13:22:21 | 2013-11-29T13:22:21 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,415 | py | # -*- encoding: utf-8 -*-
from __future__ import print_function
import io
import os
import time
import logging
import binascii
import threading
import multiprocessing
from configparser import ConfigParser
try:
import uwsgi
except ImportError:
uwsgi = None
from isso.compat import PY2K
if PY2K:
import thread
else:
import _thread as thread
from isso.utils import parse
from isso.compat import text_type as str
from werkzeug.contrib.cache import NullCache, SimpleCache
logger = logging.getLogger("isso")
class Section:
def __init__(self, conf, section):
self.conf = conf
self.section = section
def get(self, key):
return self.conf.get(self.section, key)
def getint(self, key):
return self.conf.getint(self.section, key)
def getiter(self, key):
return self.conf.getiter(self.section, key)
def getboolean(self, key):
return self.conf.getboolean(self.section, key)
class IssoParser(ConfigParser):
"""
Extended :class:`ConfigParser` to parse human-readable timedeltas
into seconds and handles multiple values per key.
>>> import io
>>> parser = IssoParser(allow_no_value=True)
>>> parser.read_file(io.StringIO(u'''
... [foo]
... bar = 1h
... baz = 12
... bla =
... spam
... ham
... asd = fgh
... '''))
>>> parser.getint("foo", "bar")
3600
>>> parser.getint("foo", "baz")
12
>>> list(parser.getiter("foo", "bla")) # doctest: +IGNORE_UNICODE
['spam', 'ham']
>>> list(parser.getiter("foo", "asd")) # doctest: +IGNORE_UNICODE
['fgh']
"""
@classmethod
def _total_seconds(cls, td):
return (td.microseconds + (td.seconds + td.days * 24 * 3600) * 10**6) / 10**6
def getint(self, section, key):
try:
delta = parse.timedelta(self.get(section, key))
except ValueError:
return super(IssoParser, self).getint(section, key)
else:
try:
return int(delta.total_seconds())
except AttributeError:
return int(IssoParser._total_seconds(delta))
def getiter(self, section, key):
for item in map(str.strip, self.get(section, key).split('\n')):
if item:
yield item
def section(self, section):
return Section(self, section)
class Config:
default = [
"[general]",
"dbpath = /tmp/isso.db", "session-key = %r" % binascii.b2a_hex(os.urandom(24)),
"host = http://localhost:8080/", "max-age = 15m",
"notify = ",
"[moderation]",
"enabled = false",
"purge-after = 30d",
"[server]",
"listen = http://localhost:8080/",
"reload = off", "profile = off",
"[smtp]",
"username = ", "password = ",
"host = localhost", "port = 465", "ssl = on",
"to = ", "from = ",
"[guard]",
"enabled = true",
"ratelimit = 2",
"direct-reply = 3",
"reply-to-self = false"
]
@classmethod
def load(cls, configfile):
# return set of (section, option)
setify = lambda cp: set((section, option) for section in cp.sections()
for option in cp.options(section))
rv = IssoParser(allow_no_value=True)
rv.read_file(io.StringIO(u'\n'.join(Config.default)))
a = setify(rv)
if configfile:
rv.read(configfile)
diff = setify(rv).difference(a)
if diff:
for item in diff:
logger.warn("no such option: [%s] %s", *item)
if item in (("server", "host"), ("server", "port")):
logger.warn("use `listen = http://$host:$port` instead")
if rv.get("smtp", "username") and not rv.get("general", "notify"):
logger.warn(("SMTP is no longer enabled by default, add "
"`notify = smtp` to the general section to "
"enable SMTP nofications."))
return rv
class Cache:
"""Wrapper around werkzeug's cache class, to make it compatible to
uWSGI's cache framework.
"""
def __init__(self, cache):
self.cache = cache
def get(self, cache, key):
return self.cache.get(key)
def set(self, cache, key, value):
return self.cache.set(key, value)
def delete(self, cache, key):
return self.cache.delete(key)
class Mixin(object):
def __init__(self, conf):
self.lock = threading.Lock()
self.cache = Cache(NullCache())
def notify(self, subject, body, retries=5):
pass
def threaded(func):
"""
Decorator to execute each :param func: call in a separate thread.
"""
def dec(self, *args, **kwargs):
thread.start_new_thread(func, (self, ) + args, kwargs)
return dec
class ThreadedMixin(Mixin):
def __init__(self, conf):
super(ThreadedMixin, self).__init__(conf)
if conf.getboolean("moderation", "enabled"):
self.purge(conf.getint("moderation", "purge-after"))
self.cache = Cache(SimpleCache(threshold=1024, default_timeout=3600))
@threaded
def purge(self, delta):
while True:
with self.lock:
self.db.comments.purge(delta)
time.sleep(delta)
class ProcessMixin(ThreadedMixin):
def __init__(self, conf):
super(ProcessMixin, self).__init__(conf)
self.lock = multiprocessing.Lock()
class uWSGICache(object):
"""Uses uWSGI Caching Framework. INI configuration:
.. code-block:: ini
cache2 = name=hash,items=1024,blocksize=32
"""
@classmethod
def get(self, cache, key):
return uwsgi.cache_get(key, cache)
@classmethod
def set(self, cache, key, value):
uwsgi.cache_set(key, value, 3600, cache)
@classmethod
def delete(self, cache, key):
uwsgi.cache_del(key, cache)
class uWSGIMixin(Mixin):
def __init__(self, conf):
super(uWSGIMixin, self).__init__(conf)
self.lock = multiprocessing.Lock()
self.cache = uWSGICache
timedelta = conf.getint("moderation", "purge-after")
purge = lambda signum: self.db.comments.purge(timedelta)
uwsgi.register_signal(1, "", purge)
uwsgi.add_timer(1, timedelta)
# run purge once
purge(1)
| [
"info@posativ.org"
] | info@posativ.org |
a7d07e5cc100048cca63f9c127c70cd6887ce01c | 513050db98102f0331b978716553894bd55b2205 | /leetcode_problems/maximum_depth_binarytree_104.py | 79b2d071365ae1ebe337ddb0950aefb209092513 | [] | no_license | INNOMIGHT/problem_solving | afef164968a98ed1c1d69301de0288c74cc199c8 | 3a7c9dc7c3747a65fff6947f03a68bc5652d360c | refs/heads/main | 2023-08-18T17:42:03.138195 | 2021-10-03T14:49:38 | 2021-10-03T14:49:38 | 328,564,937 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 950 | py | """
Given the root of a binary tree, return its maximum depth.
A binary tree's maximum depth is the number of nodes along
the longest path from the root node down to the farthest leaf node.
"""
# Definition for a binary tree node.
class TreeNode:
def __init__(self, val=0, left=None, right=None):
self.val = val
self.left = left
self.right = right
class Solution:
def maxDepth(self, root: TreeNode) -> int:
if not root:
return 0
return self.height(root)
def height(self, root):
if root is not None:
return self._height(root, 0)
def _height(self, cur_level, tree_height):
if cur_level is None:
return tree_height
left_height = self._height(cur_level.left, tree_height + 1)
right_height = self._height(cur_level.right, tree_height + 1)
return max(left_height, right_height)
| [
"iammagnificient@gmail.com"
] | iammagnificient@gmail.com |
21c65e354d2155fa80a0cb52d986b1ef1cf634f5 | 1d60c5a7b8ce6277bff514e376f79848f706344c | /Machine Learning Scientist with Python/07. Dimensionality Reduction in Python/03. Feature selection II, selecting for model accuracy/07. Creating a LASSO regressor.py | 246eb7c94ca5322569853647142c30c2bc498b59 | [] | no_license | DidiMilikina/DataCamp | 338c6e6d3b4f5b6c541c1aba155a36e9ee24949d | 3bf2cf3c1430190a7f8e54efda7d50a5fd66f244 | refs/heads/master | 2020-12-15T13:16:54.178967 | 2020-05-06T17:30:54 | 2020-05-06T17:30:54 | 235,113,616 | 4 | 3 | null | null | null | null | UTF-8 | Python | false | false | 1,224 | py | '''
Creating a LASSO regressor
You'll be working on the numeric ANSUR body measurements dataset to predict a persons Body Mass Index (BMI) using the pre-imported Lasso() regressor. BMI is a metric derived from body height and weight but those two features have been removed from the dataset to give the model a challenge.
You'll standardize the data first using the StandardScaler() that has been instantiated for you as scaler to make sure all coefficients face a comparable regularizing force trying to bring them down.
All necessary functions and classes plus the input datasets X and y have been pre-loaded.
Instructions
100 XP
Set the test size to 30% to get a 70-30% train test split.
Fit the scaler on the training features and transform these in one go.
Create the Lasso model.
Fit it to the scaled training data.
'''
SOLUTION
# Set the test size to 30% to get a 70-30% train test split
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.3, random_state=0)
# Fit the scaler on the training features and transform these in one go
X_train_std = scaler.fit_transform(X_train, y_train)
# Create the Lasso model
la = Lasso()
# Fit it to the standardized training data
la.fit(X_train_std, y_train) | [
"didimilikina8@gmail.com"
] | didimilikina8@gmail.com |
41ae8e762d040b3142be0ebb2fae018e18e70af4 | 244ecfc2017a48c70b74556be8c188e7a4815848 | /res/scripts/client/messenger/gui/scaleform/meta/channelsmanagementwindowmeta.py | 2f80335767e193d22b7c49ec549d00c91900eee6 | [] | no_license | webiumsk/WOT-0.9.12 | c1e1259411ba1e6c7b02cd6408b731419d3174e5 | 5be5fd9186f335e7bae88c9761c378ff5fbf5351 | refs/heads/master | 2021-01-10T01:38:36.523788 | 2015-11-18T11:33:37 | 2015-11-18T11:33:37 | 46,414,438 | 1 | 0 | null | null | null | null | WINDOWS-1250 | Python | false | false | 1,208 | py | # 2015.11.18 11:57:40 Střední Evropa (běžný čas)
# Embedded file name: scripts/client/messenger/gui/Scaleform/meta/ChannelsManagementWindowMeta.py
from gui.Scaleform.framework.entities.abstract.AbstractWindowView import AbstractWindowView
class ChannelsManagementWindowMeta(AbstractWindowView):
def getSearchLimitLabel(self):
self._printOverrideError('getSearchLimitLabel')
def searchToken(self, token):
self._printOverrideError('searchToken')
def joinToChannel(self, index):
self._printOverrideError('joinToChannel')
def createChannel(self, name, usePassword, password, retype):
self._printOverrideError('createChannel')
def as_freezSearchButtonS(self, isEnable):
if self._isDAAPIInited():
return self.flashObject.as_freezSearchButton(isEnable)
def as_getDataProviderS(self):
if self._isDAAPIInited():
return self.flashObject.as_getDataProvider()
# okay decompyling c:\Users\PC\wotsources\files\originals\res\scripts\client\messenger\gui\scaleform\meta\channelsmanagementwindowmeta.pyc
# decompiled 1 files: 1 okay, 0 failed, 0 verify failed
# 2015.11.18 11:57:40 Střední Evropa (běžný čas)
| [
"info@webium.sk"
] | info@webium.sk |
8026719322ae9bd486c45e78bac9b67bd5a21c4a | c6f811b0df5706c3dc9175e8fc4b18e2058f9460 | /en/2019/05/blm.py | e273de3534b04767dfac921d607168eeaf8255c4 | [] | no_license | muratk3n/thirdwave | c108ec85bcd6e59282352d924640d58e99f4d2a4 | 01af4fd74fab8e950d773989124a239c0f6c5ac6 | refs/heads/master | 2021-08-22T01:33:54.185423 | 2021-07-19T06:54:02 | 2021-07-19T06:54:02 | 161,887,880 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,989 | py | from scipy import sin, cos, tan, arctan, arctan2, arccos, pi
import pandas as pd, datetime, numpy as np
from zipfile import ZipFile
from io import BytesIO
import urllib.request as urllib2
from bs4 import BeautifulSoup
from bs4.element import Comment
import urllib.request, folium, re, requests
headers = { 'User-Agent': 'UCWEB/2.0 (compatible; Googlebot/2.1; +google.com/bot.html)'}
def tag_visible(element):
if element.parent.name in ['style', 'script', 'head', 'title', 'meta', '[document]']:
return False
if isinstance(element, Comment):
return False
return True
def text_from_html(body):
soup = BeautifulSoup(body, 'html.parser')
texts = soup.findAll(text=True)
visible_texts = filter(tag_visible, texts)
return u" ".join(t.strip() for t in visible_texts)
base_conflict_url = "http://data.gdeltproject.org/events"
conf_cols = ['GlobalEventID', 'Day', 'MonthYear', 'Year', 'FractionDate',\
'Actor1Code', 'Actor1Name', 'Actor1CountryCode', 'Actor1KnownGroupCode',\
'Actor1EthnicCode', 'Actor1Religion1Code', 'Actor1Religion2Code',\
'Actor1Type1Code', 'Actor1Type2Code', 'Actor1Type3Code', \
'Actor2Code', 'Actor2Name', 'Actor2CountryCode', 'Actor2KnownGroupCode',
'Actor2EthnicCode', 'Actor2Religion1Code', 'Actor2Religion2Code',
'Actor2Type1Code', 'Actor2Type2Code', 'Actor2Type3Code', \
'IsRootEvent','EventCode', 'EventBaseCode','EventRootCode',\
'QuadClass', 'GoldsteinScale','NumMentions','NumSources', \
'NumArticles', 'AvgTone','Actor1Geo_Type', 'Actor1Geo_FullName',\
'Actor1Geo_CountryCode', 'Actor1Geo_ADM1Code','Actor1Geo_Lat', \
'Actor1Geo_Long', 'Actor1Geo_FeatureID','Actor2Geo_Type', \
'Actor2Geo_FullName','Actor2Geo_CountryCode', 'Actor2Geo_ADM1Code',\
'Actor2Geo_Lat', 'Actor2Geo_Long']
now = datetime.datetime.now()
#now = datetime.date(2021,6,18)
dfs = []
clat,clon=40.74832401970278, -98.51347361480249
how_far = 3200
m = folium.Map(location=[clat, clon], zoom_start=3, tiles="Stamen Terrain")
def spherical_distance(lat1, long1, lat2, long2):
phi1 = 0.5*pi - lat1
phi2 = 0.5*pi - lat2
r = 0.5*(6378137 + 6356752) # mean radius in meters
t = sin(phi1)*sin(phi2)*cos(long1-long2) + cos(phi1)*cos(phi2)
return r * arccos(t) / 1000.
def dist(x):
return spherical_distance(np.deg2rad(clat),np.deg2rad(clon),np.deg2rad(x['Actor2Geo_Lat']),np.deg2rad(x['Actor2Geo_Long']))
for i in range(30):
d = now - datetime.timedelta(days=i+1)
print (d)
sd = "%d%02d%02d" % (d.year, d.month, d.day)
url = base_conflict_url + "/%s.export.CSV.zip" % sd
print (url)
r = urllib2.urlopen(url).read()
file = ZipFile(BytesIO(r))
csv = file.open("%s.export.CSV" % sd)
df = pd.read_csv(csv,sep='\t',header=None)
urls = df[57]
df2 = df[range(len(conf_cols))]
df2 = pd.concat((df2,urls),axis=1)
df2.columns = conf_cols + ['url']
df3 = df2[(df2.EventCode==193)]
df3 = df3.reset_index()
df3.drop_duplicates('url',inplace=True)
df3.loc[:,'dist'] = df3.apply(dist, axis=1)
df3 = df3[df3.dist < how_far]
for idx, row in df3.iterrows():
url = row['url']
if 'black' not in url: continue
print (url)
try:
resp = requests.get(url, headers=headers, timeout=2)
s = text_from_html(resp.text)
filt = 'black man' in s.lower() and \
('police' in s.lower() or 'deput' in s.lower()) and \
('shoot' in s.lower() or 'fatally' in s.lower() or 'killed' in s.lower() or 'shot' in s.lower())
if filt:
#print (s)
folium.Marker(
[row['Actor2Geo_Lat'], row['Actor2Geo_Long']], popup="<a href='%s' target='_blank' rel='noopener noreferrer'>Link</a>" % url
).add_to(m)
except:
continue
m.save('blm-out.html')
| [
"me@yomama.com"
] | me@yomama.com |
4f5c0732681ba62b262c1a41089c3077107cd99d | 9743d5fd24822f79c156ad112229e25adb9ed6f6 | /xai/brain/wordbase/adverbs/_eastward.py | d655412c68ba184673b91c3776a03aaf30f305f3 | [
"MIT"
] | permissive | cash2one/xai | de7adad1758f50dd6786bf0111e71a903f039b64 | e76f12c9f4dcf3ac1c7c08b0cc8844c0b0a104b6 | refs/heads/master | 2021-01-19T12:33:54.964379 | 2017-01-28T02:00:50 | 2017-01-28T02:00:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 377 | py |
#calss header
class _EASTWARD():
def __init__(self,):
self.name = "EASTWARD"
self.definitions = [u'towards the east: ']
self.parents = []
self.childen = []
self.properties = []
self.jsondata = {}
self.specie = 'adverbs'
def run(self, obj1, obj2):
self.jsondata[obj2] = {}
self.jsondata[obj2]['properties'] = self.name.lower()
return self.jsondata
| [
"xingwang1991@gmail.com"
] | xingwang1991@gmail.com |
109ce855ae435d697085e42c2216825ca6bcaa69 | f6d7c30a7ed343e5fe4859ceaae1cc1965d904b7 | /htdocs/submissions/109ce855ae435d697085e42c2216825ca6bcaa69.py | 5948bdbfe0060551e78191943d956f036399af62 | [] | no_license | pycontest/pycontest.github.io | ed365ebafc5be5d610ff9d97001240289de697ad | 606015cad16170014c41e335b1f69dc86250fb24 | refs/heads/master | 2021-01-10T04:47:46.713713 | 2016-02-01T11:03:46 | 2016-02-01T11:03:46 | 50,828,627 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 323 | py | segs = [
(" _ ","| |","|_|"),
(" "," |"," |"),
(" _ "," _|","|_ "),
(" _ "," _|"," _|"),
(" ","|_|"," |"),
(" _ ","|_ "," _|"),
(" _ ","|_ ","|_|"),
(" _ "," |"," |"),
(" _ ","|_|","|_|"),
(" _ ","|_|"," _|"),
]
def seven_seg(x):
return ''.join([''.join([segs[int(c)][i] for c in x])+'\n' for i in range(3)])
| [
"info@pycontest.net"
] | info@pycontest.net |
c2f0c1ac424c67998653efad41fd2ce956a271fd | 600df3590cce1fe49b9a96e9ca5b5242884a2a70 | /extensions/renderer/api/display_source/wifi_display/DEPS | 6dd95bfe71fe27d844f22dfc02b8217b15317b34 | [
"BSD-3-Clause"
] | permissive | metux/chromium-suckless | efd087ba4f4070a6caac5bfbfb0f7a4e2f3c438a | 72a05af97787001756bae2511b7985e61498c965 | refs/heads/orig | 2022-12-04T23:53:58.681218 | 2017-04-30T10:59:06 | 2017-04-30T23:35:58 | 89,884,931 | 5 | 3 | BSD-3-Clause | 2022-11-23T20:52:53 | 2017-05-01T00:09:08 | null | UTF-8 | Python | false | false | 245 | include_rules = [
# TODO(Mikhail): Consider removing when https://crbug.com/432381 is fixed.
"+media/base",
"+media/video",
"+services/shell/public/cpp",
"+third_party/openh264/src/codec/api",
"+third_party/wds/src/libwds/public",
]
| [
"enrico.weigelt@gr13.net"
] | enrico.weigelt@gr13.net | |
7e9de9bbe9854a34a8389b3762a967c5bde6d776 | f7e8786b1e62222bd1cedcb58383a0576c36a2a2 | /src/mojo/python/tests/bindings_constants_unittest.py | 4db08c1ad06a4f3c0a888874af940f73222f14eb | [
"BSD-3-Clause"
] | permissive | amplab/ray-core | 656915553742302915a363e42b7497037985a91e | 89a278ec589d98bcbc7e57e0b80d055667cca62f | refs/heads/master | 2023-07-07T20:45:40.883095 | 2016-08-06T23:52:23 | 2016-08-06T23:52:23 | 61,343,320 | 4 | 5 | null | 2016-08-06T23:52:24 | 2016-06-17T03:35:34 | C++ | UTF-8 | Python | false | false | 1,458 | py | # Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import math
import unittest
# Generated files
# pylint: disable=F0401
import sample_service_mojom
import test_constants_mojom
class ConstantBindingsTest(unittest.TestCase):
def testConstantGeneration(self):
self.assertEquals(test_constants_mojom.INT8_VALUE, -2)
self.assertEquals(test_constants_mojom.UINT64_VALUE, 9999999999999999999)
self.assertEquals(test_constants_mojom.DOUBLE_INFINITY,
float('inf'))
self.assertEquals(test_constants_mojom.DOUBLE_NEGATIVE_INFINITY,
float('-inf'))
self.assertTrue(math.isnan(test_constants_mojom.DOUBLE_NA_N))
self.assertEquals(test_constants_mojom.FLOAT_INFINITY,
float('inf'))
self.assertEquals(test_constants_mojom.FLOAT_NEGATIVE_INFINITY,
float('-inf'))
self.assertTrue(math.isnan(test_constants_mojom.FLOAT_NA_N))
def testConstantOnStructGeneration(self):
self.assertEquals(test_constants_mojom.StructWithConstants.INT8_VALUE, 5)
def testStructImmutability(self):
with self.assertRaises(AttributeError):
sample_service_mojom.Foo.FOOBY = 0
with self.assertRaises(AttributeError):
del sample_service_mojom.Foo.FOOBY
with self.assertRaises(AttributeError):
sample_service_mojom.Foo.BAR = 1
| [
"pcmoritz@gmail.com"
] | pcmoritz@gmail.com |
6f77aaf06090d8ac168a1a62697e869b4d35e1fa | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p03549/s476969666.py | cad978b40b2c50d40542266d7b31670693fb5e21 | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 733 | py | def getN():
return int(input())
def getNM():
return map(int, input().split())
def getList():
return list(map(int, input().split()))
def getArray(intn):
return [int(input()) for i in range(intn)]
def input():
return sys.stdin.readline().rstrip()
from collections import defaultdict, deque, Counter
from sys import exit
import heapq
import math
import fractions
import copy
from itertools import permutations
from operator import mul
from functools import reduce
from bisect import bisect_left, bisect_right
import sys
sys.setrecursionlimit(1000000000)
mod = 10 ** 9 + 7
from itertools import permutations
from math import factorial, hypot
N, M = getNM()
total = 1900 * M + 100 * (N - M)
print(total * (2 ** M)) | [
"66529651+Aastha2104@users.noreply.github.com"
] | 66529651+Aastha2104@users.noreply.github.com |
e93be5a973f7764581b48774320b849312066e4b | dd3b8bd6c9f6f1d9f207678b101eff93b032b0f0 | /basis/AbletonLive10.1_MIDIRemoteScripts/ableton/v2/control_surface/components/device_parameters.py | f0dbb3048e20c013431dfc8a6a3d098a5ea0bd5b | [] | no_license | jhlax/les | 62955f57c33299ebfc4fca8d0482b30ee97adfe7 | d865478bf02778e509e61370174a450104d20a28 | refs/heads/master | 2023-08-17T17:24:44.297302 | 2019-12-15T08:13:29 | 2019-12-15T08:13:29 | 228,120,861 | 3 | 0 | null | 2023-08-03T16:40:44 | 2019-12-15T03:02:27 | Python | UTF-8 | Python | false | false | 5,816 | py | # uncompyle6 version 3.4.1
# Python bytecode 2.7 (62211)
# Decompiled from: Python 2.7.16 (v2.7.16:413a49145e, Mar 2 2019, 14:32:10)
# [GCC 4.2.1 Compatible Apple LLVM 6.0 (clang-600.0.57)]
# Embedded file name: /Users/versonator/Jenkins/live/output/mac_64_static/Release/python-bundle/MIDI Remote Scripts/ableton/v2/control_surface/components/device_parameters.py
# Compiled at: 2019-05-15 02:17:17
from __future__ import absolute_import, print_function, unicode_literals
from itertools import chain, izip_longest, repeat
from ableton.v2.base import listens, listens_group
from ableton.v2.control_surface import Component, ParameterProvider
from ableton.v2.control_surface.control import ControlList, MappedSensitivitySettingControl
from ableton.v2.control_surface.elements import DisplayDataSource
class DeviceParameterComponent(Component):
controls = ControlList(MappedSensitivitySettingControl, 8)
def __init__(self, parameter_provider=None, *a, **k):
super(DeviceParameterComponent, self).__init__(*a, **k)
self.parameter_provider = parameter_provider
def _get_parameter_provider(self):
return self._parameter_provider
def _set_parameter_provider(self, provider):
self._parameter_provider = provider or ParameterProvider()
self._on_parameters_changed.subject = self._parameter_provider
self._update_parameters()
self._on_parameter_provider_changed(provider)
parameter_provider = property(_get_parameter_provider, _set_parameter_provider)
def set_parameter_controls(self, encoders):
self.controls.set_control_element(encoders)
self._connect_parameters()
def _connect_parameters(self):
parameters = self._parameter_provider.parameters[:self.controls.control_count]
for control, parameter_info in map(None, self.controls, parameters):
parameter = parameter_info.parameter if parameter_info else None
control.mapped_parameter = parameter
if parameter:
control.update_sensitivities(parameter_info.default_encoder_sensitivity, parameter_info.fine_grain_encoder_sensitivity)
return
def _update_parameters(self):
if self.is_enabled():
self._connect_parameters()
@listens('parameters')
def _on_parameters_changed(self):
self._update_parameters()
def _on_parameter_provider_changed(self, provider):
pass
def update(self):
super(DeviceParameterComponent, self).update()
self._update_parameters()
class DisplayingDeviceParameterComponent(DeviceParameterComponent):
def __init__(self, *a, **k):
self._parameter_name_data_sources = map(DisplayDataSource, (u'', u'', u'',
u'', u'', u'',
u'', u''))
self._parameter_value_data_sources = map(DisplayDataSource, (u'', u'', u'',
u'', u'', u'',
u'', u''))
super(DisplayingDeviceParameterComponent, self).__init__(*a, **k)
@property
def parameters(self):
return map(lambda p: p and p.parameter, self._parameter_provider.parameters)
@property
def parameter_names(self):
return map(lambda p: p and p.name or '', self.parameters)
def set_name_display_line(self, line):
self._set_display_line(line, self._parameter_name_data_sources)
def set_value_display_line(self, line):
self._set_display_line(line, self._parameter_value_data_sources)
def _set_display_line(self, line, sources):
if line:
line.set_num_segments(len(sources))
for segment in xrange(len(sources)):
line.segment(segment).set_data_source(sources[segment])
def clear_display(self):
for source in chain(self._parameter_name_data_sources, self._parameter_value_data_sources):
source.set_display_string('')
def _update_parameters(self):
super(DisplayingDeviceParameterComponent, self)._update_parameters()
if self.is_enabled():
parameters = self.parameters
self._on_parameter_name_changed.replace_subjects(parameters)
self._on_parameter_value_changed.replace_subjects(parameters)
self._update_parameter_names()
self._update_parameter_values()
@listens_group('name')
def _on_parameter_name_changed(self, parameter):
self._update_parameter_names()
@listens_group('value')
def _on_parameter_value_changed(self, parameter):
self._update_parameter_values()
def _update_parameter_names(self):
if self.is_enabled():
params = zip(chain(self.parameter_provider.parameters, repeat(None)), self._parameter_name_data_sources)
for info, name_data_source in params:
name = self.info_to_name(info)
name_data_source.set_display_string(name or '')
return
def _update_parameter_values(self):
if self.is_enabled():
for parameter, data_source in izip_longest(self.parameters, self._parameter_value_data_sources):
value_string = self.parameter_to_string(parameter)
if data_source:
data_source.set_display_string(value_string)
def info_to_name(self, info):
parameter = info and info.parameter
return info and info.name or ''
def parameter_to_string(self, parameter):
if parameter == None:
return ''
else:
return unicode(parameter)
def parameter_to_value(self, parameter):
return parameter.value | [
"jharrington@transcendbg.com"
] | jharrington@transcendbg.com |
b340af1ea2d988eda7458621a92f3e662b66ecfa | dc60fbae177523b1c1d6c6317388f18a96aa9c6e | /code/proj/__init__.py | 88d5179dd5c5c087702ed04f6f02aa290c501fe7 | [] | no_license | lucheol/prjwn_example | b611298995d13adc42ee6c7185eb47ba9fdad468 | 5154e8f12df7a2c4dd0f1f552829c8df67e6ef3d | refs/heads/main | 2023-07-10T02:46:53.646317 | 2021-08-20T23:48:56 | 2021-08-20T23:48:56 | 398,425,150 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 186 | py | import os
from .celery import app as celery_app
__all__ = ["celery_app"]
__version__ = os.getenv("APP_VERSION", "latest")
VERSION = __version__
__staging_code_version__ = __version__
| [
"lucheol@gmail.com"
] | lucheol@gmail.com |
327964de13a1cb62e967f1b59a1327f7f4617ff7 | f124a2bc35fa348d5f5b637eae2a736d67470c76 | /work4/scrapy2/dongqiudi/dongqiudi/items.py | 41b0aaa06f81a6fc2479b65a24574ac3948b5e07 | [
"Apache-2.0"
] | permissive | arfu2016/DuReader | fd173c0eb90abedad0ca65bd9b847ccd58bf567a | 66934852c508bff5540596aa71d5ce40c828b37d | refs/heads/master | 2021-04-06T05:45:13.002887 | 2018-09-06T03:58:26 | 2018-09-06T03:58:26 | 124,838,393 | 0 | 0 | Apache-2.0 | 2018-03-12T05:35:13 | 2018-03-12T05:35:13 | null | UTF-8 | Python | false | false | 541 | py | # -*- coding: utf-8 -*-
# Define here the models for your scraped items
#
# See documentation in:
# https://doc.scrapy.org/en/latest/topics/items.html
import scrapy
class DongqiudiItem(scrapy.Item):
# define the fields for your item here like:
# name = scrapy.Field()
title = scrapy.Field()
description = scrapy.Field()
url = scrapy.Field()
display_time = scrapy.Field()
page_url = scrapy.Field()
project = scrapy.Field()
spider = scrapy.Field()
server = scrapy.Field()
date = scrapy.Field()
| [
"deco@cubee.com"
] | deco@cubee.com |
f622ebd46ffe3f3629eb46422ae252e63d161091 | 8adc9d6bb8d9ce60ca970340154fcfb5c9fcde05 | /rowpack/gzipfile.py | 14f9bcb7f8ada42867bc024c5286a8bebc986170 | [
"MIT"
] | permissive | CivicSpleen/rowpack | c7bceab330750a84a4ed55a51b40566550c8c1f5 | 7e16fbeaafa92919a2759d2e8e9565f86209bf76 | refs/heads/master | 2021-06-08T11:30:52.675170 | 2016-12-09T23:05:41 | 2016-12-09T23:05:41 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,062 | py | # -*- coding: utf-8 -*-
# Copyright (c) 2016 Civic Knowledge. This file is licensed under the terms of the
# MIT License, included in this distribution as LICENSE.txt
"""
A Hacked GZIP file implementation
"""
import gzip
import six
class GzipFile(gzip.GzipFile):
"""A Hacked GzipFile that will read only one gzip member and properly handle extra data afterward,
by ignoring it"""
def __init__(self, filename=None, mode=None, compresslevel=9, fileobj=None, mtime=None, end_of_data=None):
super(GzipFile, self).__init__(filename, mode, compresslevel, fileobj, mtime)
self._end_of_data = end_of_data
def _read(self, size=1024):
"""Alters the _read method to stop reading new gzip members when we've reached the end of the row data. """
if self._new_member and self._end_of_data and self.fileobj.tell() >= self._end_of_data:
if six.PY3:
return None
else:
raise EOFError('Reached EOF')
else:
return super(GzipFile, self)._read(size)
| [
"eric@clarinova.com"
] | eric@clarinova.com |
f12857443bc525c92d3d33f85cb368d0a1a36569 | a86293a2033c06410aa8ed19bcbce8ca55ea3c55 | /src/client_libraries/python/microsoft/dynamics/customerinsights/api/models/no_content_result_py3.py | bdd02617bd95b71dc4ec6249eb80fa620fef0358 | [
"LicenseRef-scancode-generic-cla",
"MIT"
] | permissive | ramotheonly/Dynamics365-CustomerInsights-Client-Libraries | a3ca28aa78d2b5509e65d9895ff4a0d42d05f611 | e00632f7972717b03e0fb1a9e2667e8f9444a0fe | refs/heads/main | 2023-08-02T08:09:04.063030 | 2021-09-28T22:42:15 | 2021-09-28T22:42:15 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 725 | py | # coding=utf-8
# --------------------------------------------------------------------------
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class NoContentResult(Model):
"""NoContentResult.
:param status_code:
:type status_code: int
"""
_attribute_map = {
'status_code': {'key': 'statusCode', 'type': 'int'},
}
def __init__(self, *, status_code: int=None, **kwargs) -> None:
super(NoContentResult, self).__init__(**kwargs)
self.status_code = status_code
| [
"michaelajohnston@mac.com"
] | michaelajohnston@mac.com |
0d6a9fd471e5509e89387a6ff4179b972a41469e | 3ba218a6bd3ba88e8f223d1a501a94cc33916ab4 | /pylib/flake8/flake8/_pyflakes.py | 86d28ed50e5be81ea96f7b0b53f8f2402e5f2ef1 | [
"MIT"
] | permissive | armenzg/version-control-tools | a8ba9f26f76d208f0dc514791da76de4a792bb8b | 9f045b19a4284683f97296f51ca0d2a2d5b75a76 | refs/heads/master | 2020-12-30T20:05:25.673475 | 2015-12-16T20:33:05 | 2015-12-16T20:33:05 | 46,126,008 | 1 | 1 | null | 2015-11-26T15:09:37 | 2015-11-13T14:19:30 | Python | UTF-8 | Python | false | false | 1,490 | py | # -*- coding: utf-8 -*-
import pyflakes
import pyflakes.checker
def patch_pyflakes():
"""Add error codes to Pyflakes messages."""
codes = dict([line.split()[::-1] for line in (
'F401 UnusedImport',
'F402 ImportShadowedByLoopVar',
'F403 ImportStarUsed',
'F404 LateFutureImport',
'F810 Redefined', # XXX Obsolete?
'F811 RedefinedWhileUnused',
'F812 RedefinedInListComp',
'F821 UndefinedName',
'F822 UndefinedExport',
'F823 UndefinedLocal',
'F831 DuplicateArgument',
'F841 UnusedVariable',
)])
for name, obj in vars(pyflakes.messages).items():
if name[0].isupper() and obj.message:
obj.flake8_msg = '%s %s' % (codes.get(name, 'F999'), obj.message)
patch_pyflakes()
class FlakesChecker(pyflakes.checker.Checker):
"""Subclass the Pyflakes checker to conform with the flake8 API."""
name = 'pyflakes'
version = pyflakes.__version__
@classmethod
def add_options(cls, parser):
parser.add_option('--builtins',
help="define more built-ins, comma separated")
parser.config_options.append('builtins')
@classmethod
def parse_options(cls, options):
if options.builtins:
cls.builtIns = cls.builtIns.union(options.builtins.split(','))
def run(self):
for m in self.messages:
yield m.lineno, 0, (m.flake8_msg % m.message_args), m.__class__
| [
"gps@mozilla.com"
] | gps@mozilla.com |
efeab7c2358cdbfd0cbe7ffc8f6a4ff7afed9181 | 2ec3305bd43171f435092a1070b4e579ff5fc64e | /tests/test_settings.py | 609dcb4f0cdd93dd393d55cf0d160414b43a0183 | [
"MIT"
] | permissive | altheasmith/django-location-field | c5c6bf6663c8c0ed18a5d479d48349f3ffc2815d | c0dfc81b952f1fd4f6a6326be8409744132f2815 | refs/heads/master | 2021-01-11T20:17:06.913492 | 2017-01-02T21:00:42 | 2017-01-02T21:00:42 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 749 | py | import sys
import os
TEST_SPATIAL = 'TEST_SPATIAL' in os.environ
SECRET_KEY = 'fake-key'
INSTALLED_APPS = [
"location_field.apps.DefaultConfig",
"tests",
]
STATIC_URL = '/static/'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'APP_DIRS': True,
},
]
if TEST_SPATIAL:
DATABASES = {
'default': {
'ENGINE': 'django.contrib.gis.db.backends.spatialite',
'NAME': 'db.sqlite3',
}
}
if sys.platform == 'darwin':
SPATIALITE_LIBRARY_PATH = '/usr/local/lib/mod_spatialite.dylib'
else:
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'db.sqlite3',
}
}
| [
"caio.ariede@gmail.com"
] | caio.ariede@gmail.com |
0b1288620a148bf78c995cb26e6b5baad32bad0c | dffcf449a5fc73c469c3db05f31e3f604cb73e20 | /mi/name.py | 36aeeee2714c751276da5832ff36b17319981625 | [
"MIT"
] | permissive | mountain/mu | 60ca71109b984189abdf0f8254d487eb9479f8d9 | 9834a5aea2ade8ad4462fa959d2d00c129335b7c | refs/heads/master | 2021-01-19T21:27:50.658259 | 2014-04-13T23:26:28 | 2014-04-13T23:26:28 | 14,478,703 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,284 | py | from os.path import join
from interface import Eval, Egal
from env import Environment
# We try to introduce a clojure-like namespace in mu
#
class NamedEntity(Eval):
def __init__(self, scope, name, entity):
self.scope = scope
self.name = name
self.entity = entity
def __repr__(self):
return self.qname()
def scope(self):
return self.scope
def name(self):
return self.name
def entity(self):
return self.entity
def qname(self):
return "%s/%s" % (self.scope.qname(), self.name)
class NamedSpace():
def __init__(self, par, name):
self.parent = par
self.name = name
def __repr__(self):
return self.qname()
def qname(self):
return "%s.%s" % (self.parent.qname(), self.name)
def path(self):
return join(self.parent.path(), self.name)
def enviroment(self):
par_env = self.parent.enviroment()
return Environment(par_env, {})
class NamedScope(NamedSpace):
def __init__(self, par, name):
NamedSpace.__init__(self, par, name)
def enviroment(self):
par_env = self.parent.enviroment()
mu = par_env.get("__mu__")
mu.process(self.path())
return Environment(par_env, bnds)
| [
"mingli.yuan@gmail.com"
] | mingli.yuan@gmail.com |
f81d6af36c25b986d36d08cf74c3282390e69beb | f14a00cd0e23533283020b07c2c3fc3ad69f3145 | /configs/b_to_a_only.py | a6aca42700f1ebcb4c533d43cbca35d003a5795a | [
"MIT"
] | permissive | diogo149/CauseEffectPairsPaper | cfb7b1be6f5c40f2e108220fc5d88e10110deace | 7d91db6a6567182eb028cfa6fcfaaff7137b6491 | refs/heads/master | 2020-05-18T15:12:08.924175 | 2014-01-22T19:31:59 | 2014-01-22T19:31:59 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,291 | py | import numpy as np
from scipy.stats import skew, kurtosis, shapiro, pearsonr, ansari, mood, levene, fligner, bartlett, mannwhitneyu
from scipy.spatial.distance import braycurtis, canberra, chebyshev, cityblock, correlation, cosine, euclidean, hamming, jaccard, kulsinski, matching, russellrao, sqeuclidean
from sklearn.preprocessing import LabelBinarizer
from sklearn.linear_model import Ridge, LinearRegression, LogisticRegression
from sklearn.tree import DecisionTreeRegressor, DecisionTreeClassifier
from sklearn.ensemble import RandomForestRegressor, GradientBoostingRegressor, RandomForestClassifier, GradientBoostingClassifier
from sklearn.neighbors import KNeighborsRegressor, KNeighborsClassifier
from sklearn.naive_bayes import GaussianNB
from sklearn.metrics import explained_variance_score, mean_absolute_error, mean_squared_error, r2_score, accuracy_score, roc_auc_score, average_precision_score, f1_score, hinge_loss, matthews_corrcoef, precision_score, recall_score, zero_one_loss
from sklearn.metrics.cluster import adjusted_mutual_info_score, adjusted_rand_score, completeness_score, homogeneity_completeness_v_measure, homogeneity_score, mutual_info_score, normalized_mutual_info_score, v_measure_score
from boomlet.utils.aggregators import to_aggregator
from boomlet.metrics import max_error, error_variance, relative_error_variance, gini_loss, categorical_gini_loss
from boomlet.transform.type_conversion import Discretizer
from autocause.feature_functions import *
"""
Functions used to combine a list of features into one coherent one.
Sample use:
1. to convert categorical to numerical, we perform a one hot encoding
2. treat each binary column as a separate numerical feature
3. compute numerical features as usual
4. use each of the following functions to create a new feature
(with the input as the nth feature for each of the columns)
WARNING: these will be used in various locations throughout the code base
and will result in feature size growing at faster than a linear rate
"""
AGGREGATORS = [
to_aggregator("max"),
to_aggregator("min"),
to_aggregator("median"),
to_aggregator("mode"),
to_aggregator("mean"),
# to_aggregator("sum"),
]
"""
Boolean flags specifying whether or not to perform conversions
"""
CONVERT_TO_NUMERICAL = True
CONVERT_TO_CATEGORICAL = True
"""
Functions that compute a metric on a single 1-D array
"""
UNARY_NUMERICAL_FEATURES = [
normalized_entropy,
skew,
kurtosis,
np.std,
shapiro,
]
UNARY_CATEGORICAL_FEATURES = [
lambda x: len(set(x)), # number of unique
]
"""
Functions that compute a metric on two 1-D arrays
"""
BINARY_NN_FEATURES = [
independent_component,
chi_square,
pearsonr,
correlation_magnitude,
braycurtis,
canberra,
chebyshev,
cityblock,
correlation,
cosine,
euclidean,
hamming,
sqeuclidean,
ansari,
mood,
levene,
fligner,
bartlett,
mannwhitneyu,
]
BINARY_NC_FEATURES = [
]
BINARY_CN_FEATURES = [
categorical_numerical_homogeneity,
bucket_variance,
anova,
]
BINARY_CC_FEATURES = [
categorical_categorical_homogeneity,
anova,
dice_,
jaccard,
kulsinski,
matching,
rogerstanimoto_,
russellrao,
sokalmichener_,
sokalsneath_,
yule_,
adjusted_mutual_info_score,
adjusted_rand_score,
completeness_score,
homogeneity_completeness_v_measure,
homogeneity_score,
mutual_info_score,
normalized_mutual_info_score,
v_measure_score,
]
"""
Dictionaries of input type (e.g. B corresponds to pairs where binary
data is the input) to pairs of converter functions and a boolean flag
of whether or not to aggregate over the output of the converter function
converter functions should have the type signature:
converter(X_raw, X_current_type, Y_raw, Y_type)
where X_raw is the data to convert
"""
NUMERICAL_CONVERTERS = dict(
N=lambda x, *args: x, # identity function
B=lambda x, *args: x, # identity function
C=lambda x, *args: LabelBinarizer().fit_transform(x),
)
CATEGORICAL_CONVERTERS = dict(
N=lambda x, *args: Discretizer().fit_transform(x).flatten(),
B=lambda x, *args: x, # identity function
C=lambda x, *args: x, # identity function
)
"""
Whether or not the converters can result in a 2D output. This must be set to True
if any of the respective converts can return a 2D output.
"""
NUMERICAL_CAN_BE_2D = True
CATEGORICAL_CAN_BE_2D = False
"""
Estimators used to provide a fit for a variable
"""
REGRESSION_ESTIMATORS = [
Ridge(),
LinearRegression(),
DecisionTreeRegressor(random_state=0),
RandomForestRegressor(random_state=0),
GradientBoostingRegressor(subsample=0.5, n_estimators=10, random_state=0),
KNeighborsRegressor(),
]
CLASSIFICATION_ESTIMATORS = [
LogisticRegression(random_state=0),
DecisionTreeClassifier(random_state=0),
RandomForestClassifier(random_state=0),
GradientBoostingClassifier(subsample=0.5, n_estimators=10, random_state=0),
KNeighborsClassifier(),
GaussianNB(),
]
"""
Functions to provide a value of how good a fit on a variable is
"""
REGRESSION_METRICS = [
explained_variance_score,
mean_absolute_error,
mean_squared_error,
r2_score,
max_error,
error_variance,
relative_error_variance,
gini_loss,
] + BINARY_NN_FEATURES
REGRESSION_RESIDUAL_METRICS = [
] + UNARY_NUMERICAL_FEATURES
BINARY_PROBABILITY_CLASSIFICATION_METRICS = [
roc_auc_score,
hinge_loss,
] + REGRESSION_METRICS
RESIDUAL_PROBABILITY_CLASSIFICATION_METRICS = [
] + REGRESSION_RESIDUAL_METRICS
BINARY_CLASSIFICATION_METRICS = [
accuracy_score,
average_precision_score,
f1_score,
matthews_corrcoef,
precision_score,
recall_score,
zero_one_loss,
categorical_gini_loss,
]
ND_CLASSIFICATION_METRICS = [ # metrics for N-dimensional classification
] + BINARY_CC_FEATURES
"""
Functions to assess the model (e.g. complexity) of the fit on a numerical variable
of type signature:
metric(clf, X, y)
"""
REGRESSION_MODEL_METRICS = [
# TODO model complexity metrics
]
CLASSIFICATION_MODEL_METRICS = [
# TODO use regression model metrics on predict_proba
]
"""
The operations to perform on the A->B features and B->A features.
"""
RELATIVE_FEATURES = [
# Identity functions, comment out the next 2 lines for only relative features
# lambda x, y: x,
lambda x, y: y,
# lambda x, y: x - y,
]
"""
Whether or not to treat each observation (A,B) as two observations: (A,B) and (B,A)
If this is done and training labels are given, those labels will have to be
reflected as well. The reflection is performed through appending at the end.
(e.g. if we have N training examples, observation N+1 in the output will be
the first example reflected)
"""
REFLECT_DATA = False
"""
Whether or not metafeatures based on the types of A and B are generated.
e.g. 1/0 feature on whether or not A is Numerical, etc.
"""
ADD_METAFEATURES = False
"""
Whether or not to generate combination features between the computed
features and metafeatures.
e.g. for each feature and metafeature, generate a new feature which is the
product of the two
WARNING: will generate a LOT of features (approximately 21 times as many)
"""
COMPUTE_METAFEATURE_COMBINATIONS = False
| [
"diogo149@gmail.com"
] | diogo149@gmail.com |
96104e8d128851cf827d8d96c841b1f74949663f | 57b831d647686e4a86c856753d85b5b2516cfc55 | /121 - Best Time to Buy and Sell Stock.py | 7a71f8dc53b8a2c0dce7a4be63e212136413f07f | [] | no_license | PraneshASP/LeetCode-Solutions-2 | a7a079a9e7e2b53509bb49cab7832b24599d61cb | b3dd0c0b3e5be016d35ebf3d5a8bcf38cb944d3a | refs/heads/master | 2022-12-17T00:05:06.070208 | 2020-09-25T11:41:31 | 2020-09-25T11:41:31 | 298,555,918 | 0 | 0 | null | 2020-09-25T11:40:09 | 2020-09-25T11:40:08 | null | UTF-8 | Python | false | false | 1,250 | py | # Solution 1: Using dynamic programming. This solution takes O(n) time and O(n) space.
class Solution(object):
def maxProfit(self, prices):
"""
:type prices: List[int]
:rtype: int
"""
if len(prices) <= 1:
return 0 # no transaction can be made
right = [0] * len(prices)
right[len(prices)-1] = prices[len(prices)-1]
for i in range(len(prices)-2,-1,-1):
if prices[i] > right[i+1]:
right[i] = prices[i]
else:
right[i] = right[i+1]
ans = 0
for i in range(len(prices)):
if right[i] - prices[i] > ans:
ans = right[i] - prices[i]
return ans
# Solution 2: We can do better. Here's a solution in O(n) time and O(1) space.
class Solution(object):
def maxProfit(self, prices):
"""
:type prices: List[int]
:rtype: int
"""
if len(prices) <= 1:
return 0 # no transaction can be made
minPrice = prices[0]
ans = 0
for i in range(1, len(prices)):
minPrice = min(minPrice, prices[i])
if prices[i] - minPrice > ans:
ans = prices[i] - minPrice
return ans | [
"blakebrown129@gmail.com"
] | blakebrown129@gmail.com |
98dc26a59a1e2f6aa848bc260ed093e8eac30517 | f1364645e43b60c05b746083981e5c7f169702ba | /strategy/phase_out/s20_1.py | 3c5df97bb668af54aa0366afd65827dfd60cb945 | [] | no_license | geniustom/AI_Trading | bf41b946abe1f431a98078878d4b5f28a9a0d5c3 | 8c21d966053d64a224dc6b277ac9a33c576134ea | refs/heads/master | 2020-05-29T18:35:43.400029 | 2020-03-28T21:11:09 | 2020-03-28T21:11:09 | 189,305,491 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,358 | py | # coding=UTF-8
###############################################################################
def filter1(self,I):
import lib.filter as fl
if self.RunToday==-1:
self.RunToday=1
R=fl.getOpenVolABS(I.get("小台成交量"),15,start=30)
M=fl.getOpenVolABS(I.get("大台未純化主力作為"),15,start=30)
#IF(OR(AND((R2>220000),(R2<280000)),(M2<18)),0,AV2)
if (R>220000 and R<280000) or (M<18):
self.RunToday=0
return self.RunToday
###############################################################################
def s1(self,PRICE,i,I): #2330 1482->1888
if filter1(self,I)==0:
return
baseT= 30
if i< (baseT+15) : return
base1= I.get("中市大台黑手")[baseT]
base2= I.get("中市小台黑手")[baseT]
bd=I.get("中市大台黑手")[i-1]-base1
md=I.get("中市小台黑手")[i-1]-base2
if bd<0 and md<0 : self.EnterShort(PRICE)
if bd>0 and md>0 : self.EnterLong(PRICE)
self.CheckDailyExitAll(I.get("TIME")[i],PRICE)
###############################################################################
import os
STittle=u"[s20_1]中市大小台黑手策略"
FName=os.path.split(__file__)[1].split('.')[0]
if __name__ == '__main__':
exec(open(os.path.split(os.path.realpath(__file__))[0]+'\\init.py').read()) | [
"Geniustom@gmail.com"
] | Geniustom@gmail.com |
d61187dfe5efd828f42d0012d9c9be4768e8d7ab | 3cdb4faf34d8375d6aee08bcc523adadcb0c46e2 | /web/env/lib/python3.6/site-packages/django/contrib/postgres/operations.py | 95e7edcdea173c3c8aa70bba095df089b6b782b7 | [
"MIT",
"GPL-3.0-only"
] | permissive | rizwansoaib/face-attendence | bc185d4de627ce5adab1cda7da466cb7a5fddcbe | 59300441b52d32f3ecb5095085ef9d448aef63af | refs/heads/master | 2020-04-25T23:47:47.303642 | 2019-09-12T14:26:17 | 2019-09-12T14:26:17 | 173,157,284 | 45 | 12 | MIT | 2020-02-11T23:47:55 | 2019-02-28T17:33:14 | Python | UTF-8 | Python | false | false | 2,010 | py | from django.contrib.postgres.signals import (
get_citext_oids, get_hstore_oids, register_type_handlers,
)
from django.db.migrations.operations.base import Operation
class CreateExtension(Operation):
reversible = True
def __init__(self, name):
self.name = name
def state_forwards(self, app_label, state):
pass
def database_forwards(self, app_label, schema_editor, from_state, to_state):
if schema_editor.connection.vendor != 'postgresql':
return
schema_editor.execute("CREATE EXTENSION IF NOT EXISTS %s" % schema_editor.quote_name(self.name))
# Clear cached, stale oids.
get_hstore_oids.cache_clear()
get_citext_oids.cache_clear()
# Registering new type handlers cannot be done before the extension is
# installed, otherwise a subsequent data migration would use the same
# connection.
register_type_handlers(schema_editor.connection)
def database_backwards(self, app_label, schema_editor, from_state, to_state):
schema_editor.execute("DROP EXTENSION %s" % schema_editor.quote_name(self.name))
# Clear cached, stale oids.
get_hstore_oids.cache_clear()
get_citext_oids.cache_clear()
def describe(self):
return "Creates extension %s" % self.name
class BtreeGinExtension(CreateExtension):
def __init__(self):
self.name = 'btree_gin'
class BtreeGistExtension(CreateExtension):
def __init__(self):
self.name = 'btree_gist'
class CITextExtension(CreateExtension):
def __init__(self):
self.name = 'citext'
class CryptoExtension(CreateExtension):
def __init__(self):
self.name = 'pgcrypto'
class HStoreExtension(CreateExtension):
def __init__(self):
self.name = 'hstore'
class TrigramExtension(CreateExtension):
def __init__(self):
self.name = 'pg_trgm'
class UnaccentExtension(CreateExtension):
def __init__(self):
self.name = 'unaccent'
| [
"rizwansoaib@gmail.com"
] | rizwansoaib@gmail.com |
992a4c4ab94c6659ebe32bc2d02e62ca5ac23696 | fe93774e5cb4e24d7a82518bbf702b19cf6acc15 | /meiduo_mall/celery_tasks/send_email/tasks.py | 373a4acc470c107e5cab13dee2fd489db62e51bd | [
"MIT"
] | permissive | lhz0707/meiduo | 8f37bdea14923cf444a5ed4e0ac801b2b93324e2 | 5ba6bbb82a28f5c93e7c8d40cdab7ee41b6593d0 | refs/heads/master | 2020-07-21T19:22:53.938394 | 2019-09-23T13:36:59 | 2019-09-23T13:36:59 | 206,953,905 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 615 | py | # 实现验证邮箱异步任务
from django.core.mail import send_mail
from celery_tasks.main import celery_app
@celery_app.task(name='send_emails')
def send_emails(to_email,verify_url,email_from):
subject = "美多商城邮箱验证"
html_message = '<p>尊敬的用户您好!</p>' \
'<p>感谢您使用美多商城。</p>' \
'<p>您的邮箱为:%s 。请点击此链接激活您的邮箱:</p>' \
'<p><a href="%s">%s<a></p>' % (to_email, verify_url, verify_url)
send_mail(subject, '', email_from, [to_email], html_message=html_message)
| [
"xwp_fullstack@163.com"
] | xwp_fullstack@163.com |
d37e661dbf53751fa7dbf08a7ab19b956784c05c | ac8ffabf4d7339c5466e53dafc3f7e87697f08eb | /python_solutions/328.odd-even-linked-list.py | 5c25db3da9de9a7ee32d904c17c6d68fc73209f4 | [] | no_license | h4hany/leetcode | 4cbf23ea7c5b5ecfd26aef61bfc109741f881591 | 9e4f6f1a2830bd9aab1bba374c98f0464825d435 | refs/heads/master | 2023-01-09T17:39:06.212421 | 2020-11-12T07:26:39 | 2020-11-12T07:26:39 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,946 | py | #
# @lc app=leetcode id=328 lang=python3
#
# [328] Odd Even Linked List
#
# https://leetcode.com/problems/odd-even-linked-list/description/
#
# algorithms
# Medium (49.53%)
# Total Accepted: 153K
# Total Submissions: 308.6K
# Testcase Example: '[1,2,3,4,5]'
#
# Given a singly linked list, group all odd nodes together followed by the even
# nodes. Please note here we are talking about the node number and not the
# value in the nodes.
#
# You should try to do it in place. The program should run in O(1) space
# complexity and O(nodes) time complexity.
#
# Example 1:
#
#
# Input: 1->2->3->4->5->NULL
# Output: 1->3->5->2->4->NULL
#
#
# Example 2:
#
#
# Input: 2->1->3->5->6->4->7->NULL
# Output: 2->3->6->7->1->5->4->NULL
#
#
# Note:
#
#
# The relative order inside both the even and odd groups should remain as it
# was in the input.
# The first node is considered odd, the second node even and so on ...
#
#
#
# Definition for singly-linked list.
class ListNode:
def __init__(self, x):
self.val = x
self.next = None
def arr2linkedlist(arr):
if len(arr) == 0:
return
head = ListNode(arr[0])
tail = head
for i in arr[1:]:
tail.next = ListNode(i)
tail = tail.next
return head
def linkedlist2arr(head):
ans = []
cnt = 0
while head:
ans.append(head.val)
# print(head.val)
head = head.next
# cnt += 1
# if cnt>10: break
return ans
class Solution:
def oddEvenList(self, head):
if not head: return head
odd = head
even = head.next
evenhead = even
while even and even.next:
odd.next = even.next
odd = even.next
even.next = odd.next
even = odd.next
odd.next = evenhead
return head
# s = Solution()
# arr = [2,1,3,5,6,4,7]
# head = arr2linkedlist(arr)
# newhead = s.oddEvenList(head)
# newhead = head
# print(linkedlist2arr(newhead))
| [
"ssrzz@pm.me"
] | ssrzz@pm.me |
fff97f21d10337de5d9025efabc43cbd7706e300 | 63c7060562ec5d1a9153f0454ea6886b0a62a28e | /tb/axil_adapter/test_axil_adapter.py | f60a8d5948ad9f713ac6065916149fe8a74130c0 | [
"MIT"
] | permissive | alexforencich/verilog-axi | 666e6dfbd14fd124bdcbc2798b4f557347fb8261 | 38915fb5330cb8270b454afc0140a94489dc56db | refs/heads/master | 2023-03-30T07:34:17.721579 | 2023-03-30T07:12:13 | 2023-03-30T07:12:13 | 142,810,315 | 1,042 | 342 | MIT | 2023-03-05T19:52:57 | 2018-07-30T01:36:26 | Verilog | UTF-8 | Python | false | false | 7,622 | py | """
Copyright (c) 2020 Alex Forencich
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
import itertools
import logging
import os
import random
import cocotb_test.simulator
import pytest
import cocotb
from cocotb.clock import Clock
from cocotb.triggers import RisingEdge, Timer
from cocotb.regression import TestFactory
from cocotbext.axi import AxiLiteBus, AxiLiteMaster, AxiLiteRam
class TB(object):
def __init__(self, dut):
self.dut = dut
self.log = logging.getLogger("cocotb.tb")
self.log.setLevel(logging.DEBUG)
cocotb.start_soon(Clock(dut.clk, 10, units="ns").start())
self.axil_master = AxiLiteMaster(AxiLiteBus.from_prefix(dut, "s_axil"), dut.clk, dut.rst)
self.axil_ram = AxiLiteRam(AxiLiteBus.from_prefix(dut, "m_axil"), dut.clk, dut.rst, size=2**16)
def set_idle_generator(self, generator=None):
if generator:
self.axil_master.write_if.aw_channel.set_pause_generator(generator())
self.axil_master.write_if.w_channel.set_pause_generator(generator())
self.axil_master.read_if.ar_channel.set_pause_generator(generator())
self.axil_ram.write_if.b_channel.set_pause_generator(generator())
self.axil_ram.read_if.r_channel.set_pause_generator(generator())
def set_backpressure_generator(self, generator=None):
if generator:
self.axil_master.write_if.b_channel.set_pause_generator(generator())
self.axil_master.read_if.r_channel.set_pause_generator(generator())
self.axil_ram.write_if.aw_channel.set_pause_generator(generator())
self.axil_ram.write_if.w_channel.set_pause_generator(generator())
self.axil_ram.read_if.ar_channel.set_pause_generator(generator())
async def cycle_reset(self):
self.dut.rst.setimmediatevalue(0)
await RisingEdge(self.dut.clk)
await RisingEdge(self.dut.clk)
self.dut.rst.value = 1
await RisingEdge(self.dut.clk)
await RisingEdge(self.dut.clk)
self.dut.rst.value = 0
await RisingEdge(self.dut.clk)
await RisingEdge(self.dut.clk)
async def run_test_write(dut, data_in=None, idle_inserter=None, backpressure_inserter=None):
tb = TB(dut)
byte_lanes = tb.axil_master.write_if.byte_lanes
await tb.cycle_reset()
tb.set_idle_generator(idle_inserter)
tb.set_backpressure_generator(backpressure_inserter)
for length in range(1, byte_lanes*2):
for offset in range(byte_lanes):
tb.log.info("length %d, offset %d", length, offset)
addr = offset+0x1000
test_data = bytearray([x % 256 for x in range(length)])
tb.axil_ram.write(addr-128, b'\xaa'*(length+256))
await tb.axil_master.write(addr, test_data)
tb.log.debug("%s", tb.axil_ram.hexdump_str((addr & ~0xf)-16, (((addr & 0xf)+length-1) & ~0xf)+48))
assert tb.axil_ram.read(addr, length) == test_data
assert tb.axil_ram.read(addr-1, 1) == b'\xaa'
assert tb.axil_ram.read(addr+length, 1) == b'\xaa'
await RisingEdge(dut.clk)
await RisingEdge(dut.clk)
async def run_test_read(dut, data_in=None, idle_inserter=None, backpressure_inserter=None):
tb = TB(dut)
byte_lanes = tb.axil_master.write_if.byte_lanes
await tb.cycle_reset()
tb.set_idle_generator(idle_inserter)
tb.set_backpressure_generator(backpressure_inserter)
for length in range(1, byte_lanes*2):
for offset in range(byte_lanes):
tb.log.info("length %d, offset %d", length, offset)
addr = offset+0x1000
test_data = bytearray([x % 256 for x in range(length)])
tb.axil_ram.write(addr, test_data)
data = await tb.axil_master.read(addr, length)
assert data.data == test_data
await RisingEdge(dut.clk)
await RisingEdge(dut.clk)
async def run_stress_test(dut, idle_inserter=None, backpressure_inserter=None):
tb = TB(dut)
await tb.cycle_reset()
tb.set_idle_generator(idle_inserter)
tb.set_backpressure_generator(backpressure_inserter)
async def worker(master, offset, aperture, count=16):
for k in range(count):
length = random.randint(1, min(32, aperture))
addr = offset+random.randint(0, aperture-length)
test_data = bytearray([x % 256 for x in range(length)])
await Timer(random.randint(1, 100), 'ns')
await master.write(addr, test_data)
await Timer(random.randint(1, 100), 'ns')
data = await master.read(addr, length)
assert data.data == test_data
workers = []
for k in range(16):
workers.append(cocotb.start_soon(worker(tb.axil_master, k*0x1000, 0x1000, count=16)))
while workers:
await workers.pop(0).join()
await RisingEdge(dut.clk)
await RisingEdge(dut.clk)
def cycle_pause():
return itertools.cycle([1, 1, 1, 0])
if cocotb.SIM_NAME:
for test in [run_test_write, run_test_read]:
factory = TestFactory(test)
factory.add_option("idle_inserter", [None, cycle_pause])
factory.add_option("backpressure_inserter", [None, cycle_pause])
factory.generate_tests()
factory = TestFactory(run_stress_test)
factory.generate_tests()
# cocotb-test
tests_dir = os.path.abspath(os.path.dirname(__file__))
rtl_dir = os.path.abspath(os.path.join(tests_dir, '..', '..', 'rtl'))
@pytest.mark.parametrize("m_data_width", [8, 16, 32])
@pytest.mark.parametrize("s_data_width", [8, 16, 32])
def test_axil_adapter(request, s_data_width, m_data_width):
dut = "axil_adapter"
module = os.path.splitext(os.path.basename(__file__))[0]
toplevel = dut
verilog_sources = [
os.path.join(rtl_dir, f"{dut}.v"),
os.path.join(rtl_dir, f"{dut}_rd.v"),
os.path.join(rtl_dir, f"{dut}_wr.v"),
]
parameters = {}
parameters['ADDR_WIDTH'] = 32
parameters['S_DATA_WIDTH'] = s_data_width
parameters['S_STRB_WIDTH'] = parameters['S_DATA_WIDTH'] // 8
parameters['M_DATA_WIDTH'] = m_data_width
parameters['M_STRB_WIDTH'] = parameters['M_DATA_WIDTH'] // 8
extra_env = {f'PARAM_{k}': str(v) for k, v in parameters.items()}
sim_build = os.path.join(tests_dir, "sim_build",
request.node.name.replace('[', '-').replace(']', ''))
cocotb_test.simulator.run(
python_search=[tests_dir],
verilog_sources=verilog_sources,
toplevel=toplevel,
module=module,
parameters=parameters,
sim_build=sim_build,
extra_env=extra_env,
)
| [
"alex@alexforencich.com"
] | alex@alexforencich.com |
4a99b17c6827beae800f252f9d87fbf526613a4f | 747febe786dd6b7fd6c63cfe73dbe3023354daa8 | /src/the_tale/the_tale/game/bills/bills/road_create.py | 0e9a08f645d14d0441f1d5ba0847b0ad04284bb8 | [
"BSD-3-Clause"
] | permissive | the-tale/the-tale | 4e4b8d91dc873a5fb935fe58e9721a877baa6d3f | e8450bd2332344da805b1851e728da5a3e5bf0ef | refs/heads/develop | 2023-08-01T13:53:46.835667 | 2022-12-25T18:04:56 | 2022-12-25T18:04:56 | 1,949,167 | 98 | 52 | BSD-3-Clause | 2023-02-15T18:57:33 | 2011-06-24T18:49:48 | Python | UTF-8 | Python | false | false | 5,079 | py |
import smart_imports
smart_imports.all()
class BaseForm(forms.BaseUserForm):
place_1 = utils_fields.ChoiceField(label='Первый город')
place_2 = utils_fields.ChoiceField(label='Второй город')
path = utils_fields.CharField(label='Путь')
def __init__(self, *args, **kwargs):
super(BaseForm, self).__init__(*args, **kwargs)
self.fields['place_1'].choices = places_storage.places.get_choices()
self.fields['place_2'].choices = places_storage.places.get_choices()
def clean(self):
cleaned_data = super().clean()
place_1 = places_storage.places.get(int(cleaned_data['place_1']))
place_2 = places_storage.places.get(int(cleaned_data['place_2']))
if roads_logic.road_between_places(place_1, place_2) is not None:
raise django_forms.ValidationError('Дорога между городами уже проложена')
if place_1.id == place_2.id:
raise django_forms.ValidationError('Дорогу можно проложить только между разными городами')
base_road_bill.check_road_correctness(place_1, place_2, cleaned_data['path'])
class UserForm(BaseForm):
pass
class ModeratorForm(BaseForm, forms.ModeratorFormMixin):
pass
class RoadCreate(base_bill.BaseBill):
type = relations.BILL_TYPE.ROAD_CREATE
UserForm = UserForm
ModeratorForm = ModeratorForm
CAPTION = 'Строительство дороги'
DESCRIPTION = 'Создаёт дорогу между указанными городами. Подробнее про дороги можно узнать в Путеводителе (раздел о городах).'
def __init__(self, place_1_id=None, place_2_id=None, path=None, old_place_1_name_forms=None, old_place_2_name_forms=None):
super().__init__()
self.place_1_id = place_1_id
self.place_2_id = place_2_id
self.path = path
self.old_place_1_name_forms = old_place_1_name_forms
self.old_place_2_name_forms = old_place_2_name_forms
if self.old_place_1_name_forms is None and self.place_1_id is not None:
self.old_place_1_name_forms = self.place_1.utg_name
if self.old_place_2_name_forms is None and self.place_2_id is not None:
self.old_place_2_name_forms = self.place_2.utg_name
@property
def place_1(self):
return places_storage.places[self.place_1_id]
@property
def place_2(self):
return places_storage.places[self.place_2_id]
@property
def actors(self):
return [self.place_1, self.place_2]
def user_form_initials(self):
return {'place_1': self.place_1_id,
'place_2': self.place_2_id,
'path': self.path}
@property
def place_1_name_changed(self):
return self.old_place_1_name != self.place_1.name
@property
def place_2_name_changed(self):
return self.old_place_2_name != self.place_2.name
@property
def old_place_1_name(self):
return self.old_place_1_name_forms.normal_form()
@property
def old_place_2_name(self):
return self.old_place_2_name_forms.normal_form()
def initialize_with_form(self, user_form):
self.place_1_id = int(user_form.c.place_1)
self.place_2_id = int(user_form.c.place_2)
self.path = user_form.c.path
self.old_place_1_name_forms = self.place_1.utg_name
self.old_place_2_name_forms = self.place_2.utg_name
def has_meaning(self):
if roads_logic.road_between_places(self.place_1, self.place_2) is not None:
return False
if self.place_1_id == self.place_2_id:
return False
path_suitables = roads_logic.is_path_suitable_for_road(start_x=self.place_1.x,
start_y=self.place_1.y,
path=self.path)
return path_suitables.is_NO_ERRORS
def apply(self, bill=None):
if not self.has_meaning():
return
roads_logic.create_road(place_1=self.place_1,
place_2=self.place_2,
path=self.path)
def serialize(self):
return {'type': self.type.name.lower(),
'place_1_id': self.place_1_id,
'place_2_id': self.place_2_id,
'old_place_1_name_forms': self.old_place_1_name_forms.serialize(),
'old_place_2_name_forms': self.old_place_2_name_forms.serialize(),
'path': self.path}
@classmethod
def deserialize(cls, data):
obj = cls()
obj.place_1_id = data['place_1_id']
obj.place_2_id = data['place_2_id']
obj.old_place_1_name_forms = utg_words.Word.deserialize(data['old_place_1_name_forms'])
obj.old_place_2_name_forms = utg_words.Word.deserialize(data['old_place_2_name_forms'])
obj.path = data['path']
return obj
| [
"a.eletsky@gmail.com"
] | a.eletsky@gmail.com |
59b493691d58195276d7e7cd9f9a4409e12075e2 | 1aae6e3f765fa6e640c8672c81f3432799e785b4 | /py_workspace/sample42.py | af0f30160da91508853a016e494cf928d7527a32 | [] | no_license | Taeseong-429/All_workspace | 1e66759a3e36a1ac5fd1681b9ba1a8797fa9fce1 | 8db563f4d4915a4c7566b735aa228f3172222c7b | refs/heads/master | 2021-04-07T21:16:34.314519 | 2020-03-20T09:00:36 | 2020-03-20T09:00:36 | 248,709,081 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 378 | py | # sample42.py
def add_and_mul(a, b):
return (a + b, a * b)
return 1
pass
result = add_and_mul(1, 2)
( addResult, mulResult ) = add_and_mul(1, 2)
print(addResult)
print(mulResult)
print('-'*30)
def say_myself(name, man=True, old=10):
print('- say_myself({}, {}, {})'.format(name, man, old))
pass
say_myself('Yoseph', 24)
say_myself('Yoseph', 24, False) | [
"ghkdxotjd3@gmail.com"
] | ghkdxotjd3@gmail.com |
2e69f527333e0c8ce675f14dc43273f05472cfca | ea9e9756f6b5b7022015049d92c399ee03cfde67 | /电压力锅/接口代码/DYLG_Interface/cj_interface/test_procedure2.py | a52d17f4ce07f528cd466241ac171c549d1670ad | [] | no_license | Godlovesli/jiaojiewendang | 724e0f01028cc5cbcc9ce5807bd057e15172eb91 | 579551eabfc83226804ccfbf8b868192614d7166 | refs/heads/master | 2020-04-05T03:38:41.212627 | 2018-11-07T09:31:51 | 2018-11-07T09:31:51 | 156,521,937 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,372 | py | #coding:utf-8
# __author__ = 'feng'
from base.base import MyTest
import json
import sys
reload(sys)
sys.setdefaultencoding('utf-8')
class procedure2Test(MyTest):
'''流程2'''
# 取消收藏-获取我的模式最近使用的集合 不存在
@classmethod
def setUpClass(cls):
pass
def test_procedure2_success(self):
'''所有参数都传'''
recipeid=1297
url_path1 = '/v1/recipe/manager/op'
payload1 = {'deviceid': '57357285', 'recipeid':recipeid, 'flag': 0}
r1 = self.myhttp('POST',
url_path1,
payload1,
)
print r1
js1 = json.loads(r1)
self.assertEqual(js1['code'], 1)
url_path = '/v1/recipe/manager/mylist'
payload = {'deviceid': '57357285', 'pageno': 1, 'perpage': 10}
r = self.myhttp('GET',
url_path,
payload,
)
print r
js = json.loads(r)
self.assertEqual(js['code'], 1)
ID = []
for i in range(len(js['result'])):
print js['result'][i]['recipe']['name']
print js['result'][i]['recipe']['id']
ID.append(js['result'][i]['recipe']['id'])
print ID
print recipeid in ID
self.assertIs(recipeid in ID,False)
| [
"1107095622@qq.com"
] | 1107095622@qq.com |
d419dd05a06211fd85f9848f1d8ed171e0be2c99 | e70e2f24eb39601e91a903ebb52885b35620cf48 | /setup.py | 275c0d2305f1dd0e7c09d50335af169c52571b6d | [
"MIT"
] | permissive | gaybro8777/python-benedict | b4eac1e138d6ff7dd7c7b2ae4685aea8b6400762 | 3208efb9937f025becf5797f6282a81e5bc2ec42 | refs/heads/master | 2022-03-24T20:24:44.747486 | 2019-12-13T16:55:53 | 2019-12-13T16:55:53 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,519 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import find_packages, setup
import os
exec(open('benedict/metadata.py').read())
github_url = 'https://github.com/fabiocaccamo'
package_name = 'python-benedict'
package_path = os.path.abspath(os.path.dirname(__file__))
long_description_file_path = os.path.join(package_path, 'README.md')
long_description_content_type = 'text/markdown'
long_description = ''
try:
with open(long_description_file_path) as f:
long_description = f.read()
except IOError:
pass
setup(
name=package_name,
packages=find_packages(exclude=['contrib', 'docs', 'tests*']),
include_package_data=True,
version=__version__,
description=__description__,
long_description=long_description,
long_description_content_type=long_description_content_type,
author=__author__,
author_email=__email__,
url='{}/{}'.format(github_url, package_name),
download_url='{}/{}/archive/{}.tar.gz'.format(
github_url, package_name, __version__),
keywords=[
'python', 'dictionary', 'dict', 'subclass', 'extended',
'benedict', 'io', 'read', 'write', 'parse', 'keypath',
'utility', 'data', 'base64', 'json', 'query-string',
'toml', 'xml', 'yaml', 'clean', 'clone', 'deepclone',
'deepupdate', 'dump', 'filter', 'flatten', 'invert',
'merge', 'move', 'remove', 'subset', 'swap', 'unique',
],
install_requires=[
'ftfy==4.4.3;python_version<"3.4"',
'ftfy;python_version>"2.7"',
'mailchecker',
'phonenumbers',
'python-dateutil',
'python-slugify',
'pyyaml',
'requests',
'six',
'toml',
'xmltodict',
],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Topic :: Software Development :: Build Tools',
],
license=__license__,
test_suite='tests'
)
| [
"fabio.caccamo@gmail.com"
] | fabio.caccamo@gmail.com |
cbb09a89cbd15f2f1b8bb4ba4431b3cb86426946 | f1f11eb0366392014d0d54c6f7b8e340801192cd | /conf.py | b3d61f6e24aee137edda9397845bcac73aaa3d70 | [] | no_license | aclark4life/blog | f1c72375ef53081e4fc8b395bdd7402118a16181 | f1b76f43184a4176614b8f1cb213e2b988d579b2 | refs/heads/main | 2023-01-07T14:27:28.191440 | 2022-12-31T18:50:22 | 2022-12-31T18:50:22 | 44,667,790 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,473 | py | # Configuration file for the Sphinx documentation builder.
#
# This file only contains a selection of the most common options. For a full
# list see the documentation:
# https://www.sphinx-doc.org/en/master/usage/configuration.html
# -- Path setup --------------------------------------------------------------
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
# import os
# import sys
# sys.path.insert(0, os.path.abspath('.'))
# -- Project information -----------------------------------------------------
project = "Blog"
copyright = " 2007-2023 Jeffrey A Clark (Alex)"
author = " Jeffrey A Clark (Alex)"
# The short X.Y version
version = ""
# The full version, including alpha/beta/rc tags
release = ""
# -- General configuration ---------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = []
# Add any paths that contain templates here, relative to this directory.
templates_path = ["_templates"]
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This pattern also affects html_static_path and html_extra_path.
exclude_patterns = ["_build", "Thumbs.db", ".DS_Store", "lib"]
# -- Options for HTML output -------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = "alabaster"
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ["_static"]
# https://alabaster.readthedocs.io/en/latest/customization.html
html_theme_options = {
"show_powered_by": False,
"show_relbars": True,
"logo_name": True,
"font_family": "Helvetica",
"analytics_id": "UA-175892000-1",
}
html_sidebars = {
"**": ["about.html", "searchbox.html", "donate.html", "navigation.html",]
}
html_show_sourcelink = False
html_permalinks = True
html_permalinks_icon = "📌"
html_favicon = "images/alexjaxx-crop.png"
html_title = "Blog"
| [
"aclark@aclark.net"
] | aclark@aclark.net |
fe6004c5ded1a6057b21487b8110916fd2ccaf4e | 531c47c15b97cbcb263ec86821d7f258c81c0aaf | /sdk/identity/azure-identity/tests/test_live.py | 20521b54a2c097d73754424b713ab72dfd2ba2db | [
"LicenseRef-scancode-generic-cla",
"LGPL-2.1-or-later",
"MIT"
] | permissive | YijunXieMS/azure-sdk-for-python | be364d3b88204fd3c7d223df23756386ff7a3361 | f779de8e53dbec033f98f976284e6d9491fd60b3 | refs/heads/master | 2021-07-15T18:06:28.748507 | 2020-09-04T15:48:52 | 2020-09-04T15:48:52 | 205,457,088 | 1 | 2 | MIT | 2020-06-16T16:38:15 | 2019-08-30T21:08:55 | Python | UTF-8 | Python | false | false | 2,485 | py | # ------------------------------------
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.
# ------------------------------------
import pytest
from azure.identity import (
DefaultAzureCredential,
CertificateCredential,
ClientSecretCredential,
DeviceCodeCredential,
InteractiveBrowserCredential,
UsernamePasswordCredential,
)
from azure.identity._constants import AZURE_CLI_CLIENT_ID
ARM_SCOPE = "https://management.azure.com/.default"
def get_token(credential):
token = credential.get_token(ARM_SCOPE)
assert token
assert token.token
assert token.expires_on
def test_certificate_credential(live_certificate):
credential = CertificateCredential(
live_certificate["tenant_id"], live_certificate["client_id"], live_certificate["cert_path"]
)
get_token(credential)
def test_certificate_credential_with_password(live_certificate_with_password):
credential = CertificateCredential(
live_certificate_with_password["tenant_id"],
live_certificate_with_password["client_id"],
live_certificate_with_password["cert_path"],
password=live_certificate_with_password["password"],
)
get_token(credential)
def test_client_secret_credential(live_service_principal):
credential = ClientSecretCredential(
live_service_principal["tenant_id"],
live_service_principal["client_id"],
live_service_principal["client_secret"],
)
get_token(credential)
def test_default_credential(live_service_principal):
credential = DefaultAzureCredential()
get_token(credential)
def test_username_password_auth(live_user_details):
credential = UsernamePasswordCredential(
client_id=live_user_details["client_id"],
username=live_user_details["username"],
password=live_user_details["password"],
tenant_id=live_user_details["tenant"],
)
get_token(credential)
@pytest.mark.manual
@pytest.mark.prints
def test_device_code():
import webbrowser
def prompt(url, user_code, _):
print("opening a browser to '{}', enter device code {}".format(url, user_code))
webbrowser.open_new_tab(url)
credential = DeviceCodeCredential(client_id=AZURE_CLI_CLIENT_ID, prompt_callback=prompt, timeout=40)
get_token(credential)
@pytest.mark.manual
def test_browser_auth():
credential = InteractiveBrowserCredential(client_id=AZURE_CLI_CLIENT_ID, timeout=40)
get_token(credential)
| [
"noreply@github.com"
] | YijunXieMS.noreply@github.com |
3c788bf61cc0c4271136cc41f6187de99ba514ca | 8cae40932bd9f928744f94a0f5575e4ef43b398b | /photos/tests.py | d6d22f5d0141a5e89eab593c138f7f585a37767e | [
"MIT"
] | permissive | wilbrone/gallery | c3a2bb63cdbba9d951f47261ab225ce74441b16b | 709c189394f705e4834eeb3e66e66a82d2b072bb | refs/heads/master | 2022-12-14T19:09:04.781823 | 2020-02-03T13:26:19 | 2020-02-03T13:26:19 | 237,160,725 | 0 | 0 | MIT | 2022-12-08T01:07:17 | 2020-01-30T07:25:54 | Python | UTF-8 | Python | false | false | 1,783 | py | from django.test import TestCase
from .models import Image,Category,Location
# Create your tests here.
# Create your tests here.
class LocationTestClass(TestCase):
# Set up method
def setUp(self):
self.new_location = Location(loct = 'Nairobi')
# Testing instance
def test_instance(self):
self.assertTrue(isinstance(self.new_location,Location))
# Testing Save Method
def test_save_method(self):
self.new_location.save()
location = Location.objects.all()
self.assertTrue(len(location) > 0)
def test_del_location(self):
locations = Location.del_location(self.new_location.id)
self.assertTrue is None
class CategoryTestClass(TestCase):
def setUp(self):
# Creating a new editor and saving it
self.new_category = Category(cat = 'Food')
def test_save(self):
self.new_category.save()
category = Category.objects.all()
self.assertTrue(len(category) > 0)
def test_del_category(self):
categories = Category.del_category(self.new_category.id)
self.assertTrue is None
class ImageTestClass(TestCase):
def SetUp(self):
self.image = Image(title = 'BB', description ='Test Description', location = self.new_location, category = self.new_caategory, image ='/media/images/ilnur-kalimullin-kP1AxmCyEXM-unsplash2.jpg')
self.image.save_image()
images = Image.objects.all()
self.assertTrue(len(images) > 0)
Image.del_photo(self.image.id)
images = Image.objects.all()
self.assertTrue(len(images) == 0)
def tearDown(self):
Location.objects.all().delete()
Category.objects.all().delete()
Image.objects.all().delete() | [
"wilbroneokoth@gmail.com"
] | wilbroneokoth@gmail.com |
00f7b93bef70d9ac27338730382bb0e56e75876a | 33c26ef945d580fde717f55970fe472b6c3b26ae | /virtual/bin/wheel | 85a86eb2874a495a52bc10cd72b2b6ebbb46bc91 | [] | no_license | dicksonkariuki/Tribune | ba4bddc01840f8f03d7275fd491c0a6a012dc5d5 | 9abcd52dcf59306abb66c4675a65d5f3da0a70b2 | refs/heads/master | 2023-05-01T09:05:45.138398 | 2019-11-06T09:54:36 | 2019-11-06T09:54:36 | 219,964,145 | 0 | 0 | null | 2023-04-21T20:41:05 | 2019-11-06T09:51:10 | Python | UTF-8 | Python | false | false | 278 | #!/home/moringa/Documents/moringa-school-projects/The-Moringa-Tribune/virtual/bin/python
# -*- coding: utf-8 -*-
import re
import sys
from wheel.cli import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())
| [
"dicksonkariuki4@gmail.com"
] | dicksonkariuki4@gmail.com | |
ac4987895cc9644b284cb80356cf569279b7bdff | 17311c448e5974e16d3efadd991e41ecb24fafd2 | /Code/Lane/django/lab02-todo/todoapp/urls.py | bb82b3e90580b8e890cee6b07399a948e6e22685 | [] | no_license | drewherron/class_sheep | 77a7f73147bc36e65fc50d38525032cb78659d34 | 51b3ee13d2a7df5d000fa5e087b8bd4fe50669fd | refs/heads/master | 2020-05-31T14:30:51.719008 | 2019-05-31T17:25:44 | 2019-05-31T17:25:44 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 278 | py | from django.urls import path
from . import views
app_name = 'todoapp'
urlpatterns = [
path('index/', views.index, name='index'),
path('save_todo/', views.save_todo, name='save_todo'),
path('completed_todo/<int:id_num>/', views.completed_todo, name='completed')
]
| [
"you@example.com"
] | you@example.com |
1f2ab55a14ac801c4f5a822b1094026386d2685a | f30b91db647dca1f77fffa4b7e26b6c6a68abbc6 | /6_kyu/Weird IPv6 hex string parsing/solution.py | 9242f4e57b9622101bef277e051521b18de3b38f | [] | no_license | estraviz/codewars | 73caf95519eaac6f34962b8ade543bf4417df5b7 | 5f8685e883cb78381c528a0988f2b5cad6c129c2 | refs/heads/master | 2023-05-13T07:57:43.165290 | 2023-05-08T21:50:39 | 2023-05-08T21:50:39 | 159,744,593 | 10 | 55 | null | null | null | null | UTF-8 | Python | false | false | 209 | py | """Weird IPv6 hex string parsing
"""
def parse_IPv6(iPv6):
output = ""
i = 0
while i < len(iPv6):
output += str(sum(int(n, 16) for n in iPv6[i : i + 4]))
i += 5
return output
| [
"javier.estraviz@gmail.com"
] | javier.estraviz@gmail.com |
bfa9cc17f57de63846a9468124c53bbbc4d009df | d2ea8079864f6f0b99937645c2d4cab8817e75f8 | /01_lekcja2/HOME4FormatowanieZmiennych.py | c9ebc4ddae9a14a1d9ea7bbeb81ef3c5fb191e78 | [] | no_license | FlyingMedusa/PythonCourse | ea960ab20415180d6e164abf4af5619ad9d256fb | 4be6ffd1054b25285205d19987fb0fade0f72c14 | refs/heads/master | 2022-04-09T15:59:38.742786 | 2020-02-12T17:49:56 | 2020-02-12T17:49:56 | 213,706,789 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 217 | py | liczby = input("Podaj dowolną wartość liczbową:")
tekst = input("Podaj dowolną wartość tekstową: ")
zdanie = "Twoja wartość liczbowa to: {0}, a wartość tekstowa to: {1}"
print(zdanie.format(liczby, tekst)) | [
"sleboda.m98@gmail.com"
] | sleboda.m98@gmail.com |
b0d565ec05650adf07259593055ae6940a4593c5 | 72bfe8c799f55f42bde0659f1ee2d36d2adc8265 | /setup.py | ce325459d66ebb51bf2c75c18751aa517c71c455 | [
"LicenseRef-scancode-unknown-license-reference",
"BSD-3-Clause"
] | permissive | amjames/QCEngine | b8e82c7fd805ae7adb5f4f8f1729cf4a9e5cc7b9 | 78cf86199dea6b2a201bec193bda9e0f1f3e943e | refs/heads/master | 2020-04-19T05:49:56.907384 | 2019-01-28T14:46:16 | 2019-01-28T14:46:16 | 167,999,832 | 1 | 0 | null | 2019-01-28T16:59:23 | 2019-01-28T16:59:22 | null | UTF-8 | Python | false | false | 1,494 | py | import setuptools
import versioneer
if __name__ == "__main__":
setuptools.setup(
name='qcengine',
description='Compute wrapper for Quantum Chemistry Schema input/output for a variety of programs.',
author='Daniel G. A. Smith',
author_email='dgasmith@vt.edu',
url="https://github.com/MolSSI/QCEngine",
license='BSD-3C',
version=versioneer.get_version(),
cmdclass=versioneer.get_cmdclass(),
packages=setuptools.find_packages(),
install_requires=[
'pyyaml',
'py-cpuinfo',
'psutil',
'qcelemental>=0.2.1',
'pydantic>=0.18.0'
],
entry_points={"console_scripts": [
"qcengine=qcengine.cli:main",
]},
extras_require={
'docs': [
'sphinx==1.2.3', # autodoc was broken in 1.3.1
'sphinxcontrib-napoleon',
'sphinx_rtd_theme',
'numpydoc',
],
'tests': [
'pytest',
'pytest-cov',
],
},
tests_require=[
'pytest',
'pytest-cov',
],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Science/Research',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
],
zip_safe=False,
long_description="""
"""
)
| [
"malorian@me.com"
] | malorian@me.com |
8c5488f5d7727838d866da293879064bdc8dd41e | 938a496fe78d5538af94017c78a11615a8498682 | /algorithms/801-900/883.projection-area-of-3d-shapes.py | 98d1d6790c062047a66e1a69bb9974492b032e81 | [] | no_license | huilizhou/Leetcode-pyhton | 261280044d15d0baeb227248ade675177efdb297 | 6ae85bf79c5a21735e3c245c0c256f29c1c60926 | refs/heads/master | 2020-03-28T15:57:52.762162 | 2019-11-26T06:14:13 | 2019-11-26T06:14:13 | 148,644,059 | 8 | 1 | null | null | null | null | UTF-8 | Python | false | false | 766 | py | class Solution(object):
def projectionArea(self, grid):
"""
:type grid: List[List[int]]
:rtype: int
"""
# 俯视图所有非零值的和
# 主视图,行的最大值之和
# 左视图,列的最大值之和
N = len(grid)
ans = 0
for i in range(N):
best_row = 0 # max of grid[i][j]
best_col = 0 # max of grid[j][i]
for j in range(N):
if grid[i][j]:
ans += 1 # top shadow
best_row = max(best_row, grid[i][j])
best_col = max(best_col, grid[j][i])
ans += best_row + best_col
return ans
print(Solution().projectionArea([[1, 1, 1], [1, 0, 1], [1, 1, 1]]))
| [
"2540278344@qq.com"
] | 2540278344@qq.com |
c901eedb233c4c727ab58cad6745cb7d4840df98 | 8b5d68c9398186cae64dbcc5b293d62d69e1921d | /src/python/knowledge_base/resolvers/attach_geoname_resolver.py | 42fdadeed8e74f4f802b85ddae0c9b6b8b0c5637 | [
"Apache-2.0"
] | permissive | reynoldsm88/Hume | ec99df21e9b9651ec3cacfb8655a510ba567abc9 | 79a4ae3b116fbf7c9428e75a651753833e5bc137 | refs/heads/master | 2020-07-24T21:28:39.709145 | 2019-07-10T15:43:24 | 2019-07-10T15:43:24 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,419 | py | from knowledge_base import KnowledgeBase
from kb_resolver import KBResolver
import io
class AttachGeoNameResolver(KBResolver):
def __init__(self):
super(AttachGeoNameResolver,self).__init__()
def resolve(self, kb, ua_south_sudan_geoid_txt_path):
print "AttachGeoNameResolver RESOLVE"
resolved_kb = KnowledgeBase()
super(AttachGeoNameResolver, self).copy_all(resolved_kb, kb)
geo_mapping_db = dict()
with io.open(ua_south_sudan_geoid_txt_path, 'r', encoding="utf-8") as fp:
for i in fp:
i = i.strip()
unpacked = i.split(" ")
geoname = unpacked[0]
geoid = unpacked[1]
geo_mapping_db[geoname] = geoid
for entid,kb_entity in resolved_kb.entid_to_kb_entity.items():
for entity_type in kb_entity.entity_type_to_confidence.keys():
entity_type = str(entity_type.split(".")[0])
if (any({'GPE','LOC'}) in {entity_type}) is True:
canonical_name = kb_entity.canonical_name
if canonical_name is not None:
geonameid_from_ua = geo_mapping_db.get(canonical_name.strip().lower().replace(" ","_"),None)
if geonameid_from_ua is not None:
kb_entity.properties["geonameid"] = geonameid_from_ua
return resolved_kb
| [
"hqiu@bbn.com"
] | hqiu@bbn.com |
828f1c0ee71cb6e8f4b7934360e5902871c5bfdd | de24f83a5e3768a2638ebcf13cbe717e75740168 | /moodledata/vpl_data/86/usersdata/231/57366/submittedfiles/pico.py | dd0a8e31adb78e17a0e1f528dda122492f2b67fa | [] | no_license | rafaelperazzo/programacao-web | 95643423a35c44613b0f64bed05bd34780fe2436 | 170dd5440afb9ee68a973f3de13a99aa4c735d79 | refs/heads/master | 2021-01-12T14:06:25.773146 | 2017-12-22T16:05:45 | 2017-12-22T16:05:45 | 69,566,344 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 442 | py | # -*- coding: utf-8 -*-
def pico(lista):
cont=0
#CONTINUE...
for i in range(0,len(lista),1):
if lista[i]<=lista[i+1]:
cont=cont+1
elif lista[i]<=lista[i+1]:
cont=cont+1
return (cont)
n = int(input('Digite a quantidade de elementos da lista: '))
a=[]
for i in range(0,n,1):
b=int(input('b:'))
a.append(b)
if pico>=1:
print('S')
else:
print('N')
#CONTINUE...
| [
"rafael.mota@ufca.edu.br"
] | rafael.mota@ufca.edu.br |
6c6c0d7ddc8cd4f0ebb5e79be2a06dda1db8751d | 25ebf226893b44dd8a6b1b85cf80864579372892 | /divide-two-integers/Accepted/2-28-2021, 1:15:43 AM/Solution.py | 792498a0dd6085946505aa991d76b6d56acd656c | [] | no_license | TianrunCheng/LeetcodeSubmissions | db15f5a1a8e1bbecefc45cb0b2b5fbaa036aa6f5 | 00a5403f1950e039ccc370cb266b752faebb8e79 | refs/heads/main | 2023-06-29T21:51:43.029300 | 2021-07-22T03:12:15 | 2021-07-22T03:12:15 | 388,305,775 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,059 | py | // https://leetcode.com/problems/divide-two-integers
class Solution:
def divide(self, dividend: int, divisor: int) -> int:
neg = False
if dividend < 0:
dividend = 0 - dividend
neg = not neg
if divisor < 0:
divisor = 0 - divisor
neg = not neg
powers = [divisor] # record the 2^n * divisor values at index n
while powers[-1] < dividend:
temp = powers[-1] + powers[-1]
powers.append(temp)
bi_quotient = []
for i in range(len(powers)-1, -1, -1):
if dividend >= powers[i]:
if i >= 31 and not neg:
return (2**31 - 1)
bi_quotient.append(1)
dividend = dividend - powers[i]
else:
bi_quotient.append(0)
n = ''.join([str(elem) for elem in bi_quotient])
n = int(n,2)
if neg:
return (-n)
return n
| [
"tc905@georgetown.edu"
] | tc905@georgetown.edu |
7873c4c1781e24fe09d5e3aa34d406221e1d16bc | 53fab060fa262e5d5026e0807d93c75fb81e67b9 | /backup/user_216/ch33_2020_09_15_23_51_28_335676.py | fb6393ecc0b8053e931b77b208f20c2b3642bac9 | [] | no_license | gabriellaec/desoft-analise-exercicios | b77c6999424c5ce7e44086a12589a0ad43d6adca | 01940ab0897aa6005764fc220b900e4d6161d36b | refs/heads/main | 2023-01-31T17:19:42.050628 | 2020-12-16T05:21:31 | 2020-12-16T05:21:31 | 306,735,108 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 507 | py | def primos_entre(a,b):
p = a
contador = 0
while p <= b:
if p == 0 or p == 1:
p += 1
elif p == 2 or p == 3:
p += 1
contador += 1
elif p % 2 == 0:
p += 1
else:
divisor = 3
while divisor > p:
if p % divisor == 0:
p += 1
else:
d += 2
contador += 1
divisor = 3
p += 1
return contador
| [
"you@example.com"
] | you@example.com |
6a0e6a2e3e4af848e7d9a6e89b9404ed3c3a714e | 7c0ad12b6a27ce243beb36e2eea557875adeecbc | /setup.py | f597eb2d08b52b89147c2f2c50dc1b14ce74f647 | [
"MIT"
] | permissive | ahawker/tmnt | 83faa4fedcbe1a512bac52fe2ecce50c53a04288 | 5734b9f24eeebf5b8a350f7e3d89daa4666d595f | refs/heads/master | 2021-01-01T05:35:56.273207 | 2013-04-12T03:32:30 | 2013-04-12T03:32:30 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 907 | py | __author__ = 'Andrew Hawker <andrew.r.hawker@gmail.com>'
import tmnt
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
setup(
name=tmnt.__name__,
version=tmnt.__version__,
description='Python module for mutation testing.',
long_description=open('README.md').read(),
author='Andrew Hawker',
author_email='andrew.r.hawker@gmail.com',
url='https://github.com/ahawker/tmnt',
license=open('LICENSE.md').read(),
package_dir={'tmnt': 'tmnt'},
packages=['tmnt'],
test_suite='tests',
classifiers=(
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Natural Language :: English',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7'
)
)
| [
"andrew.r.hawker@gmail.com"
] | andrew.r.hawker@gmail.com |
0e4c3872aa175f3c3420ce282edce26e2c1f5457 | 47a0ea8da3ef955315974a34b1edf0b5992156ea | /reuserat/users/migrations/0008_auto_20170719_0143.py | e4345a2cffc23b5d17d08a97dd39c5112c39411b | [
"MIT"
] | permissive | jamesaud/ReuseRat | 21b2a0b03ef609c3ed43ab46c0bba7251f6a4934 | 2810edd4f0f4caddc32be7a7e1a6cb45ac2d973f | refs/heads/master | 2021-01-11T15:13:23.874468 | 2018-01-30T11:58:19 | 2018-01-30T11:58:19 | 80,316,934 | 3 | 0 | null | 2017-02-25T21:19:50 | 2017-01-28T23:08:13 | CSS | UTF-8 | Python | false | false | 821 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.10 on 2017-07-19 01:43
from __future__ import unicode_literals
import django.core.validators
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('users', '0007_auto_20170719_0056'),
]
operations = [
migrations.AddField(
model_name='user',
name='birth_date',
field=models.DateField(null=True, verbose_name='Birth Date'),
),
migrations.AddField(
model_name='user',
name='ssn_last_four',
field=models.CharField(max_length=4, null=True, validators=[django.core.validators.RegexValidator(code='nomatch', message='Length has to be 4', regex='^[\\d]{4}$')], verbose_name='Last 4 Digits of SSN'),
),
]
| [
"jamaudre@indiana.edu"
] | jamaudre@indiana.edu |
5cbc4fa756b6f0208fc011cc00f1d9a7230d41aa | c733ba74f431852562be02f24acbc984a39bdf37 | /src/rayoptics/qtgui/ipyconsole.py | a5dfa9a584b6169b21d4f3b130ec96e62c16cecd | [
"BSD-3-Clause"
] | permissive | mniehus/ray-optics | 94f1e724e76777ea27405908b4d8f889ff459890 | 056a961a37ca41bc77c42a79296b2e7c5ced42a7 | refs/heads/master | 2022-06-28T13:26:42.824501 | 2020-05-07T19:23:18 | 2020-05-07T19:23:18 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,846 | py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Copyright © 2018 Michael J. Hayford
""" Support creation of an iPython console, with rayoptics environment
.. Created on Wed Nov 21 21:48:02 2018
.. codeauthor: Michael J. Hayford
"""
from qtconsole.rich_jupyter_widget import RichJupyterWidget
from qtconsole.inprocess import QtInProcessKernelManager
from IPython.lib import guisupport
from rayoptics.gui.appmanager import ModelInfo
def create_ipython_console(app, title, view_width, view_ht):
""" create a iPython console with a rayoptics environment """
opt_model = app.app_manager.model
if opt_model:
ro_env = {
'app': app,
'opm': opt_model,
'sm': opt_model.seq_model,
'osp': opt_model.optical_spec,
'pm': opt_model.parax_model
}
else:
ro_env = {
'app': app,
'opm': opt_model
}
ro_setup = 'from rayoptics.environment import *'
# construct the top level widget
ipy_console = ConsoleWidget()
# load the environment
ipy_console.execute_command(ro_setup)
ipy_console.push_vars(ro_env)
mi = ModelInfo(opt_model)
sub_window = app.add_subwindow(ipy_console, mi)
sub_window.setWindowTitle(title)
orig_x, orig_y = app.initial_window_offset()
sub_window.setGeometry(orig_x, orig_y, view_width, view_ht)
sub_window.show()
class ConsoleWidget(RichJupyterWidget):
def __init__(self, customBanner=None, *args, **kwargs):
super().__init__(*args, **kwargs)
if customBanner is not None:
self.banner = customBanner
self.font_size = 6
self.kernel_manager = kernel_manager = QtInProcessKernelManager()
kernel_manager.start_kernel(show_banner=False)
kernel_manager.kernel.gui = 'qt'
self.kernel_client = kernel_client = self._kernel_manager.client()
kernel_client.start_channels()
def stop():
kernel_client.stop_channels()
kernel_manager.shutdown_kernel()
guisupport.get_app_qt().exit()
self.exit_requested.connect(stop)
def push_vars(self, variableDict):
"""
Given a dictionary containing name / value pairs, push those variables
to the Jupyter console widget
"""
self.kernel_manager.kernel.shell.push(variableDict)
def clear(self):
"""
Clears the terminal
"""
self._control.clear()
# self.kernel_manager
def print_text(self, text):
"""
Prints some plain text to the console
"""
self._append_plain_text(text)
def execute_command(self, command):
"""
Execute a command in the frame of the console widget
"""
self._execute(command, False)
| [
"mjhoptics@gmail.com"
] | mjhoptics@gmail.com |
d022f830ef09f9469c8bd22d699aa8babd65d610 | f1ad4b488ce0719365b36de5df062cfb1c80fde1 | /import_export_vote_smart/views.py | 1ead490a180b4a6d69f64580624abcafa2c67bda | [
"MIT",
"LicenseRef-scancode-generic-cla"
] | permissive | DaleMcGrew/WeVoteServer | 59c5a1301e29f93cf1940c7f8f7b3a42a4756df5 | e80ad10d6d20c4057872f545ecd55eaf11eca263 | refs/heads/develop | 2023-08-21T22:16:55.720943 | 2023-08-09T21:07:12 | 2023-08-09T21:07:12 | 134,788,301 | 0 | 0 | MIT | 2023-08-28T01:51:39 | 2018-05-25T01:41:28 | Python | UTF-8 | Python | false | false | 185 | py | # import_export_vote_smart/views.py
# Brought to you by We Vote. Be good.
# -*- coding: UTF-8 -*-
# See also import_export_vote_smart/views_admin.py for views used in the admin area
| [
"dale.mcgrew@wevoteusa.org"
] | dale.mcgrew@wevoteusa.org |
e2333ef903ac607fd66e399fdabffc622802e7d7 | b87f66b13293782321e20c39aebc05defd8d4b48 | /maps/build/Traits/integrationtests/ui/test_ui4.py | 97256d64e81b0243054309491f4b4120ec78294b | [] | no_license | m-elhussieny/code | 5eae020932d935e4d724c2f3d16126a0d42ebf04 | 5466f5858dbd2f1f082fa0d7417b57c8fb068fad | refs/heads/master | 2021-06-13T18:47:08.700053 | 2016-11-01T05:51:06 | 2016-11-01T05:51:06 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,544 | py | #------------------------------------------------------------------------------
# Copyright (c) 2005, Enthought, Inc.
# All rights reserved.
#
# This software is provided without warranty under the terms of the BSD
# license included in /LICENSE.txt and may be redistributed only
# under the conditions described in the aforementioned license. The license
# is also available online at http://www.enthought.com/licenses/BSD.txt
# Thanks for using Enthought open source!
#
# Author: David C. Morrill
# Date: 11/02/2004
# Description: Test case for Traits User Interface
#------------------------------------------------------------------------------
#-------------------------------------------------------------------------------
# Imports:
#-------------------------------------------------------------------------------
import wx
from enthought.traits.api import Trait, HasTraits, Str, Int
from enthought.traits.ui.api import View, Group
from enthought.traits.api import Color
#-------------------------------------------------------------------------------
# Model classes:
#-------------------------------------------------------------------------------
class Employer ( HasTraits ):
company = Str( 'Enthought, Inc.' )
boss = Str( 'eric' )
view = View( 'company', 'boss' )
class Person ( HasTraits ):
name = Str( 'David Morrill' )
age = Int( 39 )
class ExtraPerson ( Person ):
sex = Trait( 'Male', 'Female' )
eye_color = Color
class LocatedPerson ( Person ):
street = Str
city = Str
state = Str
zip = Int( 78663 )
class EmployedPerson ( LocatedPerson ):
employer = Trait( Employer() )
#-------------------------------------------------------------------------------
# View classes:
#-------------------------------------------------------------------------------
class PersonView ( HasTraits ):
view = View( 'name', '<extra>', 'age', kind = 'modal' )
class ExtraPersonView ( PersonView ):
extra = Group( 'sex', 'eye_color' )
class LocatedPersonView ( PersonView ):
extra = Group( 'street', 'city', 'state', 'zip' )
class EmployedPersonView ( LocatedPersonView ):
extra = Group( 'employer', '<extra>' )
#-------------------------------------------------------------------------------
# 'TraitSheetApp' class:
#-------------------------------------------------------------------------------
class TraitSheetApp ( wx.App ):
#---------------------------------------------------------------------------
# Initialize the object:
#---------------------------------------------------------------------------
def __init__ ( self ):
wx.InitAllImageHandlers()
wx.App.__init__( self, 1, 'debug.log' )
self.MainLoop()
#---------------------------------------------------------------------------
# Handle application initialization:
#---------------------------------------------------------------------------
def OnInit ( self ):
PersonView().edit_traits( context = Person() )
ExtraPersonView().edit_traits( context = ExtraPerson() )
LocatedPersonView().edit_traits( context = LocatedPerson() )
EmployedPersonView().edit_traits( context = EmployedPerson() )
return True
#-------------------------------------------------------------------------------
# Main program:
#-------------------------------------------------------------------------------
TraitSheetApp()
| [
"fspaolo@gmail.com"
] | fspaolo@gmail.com |
33dd200031e989d65567292d90d42cc97788dd75 | 48981ef531729e6c133f9e01252a7dc6f4251629 | /tkinter/scripts/setup_buttons_tkinter.py | 7f472269c84dd7dbcb5432b400d39c04dd80df1e | [] | no_license | sharland/python_scripts | f778f3c2dfc2f19592e94472ec9afd5c77ed4052 | 702b543a77ee5361aa73d5068510e1df937210c4 | refs/heads/master | 2021-09-08T13:26:54.123689 | 2021-08-30T21:24:58 | 2021-08-30T21:24:58 | 17,588,312 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 590 | py | from tkinter import *
#instantiates an object of class Tk
root = Tk()
#sets up window title and size
root.title("Buttons everywhere")
root.geometry("500x300")
#create a frame to hold widgets
#this says that app is a frame within root
#in other words you have passed the master ('root') to the constructor of the new object
app = Frame(root)
#invoke the grid manager
app.grid()
button1 = Button(app,text="button 1")
button2 = Button(app,text="button 2")
#and invoke them
button1.grid()
button2.grid()
#sets up window to start listening for events
mainloop()
| [
"sharland@gmail.com"
] | sharland@gmail.com |
deccdc957ae2d6e77f5830a9b672647a360c7ea5 | 15f321878face2af9317363c5f6de1e5ddd9b749 | /solutions_python/Problem_157/158.py | f1ed0d5ad09d3bef466af9435f66b557df6a9d71 | [] | no_license | dr-dos-ok/Code_Jam_Webscraper | c06fd59870842664cd79c41eb460a09553e1c80a | 26a35bf114a3aa30fc4c677ef069d95f41665cc0 | refs/heads/master | 2020-04-06T08:17:40.938460 | 2018-10-14T10:12:47 | 2018-10-14T10:12:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,794 | py | #!/usr/bin/env python3
# encoding: utf-8
import sys
import math
from pprint import pprint
def qm(a, b):
neg = False
if a[0] == '-':
neg = True
a = a[1:]
if b[0] == '-':
neg = not neg
b = b[1:]
t = {
'1': {'1': '1', 'i': 'i', 'j': 'j', 'k': 'k'},
'i': {'1': 'i', 'i': '-1', 'j': 'k', 'k': '-j'},
'j': {'1': 'j', 'i': '-k', 'j': '-1', 'k': 'i'},
'k': {'1': 'k', 'i': 'j', 'j': '-i', 'k': '-1'}
}
c = t[a][b]
if not neg:
return c
if c[0] == '-':
return c[1:]
return '-' + c
def find(s, a, i, p):
#print(s,a,i)
r = '1'
for j in range(i, len(s)):
if s[j] == '*':
r = qm(r, p)
else:
r = qm(r, s[j])
if r == a:
return True, j + 1
return False, 0
def pow(s, p):
r = '1'
for c in s:
r = qm(r, c)
i = r
p = p % 4
if p == 0:
return '1'
for j in range(1, p):
i = qm(i, r)
return i
def solveCase(s, x):
if x < 17:
s = s * x
p = '1'
else:
p = pow(s, x - 16)
s = s * 8 + '*' + s * 8
f, i = find(s, 'i', 0, p)
if not f:
return 'NO'
f, i = find(s, 'j', i, p)
if not f:
return 'NO'
f, i = find(s, 'k', i, p)
if not f:
return 'NO'
while i < len(s):
f, i = find(s, '1', i, p)
if not f:
return 'NO'
return 'YES'
def solve(s):
t = int(s.readline())
for i in range(t):
l, x = [int(k) for k in s.readline().split()]
yield solveCase(s.readline().strip(), x)
def main(argv=None):
fileName = argv[1]
s = open(fileName)
r = open(fileName + '.result.txt' , 'w')
result = solve(s)
for i, case in enumerate(result, 1):
r.write('Case #' + str(i) + ': ' + case + '\n')
return 0
if __name__ == '__main__':
status = main(sys.argv)
sys.exit(status) | [
"miliar1732@gmail.com"
] | miliar1732@gmail.com |
a62d683d1c7e3f60e12ba00905dac32a6eda6a5c | 08b531d69db0800f48c55da216d641fd586879dd | /src/BEE2_config.py | c7f161570b7c254d32a220a96df6552914873a12 | [] | no_license | Stendec-UA/BEE2.4 | 93e1c94f4dd59f5fa4b2f8c21a8bb2f95b94ea9d | 4edf923bac4f282a43fdc8f12300162f97dcb73b | refs/heads/master | 2021-01-17T20:10:58.769585 | 2015-09-30T01:03:03 | 2015-09-30T01:03:03 | 45,045,361 | 1 | 0 | null | 2015-10-27T14:10:05 | 2015-10-27T14:10:04 | null | UTF-8 | Python | false | false | 4,253 | py | import os.path
from configparser import ConfigParser
class ConfigFile(ConfigParser):
def __init__(self, filename, root='../config', auto_load=True):
"""Initialise the config file.
filename is the name of the config file, in the 'root' directory.
If auto_load is true, this file will immediately be read and parsed.
"""
super().__init__()
self.filename = os.path.join(root, filename)
self.has_changed = False
if auto_load:
self.load()
def load(self):
if self.filename is None:
return
try:
with open(self.filename, 'r') as conf:
self.read_file(conf)
except (FileNotFoundError, IOError):
print('Config "' + self.filename + '" not found! Using defaults...')
# If we fail, just continue - we just use the default values
self.has_changed = False
def save(self):
"""Write our values out to disk."""
if self.filename is None:
return
self.has_changed = False
# Make sure the directory exists
folder = os.path.dirname(self.filename)
if folder:
os.makedirs(folder, exist_ok=True)
with open(self.filename, 'w') as conf:
self.write(conf)
def save_check(self):
"""Check to see if we have different values, and save if needed."""
if self.has_changed:
print('Saving changes in config "' + self.filename + '"!')
self.save()
def set_defaults(self, def_settings):
"""Set the default values if the settings file has no values defined."""
for sect, values in def_settings.items():
if sect not in self:
self[sect] = {}
for key, default in values.items():
if key not in self[sect]:
self[sect][key] = str(default)
self.save_check()
def get_val(self, section, value, default):
"""Get the value in the specifed section.
If either does not exist, set to the default and return it.
"""
if section not in self:
self[section] = {}
if value in self[section]:
return self[section][value]
else:
self.has_changed = True
self[section][value] = default
return default
def getboolean(self, section, value, default=False) -> bool:
"""Get the value in the specified section, coercing to a Boolean.
If either does not exist, set to the default and return it.
"""
if section not in self:
self[section] = {}
if value in self[section]:
return super().getboolean(section, value)
else:
self.has_changed = True
self[section][value] = str(int(default))
return default
get_bool = getboolean
def getint(self, section, value, default=0) -> int:
"""Get the value in the specified section, coercing to a Integer.
If either does not exist, set to the default and return it.
"""
if section not in self:
self[section] = {}
if value in self[section]:
return super().getint(section, value)
else:
self.has_changed = True
self[section][value] = str(int(default))
return default
get_int = getint
def add_section(self, section):
self.has_changed = True
super().add_section(section)
def remove_section(self, section):
self.has_changed = True
super().remove_section(section)
def set(self, section, option, value=None):
orig_val = self.get(section, option, fallback=None)
if orig_val is None or orig_val is not value:
self.has_changed = True
super().set(section, option, str(value))
add_section.__doc__ = ConfigParser.add_section.__doc__
remove_section.__doc__ = ConfigParser.remove_section.__doc__
set.__doc__ = ConfigParser.set.__doc__
# Define this here so app modules can easily acess the config
# Don't load it though, since this is imported by VBSP too.
GEN_OPTS = ConfigFile('config.cfg', auto_load=False)
| [
"spencerb21@live.com"
] | spencerb21@live.com |
98d925bda05e947e2d22e13f7d5ce4758ab21c43 | d154fbda4a0d4921a8984468b99618d6072fff67 | /testing/scripts/run_gtest_perf_test.py | e1dd0d5788f096c86821caa4ddc9b244e588407b | [
"BSD-3-Clause"
] | permissive | nano325/chromium | 9bf596f048d9a702451fc6fcb3be6d2b885045a2 | 67c4590289f178cb4c77f61194afff8bf0cb67a5 | refs/heads/master | 2023-01-16T07:43:25.802720 | 2018-07-18T01:14:13 | 2018-07-18T01:14:13 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,739 | py | #!/usr/bin/env python
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Runs an isolated non-Telemetry perf test .
The main contract is that the caller passes the arguments:
--isolated-script-test-output=[FILENAME]
json is written to that file in the format produced by
common.parse_common_test_results.
--isolated-script-test-chartjson-output=[FILE]
stdout is written to this file containing chart results for the perf dashboard
Optional argument:
--isolated-script-test-filter=[TEST_NAMES]
is a double-colon-separated ("::") list of test names, to run just that subset
of tests. This list is parsed by this harness and sent down via the
--gtest_filter argument.
This script is intended to be the base command invoked by the isolate,
followed by a subsequent non-python executable. It is modeled after
run_gpu_integration_test_as_gtest.py
"""
import argparse
import json
import os
import shutil
import sys
import time
import tempfile
import traceback
import common
def GetChromiumSrcDir():
return os.path.abspath(
os.path.join(os.path.abspath(__file__), '..', '..', '..'))
def GetPerfDir():
return os.path.join(GetChromiumSrcDir(), 'tools', 'perf')
# Add src/tools/perf where generate_legacy_perf_dashboard_json.py lives
sys.path.append(GetPerfDir())
import generate_legacy_perf_dashboard_json
# Add src/testing/ into sys.path for importing xvfb and test_env.
sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
import xvfb
import test_env
# Unfortunately we need to copy these variables from ../test_env.py.
# Importing it and using its get_sandbox_env breaks test runs on Linux
# (it seems to unset DISPLAY).
CHROME_SANDBOX_ENV = 'CHROME_DEVEL_SANDBOX'
CHROME_SANDBOX_PATH = '/opt/chromium/chrome_sandbox'
def IsWindows():
return sys.platform == 'cygwin' or sys.platform.startswith('win')
def main():
parser = argparse.ArgumentParser()
parser.add_argument(
'--isolated-script-test-output', type=str,
required=True)
parser.add_argument(
'--isolated-script-test-chartjson-output', type=str,
required=False)
parser.add_argument(
'--isolated-script-test-perf-output', type=str,
required=False)
parser.add_argument(
'--isolated-script-test-filter', type=str, required=False)
parser.add_argument('--xvfb', help='Start xvfb.', action='store_true')
args, rest_args = parser.parse_known_args()
rc, charts, output_json = execute_perf_test(args, rest_args, True)
# TODO(eakuefner): Make isolated_script_test_perf_output mandatory after
# flipping flag in swarming.
if args.isolated_script_test_perf_output:
filename = args.isolated_script_test_perf_output
else:
filename = args.isolated_script_test_chartjson_output
# Write the returned encoded json to a the charts output file
with open(filename, 'w') as f:
f.write(charts)
with open(args.isolated_script_test_output, 'w') as fp:
json.dump(output_json, fp)
return rc
def execute_perf_test(args, rest_args, chromium_gtest):
env = os.environ.copy()
# Assume we want to set up the sandbox environment variables all the
# time; doing so is harmless on non-Linux platforms and is needed
# all the time on Linux.
env[CHROME_SANDBOX_ENV] = CHROME_SANDBOX_PATH
rc = 0
start_time = time.time()
test_results = {}
with common.temporary_file() as results_path:
try:
executable = rest_args[0]
extra_flags = []
if len(rest_args) > 1:
extra_flags = rest_args[1:]
if chromium_gtest:
output_flag = '--test-launcher-summary-output'
output_file = results_path
else:
output_flag = '--gtest_output'
output_file = 'json:%s' % results_path
assert not any(output_flag in flag for flag in extra_flags), (
'Duplicate %s flag detected.' % output_flag)
extra_flags.append('%s=%s' % (output_flag, output_file))
# These flags are to make sure that test output perf metrics in the log.
if not '--verbose' in extra_flags:
extra_flags.append('--verbose')
if not '--test-launcher-print-test-stdio=always' in extra_flags:
extra_flags.append('--test-launcher-print-test-stdio=always')
if args.isolated_script_test_filter:
filter_list = common.extract_filter_list(
args.isolated_script_test_filter)
extra_flags.append('--gtest_filter=' + ':'.join(filter_list))
if IsWindows():
executable = '.\%s.exe' % executable
else:
executable = './%s' % executable
with common.temporary_file() as tempfile_path:
env['CHROME_HEADLESS'] = '1'
cmd = [executable] + extra_flags
print ' '.join(cmd)
if args.xvfb:
rc = xvfb.run_executable(cmd, env, stdoutfile=tempfile_path)
else:
rc = test_env.run_command_with_output(cmd, env=env,
stdoutfile=tempfile_path)
# Now get the correct json format from the stdout to write to the perf
# results file
results_processor = (
generate_legacy_perf_dashboard_json.LegacyResultsProcessor())
charts = results_processor.GenerateJsonResults(tempfile_path)
except Exception:
traceback.print_exc()
rc = 1
if os.path.exists(results_path):
with open(results_path) as f:
if chromium_gtest:
func = common.get_chromium_gtest_summary_passes
else:
func = common.get_gtest_summary_passes
test_results = func(json.load(f))
output_json = {
'version': 3,
'interrupted': False,
'path_delimiter': '/',
'seconds_since_epoch': start_time,
'num_failures_by_type': {
'PASS': sum(1 for success in test_results.values() if success),
'FAIL': sum(1 for success in test_results.values() if not success),
},
'tests': {
test: test_result_entry(success) for (
test, success) in test_results.items()
}
}
return rc, charts, output_json
def test_result_entry(success):
test = {
'expected': 'PASS',
'actual': 'PASS' if success else 'FAIL',
}
if not success:
test['unexpected'] = True
return test
# This is not really a "script test" so does not need to manually add
# any additional compile targets.
def main_compile_targets(args):
json.dump([], args.output)
if __name__ == '__main__':
# Conform minimally to the protocol defined by ScriptTest.
if 'compile_targets' in sys.argv:
funcs = {
'run': None,
'compile_targets': main_compile_targets,
}
sys.exit(common.run_script(sys.argv[1:], funcs))
sys.exit(main())
| [
"commit-bot@chromium.org"
] | commit-bot@chromium.org |
bd18eb85cdaa3d321cec83e3d2ae3dfc24dd1fcf | 0755a5f8d821b2b02b7f087d26e1433156075f04 | /programmers/level2/소수찾기.py | e213390d3c6631d3550734ae9d9d0a30d9bb394a | [] | no_license | kimevanjunseok/Algorithm | 35fdfdd5a0c5c8ad1029429aba8d6c2320088993 | ea0883964e43779e569a1a2c8a879a246083ef0e | refs/heads/master | 2020-08-17T22:50:13.096789 | 2020-01-07T14:28:56 | 2020-01-07T14:28:56 | 215,720,238 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 663 | py | def copy(L):
T = ["0"] * len(L)
for i in range(len(L)):
T[i] = L[i]
return T
def solution(numbers):
x = int("".join(sorted(list(numbers), reverse=True)))
T = [True] * (x + 1)
save = 0
for i in range(2, int((x) ** 0.5) + 1):
if T[i]:
for j in range(i + i, x + 1, i):
T[j] = False
for i in range(2, x + 1):
if T[i]:
L = copy(numbers)
C = list(str(i))
cnt = 0
for i in C:
if i in L:
L.remove(i)
cnt += 1
if cnt == len(C):
save += 1
return save | [
"jnunseok@gmail.com"
] | jnunseok@gmail.com |
22024d93f7cb7652da595cfe06eff579858c9004 | cc31777830ccbc17347305c40db91afc012977ee | /concepts/strings/words_containing_all_consonants.py | 132ab9a06718f37c257e4d2f2427e954e98d66d9 | [] | no_license | sourcery-ai-bot/library-python | e147b9e5c6baba502de9f7605c5fa1937dbd13f4 | 61472955f4b011caa989b8805be3ed7df19c7aa8 | refs/heads/master | 2022-11-06T20:19:59.056197 | 2020-06-30T20:56:45 | 2020-06-30T20:56:45 | 276,206,925 | 0 | 0 | null | 2020-06-30T20:56:31 | 2020-06-30T20:56:30 | null | UTF-8 | Python | false | false | 330 | py | """ Retrieve all words which only contain consonants. """
words_with_all_consonants = []
with open('concepts/strings/words.txt', 'r') as file:
for line in file:
if not set('aeiou').intersection(line.lower()):
words_with_all_consonants.append(line[0:len(line)-2:])
print(words_with_all_consonants)
| [
"wayne.a.lambert@gmail.com"
] | wayne.a.lambert@gmail.com |
f45e58c501de0b61cb59acaeb188bf8ff7d9c324 | 950d30c1b3adbdcaca6062e7d6b07f23e5136f25 | /backend/zambiadecides21/migrations/0004_auto_20210809_0320.py | 27e94b9ac542e766718117296e82c4cc35307370 | [] | no_license | ThetaleofAj/zambia-decides-2021 | 2b7b6b3ded13c9b7a06ccae46077844aed97a780 | 16ab2dae77ef64e7e38ace52ff3546a217cfc814 | refs/heads/master | 2023-08-10T22:40:00.539507 | 2021-10-05T16:41:19 | 2021-10-05T16:41:19 | 394,209,549 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,373 | py | # Generated by Django 3.1.7 on 2021-08-09 01:20
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('zambiadecides21', '0003_auto_20210808_2153'),
]
operations = [
migrations.CreateModel(
name='Chart',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('year', models.CharField(max_length=5)),
('figure', models.CharField(max_length=10)),
],
),
migrations.AlterField(
model_name='eleven',
name='provincewinner',
field=models.CharField(blank=True, choices=[('1', '1'), ('2', '2'), ('1.5', '1.5'), ('2.5', '2.5'), ('4', '4'), ('0', '0')], max_length=20),
),
migrations.AlterField(
model_name='sixteen',
name='provincewinner',
field=models.CharField(blank=True, choices=[('1', '1'), ('2', '2'), ('1.5', '1.5'), ('2.5', '2.5'), ('4', '4'), ('0', '0')], max_length=20),
),
migrations.AlterField(
model_name='twentyone',
name='provincewinner',
field=models.CharField(blank=True, choices=[('1', '1'), ('2', '2'), ('1.5', '1.5'), ('2.5', '2.5'), ('4', '4'), ('0', '0')], max_length=20),
),
]
| [
"you@example.com"
] | you@example.com |
7b5f264f2133e3e738e409cec6f6e0ae594c2f90 | e0ea1eddc8e58fe59a8e902e3b7c8843f2f148b0 | /Amonia_P1/Plots.py | 85d84db4d5cda3d05ea9c178045b90fa6158fba5 | [] | no_license | MendezV/ASE_test | 4e4b04e9888dc974b0747acb09d5728ae5c7fa51 | 5a36c00b17bd0c79558bf35d81027ccfcd8de239 | refs/heads/master | 2020-05-21T05:21:01.794467 | 2019-05-10T21:53:05 | 2019-05-10T21:53:05 | 185,910,957 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 753 | py | import matplotlib.pyplot as plt
from ase.neb import NEBTools
from ase.io import read
images = read('neb4.traj@-7:')
nebtools = NEBTools(images)
# Get the calculated barrier and the energy change of the reaction.
Ef, dE = nebtools.get_barrier()
# Get the barrier without any interpolation between highest images.
Ef, dE = nebtools.get_barrier(fit=False)
# Get the actual maximum force at this point in the simulation.
max_force = nebtools.get_fmax()
# Create a figure like that coming from ASE-GUI.
fig = nebtools.plot_band()
fig.savefig('diffusion-barrier4.png')
# Create a figure with custom parameters.
fig = plt.figure(figsize=(5.5, 4.0))
ax = fig.add_axes((0.15, 0.15, 0.8, 0.75))
nebtools.plot_band(ax)
fig.savefig('diffusion-barrier4.png')
| [
"som.felipe66@gmail.com"
] | som.felipe66@gmail.com |
48220d9c7faa9d40e95549c0eed4860967102e1f | 3c7e0da4d146f45af1202c6d9f4a14f9874bd5d0 | /src/wai/spectralio/util/_non_default_kwargs.py | 346b473655ab79a7e159e7dc1a944af40ee8a55b | [
"MIT"
] | permissive | waikato-datamining/wai-spectralio | c0d4e85a57f07dfb7daaa8e65f6756687cb4fa81 | a0edba2208b0b646ed54782cb0832ce10eed0d5e | refs/heads/master | 2020-12-07T07:15:56.731643 | 2020-02-16T22:22:36 | 2020-02-16T22:22:36 | 232,667,947 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 977 | py | from inspect import Signature, signature, Parameter
from typing import Any, Dict
def non_default_kwargs(function, locals_) -> Dict[str, Any]:
"""
Gets a dictionary of all keyword-only arguments which
have non-default values.
E.g.
def my_func(*, kw1=None, kw2=None, kw3=None):
return non_default_kwargs(my_func, locals())
nd_kwargs = my_func(kw1=1, kw3=3)
assert nd_kwargs == {"kw1": 1, "kw3": 3}
:param function: The function being called.
:param locals_: The locals in the function.
:return: The mapping from keyword to value for
all non-default keyword arguments.
"""
# Get the function signature
function_signature: Signature = signature(function)
return {name: locals_[name]
for name, parameter in function_signature.parameters.items()
if parameter.kind == Parameter.KEYWORD_ONLY
and locals_[name] is not parameter.default}
| [
"coreytsterling@gmail.com"
] | coreytsterling@gmail.com |
fad0767571a561bc9ad609275298385db1d12d3d | cce55f618628a8c3734ead3de7422040585dd495 | /python_tank/bots/____bots/LOSER/7 Nataphol/bot_sample_a.py | 096a972985eeeb258339b9c72ae4406706f0e585 | [] | no_license | maxoja/python-presession-se9 | eba35f361cae5b7f011d1d1231d948560fae4e48 | 3036d59f55bed82fac48e60c6f1005e00de3f6ec | refs/heads/master | 2021-01-01T04:59:21.396064 | 2017-09-09T18:04:47 | 2017-09-09T18:04:47 | 97,286,520 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,450 | py | import pygame
import random
from core.tank import TankPrototype
class BotSampleA ( TankPrototype ):
#because this bot class is inherited from TankPrototype class
#this class will derive and obtain all public methods available in TankPrototype
#the methods available for use listed below
'''
getHP() get current HP
getMP() get current MP
getName() get name
getPosition() get x, y position
getAllyList() get a list of friendly tanks
getEnemyList() get a list of enemy tanks
isMoving() check if moving( can't change direction while moving )
isDead() check if the tank is dead or not
isAtEdge(direction) check if at edge of bettle field
isBlocked(direction) check if stuck with other tanks
getDirection() get heading direction ( also moving direction if is moving )
isAlly(another_bot) check if another_bot is an alliance or not
getTeamColor() get current team color
readyToMove() check if ready to move ( also check cooldown and mana cost )
readyToShoot() check if ready to shoot ( also check cooldown and mana cost )
readyToHeal() check if ready to heal ( also check cooldown and mana cost )
move(direction) move forward in the specified direction ('left' 'right' 'up' 'down')
shoot(direction) fire a bullet in the specified direction ('left' 'right' 'up' 'down') : cost 10 mana
shoot_heal(direction) fire a potion in the specified direction ('left' 'right' 'up' 'down') : cost 20 mana
'''
def start( self ) :
#this method will be call at the __init__() function of a super class
#use this to create and initialize variable you will use in your tank algorithm
self.current_direction = 'left'
def update( self ) :
#this method will be called every millisecond
#code your algorithm here and it will affect your tank action
'''change current_direction when this tank is at the edge of the battle field'''
'''if self.isAtEdge(self.current_direction) :
if self.current_direction == 'left' :
self.current_direction = 'right'
elif self.current_direction == 'right' :
self.current_direction = 'left'''
self_x, self_y = self.getPosition()
x = ('left', 'right', 'up', 'down')
self.move(random.choice(x))
'''then move it to the determined direction'''
'''get enemy list from method .getEnemyList()'''
enemy_list = self.getEnemyList()
'''then we check for any enemy locate in the same column and shoot it'''
for enemy in enemy_list :
self_x, self_y = self.getPosition()
enemy_x, enemy_y = enemy.getPosition()
if self_x == enemy_x : #if on the same column
if enemy_y < self_y : #enemy is located above
self.shoot('up')
else : #enemy is located below
self.shoot('down')
if self_y == enemy_y: # if on the same column
if enemy_x < self_x: # enemy is located above
self.shoot('left')
else: # enemy is located below
self.shoot('right')
| [
"-"
] | - |
4e9c5e928bdb46354ec16ff4c25cef74b765dc15 | 82762d776e2400948af54ca2e1bdf282885d922c | /648. 单词替换.py | 5b70932bd91b5ec8a5129c3133829ea9b788fb7c | [] | no_license | dx19910707/LeetCode | f77bab78bcba2d4002c9662c122b82fc3c9caa46 | 624975f767f6efa1d7361cc077eaebc344d57210 | refs/heads/master | 2020-03-17T02:50:46.546878 | 2019-06-25T09:22:13 | 2019-06-25T09:22:13 | 133,208,490 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 461 | py | class Solution(object):
def replaceWords(self, dict, sentence):
"""
:type dict: List[str]
:type sentence: str
:rtype: str
"""
words = sentence.split()
dict = set(dict)
def repalce(word,dict):
for j in range(1,len(word)):
if word[:j] in dict:
return word[:j]
return word
return ' '.join([repalce(word,dict) for word in words]) | [
"dx19910707@qq.com"
] | dx19910707@qq.com |
40d2f91aa1b79fdab73649db6b033d0fbbb9157f | 670f4ba8ded99b420c3454c6ae35789667880cc8 | /tobiko/tests/functional/shell/sh/test_process.py | 20eafd99b645ff926413ecabaf9fc2584a95591d | [
"Apache-2.0"
] | permissive | FedericoRessi/tobiko | 892db522198ab48380892138459d801c4bd00efa | ce2a8734f8b4203ec38078207297062263c49f6f | refs/heads/master | 2022-07-26T22:52:10.273883 | 2022-07-20T20:04:43 | 2022-07-20T20:04:43 | 145,856,925 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,690 | py | # Copyright (c) 2021 Red Hat, Inc.
#
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from __future__ import absolute_import
import testtools
import tobiko
from tobiko.openstack import keystone
from tobiko.openstack import stacks
from tobiko.shell import sh
from tobiko.shell import ssh
class ProcessFixture(tobiko.SharedFixture):
temp_filename: str
ssh_client: ssh.SSHClientType = None
process: sh.ShellProcessFixture
def setup_fixture(self):
self.temp_filename = sh.execute(
'mktemp', ssh_client=self.ssh_client).stdout.strip()
self.addCleanup(sh.execute,
f"rm -f '{self.temp_filename}'",
ssh_client=self.ssh_client)
self.process = sh.process(f"tail -f '{self.temp_filename}'",
ssh_client=self.ssh_client)
self.process.execute()
self.addCleanup(self.process.kill)
class ProcessTest(testtools.TestCase):
fixture = tobiko.required_fixture(ProcessFixture)
def test_stdout(self):
fixture = self.fixture
sh.execute(f"echo some text > '{fixture.temp_filename}'",
ssh_client=fixture.ssh_client)
line = self.fixture.process.stdout.readline()
self.assertEqual(b'some text\n', line)
class LocalProcessFixture(ProcessFixture):
ssh_client: ssh.SSHClientType = False
class LocalProcessTest(ProcessTest):
fixture = tobiko.required_fixture(LocalProcessFixture)
class SSHProcessFixture(ProcessFixture):
stack = tobiko.required_fixture(
stacks.UbuntuMinimalServerStackFixture)
def setup_fixture(self):
self.ssh_client = self.stack.ssh_client
super().setup_fixture()
@keystone.skip_unless_has_keystone_credentials()
class SSHProcessTest(ProcessTest):
fixture = tobiko.required_fixture(SSHProcessFixture)
class CirrosProcessFixture(SSHProcessFixture):
stack = tobiko.required_fixture(
stacks.CirrosServerStackFixture)
@keystone.skip_unless_has_keystone_credentials()
class CirrosProcessTest(ProcessTest):
fixture = tobiko.required_fixture(CirrosProcessFixture)
| [
"fressi@redhat.com"
] | fressi@redhat.com |
93946091d21adcf7a625c9851780fbfde574c3de | f41309da5e0d26b24d974a009fa309a02fcaa20c | /aws_cloudtrail_rules/aws_console_root_login_failed.py | cd4235bf1ebad8d0f6131287c69df17e9056000c | [
"Apache-2.0"
] | permissive | georgeSkoumas/panther-analysis | 2e1e87f83c6533cb6d62ecb62e3f61b2ff4b5ed4 | 30b21c270504bf7c84f99207c9c6c2f6110843ae | refs/heads/master | 2022-09-14T13:22:31.786275 | 2020-05-26T16:18:58 | 2020-05-26T16:18:58 | 267,569,230 | 1 | 0 | Apache-2.0 | 2020-05-28T11:15:05 | 2020-05-28T11:15:04 | null | UTF-8 | Python | false | false | 270 | py | def rule(event):
return (event['eventName'] == 'ConsoleLogin' and
event['userIdentity'].get('type') == 'Root' and
event.get('responseElements', {}).get('ConsoleLogin') == 'Failure')
def dedup(event):
return event.get('sourceIPAddress')
| [
"noreply@github.com"
] | georgeSkoumas.noreply@github.com |
99aa757bc2ede5b024d4fad04d932cf44fc1b792 | 57f1bd5aa524278cbf6b7fe3bfca04d34e1d6ad6 | /src/tests/test_sum_terms.py | 4709e01319ba8620de1ff15450593421bb5e6734 | [
"MIT"
] | permissive | serashioda/code-katas | 348f87e2932fa5f50fe7b7300b6a18b778a15cca | 8822b315f4136c74514348441bd8dd14651a71b9 | refs/heads/master | 2021-01-22T09:42:22.859203 | 2017-05-16T07:01:07 | 2017-05-16T07:01:07 | 76,081,162 | 3 | 0 | null | 2017-05-16T06:59:20 | 2016-12-10T00:44:04 | Python | UTF-8 | Python | false | false | 313 | py | """Tests for sum_terms module."""
import pytest
TERMS_TABLE = [
[1, '1.00'],
[2, '1.25'],
[3, '1.39']
]
@pytest.mark.parametrize("n, result", TERMS_TABLE)
def test_sum_terms(n, result):
"""Test the series_sum function."""
from sum_terms import series_sum
assert series_sum(n) == result
| [
"you@example.com"
] | you@example.com |
d0ca2dc2bb4fcb487b13dce959aa415c1147eda9 | b4afd14e3b4e9cff0a99906a69587e348b243aeb | /website/菜鸟教程/《基础》/20模块/using_sys.py | f49203d502250b0222a18a271e1610bd011e3072 | [] | no_license | zhankq/pythonlearn | d694df23826cda6ba662e852e531e96a10ab2092 | cb714fbb8257193029f958e73e0f9bd6a68d77f1 | refs/heads/master | 2021-12-16T13:51:23.381206 | 2021-12-03T01:13:36 | 2021-12-03T01:13:36 | 205,632,135 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 166 | py | #!/usr/bin/python3
# file: using_sys.py
import sys
print('命令行参数如下:')
for i in sys.argv:
print(i)
print('\n\nPython 路径为: ',sys.path,'\n')
| [
"zhankq@163.com"
] | zhankq@163.com |
018dc1fe3a3e8a6cef38f69c06991ce2baf6bd48 | fd704067def8f2b5005e45a5c017641fb08ccdf6 | /bl2_slots/wsgi.py | cb189939ed9c0d00c85b02fe9fe10efe66cbbb94 | [
"BSD-3-Clause"
] | permissive | jbittel/bl2-slots | 97d709316fa7606597b037a4b0179cca6fc6e741 | 465d0e2d191e1091a3768a4f144e4361aa730b57 | refs/heads/master | 2016-09-05T22:27:57.050472 | 2012-12-17T06:05:16 | 2012-12-17T06:05:16 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,428 | py | """
WSGI config for bl2_slots project.
This module contains the WSGI application used by Django's development server
and any production WSGI deployments. It should expose a module-level variable
named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover
this application via the ``WSGI_APPLICATION`` setting.
Usually you will have the standard Django WSGI application here, but it also
might make sense to replace the whole Django WSGI application with a custom one
that later delegates to the Django one. For example, you could introduce WSGI
middleware here, or combine a Django application with an application of another
framework.
"""
import os
# We defer to a DJANGO_SETTINGS_MODULE already in the environment. This breaks
# if running multiple sites in the same mod_wsgi process. To fix this, use
# mod_wsgi daemon mode with each site in its own daemon process, or use
# os.environ["DJANGO_SETTINGS_MODULE"] = "bl2_slots.settings"
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "bl2_slots.settings")
# This application object is used by any WSGI server configured to use this
# file. This includes Django's development server, if the WSGI_APPLICATION
# setting points here.
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
# Apply WSGI middleware here.
# from helloworld.wsgi import HelloWorldApplication
# application = HelloWorldApplication(application)
| [
"jason.bittel@gmail.com"
] | jason.bittel@gmail.com |
f520d223c1254d588e9eaca23392831fb0a26338 | 2aba62d66c2c622bdc148cef451da76cae5fd76c | /exercise/learn_python_dm2039/ch29/ch29_14.py | 6d1157ddc1e8725993369b82a188457a16ec0773 | [] | no_license | NTUT-109AB8011/crawler | 6a76de2ab1848ebc8365e071e76c08ca7348be62 | a703ec741b48d3af615a757fed7607b1f8eb66a6 | refs/heads/master | 2023-03-26T22:39:59.527175 | 2021-03-30T03:29:22 | 2021-03-30T03:29:22 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 473 | py | # ch29_14.py
import pymysql
conn = pymysql.connect(host = 'localhost',
port = 3306,
user = 'root',
charset = 'utf8',
password = 'hung',
database = 'mydb1')
mycursor = conn.cursor()
sql = """
CREATE TABLE IF NOT EXISTS Customers (
ID int NOT NULL AUTO_INCREMENT PRIMARY KEY,
Name varchar(20),
City varchar(20)
)"""
mycursor.execute(sql)
| [
"terranandes@gmail.com"
] | terranandes@gmail.com |
f679d40e63ef7891c56eb6af3fe928f525add9e1 | 39a5bf0a968e30e78f33c479d952d604f248ca97 | /analytics/frequent_calls.py | b5d59f2f345a9da5278a9d9a8dfe1fa2b9a1c94a | [] | no_license | bhargavraju/log_analytics | 1a13aa71d14edd6e7dfeab5713310c0d230ad686 | ac6319518063f883096f6818cd5c3338d38d1511 | refs/heads/master | 2023-02-02T05:31:36.545351 | 2020-12-22T06:26:21 | 2020-12-22T06:26:21 | 323,539,911 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 315 | py | from heapq import nlargest
def get_most_frequent_calls(url_dict, k):
calls = []
for url in url_dict:
for http_method in url_dict[url]:
calls.append((http_method, url, len(url_dict[url][http_method])))
most_frequent = nlargest(k, calls, key=lambda x: x[2])
return most_frequent
| [
"jampanibhargavraju@gmail.com"
] | jampanibhargavraju@gmail.com |
d3bc1e07ff0ac6708ca0cb01e5ac36345964cc5b | 8255dcf7689c20283b5e75a452139e553b34ddf3 | /app/views/dashboard/items/category_is_popular.py | 10b4b1e99560f5b2de07a3814f674bd842d6354a | [
"MIT"
] | permissive | Wern-rm/raton.by | 09871eb4da628ff7b0d0b4415a150cf6c12c3e5a | 68f862f2bc0551bf2327e9d6352c0cde93f45301 | refs/heads/main | 2023-05-06T02:26:58.980779 | 2021-05-25T14:09:47 | 2021-05-25T14:09:47 | 317,119,285 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 823 | py | from flask import redirect, url_for
from flask_login import login_required
from app import db, logger
from app.models.items_category import ItemsCategory
from app.views.dashboard import bp
from app.controllers.dashboard_controller import dashboard_controller
@bp.route('/item/category/popular/<int:category_id>/<int:status>', methods=['GET'])
@login_required
@dashboard_controller
def item_category_popular(category_id: int, status: int):
try:
db.session.query(ItemsCategory).filter(ItemsCategory.id == category_id).update({'is_popular': status})
db.session.commit()
return redirect(url_for('dashboard.items', action='success', id=12))
except Exception as e:
db.session.rollback()
logger.error(e)
return redirect(url_for('dashboard.items', action='warning', id=1)) | [
"devwern@gmail.com"
] | devwern@gmail.com |
d7d01fd4d444af4f105a94a7be9b1d11fe356c01 | 6d39bbe870917a9f759cc1342c51cc67bb3c3001 | /pyobjc-framework-LocalAuthentication/PyObjCTest/test_laerror.py | ca39217075417a5dbcc4a37491767185d95d771a | [
"MIT"
] | permissive | beOn/pyobjc | 6cb1887d0a050d63c9885a9c5ecb8fbe1f402b43 | acf71801c5915a5a632afe7a265faa7c9a7209b1 | refs/heads/master | 2023-06-04T13:46:39.855437 | 2021-06-25T19:44:40 | 2021-06-25T19:44:40 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,847 | py | from PyObjCTools.TestSupport import TestCase, min_os_level
import LocalAuthentication
class TestLAContext(TestCase):
@min_os_level("10.10")
def testConstants(self):
self.assertEqual(
LocalAuthentication.LAErrorAuthenticationFailed,
LocalAuthentication.kLAErrorAuthenticationFailed,
)
self.assertEqual(
LocalAuthentication.LAErrorUserCancel,
LocalAuthentication.kLAErrorUserCancel,
)
self.assertEqual(
LocalAuthentication.LAErrorUserFallback,
LocalAuthentication.kLAErrorUserFallback,
)
self.assertEqual(
LocalAuthentication.LAErrorSystemCancel,
LocalAuthentication.kLAErrorSystemCancel,
)
self.assertEqual(
LocalAuthentication.LAErrorPasscodeNotSet,
LocalAuthentication.kLAErrorPasscodeNotSet,
)
self.assertEqual(
LocalAuthentication.LAErrorTouchIDNotAvailable,
LocalAuthentication.kLAErrorTouchIDNotAvailable,
)
self.assertEqual(
LocalAuthentication.LAErrorTouchIDNotEnrolled,
LocalAuthentication.kLAErrorTouchIDNotEnrolled,
)
self.assertEqual(
LocalAuthentication.LAErrorTouchIDLockout,
LocalAuthentication.kLAErrorTouchIDLockout,
)
self.assertEqual(
LocalAuthentication.LAErrorAppCancel, LocalAuthentication.kLAErrorAppCancel
)
self.assertEqual(
LocalAuthentication.LAErrorInvalidContext,
LocalAuthentication.kLAErrorInvalidContext,
)
self.assertEqual(
LocalAuthentication.LAErrorNotInteractive,
LocalAuthentication.kLAErrorNotInteractive,
)
self.assertEqual(
LocalAuthentication.LAErrorBiometryNotAvailable,
LocalAuthentication.kLAErrorBiometryNotAvailable,
)
self.assertEqual(
LocalAuthentication.LAErrorBiometryNotEnrolled,
LocalAuthentication.kLAErrorBiometryNotEnrolled,
)
self.assertEqual(
LocalAuthentication.LAErrorBiometryLockout,
LocalAuthentication.kLAErrorBiometryLockout,
)
self.assertEqual(
LocalAuthentication.LAErrorWatchNotAvailable,
LocalAuthentication.kLAErrorWatchNotAvailable,
)
self.assertEqual(
LocalAuthentication.LAErrorBiometryNotPaired,
LocalAuthentication.kLAErrorBiometryNotPaired,
)
self.assertEqual(
LocalAuthentication.LAErrorBiometryDisconnected,
LocalAuthentication.kLAErrorBiometryDisconnected,
)
@min_os_level("10.11")
def testConstants10_11(self):
self.assertIsInstance(LocalAuthentication.LAErrorDomain, str)
| [
"ronaldoussoren@mac.com"
] | ronaldoussoren@mac.com |
ca9abafd76c63bff6a24df56960ef75ce10ab43a | b4c740714b29ea325562377e117c9e0a181c6d5e | /env/bin/pyrsa-priv2pub | f1e2e671eb2b970a991f64bc9077dbe85455b418 | [] | no_license | mazharoddin/keyword-pos-SEO-updated | b232d304e22dfa0a9076d7892a035d0407bba4b3 | e322e31b0db0d9bd9d79574fcfdaf4946b203ec4 | refs/heads/master | 2023-03-02T10:26:27.299213 | 2021-02-11T10:55:55 | 2021-02-11T10:55:55 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 260 | #!/home/anand/Desktop/seotool/env/bin/python3
# -*- coding: utf-8 -*-
import re
import sys
from rsa.util import private_to_public
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(private_to_public())
| [
"anand98.ar@gmail.com"
] | anand98.ar@gmail.com | |
f0fe1c6e5370c491fa9b9eed13880a3010b59311 | 3b61a91499e048062dfb805456f8fff46e53f0aa | /spectralDNS.py | 7d614eff18d1866ebaf814efaa5fa78bd0f02359 | [] | no_license | Nasrollah/spectralDNS | 4df534e0367c329738dfcda4a501dd5be09ab696 | df1ffaa42207dd0ce3a6583f7c4c712e3b76ffa4 | refs/heads/master | 2021-01-15T10:13:44.550406 | 2015-01-27T09:24:46 | 2015-01-27T09:24:46 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,185 | py | __author__ = "Mikael Mortensen <mikaem@math.uio.no>"
__date__ = "2014-11-07"
__copyright__ = "Copyright (C) 2014 " + __author__
__license__ = "GNU Lesser GPL version 3 or any later version"
from MPI_knee import mpi_import, MPI
with mpi_import():
import time
t0 = time.time()
import sys, cProfile
from h5io import *
from numpy import *
from utilities import *
#import time
#t0 = time.time()
#import sys, cProfile
#from mpi4py import MPI
#from numpy import *
#from h5io import *
#from utilities import *
comm = MPI.COMM_WORLD
comm.barrier()
if comm.Get_rank()==0:
print "Import time ", time.time()-t0
params = {
'decomposition': 'slab', # 'slab' or 'pencil'
'communication': 'alltoall',# 'alltoall' or 'sendrecv_replace' (only for slab)
'convection': 'Vortex', # 'Standard', 'Divergence', 'Skewed', 'Vortex'
'make_profile': 0, # Enable cProfile profiler
'mem_profile': False, # Check memory use
'M': 5, # Mesh size
'P1': 1, # Mesh decomposition in first direction (pencil P1*P2=num_processes)
'temporal': 'RK4', # Integrator ('RK4', 'ForwardEuler', 'AB2')
'write_result': 1e8, # Write to HDF5 every..
'write_yz_slice': [0, 1e8], # Write slice 0 (or higher) in y-z plance every..
'compute_energy': 2, # Compute solution energy every..
'nu': 0.000625, # Viscosity
'dt': 0.01, # Time step
'T': 0.1, # End time
'precision': "double" # single or double precision
}
# Parse parameters from the command line
commandline_kwargs = parse_command_line(sys.argv[1:])
params.update(commandline_kwargs)
assert params['convection'] in ['Standard', 'Divergence', 'Skewed', 'Vortex']
assert params['temporal'] in ['RK4', 'ForwardEuler', 'AB2']
vars().update(params)
if mem_profile: mem = MemoryUsage("Start (numpy/mpi4py++)", comm)
float, complex, mpitype = {"single": (float32, complex64, MPI.F_FLOAT_COMPLEX),
"double": (float64, complex128, MPI.F_DOUBLE_COMPLEX)}[precision]
# Set mesh size. Uniform size in all three directions (for now)
N = 2**M
L = 2 * pi
dx = L / N
num_processes = comm.Get_size()
rank = comm.Get_rank()
hdf5file = HDF5Writer(comm, dt, N, params, float)
if make_profile: profiler = cProfile.Profile()
# Import decomposed mesh, wavenumber mesh and FFT routines with either slab or pencil decomposition
with mpi_import():
exec("from mpi.{} import *".format(decomposition))
vars().update(setup(**vars()))
if mem_profile: mem("Arrays")
# RK4 parameters
a = array([1./6., 1./3., 1./3., 1./6.], dtype=float)
b = array([0.5, 0.5, 1.], dtype=float)
def project(u):
"""Project u onto divergence free space"""
u[:] -= sum(KX_over_Ksq*u, 0)*KX
def standardConvection(c):
"""c_i = u_j du_i/dx_j"""
for i in range(3):
for j in range(3):
ifftn_mpi(1j*KX[j]*U_hat[i], U_tmp[j])
fftn_mpi(sum(U*U_tmp, 0), c[i])
def divergenceConvection(c, add=False):
"""c_i = div(u_i u_j)"""
if not add: c.fill(0)
for i in range(3):
fftn_mpi(U[0]*U[i], F_tmp[i])
c[0] += 1j*sum(KX*F_tmp, 0)
c[1] += 1j*KX[0]*F_tmp[1]
c[2] += 1j*KX[0]*F_tmp[2]
fftn_mpi(U[1]*U[1], F_tmp[0])
fftn_mpi(U[1]*U[2], F_tmp[1])
fftn_mpi(U[2]*U[2], F_tmp[2])
c[1] += (1j*KX[1]*F_tmp[0] + 1j*KX[2]*F_tmp[1])
c[2] += (1j*KX[1]*F_tmp[1] + 1j*KX[2]*F_tmp[2])
def Cross(a, b, c):
"""c_k = F_k(a x b)"""
fftn_mpi(a[1]*b[2]-a[2]*b[1], c[0])
fftn_mpi(a[2]*b[0]-a[0]*b[2], c[1])
fftn_mpi(a[0]*b[1]-a[1]*b[0], c[2])
def Curl(a, c):
"""c = F_inv(curl(a))"""
ifftn_mpi(1j*(KX[0]*a[1]-KX[1]*a[0]), c[2])
ifftn_mpi(1j*(KX[2]*a[0]-KX[0]*a[2]), c[1])
ifftn_mpi(1j*(KX[1]*a[2]-KX[2]*a[1]), c[0])
def Div(a, c):
"""c = F_inv(div(a))"""
ifftn_mpi(1j*(sum(KX*a, 0), c))
def ComputeRHS(dU, rk):
if rk > 0: # For rk=0 the correct values are already in U
for i in range(3):
ifftn_mpi(U_hat[i], U[i])
# Compute convective term and place in dU
if convection == "Standard":
standardConvection(dU)
elif convection == "Divergence":
divergenceConvection(dU)
elif convection == "Skewed":
standardConvection(dU)
divergenceConvection(dU, add=True)
dU[:] = dU/2
elif convection == "Vortex":
Curl(U_hat, curl)
Cross(U, curl, dU)
# Dealias the nonlinear convection
dU[:] *= dealias*dt
# Compute pressure (To get actual pressure multiply by 1j/dt)
P_hat[:] = sum(dU*KX_over_Ksq, 0)
# Add pressure gradient
dU[:] -= P_hat*KX
# Add contribution from diffusion
dU[:] -= nu*dt*KK*U_hat
# Taylor-Green initialization
U[0] = sin(X[0])*cos(X[1])*cos(X[2])
U[1] =-cos(X[0])*sin(X[1])*cos(X[2])
U[2] = 0
# Transform initial data
for i in range(3):
fftn_mpi(U[i], U_hat[i])
if mem_profile: mem("After first FFT")
# Set some timers
t = 0.0
tstep = 0
fastest_time = 1e8
slowest_time = 0.0
# initialize k for storing energy
if rank == 0: k = []
# Forward equations in time
tic = t0 = time.time()
while t < T-1e-8:
t += dt; tstep += 1
if temporal == "RK4":
U_hat1[:] = U_hat0[:] = U_hat
for rk in range(4):
ComputeRHS(dU, rk)
if rk < 3:
U_hat[:] = U_hat0 + b[rk]*dU
U_hat1[:] += a[rk]*dU
U_hat[:] = U_hat1[:]
elif temporal == "ForwardEuler" or tstep == 1:
ComputeRHS(dU, 0)
U_hat[:] += dU
if temporal == "AB2":
U_hat0[:] = dU
else:
ComputeRHS(dU, 0)
U_hat[:] += 1.5*dU - 0.5*U_hat0
U_hat0[:] = dU
for i in range(3):
ifftn_mpi(U_hat[i], U[i])
if tstep % params['write_result'] == 0 or tstep % params['write_yz_slice'][1] == 0:
ifftn_mpi(P_hat*1j/dt, P)
hdf5file.write(U, P, tstep)
if tstep % compute_energy == 0:
kk = comm.reduce(sum(U.astype(float64)*U.astype(float64))*dx*dx*dx/L**3/2) # Compute energy with double precision
if rank == 0:
k.append(kk)
print t, float(kk)
tt = time.time()-t0
t0 = time.time()
if tstep > 1:
fastest_time = min(tt, fastest_time)
slowest_time = max(tt, slowest_time)
if tstep == 1 and make_profile:
#Enable profiling after first step is finished
profiler.enable()
toc = time.time()-tic
# Get min/max of fastest and slowest process
fast = (comm.reduce(fastest_time, op=MPI.MIN, root=0),
comm.reduce(slowest_time, op=MPI.MIN, root=0))
slow = (comm.reduce(fastest_time, op=MPI.MAX, root=0),
comm.reduce(slowest_time, op=MPI.MAX, root=0))
if rank == 0:
print "Time = ", toc
print "Fastest = ", fast
print "Slowest = ", slow
#figure()
#k = array(k)
#dkdt = (k[1:]-k[:-1])/dt
#plot(-dkdt)
#show()
if make_profile:
results = create_profile(**vars())
if mem_profile: mem("End")
hdf5file.generate_xdmf()
hdf5file.close()
| [
"mikaem@math.uio.no"
] | mikaem@math.uio.no |
03605ae5bfd171adf54bd4d6848c989903251394 | bede0bbf055a7cffc62808cd6ee4654c02e2a2c4 | /PlotConfiguration/ISR/2018/unfolding/unfolding_input/configuration_muon.py | b102ddc4a96dfe4f163c50842dc452c2db201f6c | [] | no_license | bhoh/MultiUniv | ec4d94180971542d8c4d69726c4e26a3f90596ef | 1105e8203ce650457bb9cbfb985a90323845c8b0 | refs/heads/master | 2020-04-24T07:33:41.915446 | 2020-02-13T10:13:40 | 2020-02-13T10:13:40 | 171,802,629 | 0 | 0 | null | 2019-06-03T06:49:59 | 2019-02-21T04:54:20 | C | UTF-8 | Python | false | false | 391 | py |
Analyzer = 'mkShape'
Outputdir = 'Output_Muon'
Category = 'SMP'
Year = '2018'
InSkim = 'MetFt_L_v2_LL_v1_ISR_detector_only_v1_Eff_SF_v1'
Userflags = 'unfold_input_muon'
treeName = 'recoTree/SKFlat'
samplesFile = 'samples_muon.py'
plotFile = 'plot_muon.py'
variablesFile = 'variables_muon.py'
cutsFile = 'cuts_muon.py'
nuisancesFile= 'nuisances_muon.py'
| [
"jhkim@cern.ch"
] | jhkim@cern.ch |
920eec89ac0e91eeda23235253b8e6c8d35c4042 | 4738be4be8cda375e33ef606dbe82998d6e60bef | /common_nlp/parser_json_bec.py | be57c0e306259c07aece730900b9e8e8c61119c4 | [
"MIT"
] | permissive | Arieugon/Pesquisas | 39723d6ee642d50708f4a883b8d13faf5d018c3c | 87e3923c571d44774c36d4bc54e444cb1003b43b | refs/heads/master | 2023-01-21T10:23:53.645736 | 2020-12-02T19:23:48 | 2020-12-02T19:23:48 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,963 | py | import json, os
class parser_json_bec():
"""Classe para parseamento de json's"""
def __init__(self):
pass
def parse_bec_basico(self, file_path, multiple=False):
resultados = []
if multiple:
arquivos = []
for f in os.listdir(file_path):
arquivos.append(file_path+'/'+f)
else:
arquivos = [file_path]
for a in arquivos:
json_dct = json.load(open(a,'r'))[0]
# # PREGÕES
# if 'DESC_ATA_GERADAPR' in json_dct and json_dct['DESC_ATA_GERADAPR']:
# numero_oc = json_dct['OC']
# uf = json_dct['UF']
# modalidade = json_dct['MODALIDADE']
# ente_federativo = json_dct['DESC_ATA_GERADAPR']['OCCompleta']['EnteFederativo']
# responsaveis = str(json_dct['DESC_ATA_GERADAPR']['OCCompleta']['Responsaveis'])
# equipe_apoio = str(json_dct['DESC_ATA_GERADAPR']['OCCompleta']['EquipeApoio'])
# data_ini = json_dct['DT_INICIO']
# data_fim = json_dct['DT_FIM']
# resultados.append((numero_oc, uf, modalidade, ente_federativo, responsaveis, equipe_apoio, data_ini, data_fim))
if 'DESC_ATA_GERADACV_ENCERRAMENTO' in json_dct and json_dct['DESC_ATA_GERADACV_ENCERRAMENTO']:
numero_oc = json_dct['OC']
uf = json_dct['UF']
modalidade = json_dct['MODALIDADE']
ente_federativo = json_dct['UNIDADE_COMPRADORA'].replace('"','').replace('\\','').replace('\'','')
try:
responsaveis = str(json_dct['DESC_ATA_GERADACV_ENCERRAMENTO']['RESPONSAVEL']).replace('"','').replace('\\','').replace('\'','')
except:
responsaveis = str(json_dct['DESC_ATA_GERADACV_ENCERRAMENTO']['RESPONSAVEIS']).replace('"','').replace('\\','').replace('\'','')
equipe_apoio = ''
data_ini = json_dct['DT_INICIO']
data_fim = json_dct['DT_FIM']
resultados.append((numero_oc, uf, modalidade, ente_federativo, responsaveis, equipe_apoio, data_ini, data_fim))
return resultados
def main():
p = parser_json()
p.parse_bec_basico('/home/danilo/Downloads/BEC_json')
if __name__ == '__main__':
main() | [
"danilopcarlotti@gmail.com"
] | danilopcarlotti@gmail.com |
021528f86c0ac900db42ee7d779aac327dd93731 | c8d666e8d8892629510fe034968aca06c712a18b | /infrastructure/src/main/python/pbase/papp/__init__.py | 4abaa950c8c327a1937e80fe5202f7c880be8481 | [
"MIT"
] | permissive | Impavidity/pbase | 6802cddf78886a5f7a9004826954c7105c91e487 | b5577fe1e140e0b360aec2834349ba87647eb000 | refs/heads/master | 2021-07-21T14:35:30.372273 | 2019-01-10T08:57:45 | 2019-01-10T08:57:45 | 109,772,868 | 10 | 1 | MIT | 2018-12-03T19:00:59 | 2017-11-07T01:52:01 | Python | UTF-8 | Python | false | false | 370 | py | PYTORCH = "pytorch"
"""
Constant for `pytorch` framework.
"""
TENSORFLOW = "tensorflow"
"""
Constant for `tensorflow` framework.
"""
TRAIN_TAG = "TRAIN"
VALID_TAG = "VALID"
TEST_TAG = "TEST"
from .argument import Argument
from .trainer import Trainer
from .config import Config
from .logger import Logger
from .model_pytorch import BaseModel
from .tester import Tester
| [
"spone.2014@gmail.com"
] | spone.2014@gmail.com |
77bec83e145833b45a79450556a8931503ba2557 | 865bd5e42a4299f78c5e23b5db2bdba2d848ab1d | /Python/529.minesweeper.137375432.ac.python3.py | 733f1dd913e188a1b5264bc5b6311c8666c9353b | [] | no_license | zhiymatt/Leetcode | 53f02834fc636bfe559393e9d98c2202b52528e1 | 3a965faee2c9b0ae507991b4d9b81ed0e4912f05 | refs/heads/master | 2020-03-09T08:57:01.796799 | 2018-05-08T22:01:38 | 2018-05-08T22:01:38 | 128,700,683 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 3,779 | py | #
# [529] Minesweeper
#
# https://leetcode.com/problems/minesweeper/description/
#
# algorithms
# Medium (49.42%)
# Total Accepted: 16.6K
# Total Submissions: 33.6K
# Testcase Example: '[["E","E","E","E","E"],["E","E","M","E","E"],["E","E","E","E","E"],["E","E","E","E","E"]]\n[3,0]'
#
# Let's play the minesweeper game (Wikipedia, online game)!
#
# You are given a 2D char matrix representing the game board. 'M' represents an
# unrevealed mine, 'E' represents an unrevealed empty square, 'B' represents a
# revealed blank square that has no adjacent (above, below, left, right, and
# all 4 diagonals) mines, digit ('1' to '8') represents how many mines are
# adjacent to this revealed square, and finally 'X' represents a revealed
# mine.
#
# Now given the next click position (row and column indices) among all the
# unrevealed squares ('M' or 'E'), return the board after revealing this
# position according to the following rules:
#
#
#
# If a mine ('M') is revealed, then the game is over - change it to 'X'.
# If an empty square ('E') with no adjacent mines is revealed, then change it
# to revealed blank ('B') and all of its adjacent unrevealed squares should be
# revealed recursively.
# If an empty square ('E') with at least one adjacent mine is revealed, then
# change it to a digit ('1' to '8') representing the number of adjacent mines.
# Return the board when no more squares will be revealed.
#
#
#
# Example 1:
#
# Input:
#
# [['E', 'E', 'E', 'E', 'E'],
# ['E', 'E', 'M', 'E', 'E'],
# ['E', 'E', 'E', 'E', 'E'],
# ['E', 'E', 'E', 'E', 'E']]
#
# Click : [3,0]
#
# Output:
#
# [['B', '1', 'E', '1', 'B'],
# ['B', '1', 'M', '1', 'B'],
# ['B', '1', '1', '1', 'B'],
# ['B', 'B', 'B', 'B', 'B']]
#
# Explanation:
#
#
#
#
# Example 2:
#
# Input:
#
# [['B', '1', 'E', '1', 'B'],
# ['B', '1', 'M', '1', 'B'],
# ['B', '1', '1', '1', 'B'],
# ['B', 'B', 'B', 'B', 'B']]
#
# Click : [1,2]
#
# Output:
#
# [['B', '1', 'E', '1', 'B'],
# ['B', '1', 'X', '1', 'B'],
# ['B', '1', '1', '1', 'B'],
# ['B', 'B', 'B', 'B', 'B']]
#
# Explanation:
#
#
#
#
#
#
# Note:
#
# The range of the input matrix's height and width is [1,50].
# The click position will only be an unrevealed square ('M' or 'E'), which also
# means the input board contains at least one clickable square.
# The input board won't be a stage when game is over (some mines have been
# revealed).
# For simplicity, not mentioned rules should be ignored in this problem. For
# example, you don't need to reveal all the unrevealed mines when the game is
# over, consider any cases that you will win the game or flag any squares.
#
#
#
class Solution:
def updateBoard(self, board, click):
"""
:type board: List[List[str]]
:type click: List[int]
:rtype: List[List[str]]
"""
A = board
click = tuple(click)
R, C = len(A), len(A[0])
def neighbors(r, c):
for dr in range(-1, 2):
for dc in range(-1, 2):
if (dr or dc) and 0 <= r + dr < R and 0 <= c + dc < C:
yield r + dr, c + dc
stack = [click]
seen = {click}
while stack:
r, c = stack.pop()
if A[r][c] == 'M':
A[r][c] = 'X'
else:
mines_adj = sum( A[nr][nc] in 'MX' for nr, nc in neighbors(r, c) )
if mines_adj:
A[r][c] = str(mines_adj)
else:
A[r][c] = 'B'
for nei in neighbors(r, c):
if A[nei[0]][nei[1]] == 'E' and nei not in seen:
stack.append(nei)
seen.add(nei)
return A
| [
"miylolmiy@gmail.com"
] | miylolmiy@gmail.com |
9cb53aa651363a986810ad06ebbe01e842b8aa0c | 0bf10171cf52b225131562f39b90c412d3d23953 | /charm/customer/migrations/0001_initial.py | abe5ffccd6b5440c0bbe868583146b4b866602b2 | [] | no_license | pharmaziegasse/charm-backend | 937e601e6f6f06720ecb7e637437bf08f237603a | db94823697541e12e7b8eed975de3419e920a625 | refs/heads/master | 2021-06-23T05:38:04.466227 | 2020-11-25T21:05:09 | 2020-11-25T21:05:09 | 197,613,405 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 564 | py | # Generated by Django 2.2.3 on 2019-08-28 13:08
from django.db import migrations
class Migration(migrations.Migration):
initial = True
dependencies = [
('user', '0004_auto_20190828_1415'),
]
operations = [
migrations.CreateModel(
name='Customer',
fields=[
],
options={
'ordering': ('date_joined',),
'proxy': True,
'indexes': [],
'constraints': [],
},
bases=('user.user',),
),
]
| [
"simon.prast.sp@gmail.com"
] | simon.prast.sp@gmail.com |
189fef8c3cab4d5bdcdd1af4e1b33688a846b299 | bf05f5efc341bb4c5128e131e57dae5680605a7b | /forum/admin.py | 78b12008677d8453759905ae48a858d5d0ec2f31 | [] | no_license | import-keshav/Tripoto | 0c7c3f06022e4c32e59042f3e510dbcd58d46108 | d1649dea9c93d95feb1442f4b98156434544d7cb | refs/heads/master | 2020-06-27T00:30:05.777602 | 2019-07-31T14:38:41 | 2019-07-31T14:38:41 | 199,799,343 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 237 | py | from django.contrib import admin
from .models import Question
from .models import Answer
from .models import Comments
admin.site.register(Question)
admin.site.register(Answer)
admin.site.register(Comments)
# Register your models here.
| [
"keshavbathla2017@gmail.com"
] | keshavbathla2017@gmail.com |
9aee44adf4cd7745f52d34caa4005bfab51007e1 | f6db58013843424e737efa2e40d28dbf6ec121d5 | /HLA-pipeline/OptiType/scripts/statistic.multi.samples.py | 7eedd467b23016f60396d906803eca4e3dfa0e3b | [] | no_license | levinyi/GZ_Tcell_work | ef3d7ab2fa1eda7bfd09b09a83349536d6834c81 | 275198da3759e3bee6adde09ea68a598a6ca89a8 | refs/heads/master | 2023-04-06T07:29:59.201311 | 2023-03-24T07:14:27 | 2023-03-24T07:14:27 | 202,257,755 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 245 | py | import sys
import os
import pandas as pd
files = sys.argv[1:]
for each_file in files:
filename = os.path.basename(each_file).split(".")[0]
df = pd.read_table(each_file, header=T)
print(df)
pd.melt(df, id_vars="",value_vars=[])
| [
"dushiyi319@163.com"
] | dushiyi319@163.com |
423fed9c53cec1e51570c919153df39d4fc0e9f0 | 196fda5a027b96de06c1149ccdfe51f39a633fe6 | /gen/sam/sam_sa13.py | 8fc069adefd911093e8b12e6707dee4ce8e99a68 | [] | no_license | VEAF/dcs_liberation | 884995b0174009bc518cd8b7a4f067d4356d8465 | 7dd3367203b042fdaef639016dc249ac38c42123 | refs/heads/master | 2022-12-31T18:21:28.015682 | 2020-10-03T14:50:56 | 2020-10-03T14:50:56 | 298,514,261 | 1 | 1 | null | 2020-09-29T13:14:56 | 2020-09-25T08:29:59 | null | UTF-8 | Python | false | false | 804 | py | import random
from dcs.vehicles import AirDefence, Unarmed
from gen.sam.group_generator import GroupGenerator
class SA13Generator(GroupGenerator):
"""
This generate a SA-13 group
"""
name = "SA-13 Strela Group"
price = 50
def generate(self):
self.add_unit(Unarmed.Transport_UAZ_469, "UAZ", self.position.x, self.position.y, self.heading)
self.add_unit(Unarmed.Transport_KAMAZ_43101, "TRUCK", self.position.x+40, self.position.y, self.heading)
num_launchers = random.randint(2, 3)
positions = self.get_circular_position(num_launchers, launcher_distance=120, coverage=360)
for i, position in enumerate(positions):
self.add_unit(AirDefence.SAM_SA_13_Strela_10M3_9A35M3, "LN#" + str(i), position[0], position[1], position[2]) | [
"clemguip@gmail.com"
] | clemguip@gmail.com |
9d4999492d44a3ed2e8a4eabce9ca9681a528ac9 | dee468400b97faa9926a8f80be9d400fab2c6d85 | /tests/admin_default_site/apps.py | 92743c18d4fe75dd94f411c30eb131d263f03cb3 | [
"BSD-3-Clause",
"LicenseRef-scancode-free-unknown",
"LicenseRef-scancode-unknown-license-reference",
"LicenseRef-scancode-other-permissive",
"Python-2.0"
] | permissive | claudep/django | 0a8eec4039ddb57fc3c31ae03b313fccdeb6a063 | f1a808a5025b63715d1034af2b96a6a5241d29e9 | refs/heads/master | 2023-09-01T05:08:41.544950 | 2020-04-15T11:11:13 | 2020-04-15T16:31:30 | 4,217,165 | 3 | 2 | BSD-3-Clause | 2023-08-27T16:40:58 | 2012-05-03T18:20:44 | Python | UTF-8 | Python | false | false | 217 | py | from django.contrib.admin.apps import SimpleAdminConfig
class MyCustomAdminConfig(SimpleAdminConfig):
verbose_name = 'My custom default admin site.'
default_site = 'admin_default_site.sites.CustomAdminSite'
| [
"timograham@gmail.com"
] | timograham@gmail.com |
1d1ad37da47cac0c827581e2adde3e48c18547ca | 30227ff573bcec32644fca1cca42ef4cdd612c3e | /leetcode/recursion_1/tests/test_unique_bsts.py | 917451476f84e257e0f02c115b47451e23caccae | [] | no_license | saurabh-pandey/AlgoAndDS | bc55864422c93e6c93b8432e483394f286ce8ef2 | dad11dedea9ceb4904d6c2dea801ce0172abfc81 | refs/heads/master | 2023-07-01T09:12:57.951949 | 2023-06-15T12:16:36 | 2023-06-15T12:16:36 | 88,239,921 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,443 | py | import pytest
import recursion_1.unique_bsts as prob
def toList(root):
if root is None:
return []
treeList = [root.val]
nodes_queue = [root]
while len(nodes_queue) > 0:
node = nodes_queue.pop(0)
lChild = node.left
rChild = node.right
if lChild is None and rChild is None:
continue
if lChild is not None:
treeList.append(lChild.val)
nodes_queue.append(lChild)
else:
treeList.append(None)
if rChild is not None:
treeList.append(rChild.val)
nodes_queue.append(rChild)
else:
treeList.append(None)
# Remove trailing None
while treeList[-1] == None:
treeList.pop()
return treeList
class TestUniqueBsts:
def test_example1(self):
n = 3
res = [[1,None,2,None,3],[1,None,3,2],[2,1,3],[3,1,None,None,2],[3,2,None,1]]
bsts = prob.generateTrees(n)
resBsts = []
for bst in bsts:
resBsts.append(toList(bst))
res.sort()
resBsts.sort()
assert res == resBsts
def test_example2(self):
n = 1
res = [[1]]
bsts = prob.generateTrees(n)
resBsts = []
for bst in bsts:
resBsts.append(toList(bst))
res.sort()
resBsts.sort()
assert res == resBsts
def test_my_example1(self):
n = 2
res = [[2,1], [1,None,2]]
bsts = prob.generateTrees(n)
resBsts = []
for bst in bsts:
resBsts.append(toList(bst))
res.sort()
resBsts.sort()
assert res == resBsts | [
"saurabhpandey85@gmail.com"
] | saurabhpandey85@gmail.com |
d0bc6c90deac48da27cbd82e84a00d51dd07dcb1 | 52b5773617a1b972a905de4d692540d26ff74926 | /.history/majority_20200527095149.py | 1a3b67c259e04c5f8a483733d6bd3ded0743c0d8 | [] | no_license | MaryanneNjeri/pythonModules | 56f54bf098ae58ea069bf33f11ae94fa8eedcabc | f4e56b1e4dda2349267af634a46f6b9df6686020 | refs/heads/master | 2022-12-16T02:59:19.896129 | 2020-09-11T12:05:22 | 2020-09-11T12:05:22 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 205 | py | def majority (nums):
length = len(nums)/2
numbers ={}
count = 0
for n in nums:
if n in numbers:
numbers
print(length)
majority([3, 3, 4, 2, 4, 4, 2, 4, 4]) | [
"mary.jereh@gmail.com"
] | mary.jereh@gmail.com |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.