hexsha
stringlengths 40
40
| size
int64 4
1.02M
| ext
stringclasses 8
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 4
209
| max_stars_repo_name
stringlengths 5
121
| max_stars_repo_head_hexsha
stringlengths 40
40
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 4
209
| max_issues_repo_name
stringlengths 5
121
| max_issues_repo_head_hexsha
stringlengths 40
40
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
67k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 4
209
| max_forks_repo_name
stringlengths 5
121
| max_forks_repo_head_hexsha
stringlengths 40
40
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 4
1.02M
| avg_line_length
float64 1.07
66.1k
| max_line_length
int64 4
266k
| alphanum_fraction
float64 0.01
1
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
346b106a0fa0d57f553ecf7c90cd73039ab05ddf
| 20,184
|
py
|
Python
|
pajbot/models/action.py
|
tolekk/pajbot
|
c239f0c966141dfd39db46e081fff4f3a3a82ef0
|
[
"MIT"
] | null | null | null |
pajbot/models/action.py
|
tolekk/pajbot
|
c239f0c966141dfd39db46e081fff4f3a3a82ef0
|
[
"MIT"
] | null | null | null |
pajbot/models/action.py
|
tolekk/pajbot
|
c239f0c966141dfd39db46e081fff4f3a3a82ef0
|
[
"MIT"
] | null | null | null |
import collections
import json
import logging
import sys
import irc
import regex as re
import requests
from pajbot.constants import VERSION
from pajbot.managers.schedule import ScheduleManager
log = logging.getLogger(__name__)
class ActionParser:
bot = None
@staticmethod
def parse(raw_data=None, data=None, command=""):
try:
from pajbot.userdispatch import UserDispatch
Dispatch = UserDispatch
except ImportError:
from pajbot.dispatch import Dispatch
except:
log.exception("Something went wrong while attemting to import Dispatch, this should never happen")
sys.exit(1)
if not data:
data = json.loads(raw_data)
if data["type"] == "say":
action = SayAction(data["message"], ActionParser.bot)
elif data["type"] == "me":
action = MeAction(data["message"], ActionParser.bot)
elif data["type"] == "whisper":
action = WhisperAction(data["message"], ActionParser.bot)
elif data["type"] == "reply":
action = ReplyAction(data["message"], ActionParser.bot)
elif data["type"] == "func":
try:
action = FuncAction(getattr(Dispatch, data["cb"]))
except AttributeError as e:
log.error(f'AttributeError caught when parsing action for action "{command}": {e}')
return None
elif data["type"] == "multi":
action = MultiAction(data["args"], data["default"])
else:
raise Exception(f"Unknown action type: {data['type']}")
return action
def apply_substitutions(text, substitutions, bot, extra):
for needle, sub in substitutions.items():
if sub.key and sub.argument:
param = sub.key
extra["argument"] = MessageAction.get_argument_value(extra["message"], sub.argument - 1)
elif sub.key:
param = sub.key
elif sub.argument:
param = MessageAction.get_argument_value(extra["message"], sub.argument - 1)
else:
log.error("Unknown param for response.")
continue
value = sub.cb(param, extra)
if value is None:
return None
try:
for f in sub.filters:
value = bot.apply_filter(value, f)
except:
log.exception("Exception caught in filter application")
if value is None:
return None
text = text.replace(needle, str(value))
return text
class IfSubstitution:
def __call__(self, key, extra={}):
if self.sub.key is None:
msg = MessageAction.get_argument_value(extra.get("message", ""), self.sub.argument - 1)
if msg:
return self.get_true_response(extra)
return self.get_false_response(extra)
res = self.sub.cb(self.sub.key, extra)
if res:
return self.get_true_response(extra)
return self.get_false_response(extra)
def get_true_response(self, extra):
return apply_substitutions(self.true_response, self.true_subs, self.bot, extra)
def get_false_response(self, extra):
return apply_substitutions(self.false_response, self.false_subs, self.bot, extra)
def __init__(self, key, arguments, bot):
self.bot = bot
subs = get_substitutions(key, bot)
if len(subs) == 1:
self.sub = list(subs.values())[0]
else:
subs = get_argument_substitutions(key)
if len(subs) == 1:
self.sub = subs[0]
else:
self.sub = None
self.true_response = arguments[0][2:-1] if arguments else "Yes"
self.false_response = arguments[1][2:-1] if len(arguments) > 1 else "No"
self.true_subs = get_substitutions(self.true_response, bot)
self.false_subs = get_substitutions(self.false_response, bot)
class Substitution:
argument_substitution_regex = re.compile(r"\$\((\d+)\)")
substitution_regex = re.compile(
r'\$\(([a-z_]+)(\;[0-9]+)?(\:[\w\.\/ -]+|\:\$\([\w_:;\._\/ -]+\))?(\|[\w]+(\([\w%:/ +-]+\))?)*(\,[\'"]{1}[\w \|$;_\-:()\.]+[\'"]{1}){0,2}\)'
)
# https://stackoverflow.com/a/7109208
urlfetch_substitution_regex = re.compile(r"\$\(urlfetch ([A-Za-z0-9\-._~:/?#\[\]@!$%&\'()*+,;=]+)\)")
urlfetch_substitution_regex_all = re.compile(r"\$\(urlfetch (.+?)\)")
def __init__(self, cb, needle, key=None, argument=None, filters=[]):
self.cb = cb
self.key = key
self.argument = argument
self.filters = filters
self.needle = needle
class SubstitutionFilter:
def __init__(self, name, arguments):
self.name = name
self.arguments = arguments
class BaseAction:
type = None
subtype = None
def reset(self):
pass
class MultiAction(BaseAction):
type = "multi"
def __init__(self, args, default=None, fallback=None):
from pajbot.models.command import Command
self.commands = {}
self.default = default
self.fallback = fallback
for command in args:
cmd = Command.from_json(command)
for alias in command["command"].split("|"):
if alias not in self.commands:
self.commands[alias] = cmd
else:
log.error(f"Alias {alias} for this multiaction is already in use.")
import copy
self.original_commands = copy.copy(self.commands)
def reset(self):
import copy
self.commands = copy.copy(self.original_commands)
def __iadd__(self, other):
if other is not None and other.type == "multi":
self.commands.update(other.commands)
return self
@classmethod
def ready_built(cls, commands, default=None, fallback=None):
""" Useful if you already have a dictionary
with commands pre-built.
"""
multiaction = cls(args=[], default=default, fallback=fallback)
multiaction.commands = commands
import copy
multiaction.original_commands = copy.copy(commands)
return multiaction
def run(self, bot, source, message, event={}, args={}):
""" If there is more text sent to the multicommand after the
initial alias, we _ALWAYS_ assume it's trying the subaction command.
If the extra text was not a valid command, we try to run the fallback command.
In case there's no extra text sent, we will try to run the default command.
"""
cmd = None
if message:
msg_lower_parts = message.lower().split(" ")
command = msg_lower_parts[0]
cmd = self.commands.get(command, None)
extra_msg = " ".join(message.split(" ")[1:])
if cmd is None and self.fallback:
cmd = self.commands.get(self.fallback, None)
extra_msg = message
elif self.default:
command = self.default
cmd = self.commands.get(command, None)
extra_msg = None
if cmd:
if source.level >= cmd.level:
return cmd.run(bot, source, extra_msg, event, args)
log.info(f"User {source} tried running a sub-command he had no access to ({command}).")
return None
class FuncAction(BaseAction):
type = "func"
def __init__(self, cb):
self.cb = cb
def run(self, bot, source, message, event={}, args={}):
try:
return self.cb(bot, source, message, event, args)
except:
log.exception("Uncaught exception in FuncAction")
class RawFuncAction(BaseAction):
type = "rawfunc"
def __init__(self, cb):
self.cb = cb
def run(self, bot, source, message, event={}, args={}):
return self.cb(bot=bot, source=source, message=message, event=event, args=args)
def get_argument_substitutions(string):
"""
Returns a list of `Substitution` objects that are found in the passed `string`.
Will not return multiple `Substitution` objects for the same number.
This means string "$(1) $(1) $(2)" will only return two Substitutions.
"""
argument_substitutions = []
for sub_key in Substitution.argument_substitution_regex.finditer(string):
needle = sub_key.group(0)
argument_num = int(sub_key.group(1))
found = False
for sub in argument_substitutions:
if sub.argument == argument_num:
# We already matched this argument variable
found = True
break
if found:
continue
argument_substitutions.append(Substitution(None, needle=needle, argument=argument_num))
return argument_substitutions
def get_substitution_arguments(sub_key):
sub_string = sub_key.group(0)
path = sub_key.group(1)
argument = sub_key.group(2)
if argument is not None:
argument = int(argument[1:])
key = sub_key.group(3)
if key is not None:
key = key[1:]
matched_filters = sub_key.captures(4)
matched_filter_arguments = sub_key.captures(5)
filters = []
filter_argument_index = 0
for f in matched_filters:
f = f[1:]
filter_arguments = []
if "(" in f:
f = f[: -len(matched_filter_arguments[filter_argument_index])]
filter_arguments = [matched_filter_arguments[filter_argument_index][1:-1]]
filter_argument_index += 1
f = SubstitutionFilter(f, filter_arguments)
filters.append(f)
if_arguments = sub_key.captures(6)
return sub_string, path, argument, key, filters, if_arguments
def get_substitutions(string, bot):
"""
Returns a dictionary of `Substitution` objects thare are found in the passed `string`.
Will not return multiple `Substitution` objects for the same string.
This means "You have $(source:points) points xD $(source:points)" only returns one Substitution.
"""
substitutions = collections.OrderedDict()
for sub_key in Substitution.substitution_regex.finditer(string):
sub_string, path, argument, key, filters, if_arguments = get_substitution_arguments(sub_key)
if sub_string in substitutions:
# We already matched this variable
continue
try:
if path == "if":
if if_arguments:
if_substitution = IfSubstitution(key, if_arguments, bot)
if if_substitution.sub is None:
continue
sub = Substitution(if_substitution, needle=sub_string, key=key, argument=argument, filters=filters)
substitutions[sub_string] = sub
except:
log.exception("BabyRage")
method_mapping = {}
try:
method_mapping["kvi"] = bot.get_kvi_value
method_mapping["tb"] = bot.get_value
method_mapping["lasttweet"] = bot.get_last_tweet
# "etm" is legacy
method_mapping["etm"] = bot.get_emote_epm
method_mapping["epm"] = bot.get_emote_epm
method_mapping["etmrecord"] = bot.get_emote_epm_record
method_mapping["epmrecord"] = bot.get_emote_epm_record
method_mapping["ecount"] = bot.get_emote_count
method_mapping["source"] = bot.get_source_value
method_mapping["user"] = bot.get_user_value
method_mapping["usersource"] = bot.get_usersource_value
method_mapping["time"] = bot.get_time_value
method_mapping["curdeck"] = bot.decks.action_get_curdeck
method_mapping["stream"] = bot.stream_manager.get_stream_value
method_mapping["current_stream"] = bot.stream_manager.get_current_stream_value
method_mapping["last_stream"] = bot.stream_manager.get_last_stream_value
method_mapping["current_song"] = bot.get_current_song_value
method_mapping["args"] = bot.get_args_value
method_mapping["strictargs"] = bot.get_strictargs_value
method_mapping["command"] = bot.get_command_value
except AttributeError:
pass
for sub_key in Substitution.substitution_regex.finditer(string):
sub_string, path, argument, key, filters, if_arguments = get_substitution_arguments(sub_key)
if sub_string in substitutions:
# We already matched this variable
continue
if path in method_mapping:
sub = Substitution(method_mapping[path], needle=sub_string, key=key, argument=argument, filters=filters)
substitutions[sub_string] = sub
return substitutions
def get_urlfetch_substitutions(string, all=False):
substitutions = {}
if all:
r = Substitution.urlfetch_substitution_regex_all
else:
r = Substitution.urlfetch_substitution_regex
for sub_key in r.finditer(string):
substitutions[sub_key.group(0)] = sub_key.group(1)
return substitutions
def is_message_good(bot, message, extra):
# this is imported here to avoid circular imports
# (Circular import was command.py importing this file)
from pajbot.modules.ascii import AsciiProtectionModule
checks = {
"banphrase": lambda: bot.banphrase_manager.check_message(message, extra["source"]),
"ascii": lambda: AsciiProtectionModule.check_message(message),
"massping": lambda: bot.module_manager.get_module("massping").check_message(message, extra["source"]),
}
for check_name, check_fn in checks.items():
# Make sure the module is enabled
if check_name not in bot.module_manager:
continue
# apply the check fn
# only if the result is False the check was successful
if check_fn() is not False:
log.info(f'Not sending message "{message}" because check "{check_name}" failed.')
return False
return True
class MessageAction(BaseAction):
type = "message"
def __init__(self, response, bot):
self.response = response
if bot:
self.argument_subs = get_argument_substitutions(self.response)
self.subs = get_substitutions(self.response, bot)
self.num_urlfetch_subs = len(get_urlfetch_substitutions(self.response, all=True))
else:
self.argument_subs = []
self.subs = {}
self.num_urlfetch_subs = 0
@staticmethod
def get_argument_value(message, index):
if not message:
return ""
msg_parts = message.split(" ")
try:
return msg_parts[index]
except:
pass
return ""
def get_response(self, bot, extra):
resp = self.response
resp = apply_substitutions(resp, self.subs, bot, extra)
if resp is None:
return None
for sub in self.argument_subs:
needle = sub.needle
value = str(MessageAction.get_argument_value(extra["message"], sub.argument - 1))
resp = resp.replace(needle, value)
log.debug(f"Replacing {needle} with {value}")
if "command" in extra and extra["command"].run_through_banphrases is True and "source" in extra:
if not is_message_good(bot, resp, extra):
return None
return resp
@staticmethod
def get_extra_data(source, message, args):
return {"source": source, "message": message, **args}
def run(self, bot, source, message, event={}, args={}):
raise NotImplementedError("Please implement the run method.")
def urlfetch_msg(method, message, num_urlfetch_subs, bot, extra={}, args=[], kwargs={}):
urlfetch_subs = get_urlfetch_substitutions(message)
if len(urlfetch_subs) > num_urlfetch_subs:
log.error(f"HIJACK ATTEMPT {message}")
return False
for needle, url in urlfetch_subs.items():
try:
headers = {
"Accept": "text/plain",
"Accept-Language": "en-US, en;q=0.9, *;q=0.5",
"User-Agent": f"pajbot1/{VERSION} ({bot.nickname})",
}
r = requests.get(url, allow_redirects=True, headers=headers)
r.raise_for_status()
value = r.text.strip().replace("\n", "").replace("\r", "")[:400]
except:
return False
message = message.replace(needle, value)
if "command" in extra and extra["command"].run_through_banphrases is True and "source" in extra:
if not is_message_good(bot, message, extra):
return None
args.append(message)
method(*args, **kwargs)
class SayAction(MessageAction):
subtype = "say"
def run(self, bot, source, message, event={}, args={}):
extra = self.get_extra_data(source, message, args)
resp = self.get_response(bot, extra)
if not resp:
return False
if self.num_urlfetch_subs == 0:
return bot.say(resp)
return ScheduleManager.execute_now(
urlfetch_msg,
args=[],
kwargs={
"args": [],
"kwargs": {},
"method": bot.say,
"bot": bot,
"extra": extra,
"message": resp,
"num_urlfetch_subs": self.num_urlfetch_subs,
},
)
class MeAction(MessageAction):
subtype = "me"
def run(self, bot, source, message, event={}, args={}):
extra = self.get_extra_data(source, message, args)
resp = self.get_response(bot, extra)
if not resp:
return False
if self.num_urlfetch_subs == 0:
return bot.me(resp)
return ScheduleManager.execute_now(
urlfetch_msg,
args=[],
kwargs={
"args": [],
"kwargs": {},
"method": bot.me,
"bot": bot,
"extra": extra,
"message": resp,
"num_urlfetch_subs": self.num_urlfetch_subs,
},
)
class WhisperAction(MessageAction):
subtype = "whisper"
def run(self, bot, source, message, event={}, args={}):
extra = self.get_extra_data(source, message, args)
resp = self.get_response(bot, extra)
if not resp:
return False
if self.num_urlfetch_subs == 0:
return bot.whisper(source, resp)
return ScheduleManager.execute_now(
urlfetch_msg,
args=[],
kwargs={
"args": [source],
"kwargs": {},
"method": bot.whisper,
"bot": bot,
"extra": extra,
"message": resp,
"num_urlfetch_subs": self.num_urlfetch_subs,
},
)
class ReplyAction(MessageAction):
subtype = "reply"
def run(self, bot, source, message, event={}, args={}):
extra = self.get_extra_data(source, message, args)
resp = self.get_response(bot, extra)
if not resp:
return False
if irc.client.is_channel(event.target):
if self.num_urlfetch_subs == 0:
return bot.say(resp, channel=event.target)
return ScheduleManager.execute_now(
urlfetch_msg,
args=[],
kwargs={
"args": [],
"kwargs": {"channel": event.target},
"method": bot.say,
"bot": bot,
"extra": extra,
"message": resp,
"num_urlfetch_subs": self.num_urlfetch_subs,
},
)
if self.num_urlfetch_subs == 0:
return bot.whisper(source, resp)
return ScheduleManager.execute_now(
urlfetch_msg,
args=[],
kwargs={
"args": [source],
"kwargs": {},
"method": bot.whisper,
"bot": bot,
"extra": extra,
"message": resp,
"num_urlfetch_subs": self.num_urlfetch_subs,
},
)
| 32.2944
| 148
| 0.589625
|
a480f8fb01cb7a38a16da71055fcfb6db019f94f
| 865
|
py
|
Python
|
tests/test_windows_warning.py
|
RenskeW/cwltool
|
8ef515037de411abd2f84b569ad4d4a4f7a2c7a0
|
[
"Apache-2.0"
] | 289
|
2015-10-07T16:27:32.000Z
|
2022-03-25T23:32:36.000Z
|
tests/test_windows_warning.py
|
RenskeW/cwltool
|
8ef515037de411abd2f84b569ad4d4a4f7a2c7a0
|
[
"Apache-2.0"
] | 1,436
|
2015-10-09T13:31:46.000Z
|
2022-03-31T13:36:45.000Z
|
tests/test_windows_warning.py
|
RenskeW/cwltool
|
8ef515037de411abd2f84b569ad4d4a4f7a2c7a0
|
[
"Apache-2.0"
] | 230
|
2015-10-11T17:33:32.000Z
|
2022-03-25T22:55:43.000Z
|
"""Test user experience running on MS Windows."""
import os
import pytest
from cwltool import main
# Can't be just "import cwltool ; … cwltool.main.windows_check()"
# needs a direct import to avoid path traversal after os.name is set to "nt"
def test_windows_warning(monkeypatch: pytest.MonkeyPatch) -> None:
"""Confirm that the windows warning is given."""
with pytest.warns(UserWarning, match=r"Windows Subsystem for Linux 2"):
# would normally just use the MonkeyPatch object directly
# but if we don't use a context then os.name being "nt" causes problems
# for pytest on non-Windows systems. So the context unravels the change
# to os.name quickly, and then pytest will check for the desired warning
with monkeypatch.context() as m:
m.setattr(os, "name", "nt")
main.windows_check()
| 37.608696
| 80
| 0.695954
|
35676994df332ae98b4ecb08fc76231b614f1610
| 9,592
|
py
|
Python
|
train/optimization_adafactor.py
|
ksjae/gpt2-ml
|
240bfc8e4f113425dc454f2ff47d963e0558f876
|
[
"Apache-2.0"
] | null | null | null |
train/optimization_adafactor.py
|
ksjae/gpt2-ml
|
240bfc8e4f113425dc454f2ff47d963e0558f876
|
[
"Apache-2.0"
] | null | null | null |
train/optimization_adafactor.py
|
ksjae/gpt2-ml
|
240bfc8e4f113425dc454f2ff47d963e0558f876
|
[
"Apache-2.0"
] | null | null | null |
# Original work Copyright 2018 The Google AI Language Team Authors.
# Modified work Copyright 2019 Rowan Zellers
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import re
import tensorflow as tf
from train.utils import get_shape_list
def create_optimizer(loss, init_lr, num_train_steps, num_warmup_steps, use_tpu):
"""Creates an optimizer training op."""
global_step = tf.compat.v1.train.get_or_create_global_step()
learning_rate = tf.constant(value=init_lr, shape=[], dtype=tf.float32)
# Implements linear decay of the learning rate.
learning_rate = tf.compat.v1.train.polynomial_decay(
learning_rate,
global_step,
num_train_steps,
end_learning_rate=0.0,
power=1.0,
cycle=False)
# Implements linear warmup. I.e., if global_step < num_warmup_steps, the
# learning rate will be `global_step/num_warmup_steps * init_lr`.
if num_warmup_steps:
global_steps_int = tf.cast(global_step, tf.int32)
warmup_steps_int = tf.constant(num_warmup_steps, dtype=tf.int32)
global_steps_float = tf.cast(global_steps_int, tf.float32)
warmup_steps_float = tf.cast(warmup_steps_int, tf.float32)
warmup_percent_done = global_steps_float / warmup_steps_float
warmup_learning_rate = init_lr * warmup_percent_done
is_warmup = tf.cast(global_steps_int < warmup_steps_int, tf.float32)
learning_rate = (
(1.0 - is_warmup) * learning_rate + is_warmup * warmup_learning_rate)
# It is recommended that you use this optimizer for fine tuning, since this
# is how the model was trained (note that the Adam m/v variables are NOT
# loaded from init_checkpoint.)
optimizer = AdaFactorOptimizer(
learning_rate=learning_rate,
weight_decay_rate=0.01,
beta_1=0.9,
beta_2=0.999,
epsilon=1e-6,
exclude_from_weight_decay=["LayerNorm", "layer_norm", "bias"])
if use_tpu:
optimizer = tf.compat.v1.tpu.CrossShardOptimizer(optimizer)
tvars = tf.compat.v1.trainable_variables()
grads = tf.gradients(loss, tvars)
# You could do this, but instead we don't because a) it's slow and b) we already did the 'update clipping'
# (grads, _) = tf.clip_by_global_norm(grads, clip_norm=1.0)
train_op = optimizer.apply_gradients(
zip(grads, tvars), global_step=global_step)
# Normally the global step update is done inside of `apply_gradients`.
# However, `AdaFactorOptimizer` doesn't do this. But if you use
# a different optimizer, you should probably take this line out.
new_global_step = global_step + 1
train_op = tf.group(train_op, [global_step.assign(new_global_step)])
train_metrics = {
'learning_rate': learning_rate,
'minibatch_loss': loss,
# 'minibatch_ppl': tf.math.exp(loss),
}
return train_op, train_metrics
class AdaFactorOptimizer(tf.compat.v1.train.Optimizer):
"""here's the optimizer we'll use"""
def __init__(self,
learning_rate,
weight_decay_rate=0.0,
beta_1=0.9,
beta_2=0.999,
epsilon=1e-6,
exclude_from_weight_decay=None,
clipping_rate=1.0,
name="AdaFactorOptimizer"):
"""Constructs a AdaFactorOptimizer."""
super(AdaFactorOptimizer, self).__init__(False, name)
self.learning_rate = learning_rate
self.weight_decay_rate = weight_decay_rate
self.beta_1 = beta_1
self.beta_2 = beta_2
self.epsilon = epsilon
self.epsilon1 = 1e-30
self.epsilon2 = 0.001
self.clipping_rate = clipping_rate
self.exclude_from_weight_decay = exclude_from_weight_decay
self.use_locking = False
def _use_factored(self, shape):
return len(shape) >= 2
def _parameter_scale(self, var):
"""Estimate the scale of the parameters from the current values.
We include a minimum value of 0.001 to give it a chance to escape 0
if it was zero-initialized.
Instead of using the value, we could impute the scale from the shape,
as initializers do.
Args:
var: a variable or Tensor.
Returns:
a Scalar
"""
return tf.maximum(reduce_rms(var), self.epsilon2)
def apply_gradients(self, grads_and_vars, global_step=None, name=None):
"""See base class."""
assignments = []
for (grad, param) in grads_and_vars:
if grad is None or param is None:
continue
param_name = self._get_variable_name(param.name)
shape_list = get_shape_list(param, expected_rank=[1, 2])
# decay_rate = 1 - tf.pow(tf.cast(tf.train.get_or_create_global_step(), tf.float32) + 1.0, -0.8)
decay_rate = self.beta_2
grad_squared = tf.square(grad) + self.epsilon1
update_scale = self.learning_rate
# update_scale = self.learning_rate * tf.cast(self._parameter_scale(param), dtype=tf.float32)
# HACK: Make things dependent on grad.
# This confounds the XLA rewriter and keeps it from fusing computations
# across different variables. This fusion is a bad for HBM usage, since
# it causes the gradients to persist in memory.
grad_squared_mean = tf.reduce_mean(grad_squared)
decay_rate += grad_squared_mean * 1e-30
update_scale += grad_squared_mean * 1e-30
# END HACK
if self._use_factored(shape_list):
num_rows, num_columns = shape_list
vr = tf.compat.v1.get_variable(
name=param_name + "/adafactor_vr",
shape=[num_rows],
dtype=tf.float32,
trainable=False,
initializer=tf.zeros_initializer())
vc = tf.compat.v1.get_variable(
name=param_name + "/adafactor_vc",
shape=[num_columns],
dtype=tf.float32,
trainable=False,
initializer=tf.zeros_initializer())
next_vr = decay_rate * vr + (1 - decay_rate) * tf.reduce_mean(grad_squared, 1)
next_vc = decay_rate * vc + (1 - decay_rate) * tf.reduce_mean(grad_squared, 0)
long_term_mean = tf.reduce_mean(next_vr, -1, keepdims=True)
r_factor = tf.rsqrt(next_vr / long_term_mean + self.epsilon1)
c_factor = tf.rsqrt(next_vc + self.epsilon1)
update = grad * tf.expand_dims(r_factor, -1) * tf.expand_dims(c_factor, -2)
assignments.append(vr.assign(next_vr, use_locking=self.use_locking))
assignments.append(vc.assign(next_vc, use_locking=self.use_locking))
else:
v = tf.compat.v1.get_variable(
name=param_name + "/adafactor_v",
shape=shape_list,
dtype=tf.float32,
trainable=False,
initializer=tf.zeros_initializer())
next_v = decay_rate * v + (1 - decay_rate) * grad_squared
assignments.append(v.assign(next_v, use_locking=self.use_locking))
update = grad * tf.rsqrt(next_v + self.epsilon1)
clipping_denom = tf.maximum(1.0, reduce_rms(update) / self.clipping_rate)
update /= clipping_denom
# Do weight decay
# Just adding the square of the weights to the loss function is *not*
# the correct way of using L2 regularization/weight decay with Adam,
# since that will interact with the m and v parameters in strange ways.
#
# Instead we want ot decay the weights in a manner that doesn't interact
# with the m/v parameters. This is equivalent to adding the square
# # of the weights to the loss with plain (non-momentum) SGD.
if self._do_use_weight_decay(param_name):
update += self.weight_decay_rate * param
update_with_lr = update_scale * update
next_param = param - update_with_lr
assignments.append(param.assign(next_param, use_locking=self.use_locking))
return tf.group(*assignments, name=name)
def _do_use_weight_decay(self, param_name):
"""Whether to use L2 weight decay for `param_name`."""
if not self.weight_decay_rate:
return False
if self.exclude_from_weight_decay:
for r in self.exclude_from_weight_decay:
if re.search(r, param_name) is not None:
return False
return True
def _get_variable_name(self, param_name):
"""Get the variable name from the tensor name."""
m = re.match("^(.*):\\d+$", param_name)
if m is not None:
param_name = m.group(1)
return param_name
def reduce_rms(x):
return tf.sqrt(tf.reduce_mean(tf.square(x)))
| 40.817021
| 110
| 0.630942
|
b3e3f75b60ed84d314078b594ae6d9379f5018a1
| 3,088
|
py
|
Python
|
src/MainHandler.py
|
trapwired/EmailYoutubeDownload
|
1eddea127877df708bdee52ccda3c00fef1eec58
|
[
"MIT"
] | 2
|
2021-07-04T22:12:06.000Z
|
2021-08-10T14:12:29.000Z
|
src/MainHandler.py
|
trapwired/EmailYoutubeDownload
|
1eddea127877df708bdee52ccda3c00fef1eec58
|
[
"MIT"
] | null | null | null |
src/MainHandler.py
|
trapwired/EmailYoutubeDownload
|
1eddea127877df708bdee52ccda3c00fef1eec58
|
[
"MIT"
] | null | null | null |
import json
import os
import shutil
import sys
import time
sys.path.insert(0, '../src')
from EmailHandler import EmailHandler
from DownloadHandler import DownloadHandler
EMAIL_MAX_SIZE = 25 # maximum send size in MegaBytes
MAX_VIDEO_LENGTH = 10 # maximum length of videos to download in minutes
MAX_RETRY = 10 # maximum number of times to retry on error
class MainHandler(object):
def __init__(self, secrets_: dict, email_handler_: EmailHandler, download_handler_: DownloadHandler):
self.secrets = secrets_
self.email_handler = email_handler_
self.download_handler = download_handler_
def start(self, retry):
while True:
try:
emails = self.email_handler.get_all_emails()
except Exception as e:
self.email_handler.send_error('Error while getting emails.', e)
# for HTTPError 403: try |youtube-dl --rm-cache-dir|
else:
if len(emails) > 0:
print('you\'ve got mail')
# loop over all Emails, there may be plenty
for email in emails:
try:
folder_name = self.download_handler.download_videos(email.youtube_links)
except Exception as e:
self.email_handler.send_error('Error while downloading youtube video, Restarting.', e)
if retry < MAX_RETRY:
self.start(retry + 1)
else:
try:
# send answer, attach all mails in folder folder_name
self.email_handler.send_response(email, folder_name)
except Exception as e:
self.email_handler.send_error('Error while sending response emails.', e, email, folder_name)
if retry < MAX_RETRY:
self.start(retry + 1)
break
else:
# delete folder
shutil.rmtree(folder_name)
# delete email from Inbox / move to folder
self.email_handler.delete_successful(email)
self.email_handler.imap_connection.expunge()
time.sleep(10)
def get_secrets(path):
with open(path) as f:
return json.load(f)
def main():
# root path
path = '/'.join((os.path.abspath(__file__).replace('\\', '/')).split('/')[:-2])
secrets = get_secrets(os.path.join(path, "secrets.json"))
# initialize email_handler
email_handler = EmailHandler(secrets, EMAIL_MAX_SIZE)
# initialize DownloadHandler
download_handler = DownloadHandler(path, MAX_VIDEO_LENGTH)
# start
main_handler = MainHandler(secrets, email_handler, download_handler)
main_handler.start(0)
if __name__ == "__main__":
main()
| 37.204819
| 124
| 0.551166
|
8432ceae5440aeca80c25fc3db4955fda8f6156b
| 934
|
py
|
Python
|
src/03_Code_Structure/05_Your_Challenge_-_Problem/challenge-problem.py
|
MilovanTomasevic/Clean-Code
|
f1ac9280026e2c21dd8b68e746675ed2c92b52e3
|
[
"MIT"
] | null | null | null |
src/03_Code_Structure/05_Your_Challenge_-_Problem/challenge-problem.py
|
MilovanTomasevic/Clean-Code
|
f1ac9280026e2c21dd8b68e746675ed2c92b52e3
|
[
"MIT"
] | null | null | null |
src/03_Code_Structure/05_Your_Challenge_-_Problem/challenge-problem.py
|
MilovanTomasevic/Clean-Code
|
f1ac9280026e2c21dd8b68e746675ed2c92b52e3
|
[
"MIT"
] | null | null | null |
# (c) Maximilian Schwarzmüller / Academind GmbH
# *********
# Imports
# *********
from os import path, makedirs
from pathlib import Path
# *********
# Main
# *********
# A class which allows us to create DiskStorage instances
class DiskStorage:
def __init__(self, directory_name):
self.storage_directory = directory_name
def get_directory_path(self):
return Path(self.storage_directory)
# This must be called before a file is inserted
def create_directory(self):
if (not path.exists(self.get_directory_path())):
makedirs(self.storage_directory)
# Warning: Directory must exist in advance
def insert_file(self, file_name, content):
file = open(self.get_directory_path() / file_name, 'w')
file.write(content)
file.close()
# Todo: Add proper error handling
log_storage = DiskStorage('logs')
log_storage.insert_file('test.txt', 'Test')
| 24.578947
| 63
| 0.663812
|
224d5bf7b7cc5c11987aad2bd89f2ecbcaa8a98e
| 2,926
|
py
|
Python
|
homeassistant/components/hunterdouglas_powerview/scene.py
|
andersop91/core
|
0e0ef0aa17073609eae7c974cf4c73306b7c414b
|
[
"Apache-2.0"
] | 4
|
2021-07-11T09:11:00.000Z
|
2022-02-27T14:43:50.000Z
|
homeassistant/components/hunterdouglas_powerview/scene.py
|
andersop91/core
|
0e0ef0aa17073609eae7c974cf4c73306b7c414b
|
[
"Apache-2.0"
] | 277
|
2021-10-04T06:39:33.000Z
|
2021-12-28T22:04:17.000Z
|
homeassistant/components/hunterdouglas_powerview/scene.py
|
andersop91/core
|
0e0ef0aa17073609eae7c974cf4c73306b7c414b
|
[
"Apache-2.0"
] | 3
|
2021-11-14T13:29:33.000Z
|
2021-12-27T17:05:22.000Z
|
"""Support for Powerview scenes from a Powerview hub."""
from __future__ import annotations
from typing import Any
from aiopvapi.resources.scene import Scene as PvScene
import voluptuous as vol
from homeassistant.components.scene import Scene
from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry
from homeassistant.const import CONF_HOST, CONF_PLATFORM
from homeassistant.core import HomeAssistant
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
from .const import (
COORDINATOR,
DEVICE_INFO,
DOMAIN,
HUB_ADDRESS,
PV_API,
PV_ROOM_DATA,
PV_SCENE_DATA,
ROOM_NAME_UNICODE,
STATE_ATTRIBUTE_ROOM_NAME,
)
from .entity import HDEntity
PLATFORM_SCHEMA = vol.Schema(
{vol.Required(CONF_PLATFORM): DOMAIN, vol.Required(HUB_ADDRESS): cv.string}
)
async def async_setup_platform(
hass: HomeAssistant,
config: ConfigType,
async_add_entities: AddEntitiesCallback,
discovery_info: DiscoveryInfoType | None = None,
) -> None:
"""Import platform from yaml."""
hass.async_create_task(
hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_IMPORT},
data={CONF_HOST: config[HUB_ADDRESS]},
)
)
async def async_setup_entry(
hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback
) -> None:
"""Set up powerview scene entries."""
pv_data = hass.data[DOMAIN][entry.entry_id]
room_data = pv_data[PV_ROOM_DATA]
scene_data = pv_data[PV_SCENE_DATA]
pv_request = pv_data[PV_API]
coordinator = pv_data[COORDINATOR]
device_info = pv_data[DEVICE_INFO]
pvscenes = []
for raw_scene in scene_data.values():
scene = PvScene(raw_scene, pv_request)
room_name = room_data.get(scene.room_id, {}).get(ROOM_NAME_UNICODE, "")
pvscenes.append(PowerViewScene(coordinator, device_info, room_name, scene))
async_add_entities(pvscenes)
class PowerViewScene(HDEntity, Scene):
"""Representation of a Powerview scene."""
def __init__(self, coordinator, device_info, room_name, scene):
"""Initialize the scene."""
super().__init__(coordinator, device_info, room_name, scene.id)
self._scene = scene
@property
def name(self):
"""Return the name of the scene."""
return self._scene.name
@property
def extra_state_attributes(self):
"""Return the state attributes."""
return {STATE_ATTRIBUTE_ROOM_NAME: self._room_name}
@property
def icon(self):
"""Icon to use in the frontend."""
return "mdi:blinds"
async def async_activate(self, **kwargs: Any) -> None:
"""Activate scene. Try to get entities into requested state."""
await self._scene.activate()
| 29.857143
| 84
| 0.71121
|
fe81d61064af77258b00088ce0a26965d4ec2aa8
| 1,665
|
py
|
Python
|
e3nn/non_linearities/gated_block_parity.py
|
zizai/e3nn
|
3efa8d7e110d23410d0e8c5975eaa552da1c2e0b
|
[
"MIT"
] | null | null | null |
e3nn/non_linearities/gated_block_parity.py
|
zizai/e3nn
|
3efa8d7e110d23410d0e8c5975eaa552da1c2e0b
|
[
"MIT"
] | null | null | null |
e3nn/non_linearities/gated_block_parity.py
|
zizai/e3nn
|
3efa8d7e110d23410d0e8c5975eaa552da1c2e0b
|
[
"MIT"
] | null | null | null |
# pylint: disable=invalid-name, arguments-differ, missing-docstring, line-too-long, no-member, unbalanced-tuple-unpacking
import torch
from e3nn import rs
from e3nn.non_linearities.activation import Activation
from e3nn.tensor_product import ElementwiseTensorProduct
def split_features(features, *Rss, dim=-1):
index = 0
outputs = []
for Rs in Rss:
n = rs.dim(Rs)
outputs.append(features.narrow(dim, index, n))
index += n
assert index == features.size(dim)
return outputs
class GatedBlockParity(torch.nn.Module):
def __init__(self, Rs_scalars, act_scalars, Rs_gates, act_gates, Rs_nonscalars):
super().__init__()
self.Rs_in = Rs_scalars + Rs_gates + Rs_nonscalars
self.Rs_scalars, self.Rs_gates, self.Rs_nonscalars = Rs_scalars, Rs_gates, Rs_nonscalars
self.act_scalars = Activation(Rs_scalars, act_scalars)
Rs_scalars = self.act_scalars.Rs_out
self.act_gates = Activation(Rs_gates, act_gates)
Rs_gates = self.act_gates.Rs_out
self.mul = ElementwiseTensorProduct(Rs_nonscalars, Rs_gates)
Rs_nonscalars = self.mul.Rs_out
self.Rs_out = Rs_scalars + Rs_nonscalars
def forward(self, features, dim=-1):
scalars, gates, nonscalars = split_features(features, self.Rs_scalars, self.Rs_gates, self.Rs_nonscalars, dim=dim)
scalars = self.act_scalars(scalars)
if gates.size(dim):
gates = self.act_gates(gates)
nonscalars = self.mul(nonscalars, gates)
features = torch.cat([scalars, nonscalars], dim=dim)
else:
features = scalars
return features
| 34.6875
| 122
| 0.685285
|
3b7ca1ef217f3baac6d6789c7b8620d2042e9192
| 474
|
py
|
Python
|
guppe/atividades/secao_8/ex047.py
|
WesleyLucas97/cursos_python
|
b2cbd393e8fed6e36b84253f7934766f2b4f1de8
|
[
"MIT"
] | null | null | null |
guppe/atividades/secao_8/ex047.py
|
WesleyLucas97/cursos_python
|
b2cbd393e8fed6e36b84253f7934766f2b4f1de8
|
[
"MIT"
] | null | null | null |
guppe/atividades/secao_8/ex047.py
|
WesleyLucas97/cursos_python
|
b2cbd393e8fed6e36b84253f7934766f2b4f1de8
|
[
"MIT"
] | null | null | null |
"""
Faça uma funcao que receba uma matriz 4x4 e retorne quantos valores maiores que 10 ela possui.
"""
from random import randint
def conta_(x: list):
cont = 0
for a in range(4):
for b in range(4):
if x[a][b] >= 10:
cont += 1
return cont
matriz = [[0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0]]
for i in range(4):
for j in range(4):
matriz[i][j] = randint(0, 20)
print(matriz)
print(conta_(matriz))
| 19.75
| 94
| 0.542194
|
f9761aa3c099a7e07485f1fc3ab4743e86ca3105
| 12,990
|
py
|
Python
|
openslides/motions/config_variables.py
|
flowluap/OpenSlides
|
e0069f734adacd5a42183915230f17fc52336f22
|
[
"MIT"
] | null | null | null |
openslides/motions/config_variables.py
|
flowluap/OpenSlides
|
e0069f734adacd5a42183915230f17fc52336f22
|
[
"MIT"
] | null | null | null |
openslides/motions/config_variables.py
|
flowluap/OpenSlides
|
e0069f734adacd5a42183915230f17fc52336f22
|
[
"MIT"
] | null | null | null |
from django.core.validators import MinValueValidator
from openslides.core.config import ConfigVariable
from openslides.motions.models import MotionPoll
from .models import Workflow
def get_workflow_choices():
"""
Returns a list of all workflows to be used as choices for the config variable
'motions_workflow'. Each list item contains the pk and the display name.
"""
return [
{"value": str(workflow.pk), "display_name": workflow.name}
for workflow in Workflow.objects.all()
]
def get_config_variables():
"""
Generator which yields all config variables of this app.
They are grouped in 'General', 'Amendments', 'Supporters', 'Voting and ballot
papers' and 'PDF'. The generator has to be evaluated during app loading
(see apps.py).
"""
# General
yield ConfigVariable(
name="motions_workflow",
default_value="1",
input_type="choice",
label="Workflow of new motions",
choices=get_workflow_choices,
weight=310,
group="Motions",
)
yield ConfigVariable(
name="motions_statute_amendments_workflow",
default_value="1",
input_type="choice",
label="Workflow of new statute amendments",
choices=get_workflow_choices,
weight=312,
group="Motions",
)
yield ConfigVariable(
name="motions_preamble",
default_value="The assembly may decide:",
label="Motion preamble",
weight=320,
group="Motions",
)
yield ConfigVariable(
name="motions_default_line_numbering",
default_value="outside",
input_type="choice",
label="Default line numbering",
choices=(
{"value": "outside", "display_name": "outside"},
{"value": "inline", "display_name": "inline"},
{"value": "none", "display_name": "Disabled"},
),
weight=322,
group="Motions",
)
yield ConfigVariable(
name="motions_line_length",
default_value=85,
input_type="integer",
label="Line length",
help_text="The maximum number of characters per line. Relevant when line numbering is enabled. Min: 40",
weight=323,
group="Motions",
validators=(MinValueValidator(40),),
)
yield ConfigVariable(
name="motions_reason_required",
default_value=False,
input_type="boolean",
label="Reason required for creating new motion",
weight=324,
group="Motions",
)
yield ConfigVariable(
name="motions_disable_text_on_projector",
default_value=False,
input_type="boolean",
label="Hide motion text on projector",
weight=325,
group="Motions",
)
yield ConfigVariable(
name="motions_disable_reason_on_projector",
default_value=False,
input_type="boolean",
label="Hide reason on projector",
weight=326,
group="Motions",
)
yield ConfigVariable(
name="motions_disable_recommendation_on_projector",
default_value=False,
input_type="boolean",
label="Hide recommendation on projector",
weight=327,
group="Motions",
)
yield ConfigVariable(
name="motions_hide_referring_motions",
default_value=False,
input_type="boolean",
label="Hide referring motions",
weight=328,
group="Motions",
)
yield ConfigVariable(
name="motions_disable_sidebox_on_projector",
default_value=True,
input_type="boolean",
label="Show meta information box below the title on projector",
weight=329,
group="Motions",
)
yield ConfigVariable(
name="motions_show_sequential_numbers",
default_value=True,
input_type="boolean",
label="Show the sequential number for a motion",
help_text="In motion list, motion detail and PDF.",
weight=330,
group="Motions",
)
yield ConfigVariable(
name="motions_recommendations_by",
default_value="",
label="Name of recommender",
help_text="Will be displayed as label before selected recommendation. Use an empty value to disable the recommendation system.",
weight=332,
group="Motions",
)
yield ConfigVariable(
name="motions_statute_recommendations_by",
default_value="",
label="Name of recommender for statute amendments",
help_text="Will be displayed as label before selected recommendation in statute amendments.",
weight=333,
group="Motions",
)
yield ConfigVariable(
name="motions_recommendation_text_mode",
default_value="diff",
input_type="choice",
label="Default text version for change recommendations",
choices=(
{"value": "original", "display_name": "Original version"},
{"value": "changed", "display_name": "Changed version"},
{"value": "diff", "display_name": "Diff version"},
{"value": "agreed", "display_name": "Final version"},
),
weight=334,
group="Motions",
)
yield ConfigVariable(
name="motions_motions_sorting",
default_value="identifier",
input_type="choice",
label="Sort motions by",
choices=(
{"value": "weight", "display_name": "Call list"},
{"value": "identifier", "display_name": "Identifier"},
),
weight=335,
group="Motions",
)
# Numbering
yield ConfigVariable(
name="motions_identifier",
default_value="per_category",
input_type="choice",
label="Identifier",
choices=(
{"value": "per_category", "display_name": "Numbered per category"},
{"value": "serially_numbered", "display_name": "Serially numbered"},
{"value": "manually", "display_name": "Set it manually"},
),
weight=340,
group="Motions",
subgroup="Numbering",
)
yield ConfigVariable(
name="motions_identifier_min_digits",
default_value=1,
input_type="integer",
label="Number of minimal digits for identifier",
help_text="Uses leading zeros to sort motions correctly by identifier.",
weight=342,
group="Motions",
subgroup="Numbering",
validators=(MinValueValidator(1),),
)
yield ConfigVariable(
name="motions_identifier_with_blank",
default_value=False,
input_type="boolean",
label="Allow blank in identifier",
help_text="Blank between prefix and number, e.g. 'A 001'.",
weight=344,
group="Motions",
subgroup="Numbering",
)
# Amendments
yield ConfigVariable(
name="motions_statutes_enabled",
default_value=False,
input_type="boolean",
label="Activate statute amendments",
weight=350,
group="Motions",
subgroup="Amendments",
)
yield ConfigVariable(
name="motions_amendments_enabled",
default_value=False,
input_type="boolean",
label="Activate amendments",
weight=351,
group="Motions",
subgroup="Amendments",
)
yield ConfigVariable(
name="motions_amendments_main_table",
default_value=True,
input_type="boolean",
label="Show amendments together with motions",
weight=352,
group="Motions",
subgroup="Amendments",
)
yield ConfigVariable(
name="motions_amendments_prefix",
default_value="-",
label="Prefix for the identifier for amendments",
weight=353,
group="Motions",
subgroup="Amendments",
)
yield ConfigVariable(
name="motions_amendments_text_mode",
default_value="paragraph",
input_type="choice",
label="How to create new amendments",
choices=(
{"value": "freestyle", "display_name": "Empty text field"},
{"value": "fulltext", "display_name": "Edit the whole motion text"},
{"value": "paragraph", "display_name": "Paragraph-based, Diff-enabled"},
),
weight=354,
group="Motions",
subgroup="Amendments",
)
yield ConfigVariable(
name="motions_amendments_multiple_paragraphs",
default_value=True,
input_type="boolean",
label="Amendments can change multiple paragraphs",
weight=355,
group="Motions",
subgroup="Amendments",
)
yield ConfigVariable(
name="motions_amendments_of_amendments",
default_value=False,
input_type="boolean",
label="Allow amendments of amendments",
weight=356,
group="Motions",
subgroup="Amendments",
)
# Supporters
yield ConfigVariable(
name="motions_min_supporters",
default_value=0,
input_type="integer",
label="Number of (minimum) required supporters for a motion",
help_text="Choose 0 to disable the supporting system.",
weight=360,
group="Motions",
subgroup="Supporters",
validators=(MinValueValidator(0),),
)
yield ConfigVariable(
name="motions_remove_supporters",
default_value=False,
input_type="boolean",
label="Remove all supporters of a motion if a submitter edits his motion in early state",
weight=361,
group="Motions",
subgroup="Supporters",
)
# Voting and ballot papers
yield ConfigVariable(
name="motion_poll_default_100_percent_base",
default_value="YNA",
input_type="choice",
label="Default 100 % base of a voting result",
choices=tuple(
{"value": base[0], "display_name": base[1]}
for base in MotionPoll.PERCENT_BASES
),
weight=370,
group="Motions",
subgroup="Voting and ballot papers",
)
yield ConfigVariable(
name="motion_poll_default_majority_method",
default_value="simple",
input_type="choice",
choices=tuple(
{"value": method[0], "display_name": method[1]}
for method in MotionPoll.MAJORITY_METHODS
),
label="Required majority",
help_text="Default method to check whether a motion has reached the required majority.",
weight=371,
hidden=True,
group="Motions",
subgroup="Voting and ballot papers",
)
yield ConfigVariable(
name="motion_poll_default_groups",
default_value=[],
input_type="groups",
label="Default groups with voting rights",
weight=372,
group="Motions",
subgroup="Voting and ballot papers",
)
yield ConfigVariable(
name="motions_pdf_ballot_papers_selection",
default_value="CUSTOM_NUMBER",
input_type="choice",
label="Number of ballot papers",
choices=(
{"value": "NUMBER_OF_DELEGATES", "display_name": "Number of all delegates"},
{
"value": "NUMBER_OF_ALL_PARTICIPANTS",
"display_name": "Number of all participants",
},
{
"value": "CUSTOM_NUMBER",
"display_name": "Use the following custom number",
},
),
weight=373,
group="Motions",
subgroup="Voting and ballot papers",
)
yield ConfigVariable(
name="motions_pdf_ballot_papers_number",
default_value=8,
input_type="integer",
label="Custom number of ballot papers",
weight=374,
group="Motions",
subgroup="Voting and ballot papers",
validators=(MinValueValidator(1),),
)
# PDF export
yield ConfigVariable(
name="motions_export_title",
default_value="Motions",
label="Title for PDF documents of motions",
weight=380,
group="Motions",
subgroup="PDF export",
)
yield ConfigVariable(
name="motions_export_preamble",
default_value="",
label="Preamble text for PDF documents of motions",
weight=382,
group="Motions",
subgroup="PDF export",
)
yield ConfigVariable(
name="motions_export_submitter_recommendation",
default_value=False,
label="Show submitters and recommendation/state in table of contents",
input_type="boolean",
weight=384,
group="Motions",
subgroup="PDF export",
)
yield ConfigVariable(
name="motions_export_follow_recommendation",
default_value=False,
label="Show checkbox to record decision",
input_type="boolean",
weight=386,
group="Motions",
subgroup="PDF export",
)
| 29.191011
| 136
| 0.601848
|
afa760d06cb2bb5571d314fc466ffe4cdf32d4b7
| 383
|
py
|
Python
|
simuvex/procedures/libc___so___6/getchar.py
|
praetorian-inc/simuvex
|
7984bc4432a1c2126e6f2eb963c935e9f6a98da5
|
[
"BSD-2-Clause"
] | 8
|
2016-01-19T03:13:32.000Z
|
2020-11-03T09:30:05.000Z
|
simuvex/procedures/libc___so___6/getchar.py
|
praetorian-inc/simuvex
|
7984bc4432a1c2126e6f2eb963c935e9f6a98da5
|
[
"BSD-2-Clause"
] | null | null | null |
simuvex/procedures/libc___so___6/getchar.py
|
praetorian-inc/simuvex
|
7984bc4432a1c2126e6f2eb963c935e9f6a98da5
|
[
"BSD-2-Clause"
] | 3
|
2017-04-24T00:22:30.000Z
|
2020-11-03T09:30:06.000Z
|
import simuvex
from simuvex.s_type import SimTypeInt
######################################
# getchar
######################################
class getchar(simuvex.SimProcedure):
def run(self):
self.return_type = SimTypeInt(32, True)
data = self.inline_call(
simuvex.SimProcedures['libc.so.6']['_IO_getc'], 0).ret_expr # stdin
return data
| 23.9375
| 80
| 0.530026
|
27e00df54fa1c05af576386431d1a8903b6e4014
| 7,189
|
py
|
Python
|
models/vgg.py
|
MarioMZhang/HAP-tryout
|
9a423f35b50766533a0d2cab8069316ccb21954b
|
[
"MIT"
] | 24
|
2021-02-04T09:51:46.000Z
|
2022-03-04T13:01:24.000Z
|
models/vgg.py
|
ICML2021Submission1958/Hessian-Aware-Pruning-and-Optimal-Neural-Implant
|
34310e93b314f38a8d262f0b1fdb033d16d7087f
|
[
"MIT"
] | 3
|
2021-07-26T07:18:00.000Z
|
2022-01-07T12:17:11.000Z
|
models/vgg.py
|
ICML2021Submission1958/Hessian-Aware-Pruning-and-Optimal-Neural-Implant
|
34310e93b314f38a8d262f0b1fdb033d16d7087f
|
[
"MIT"
] | 2
|
2021-10-10T20:51:10.000Z
|
2021-11-16T03:40:32.000Z
|
import math
import torch
import torch.nn as nn
from utils.common_utils import try_contiguous
from utils.prune_utils import register_bottleneck_layer, update_QQ_dict
from utils.prune_utils import LinearLayerRotation, ConvLayerRotation
# from layers.bottleneck_layers import LinearBottleneck, Conv2dBottleneck
from models.resnet import _weights_init
_AFFINE = True
# _AFFINE = False
defaultcfg = {
11: [64, 'M', 128, 'M', 256, 256, 'M', 512, 512, 'M', 512, 512],
13: [64, 64, 'M', 128, 128, 'M', 256, 256, 'M', 512, 512, 'M', 512, 512],
16: [64, 64, 'M', 128, 128, 'M', 256, 256, 256, 'M', 512, 512, 512, 'M', 512, 512, 512],
19: [64, 64, 'M', 128, 128, 'M', 256, 256, 256, 256, 'M', 512, 512, 512, 512, 'M', 512, 512, 512, 512],
}
class VGG(nn.Module):
def __init__(self, dataset='cifar10', depth=19, init_weights=True, cfg=None):
super(VGG, self).__init__()
if cfg is None:
cfg = defaultcfg[depth]
self.feature = self.make_layers(cfg, False)
# self.feature = self.make_layers(cfg, True)
self.dataset = dataset
if dataset == 'cifar10' or dataset == 'cinic-10':
num_classes = 10
elif dataset == 'cifar100':
num_classes = 100
elif dataset == 'tiny_imagenet':
num_classes = 200
elif dataset == 'imagenet':
num_classes = 1000
if dataset != 'imagenet':
self.classifier = nn.Linear(cfg[-1], num_classes)
else:
self.classifier = VGGOutputBlock(in_channels=7*7*cfg[-1], classes=1000)
# self.classifier = VGGOutputBlock(in_channels=41472, classes=1000)
if init_weights:
self.apply(_weights_init)
# self._initialize_weights()
def make_layers(self, cfg, batch_norm=False):
layers = []
in_channels = 3
for v in cfg:
if v == 'M':
layers += [nn.MaxPool2d(kernel_size=2, stride=2)]
else:
conv2d = nn.Conv2d(in_channels, v, kernel_size=3, padding=1, bias=True)
# conv2d = nn.Conv2d(in_channels, v, kernel_size=3, padding=1, bias=False)
if batch_norm:
layers += [conv2d, nn.BatchNorm2d(v, affine=_AFFINE), nn.ReLU(inplace=True)]
else:
layers += [conv2d, nn.ReLU(inplace=True)]
in_channels = v
return nn.Sequential(*layers)
def forward(self, x):
x = self.feature(x)
if self.dataset == 'tiny_imagenet':
x = nn.AvgPool2d(4)(x)
else:
x = nn.AvgPool2d(2)(x)
# print(x.shape)
x = x.view(x.size(0), -1)
# print(x.shape)
y = self.classifier(x)
return y
def _initialize_weights(self):
for m in self.modules():
if isinstance(m, nn.Conv2d):
n = m.kernel_size[0] * m.kernel_size[1] * m.in_channels
m.weight.data.normal_(0, math.sqrt(2. / n))
if m.bias is not None:
m.bias.data.zero_()
elif isinstance(m, nn.BatchNorm2d):
if m.weight is not None:
m.weight.data.fill_(1.0)
m.bias.data.zero_()
elif isinstance(m, nn.Linear):
m.weight.data.normal_(0, 0.01)
m.bias.data.zero_()
class VGGDense(nn.Module):
def __init__(self,
in_channels,
out_channels):
super(VGGDense, self).__init__()
self.fc = nn.Linear(
in_features=in_channels,
out_features=out_channels)
self.activ = nn.ReLU(inplace=True)
self.dropout = nn.Dropout(p=0.5)
def forward(self, x):
x = self.fc(x)
x = self.activ(x)
x = self.dropout(x)
return x
class VGGOutputBlock(nn.Module):
def __init__(self,
in_channels,
classes):
super(VGGOutputBlock, self).__init__()
mid_channels = 4096
self.fc1 = VGGDense(
in_channels=in_channels,
out_channels=mid_channels)
self.fc2 = VGGDense(
in_channels=mid_channels,
out_channels=mid_channels)
self.fc3 = nn.Linear(
in_features=mid_channels,
out_features=classes)
def forward(self, x):
x = self.fc1(x)
x = self.fc2(x)
x = self.fc3(x)
return x
class BottleneckVGG(nn.Module):
def __init__(self, vgg_prev, fix_rotation=True):
super(BottleneckVGG, self).__init__()
self.dataset = vgg_prev.dataset
self.feature = vgg_prev.feature
self.classifier = vgg_prev.classifier
self.fix_rotation = fix_rotation
self._is_registered = False
def register(self, modules, Q_g, Q_a, W_star, use_patch, fix_rotation, re_init):
n_seqs = len(self.feature)
for idx in range(n_seqs):
m = self.feature[idx]
if isinstance(m, nn.Sequential):
m = m[1]
if m in modules:
self.feature[idx] = register_bottleneck_layer(m, Q_g[m], Q_a[m], W_star[m], use_patch, fix_rotation)
update_QQ_dict(Q_g, Q_a, m, self.feature[idx][1])
m = self.classifier
if isinstance(m, nn.Sequential):
m = m[1]
if m in modules:
self.classifier = register_bottleneck_layer(m, Q_g[m], Q_a[m], W_star[m], use_patch, fix_rotation)
update_QQ_dict(Q_g, Q_a, m, self.classifier)
self._is_registered = True
if re_init:
self.apply(_weights_init)
def _initialize_weights(self):
for m in self.modules():
if isinstance(m, nn.Conv2d):
n = m.kernel_size[0] * m.kernel_size[1] * m.in_channels
m.weight.data.normal_(0, math.sqrt(2. / n))
if m.bias is not None:
m.bias.data.zero_()
elif isinstance(m, nn.BatchNorm2d):
if m.weight is not None:
m.weight.data.fill_(1.0)
m.bias.data.zero_()
elif isinstance(m, nn.Linear):
m.weight.data.normal_(0, 0.01)
m.bias.data.zero_()
elif isinstance(m, LinearLayerRotation):
if m.trainable:
print('* init Linear rotation')
m.rotation_matrix.data.normal_(0, 0.01)
elif isinstance(m, ConvLayerRotation):
if m.trainable:
print('* init Conv rotation')
n = 1 * m.rotation_matrix.size(1)
m.rotation_matrix.data.normal_(0, math.sqrt(2. / n))
def forward(self, x):
assert self._is_registered
nseq = len(self.feature)
for idx in range(nseq):
x = self.feature[idx](x)
if self.dataset == 'tiny_imagenet':
x = nn.AvgPool2d(4)(x)
elif self.dataset == 'imagenet':
return self.classifier(x)
else:
x = nn.AvgPool2d(2)(x)
out = x.view(x.size(0), -1)
out = self.classifier(out)
return out
| 35.413793
| 116
| 0.548199
|
f74586a9a2e957cdc6a027edd7f741dc21f08c31
| 95
|
py
|
Python
|
helli5/contextprocessor.py
|
TheMn/internet-engineering-project
|
e41536552feff6f806ba099922df95e89da5bd31
|
[
"Apache-2.0"
] | 7
|
2019-10-19T12:58:11.000Z
|
2020-11-05T07:02:17.000Z
|
helli5/contextprocessor.py
|
TheMn/internet-engineering-project
|
e41536552feff6f806ba099922df95e89da5bd31
|
[
"Apache-2.0"
] | 35
|
2019-12-06T16:31:07.000Z
|
2022-03-12T00:56:35.000Z
|
helli5/contextprocessor.py
|
TheMn/internet-engineering-project
|
e41536552feff6f806ba099922df95e89da5bd31
|
[
"Apache-2.0"
] | 1
|
2019-10-18T19:07:04.000Z
|
2019-10-18T19:07:04.000Z
|
from datetime import datetime
def time_now(request):
return {'time_now': datetime.now()}
| 15.833333
| 39
| 0.726316
|
e396d79e336bf66fbab6a5177060929d522bb474
| 2,005
|
py
|
Python
|
src/greyd/config.py
|
canberk/greyd
|
b3ce055ba856277d5efd5e2bbb9b59ff8d3b3c58
|
[
"MIT"
] | 2
|
2019-02-18T16:31:14.000Z
|
2019-03-05T18:24:39.000Z
|
src/greyd/config.py
|
canberk/greyd
|
b3ce055ba856277d5efd5e2bbb9b59ff8d3b3c58
|
[
"MIT"
] | null | null | null |
src/greyd/config.py
|
canberk/greyd
|
b3ce055ba856277d5efd5e2bbb9b59ff8d3b3c58
|
[
"MIT"
] | 1
|
2019-02-20T21:42:20.000Z
|
2019-02-20T21:42:20.000Z
|
# -*- coding: utf-8 -*-
"""Set up environment configure."""
import os
import rsa
# pylint: disable=invalid-name
def use_rsa_key(keys_path):
"""Check if already have keypair do nothing else create new one."""
if not check_pem_files(keys_path):
generate_rsa_keypair(keys_path)
def check_pem_files(keys_path):
"""Check *_public.pem and *_private.pem is exist."""
from pathlib import Path
pub_file = Path(keys_path + "_public.pem")
pri_file = Path(keys_path + "_private.pem")
return bool(pub_file.is_file() and pri_file.is_file())
def generate_rsa_keypair(keys_path):
"""Generate rsa keypair."""
(public_key, private_key) = rsa.newkeys(512)
if not os.path.exists(keys_path):
os.makedirs(keys_path)
with open(keys_path + "_public.pem", "wb+") as file: # noqa pylint: disable=redefined-outer-name
key_data = rsa.PublicKey.save_pkcs1(public_key)
file.write(key_data)
with open(keys_path + "_private.pem", "wb+") as file:
key_data = rsa.PrivateKey.save_pkcs1(private_key)
file.write(key_data)
HOST = os.environ.get("HOST", "0.0.0.0")
PORT = int(os.environ.get("PORT", "8001"))
GEONAMES_USERNAME = os.environ.get("GEONAMES_USERNAME")
SERVER_NAME = os.environ.get("SERVER_NAME", "server")
CLIENT_NAME = os.environ.get("CLIENT_NAME", "client")
DB_NAME = os.environ.get("DB_NAME", "greyd.db")
DB_PATH = os.environ.get("DB_PATH", "/usr/src/app/greyd/db")
KEYS_PATH = os.environ.get("KEYS_PATH", "/usr/src/app/greyd/rsa_keys/")
use_rsa_key(KEYS_PATH + SERVER_NAME)
use_rsa_key(KEYS_PATH + CLIENT_NAME)
private_file = KEYS_PATH + SERVER_NAME + "_private.pem"
with open(private_file, mode="rb") as file:
private_key_data = file.read()
SERVER_PRIVATE_RSA_KEY = rsa.PrivateKey.load_pkcs1(private_key_data)
public_file = KEYS_PATH + CLIENT_NAME + "_public.pem"
with open(public_file, mode="rb") as file:
public_key_data = file.read()
CLIENT_PUBLIC_RSA_KEY = rsa.PublicKey.load_pkcs1(public_key_data)
| 31.328125
| 101
| 0.711721
|
d5e3a84b8990ddb290b84c9757a702050b3e2f1a
| 24,673
|
py
|
Python
|
Packs/FeedFireEye/Integrations/FeedFireEye/FeedFireEye.py
|
Gil-nuriel/content
|
b5237605d24ad915566f96c2cac392b1a93be80a
|
[
"MIT"
] | 1
|
2021-04-20T07:10:06.000Z
|
2021-04-20T07:10:06.000Z
|
Packs/FeedFireEye/Integrations/FeedFireEye/FeedFireEye.py
|
Gil-nuriel/content
|
b5237605d24ad915566f96c2cac392b1a93be80a
|
[
"MIT"
] | null | null | null |
Packs/FeedFireEye/Integrations/FeedFireEye/FeedFireEye.py
|
Gil-nuriel/content
|
b5237605d24ad915566f96c2cac392b1a93be80a
|
[
"MIT"
] | null | null | null |
from typing import Tuple, List, Dict, Any, Optional
import urllib3
from requests.auth import HTTPBasicAuth
from CommonServerPython import *
# disable insecure warnings
urllib3.disable_warnings()
INTEGRATION_NAME = 'FireEye Feed'
API_URL = 'https://api.intelligence.fireeye.com'
FE_CONFIDENCE_TO_REPUTATION = {
Common.DBotScore.BAD: 70,
Common.DBotScore.SUSPICIOUS: 30,
Common.DBotScore.NONE: 0
}
class STIX21Processor:
"""Processing class for STIX 2.1 objects.
Args:
raw_indicators (List): List of STIX 2.1 indicators objects.
relationships (Dict): Dict of `id: STIX 2.1 relationship object`.
entities (Dict): Dict of `id: STIX 2.1 entity object`.
reports (List): List of STIX 2.1 reports objects.
"""
def __init__(self, raw_indicators: List, relationships: Dict, entities: Dict, reports: List,
malicious_threshold: int, reputation_interval: int):
self.raw_indicators = raw_indicators
self.relationships = relationships
self.entities = entities
self.reports = reports
self.reputation_interval = reputation_interval
self.type_to_processor = {
'report': self.process_report,
'malware': self.process_malware,
'threat-actor': self.process_threat_actor,
}
FE_CONFIDENCE_TO_REPUTATION[Common.DBotScore.BAD] = malicious_threshold
def process_indicators(self) -> List:
processed_indicators = list() # type: List
for raw_data in self.raw_indicators:
processed_indicator = self.process_indicator(raw_data)
if processed_indicator:
processed_indicators += processed_indicator
return processed_indicators
@staticmethod
def process_indicator_value(indicator_pattern_value: str) -> Tuple[List, List, Dict]:
"""Processes the `pattern` value from the feed response into indicator types according to FireEye, their values,
and, in case of file type, it's hashes values.
Args:
indicator_pattern_value (str): The raw value of the `pattern` value from the feed response.
Returns:
Tuple[List, List, Dict]:
indicator_types - List of indicator types according to FireEye classification.
values - List of indicator values.
hashes - Dict of `hash_type: hash_value`, in case of `file` indicator type.
"""
indicator_pattern_value = indicator_pattern_value[1:-1]
if indicator_pattern_value.startswith('file'):
hash_values = indicator_pattern_value.split('OR')
hashes_dict = dict() # type: Dict
for h in hash_values:
key, value = h.split('=')
hashes_dict[key.strip().split('file:hashes.')[1].replace("'", '')] = value.strip().replace("'", '')
return ['file'], [hashes_dict['MD5']], hashes_dict
try:
indicator_types = list() # type: List
values = list() # type: List
for indicator_value in indicator_pattern_value.split('AND'):
if indicator_value.startswith('email-message'):
key, value = indicator_value.split(':from_ref.value=')
else:
try:
key, value = indicator_value.split(':value=')
except Exception:
continue
indicator_types.append(key.strip().replace("'", '').replace('[', '').replace(']', ''))
values.append(value.strip().replace("'", '').replace('[', '').replace(']', ''))
return indicator_types, values, {}
except Exception:
return [], [], {}
@staticmethod
def calculate_indicator_reputation(confidence: int, date: str, reputation_interval: int):
"""Calculates indicator reputation according to the threshold levels and dates.
Args:
confidence (int): FireEye feed confidence.
date (str): Date in which the indicator was published.
reputation_interval (int): If this amount of days passed since the indicator was created,
then its reputation can be at most "Suspicious"
Returns:
int. DBot Score value
Notes:
In case the (current_date - publishing date of the indicator) < reputation_interval, the highest score the
indicator can get is SUSPICIOUS.
"""
current_date = datetime.now()
published_date = datetime.strptime(date, '%Y-%m-%dT%H:%M:%S.%fZ')
sorted_reputation_map = sorted(FE_CONFIDENCE_TO_REPUTATION.items(), reverse=True)
if current_date - published_date < timedelta(days=reputation_interval):
for score, threshold in sorted_reputation_map:
if confidence > threshold:
return score
else:
for score, threshold in sorted_reputation_map:
if confidence > threshold:
return min(score, Common.DBotScore.SUSPICIOUS)
def process_indicator(self, raw_data):
indicators = list()
_, values, hashes = self.process_indicator_value(raw_data.get('pattern'))
for value in values:
indicator = dict()
indicator['type'] = auto_detect_indicator_type(value)
if indicator['type']:
indicator['value'] = value
indicator['score'] = self.calculate_indicator_reputation(
raw_data.get('confidence'),
raw_data.get('created'),
self.reputation_interval
)
indicator['rawJSON'] = {
'fireeye_id': raw_data.get('id'),
'fireeye_labels': raw_data.get('labels'),
'fireeye_revoked': raw_data.get('revoked'),
'fireeye_created_date': raw_data.get('created'),
'fireeye_confidence': raw_data.get('confidence'),
'fireeye_valid_from': raw_data.get('valid_from'),
'fireeye_modified_date': raw_data.get('modified'),
'indicator_types': raw_data.get('indicator_types'),
'fireeye_valid_until': raw_data.get('valid_until'),
'fireeye_description': raw_data.get('description')
}
if 'MD5' in hashes:
indicator['rawJSON']['MD5'] = hashes['MD5']
if 'SHA-1' in hashes:
indicator['rawJSON']['SHA-1'] = hashes['SHA-1']
if 'SHA-256' in hashes:
indicator['rawJSON']['SHA-256'] = hashes['SHA-256']
indicators.append(indicator)
return indicators
def process_stix_entities(self):
processed_entities = list() # type: List
for entity_type, value in self.entities.items():
if value.get('type') in self.type_to_processor:
processed_entity = self.type_to_processor[value.get('type')](value)
if processed_entity:
processed_entities.append(processed_entity)
return processed_entities
def process_reports(self) -> List:
processed_reports = list() # type: List
for raw_data in self.reports:
processed_report = self.process_report(raw_data)
if processed_report:
processed_reports.append(processed_report)
return processed_reports
@staticmethod
def process_malware(raw_data) -> Dict:
entity = dict() # type: Dict[str, Any]
entity['type'] = 'STIX Malware'
entity['value'] = raw_data.get('name')
entity['fields'] = {
'stixid': raw_data.get('id'),
'stixdescription': raw_data.get('description', ''),
'stixismalwarefamily': raw_data.get('is_family'),
'stixmalwaretypes': raw_data.get('malware_types')
}
entity['rawJSON'] = {
'fireeye_id': raw_data.get('id'),
'fireeye_labels': raw_data.get('labels'),
'fireeye_aliases': raw_data.get('aliases'),
'fireeye_revoked': raw_data.get('revoked'),
'fireeye_is_family': raw_data.get('is_family'),
'fireeye_created_date': raw_data.get('created'),
'fireeye_modified_date': raw_data.get('modified'),
'fireeye_description': raw_data.get('description'),
'fireeye_malware_types': raw_data.get('malware_types'),
'fireeye_os_execution_envs': raw_data.get('os_execution_envs'),
'fireeye_external_references': raw_data.get('external_references'),
}
return entity
@staticmethod
def process_threat_actor(raw_data) -> Dict:
entity = dict() # type: Dict[str, Any]
entity['type'] = 'STIX Threat Actor'
entity['value'] = raw_data.get('name')
entity['fields'] = {
'stixid': raw_data.get('id'),
'stixaliases': raw_data.get('aliases'),
'stixdescription': raw_data.get('description', ''),
'stixsophistication': raw_data.get('sophistication'),
'stixprimarymotivation': raw_data.get('primary_motivation'),
'stixsecondarymotivations': raw_data.get('secondary_motivations'),
}
entity['rawJSON'] = {
'fireeye_id': raw_data.get('id'),
'fireeye_labels': raw_data.get('labels'),
'fireeye_aliases': raw_data.get('aliases'),
'fireeye_revoked': raw_data.get('revoked'),
'fireeye_created_date': raw_data.get('created'),
'fireeye_modified_date': raw_data.get('modified'),
'fireeye_description': raw_data.get('description'),
'fireeye_sophistication': raw_data.get('sophistication'),
'fireeye_primary_motivation': raw_data.get('primary_motivation'),
'fireeye_threat_actor_types': raw_data.get('threat_actor_types'),
'fireeye_object_marking_refs': raw_data.get('object_marking_refs'),
'fireeye_secondary_motivations': raw_data.get('secondary_motivations'),
'fireeye_intended_effect': raw_data.get('x_fireeye_com_intended_effect'),
'fireeye_planning_and_operational_support': raw_data.get('x_fireeye_com_planning_and_operational_support'),
}
return entity
@staticmethod
def process_report(raw_data) -> Dict:
report = dict() # type: Dict[str, Any]
report['type'] = 'STIX Report'
report['value'] = raw_data.get('name')
report['fields'] = {
'stixid': raw_data.get('id'),
'published': raw_data.get('published'),
'stixdescription': raw_data.get('description', ''),
}
report['rawJSON'] = {
'fireeye_id': raw_data.get('id'),
'fireeye_labels': raw_data.get('labels'),
'fireeye_threats': raw_data.get('threats'),
'fireeye_revoked': raw_data.get('revoked'),
'fireeye_published': raw_data.get('published'),
'fireeye_created_date': raw_data.get('created'),
'fireeye_modified_date': raw_data.get('modified'),
'fireeye_description': raw_data.get('description'),
'fireeye_report_types': raw_data.get('report_types'),
'fireeye_metadata': raw_data.get('x_fireeye_com_metadata'),
'fireeye_external_references': raw_data.get('external_references'),
'fireeye_tracking_info': raw_data.get('x_fireeye_com_tracking_info'),
'fireeye_exploitation_rating': raw_data.get('x_fireeye_com_exploitation_rating'),
'fireeye_risk_rating_justification': raw_data.get('x_fireeye_com_risk_rating_justification'),
'fireeye_additional_description_sections': raw_data.get('x_fireeye_com_additional_description_sections'),
}
return report
class Client(BaseClient):
"""Client to use in the FireEye Feed integration. Overrides BaseClient.
Args:
insecure (bool): False if feed HTTPS server certificate should be verified, True otherwise.
proxy (bool): False if feed HTTPS server certificate will not use proxies, True otherwise.
tags (list): The indicator tags.
tlp_color (str): Traffic Light Protocol color.
"""
def __init__(self, public_key: str, private_key: str, malicious_threshold: int, reputation_interval: int,
polling_timeout: int = 20, insecure: bool = False, proxy: bool = False,
tags: list = [], tlp_color: Optional[str] = None):
super().__init__(base_url=API_URL, verify=not insecure, proxy=proxy)
self.public_key = public_key
self.private_key = private_key
self.reputation_interval = reputation_interval
self.malicious_threshold = malicious_threshold
self._polling_timeout = polling_timeout
self.tags = tags
self.tlp_color = tlp_color
@staticmethod
def parse_access_token_expiration_time(expires_in: str) -> int:
"""Computes the expiration time of the new fetched authentication time.
Args:
expires_in (str): Amount of time the authentication token will be valid according to the API.
Returns:
int. Epoch time that represents the expiration timeof the token.
"""
try:
current_time = datetime.now()
expiration_time = current_time + timedelta(seconds=int(expires_in))
epoch_expiration_time = int(expiration_time.strftime('%s'))
except ValueError:
demisto.info('INFO - could not parse expiration time for access token.')
epoch_expiration_time = 0
return epoch_expiration_time
def fetch_new_access_token(self) -> str:
"""Fetches new authentication token from the API.
Returns:
str. Authentication token.
"""
response = self._http_request(
method='POST',
url_suffix='token',
data={'grant_type': 'client_credentials'},
auth=HTTPBasicAuth(self.public_key, self.private_key),
timeout=self._polling_timeout
)
auth_token = response.get('access_token')
expires_in = response.get('expires_in')
epoch_expiration_time = self.parse_access_token_expiration_time(expires_in)
demisto.setIntegrationContext(
{
'auth_token': auth_token,
'expiration_time': epoch_expiration_time
}
)
return auth_token
def get_access_token(self) -> str:
"""Returns the current valid authentication token for the feed.
Returns:
str. Authentication token.
"""
last_token_fetched_expiration_time = demisto.getIntegrationContext().get('expiration_time')
current_time = int(datetime.now().timestamp())
if last_token_fetched_expiration_time and last_token_fetched_expiration_time > current_time:
auth_token = demisto.getIntegrationContext().get('auth_token')
else:
auth_token = self.fetch_new_access_token()
return auth_token
def fetch_all_indicators_from_api(self, limit: int) -> Tuple[List, Dict, Dict]:
"""Collects raw data of indicators and their relationships from the feed.
Args:
limit (int): Amount of indicators to fetch. -1 means no limit.
Returns:
Tuple[List, Dict, Dict].
raw_indicators - List of STIX 2.1 indicators objects.
relationships - Dict of `id: STIX 2.1 relationship object`.
stix_entities - Dict of `id: STIX 2.1 entity object`.
"""
raw_indicators = list() # type: List
relationships = dict() # type: Dict
stix_entities = dict() # type: Dict
headers = {
'Accept': 'application/vnd.oasis.stix+json; version=2.1',
'X-App-Name': 'content.xsoar.cortex.paloaltonetworks.v1.0',
}
if limit == -1:
query_url = '/collections/indicators/objects?length=1000'
else:
query_url = f'/collections/indicators/objects?length={min(limit, 1000)}'
while True:
headers['Authorization'] = f'Bearer {self.get_access_token()}'
response = self._http_request(
method='GET',
url_suffix=query_url,
headers=headers,
timeout=self._polling_timeout,
resp_type='response'
)
if response.status_code == 204:
demisto.info(f'{INTEGRATION_NAME} info - '
f'API Status Code: {response.status_code} No Content Available for this timeframe.')
return [], {}, {}
if response.status_code != 200:
return_error(f'{INTEGRATION_NAME} indicators fetching - '
f'API Status Code: {response.status_code} Error Reason: {response.text}')
objects_fetched = response.json().get('objects')
for obj in objects_fetched:
if obj.get('type') == 'indicator':
raw_indicators.append(obj)
elif obj.get('type') == 'relationship':
relationships[obj.get('id')] = obj
else:
stix_entities[obj.get('id')] = obj
if limit != -1:
break
try:
query_url = response.links['next']['url']
query_url = query_url.split('https://api.intelligence.fireeye.com')[1]
except KeyError:
break
return raw_indicators, relationships, stix_entities
def fetch_all_reports_from_api(self, limit: int) -> List:
"""Collects reports raw data from the feed.
Args:
limit (int): Amount of reports to fetch. -1 means no limit.
Returns:
List. List of STIX 2.1 reports objects.
"""
raw_reports = list() # type: List
headers = {
'Accept': 'application/vnd.oasis.stix+json; version=2.1',
'X-App-Name': 'content.xsoar.cortex.paloaltonetworks.v1.0',
}
if limit == -1:
query_url = '/collections/reports/objects?length=100'
else:
query_url = f'/collections/reports/objects?length={limit}'
while True:
headers['Authorization'] = f'Bearer {self.get_access_token()}'
response = self._http_request(
method='GET',
url_suffix=query_url,
headers=headers,
timeout=self._polling_timeout,
resp_type='response'
)
if response.status_code != 200:
return_error(f'{INTEGRATION_NAME} reports fetching - '
f'API Status Code: {response.status_code} Error Reason: {response.text}')
raw_reports += [report for report in response.json().get('objects')
if report.get('type') == 'report']
if limit != -1:
break
try:
query_url = response.links['next']['url']
query_url = query_url.split('https://api.intelligence.fireeye.com')[1]
except KeyError:
break
return raw_reports
def build_iterator(self, limit: int) -> List:
self.get_access_token()
raw_indicators, relationships, stix_entities = self.fetch_all_indicators_from_api(limit)
raw_reports = self.fetch_all_reports_from_api(limit)
stix_processor = STIX21Processor(raw_indicators, relationships, stix_entities, raw_reports,
self.malicious_threshold, self.reputation_interval)
indicators = stix_processor.process_indicators()
stix_indicators = stix_processor.process_stix_entities()
reports = stix_processor.process_reports()
return indicators + stix_indicators + reports
def test_module(client: Client):
client.build_iterator(limit=10)
return 'ok', {}, {}
def get_indicators_command(client: Client):
"""Retrieves indicators from the feed to the war-room.
Args:
client (Client): Client object configured according to instance arguments.
Returns:
Tuple of:
str. Information to be printed to war room.
Dict. The raw data of the indicators.
"""
limit = int(demisto.args().get('limit')) if 'limit' in demisto.args() else 10
indicators, raw_response = fetch_indicators_command(client, limit)
human_readable = tableToMarkdown('Indicators from FireEye Feed:', indicators,
headers=['value', 'type', 'rawJSON'], removeNull=True)
return human_readable, {}, indicators
def add_fields_if_exists(client: Client, fields_dict: Dict):
"""Adds field mapping if they hold actual values
Args:
fields_dict: The fields entry of the indicator
client (Client): Client object configured according to instance arguments.
Returns:
Dict. Updated field mapping
"""
if client.tags:
fields_dict.update({
'tags': client.tags
})
if client.tlp_color:
fields_dict.update({
'trafficlightprotocol': client.tlp_color
})
return fields_dict
def fetch_indicators_command(client: Client, limit: int = -1):
"""Fetches indicators from the feed to the indicators tab.
Args:
client (Client): Client object configured according to instance arguments.
limit (int): Maximum number of indicators to return.
Returns:
Tuple of:
str. Information to be printed to war room.
Dict. Data to be entered to context.
Dict. The raw data of the indicators.
"""
iterator = client.build_iterator(limit)
indicators = []
raw_response = []
for indicator in iterator:
fields = add_fields_if_exists(client, indicator.get('fields', {}))
indicators.append({
'value': indicator['value'],
'type': indicator['type'],
'fields': fields,
'rawJSON': indicator
})
raw_response.append(indicator)
return indicators, raw_response
def verify_threshold_reputation_interval_types(threshold: str, reputation_interval: str):
if not str.isdigit(threshold):
return_error(f'{INTEGRATION_NAME} wrong parameter value - '
f'Parameter "Malicious Threshold" has to be a number')
if not str.isdigit(reputation_interval):
return_error(f'{INTEGRATION_NAME} wrong parameter value - '
f'Parameter "Reputation Interval" has to be a number')
def main():
"""
PARSE AND VALIDATE INTEGRATION PARAMS
"""
public_key = demisto.params().get('credentials').get('identifier')
private_key = demisto.params().get('credentials').get('password')
threshold = demisto.params().get('threshold', '70')
reputation_interval = demisto.params().get('reputation_interval', '30')
verify_threshold_reputation_interval_types(threshold, reputation_interval)
feedTags = argToList(demisto.params().get('feedTags'))
tlp_color = demisto.params().get('tlp_color')
polling_arg = demisto.params().get('polling_timeout', '')
polling_timeout = int(polling_arg) if polling_arg.isdigit() else 20
insecure = demisto.params().get('insecure', False)
proxy = demisto.params().get('proxy', False)
command = demisto.command()
demisto.info(f'Command being called is {command}')
command = demisto.command()
try:
client = Client(public_key, private_key, int(threshold), int(reputation_interval),
polling_timeout, insecure, proxy, feedTags, tlp_color)
if command == 'test-module':
return_outputs(*test_module(client))
elif command == 'fireeye-get-indicators':
return_outputs(*get_indicators_command(client))
elif command == 'fetch-indicators':
indicators, _ = fetch_indicators_command(client)
for single_batch in batch(indicators, batch_size=2000):
demisto.createIndicators(single_batch)
else:
raise NotImplementedError(f'Command {command} is not implemented.')
except Exception:
raise
if __name__ in ['__main__', 'builtin', 'builtins']:
main()
| 38.672414
| 120
| 0.608357
|
c14d36ad2a0b4bd957ec8322e890bc060698bb26
| 1,504
|
py
|
Python
|
webinar_registration/models.py
|
ChameleonCloud/portal
|
92a06fb926dc36e997b94fb8dcd22b7e0d24d3ee
|
[
"Apache-2.0"
] | 3
|
2015-08-04T20:53:41.000Z
|
2020-02-14T22:58:20.000Z
|
webinar_registration/models.py
|
ChameleonCloud/portal
|
92a06fb926dc36e997b94fb8dcd22b7e0d24d3ee
|
[
"Apache-2.0"
] | 103
|
2015-01-15T14:21:00.000Z
|
2022-03-31T19:14:20.000Z
|
webinar_registration/models.py
|
ChameleonCloud/portal
|
92a06fb926dc36e997b94fb8dcd22b7e0d24d3ee
|
[
"Apache-2.0"
] | 4
|
2016-02-22T16:48:20.000Z
|
2021-01-08T17:13:21.000Z
|
from django.conf import settings
from django.db import models
#from datetime import datetime
from django.utils import timezone
import pytz
class Webinar(models.Model):
name = models.CharField(max_length=255)
description = models.TextField()
registration_open = models.DateTimeField()
registration_closed = models.DateTimeField()
start_date = models.DateTimeField()
end_date = models.DateTimeField()
registration_limit = models.IntegerField(default=0)
def is_registration_open(self):
return self.registration_open <= timezone.now() and self.registration_closed >= timezone.now()
def is_registration_closed(self):
return self.registration_closed <= timezone.now()
def is_registration_future(self):
return self.registration_open > timezone.now()
def __str__(self):
return self.name
def is_registered(self, is_registered):
self.is_registered = is_registered
class Meta:
verbose_name = 'Webinar'
verbose_name_plural = 'Webinars'
class WebinarRegistrant(models.Model):
user = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.CASCADE)
webinar = models.ForeignKey(Webinar, on_delete=models.CASCADE)
created = models.DateTimeField(auto_now_add=True)
updated = models.DateTimeField(auto_now=True)
def __str__(self):
return self.user.username
class Meta:
verbose_name = 'Webinar Registrant'
verbose_name_plural = 'Webinar Registrants'
| 30.693878
| 102
| 0.729388
|
d0b79a7f70b273f801bc047591b90f1d74944f88
| 4,175
|
py
|
Python
|
applications/trilinos_application/test_examples/cantilever3d.gid/cantilever_aztec.py
|
jiaqiwang969/Kratos-test
|
ed082abc163e7b627f110a1ae1da465f52f48348
|
[
"BSD-4-Clause"
] | null | null | null |
applications/trilinos_application/test_examples/cantilever3d.gid/cantilever_aztec.py
|
jiaqiwang969/Kratos-test
|
ed082abc163e7b627f110a1ae1da465f52f48348
|
[
"BSD-4-Clause"
] | null | null | null |
applications/trilinos_application/test_examples/cantilever3d.gid/cantilever_aztec.py
|
jiaqiwang969/Kratos-test
|
ed082abc163e7b627f110a1ae1da465f52f48348
|
[
"BSD-4-Clause"
] | null | null | null |
#importing MPI ... for this boost 1.35 or superior is needed
import mpi
print "i am ",mpi.rank , " of ",mpi.size
##################################################################
##################################################################
#setting the domain size for the problem to be solved
domain_size = 3
##################################################################
##################################################################
## ATTENTION: here the order is important
#including kratos path
kratos_libs_path = '../../../../libs/' ##kratos_root/libs
kratos_applications_path = '../../../../applications/' ##kratos_root/applications
import sys
sys.path.append(kratos_libs_path)
sys.path.append(kratos_applications_path)
#importing Kratos main library
from Kratos import *
kernel = Kernel() #defining kernel
#importing applications
import applications_interface
applications_interface.Import_StructuralApplication = True
applications_interface.Import_KratosTrilinosApplication = True
applications_interface.Import_KratosMetisApplication = True
applications_interface.ImportApplications(kernel, kratos_applications_path)
from KratosStructuralApplication import *
from KratosTrilinosApplication import *
from KratosMetisApplication import *
## from now on the order is not anymore crucial
##################################################################
##################################################################
#defining a model part
model_part = ModelPart("FluidPart");
#adding of Variables to Model Part should be here when the "very fix container will be ready"
import trilinos_structural_solver_static
trilinos_structural_solver_static.AddVariables(model_part)
model_part.AddNodalSolutionStepVariable(PARTITION_INDEX)
#reading a model
gid_mode = GiDPostMode.GiD_PostBinary
multifile = MultiFileFlag.MultipleFiles
deformed_mesh_flag = WriteDeformedMeshFlag.WriteUndeformed
write_conditions = WriteConditionsFlag.WriteElementsOnly
gid_io = GidIO("cantilever3d",gid_mode,multifile,deformed_mesh_flag, write_conditions)
##gid_io.ReadModelPart(model_part)
number_of_partitions = mpi.size #we set it equal to the number of processors
print "number_of_partitions", number_of_partitions
partitioner = MetisPartitioningProcess(model_part, gid_io, number_of_partitions, domain_size);
partitioner.Execute()
print "GetRank()",GetRank()
mesh_name = mpi.rank
gid_io.InitializeMesh( mesh_name );
gid_io.WriteMesh((model_part).GetMesh());
gid_io.FinalizeMesh()
print "mesh_name =", mesh_name
print model_part
print model_part.Properties
#writing the mesh
#gid_io.WriteUndeformedMesh(model_part.GetMesh(),domain_size,GiDPostMode.GiD_PostBinary);
#the buffer size should be set up here after the mesh is read for the first time
model_part.SetBufferSize(2)
#importing the solver files
trilinos_structural_solver_static.AddDofs(model_part)
#creating a fluid solver object
solver = trilinos_structural_solver_static.StaticStructuralSolver(model_part,domain_size)
#defining the linear solver
solver_parameters = ParameterList()
solver_parameters.set("AZ_precond", "AZ_dom_decomp");
solver_parameters.set("AZ_subdomain_solve", "AZ_ilut");
solver_parameters.set("AZ_overlap", 3);
solver_parameters.set("AZ_solver", "AZ_gmres");
solver_parameters.set("AZ_kspace", 200);
solver.structure_linear_solver = AztecSolver(solver_parameters,1e-9,1000);
model_part.Properties[1].SetValue(CONSTITUTIVE_LAW, Isotropic3D() )
print "Linear elastic model selected"
solver.Initialize()
(solver.solver).SetEchoLevel(2);
Dt = 0.001
nsteps = 10
gid_io.InitializeResults(mesh_name,(model_part).GetMesh())
for step in range(0,nsteps):
print "line49"
time = Dt*step
model_part.CloneTimeStep(time)
print time
#print model_part.ProcessInfo()[TIME]
#solving the fluid problem
if(step > 3):
solver.Solve()
# if(step > 4):
#print the results
gid_io.WriteNodalResults(DISPLACEMENT,model_part.Nodes,time,0)
gid_io.WriteNodalResults(REACTION,model_part.Nodes,time,0)
# gid_io.PrintOnGaussPoints(PK2_STRESS_TENSOR,model_part,time,domain_size)
gid_io.FinalizeResults()
| 32.617188
| 94
| 0.72024
|
22de59363933d4e479f7f14dc5971255a24a2142
| 856
|
py
|
Python
|
scripts/dags/oci_simple_example.py
|
oracle-quickstart/oci-airflow
|
ca244f05a6abdd33f98041b4f5b20098faf5f215
|
[
"UPL-1.0"
] | 14
|
2020-06-09T13:15:01.000Z
|
2021-12-03T03:13:08.000Z
|
oci-provider/dags/oci_simple_example.py
|
RahulMR42/oke-airflow
|
5796ca95d9cfca6a8072107bf33de4df8fb35f77
|
[
"UPL-1.0"
] | 2
|
2021-03-31T19:13:03.000Z
|
2022-03-15T20:33:33.000Z
|
oci-provider/dags/oci_simple_example.py
|
RahulMR42/oke-airflow
|
5796ca95d9cfca6a8072107bf33de4df8fb35f77
|
[
"UPL-1.0"
] | 5
|
2021-08-29T20:36:26.000Z
|
2022-03-17T08:27:46.000Z
|
from datetime import datetime
from airflow import DAG
from hooks.oci_base import OCIBaseHook
from hooks.oci_object_storage import OCIObjectStorageHook
from operators.oci_object_storage import MakeBucket
default_args = {'owner': 'airflow',
'start_date': datetime(2020, 5, 26),
'email': ['your_email@somecompany.com'],
'email_on_failure': False,
'email_on_retry': False
}
dag = DAG('oci_simple_example',
default_args=default_args,
schedule_interval='@hourly',
catchup=False
)
oci_conn_id = "oci_default"
bucketname = "SomeBucketName"
compartment_ocid = "COMPARTMENT_OCID"
with dag:
make_bucket = MakeBucket(task_id='Make_Bucket', bucket_name=bucketname,oci_conn_id=oci_conn_id, compartment_ocid=compartment_ocid)
make_bucket
| 30.571429
| 134
| 0.690421
|
28ae38af8465c735b4221138b931dc4a859ecacb
| 402
|
py
|
Python
|
main/migrations/0006_endofday_currdate.py
|
Harsh-Sanklecha/stockanalysis
|
f5c6a454863ff72e0bac5e29035033cbf1bb3a06
|
[
"MIT"
] | null | null | null |
main/migrations/0006_endofday_currdate.py
|
Harsh-Sanklecha/stockanalysis
|
f5c6a454863ff72e0bac5e29035033cbf1bb3a06
|
[
"MIT"
] | null | null | null |
main/migrations/0006_endofday_currdate.py
|
Harsh-Sanklecha/stockanalysis
|
f5c6a454863ff72e0bac5e29035033cbf1bb3a06
|
[
"MIT"
] | 2
|
2021-01-08T12:32:59.000Z
|
2021-02-14T11:41:51.000Z
|
# Generated by Django 3.1.2 on 2020-12-12 14:39
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('main', '0005_remove_endofday_currdate'),
]
operations = [
migrations.AddField(
model_name='endofday',
name='currDate',
field=models.CharField(max_length=30, null=True),
),
]
| 21.157895
| 61
| 0.606965
|
a7642afd45ee21340c7d0ec7a330f2e6441b01a0
| 4,773
|
py
|
Python
|
envs/__init__.py
|
zhihanyang2022/hiro_pytorch
|
74761e81f257207c2aa86312e8806eaef45658c4
|
[
"Linux-OpenIB"
] | 33
|
2020-01-15T02:50:02.000Z
|
2021-12-16T02:40:49.000Z
|
envs/__init__.py
|
zhihanyang2022/hiro_pytorch
|
74761e81f257207c2aa86312e8806eaef45658c4
|
[
"Linux-OpenIB"
] | 6
|
2020-07-07T13:15:04.000Z
|
2022-01-03T13:22:02.000Z
|
envs/__init__.py
|
zhihanyang2022/hiro_pytorch
|
74761e81f257207c2aa86312e8806eaef45658c4
|
[
"Linux-OpenIB"
] | 11
|
2020-08-17T10:19:15.000Z
|
2022-02-13T10:10:37.000Z
|
"""Random policy on an environment."""
import numpy as np
import argparse
import envs.create_maze_env
def get_goal_sample_fn(env_name, evaluate):
if env_name == 'AntMaze':
# NOTE: When evaluating (i.e. the metrics shown in the paper,
# we use the commented out goal sampling function. The uncommented
# one is only used for training.
if evaluate:
return lambda: np.array([0., 16.])
else:
return lambda: np.random.uniform((-4, -4), (20, 20))
elif env_name == 'AntPush':
return lambda: np.array([0., 19.])
elif env_name == 'AntFall':
return lambda: np.array([0., 27., 4.5])
else:
assert False, 'Unknown env'
def get_reward_fn(env_name):
if env_name == 'AntMaze':
return lambda obs, goal: -np.sum(np.square(obs[:2] - goal)) ** 0.5
elif env_name == 'AntPush':
return lambda obs, goal: -np.sum(np.square(obs[:2] - goal)) ** 0.5
elif env_name == 'AntFall':
return lambda obs, goal: -np.sum(np.square(obs[:3] - goal)) ** 0.5
else:
assert False, 'Unknown env'
def success_fn(last_reward):
return last_reward > -5.0
class EnvWithGoal(object):
def __init__(self, base_env, env_name):
self.base_env = base_env
self.env_name = env_name
self.evaluate = False
self.reward_fn = get_reward_fn(env_name)
self.goal = None
self.distance_threshold = 5
self.count = 0
self.state_dim = self.base_env.observation_space.shape[0] + 1
self.action_dim = self.base_env.action_space.shape[0]
def seed(self, seed):
self.base_env.seed(seed)
def reset(self):
# self.viewer_setup()
self.goal_sample_fn = get_goal_sample_fn(self.env_name, self.evaluate)
obs = self.base_env.reset()
self.count = 0
self.goal = self.goal_sample_fn()
return {
# add timestep
'observation': np.r_[obs.copy(), self.count],
'achieved_goal': obs[:2],
'desired_goal': self.goal,
}
def step(self, a):
obs, _, done, info = self.base_env.step(a)
reward = self.reward_fn(obs, self.goal)
self.count += 1
next_obs = {
# add timestep
'observation': np.r_[obs.copy(), self.count],
'achieved_goal': obs[:2],
'desired_goal': self.goal,
}
return next_obs, reward, done or self.count >= 500, info
def render(self):
self.base_env.render()
def get_image(self):
self.render()
data = self.base_env.viewer.get_image()
img_data = data[0]
width = data[1]
height = data[2]
tmp = np.fromstring(img_data, dtype=np.uint8)
image_obs = np.reshape(tmp, [height, width, 3])
image_obs = np.flipud(image_obs)
return image_obs
@property
def action_space(self):
return self.base_env.action_space
@property
def observation_space(self):
return self.base_env.observation_space
def run_environment(env_name, episode_length, num_episodes):
env = EnvWithGoal(
create_maze_env.create_maze_env(env_name),
env_name)
def action_fn(obs):
action_space = env.action_space
action_space_mean = (action_space.low + action_space.high) / 2.0
action_space_magn = (action_space.high - action_space.low) / 2.0
random_action = (action_space_mean +
action_space_magn *
np.random.uniform(low=-1.0, high=1.0,
size=action_space.shape))
return random_action
rewards = []
successes = []
for ep in range(num_episodes):
rewards.append(0.0)
successes.append(False)
obs = env.reset()
for _ in range(episode_length):
env.render()
print(env.get_image().shape)
obs, reward, done, _ = env.step(action_fn(obs))
rewards[-1] += reward
successes[-1] = success_fn(reward)
if done:
break
print('Episode {} reward: {}, Success: {}'.format(ep + 1, rewards[-1], successes[-1]))
print('Average Reward over {} episodes: {}'.format(num_episodes, np.mean(rewards)))
print('Average Success over {} episodes: {}'.format(num_episodes, np.mean(successes)))
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument("--env_name", default="AntEnv", type=str)
parser.add_argument("--episode_length", default=500, type=int)
parser.add_argument("--num_episodes", default=100, type=int)
args = parser.parse_args()
run_environment(args.env_name, args.episode_length, args.num_episodes)
| 31.82
| 94
| 0.598994
|
eb195f20b6165daae7aaf8df04a2898da55779f4
| 1,714
|
py
|
Python
|
metacells/check_avx2.py
|
tanaylab/metacells
|
ecd957b306bd6af2fcfd56efb246ce15b0d8238a
|
[
"MIT"
] | 16
|
2021-06-19T03:03:06.000Z
|
2022-03-21T20:47:15.000Z
|
metacells/check_avx2.py
|
tanaylab/metacells
|
ecd957b306bd6af2fcfd56efb246ce15b0d8238a
|
[
"MIT"
] | 23
|
2021-03-17T09:38:04.000Z
|
2022-03-02T11:04:56.000Z
|
metacells/check_avx2.py
|
tanaylab/metacells
|
ecd957b306bd6af2fcfd56efb246ce15b0d8238a
|
[
"MIT"
] | 1
|
2021-12-02T21:28:11.000Z
|
2021-12-02T21:28:11.000Z
|
'''
Check for AVX2
--------------
This is only imported on X86_64 machines if we compiled the C++ extension to use AVX2 (when creating
the pre-compiled wheels). If this is run on a non-AVX2 machine, it generates a human-readable error
instead of generating an opaque segmentation fault.
'''
import os
from warnings import warn
HAS_AVX2 = None
try:
with (open('/proc/cpuinfo') if os.path.exists('/proc/cpuinfo')
else os.popen('sysctl -a')) as file:
for line in file.readlines():
if line.startswith('flags'):
features = line.split(' ')
HAS_AVX2 = 'avx2' in features and 'fma' in features
break
except BaseException: # pylint: disable=broad-except
pass
if HAS_AVX2 is None:
AVX2_MAYBE_NOT_SUPPORTED = \
'The metacells precompiled wheel is using AVX2 FMA instructions.\n' \
"However, AVX2 FMA might not be available on this computer's processors.\n" \
'Therefore, using the metacells package might cause a segmentation violation.\n' \
'You can avoid the wheel using: pip install metacells --install-option=--native\n' \
"This will compile the metacells package on and for this computer's processors."
warn(AVX2_MAYBE_NOT_SUPPORTED)
HAS_AVX2 = True
if not HAS_AVX2:
AVX2_NOT_SUPPORTED = \
'The metacells precompiled wheel is using AVX2 FMA instructions.\n' \
"However, AVX2 FMA is not available on this computer's processors.\n" \
'You can avoid the wheel using: pip install metacells --install-option=--native\n' \
"This will compile the metacells package on and for this computer's processors."
raise ImportError(AVX2_NOT_SUPPORTED)
| 40.809524
| 100
| 0.680863
|
1937e3e26348ee1ef8bd110fed853491e907a729
| 1,297
|
py
|
Python
|
scripts/naiveBayesBowDemo.py
|
always-newbie161/pyprobml
|
eb70c84f9618d68235ef9ba7da147c009b2e4a80
|
[
"MIT"
] | 2
|
2021-04-10T18:12:19.000Z
|
2021-05-11T12:07:40.000Z
|
scripts/naiveBayesBowDemo.py
|
always-newbie161/pyprobml
|
eb70c84f9618d68235ef9ba7da147c009b2e4a80
|
[
"MIT"
] | 1
|
2021-04-19T12:25:26.000Z
|
2021-04-19T12:25:26.000Z
|
scripts/naiveBayesBowDemo.py
|
always-newbie161/pyprobml
|
eb70c84f9618d68235ef9ba7da147c009b2e4a80
|
[
"MIT"
] | 1
|
2021-06-21T01:18:07.000Z
|
2021-06-21T01:18:07.000Z
|
# authors: ashishpapanai, animesh-007
import scipy
import scipy.io as sio
import numpy as np
from sklearn.metrics import zero_one_loss
from sklearn.naive_bayes import MultinomialNB,ComplementNB,CategoricalNB,BernoulliNB,GaussianNB
import matplotlib.pyplot as plt
import os
if os.path.isdir('scripts'):
os.chdir('scripts')
data = None
Xtrain = None
Xtest = None
data = sio.loadmat('../data/XwindowsDocData.mat')
Xtrain = data['xtrain']
Xtrain = scipy.sparse.csc_matrix.toarray(Xtrain)
Xtest = data['xtest']
Xtest = scipy.sparse.csc_matrix.toarray(Xtest)
ytrain = data['ytrain']
ytest = data['ytest']
model = BernoulliNB()
model.fit(Xtrain, ytrain)
ypred_train = model.predict(Xtrain)
err_train = np.mean(zero_one_loss(ytrain, ypred_train))
ypred_test = model.predict(Xtest)
err_test = np.mean(zero_one_loss(ytest, ypred_test))
print('misclassification rates on train = '+str(err_train*100) +
' pc, on test = '+str(err_test*100)+' pc\n')
C = np.unique(data['ytrain']).size
for i in range(0, C):
plt.figure(figsize=(10,10)) # make sure thin spike is visible
plt.bar(np.arange(0, 600, 1), np.exp(model.feature_log_prob_)[i, :])
plt.title('p(xj=1|y='+str(i)+')')
fileName = 'naiveBayesBow'+str(i+1)+'ClassCond'
plt.savefig(r'../figures/'+fileName)
plt.show()
| 29.477273
| 95
| 0.720894
|
2e9b9390d68497aa634cfaa60edf04843c05711b
| 288
|
py
|
Python
|
fplib/functor.py
|
ppedemon/fplib
|
61c221e967e924b3fd0a3014e80b331574d45f0c
|
[
"MIT"
] | null | null | null |
fplib/functor.py
|
ppedemon/fplib
|
61c221e967e924b3fd0a3014e80b331574d45f0c
|
[
"MIT"
] | null | null | null |
fplib/functor.py
|
ppedemon/fplib
|
61c221e967e924b3fd0a3014e80b331574d45f0c
|
[
"MIT"
] | null | null | null |
class Functor:
def __rrshift__(self, f):
return self.fmap(f)
@classmethod
def unit(cls, x):
raise NotImplementedError('unit not defined')
def fmap(self, f):
raise NotImplementedError('fmap not defined')
def unit(cls, x):
return cls.unit(x)
| 19.2
| 53
| 0.628472
|
021c6f372132ca94f8a21a2357df1e5dda4de6ba
| 9,931
|
py
|
Python
|
spiral/core/foundation.py
|
acdaniells/spiral
|
d78344007969d7c991216901b4a9d3ad7d768587
|
[
"BSD-3-Clause"
] | null | null | null |
spiral/core/foundation.py
|
acdaniells/spiral
|
d78344007969d7c991216901b4a9d3ad7d768587
|
[
"BSD-3-Clause"
] | 1
|
2020-04-01T18:39:48.000Z
|
2020-04-01T18:39:48.000Z
|
spiral/core/foundation.py
|
acdaniells/spiral
|
d78344007969d7c991216901b4a9d3ad7d768587
|
[
"BSD-3-Clause"
] | 1
|
2020-04-01T18:36:44.000Z
|
2020-04-01T18:36:44.000Z
|
"""
Spiral core foundation module.
"""
import logging
import os
import sys
from importlib import reload as reload_module
from spiral.core import extension, ml, plot
from spiral.ext.ext_argparse import ArgparseController as Controller
from cement.core import (
arg,
cache,
config,
controller,
log,
mail,
output,
plugin,
template,
)
from cement.core.foundation import (
App as CementApp,
add_handler_override_options,
handler_override,
)
from cement.core.handler import HandlerManager
from cement.core.hook import HookManager
from cement.core.interface import InterfaceManager
from cement.utils import misc
from cement.utils.misc import minimal_logger
join = os.path.join
LOG = minimal_logger(__name__)
class App(CementApp):
"""
Primary application object class.
"""
class Meta:
"""
Application meta-data.
"""
extension_handler = "spiral"
"""
Handler class that implements the Extension interface.
"""
plot_handler = "plotly"
"""
Handler class that implements the Plot interface.
"""
core_extensions = [
"cement.ext.ext_dummy",
"cement.ext.ext_plugin",
"cement.ext.ext_configparser",
"spiral.ext.ext_argparse",
"spiral.ext.ext_logging",
"spiral.ext.ext_plotly",
]
"""
List of Spiral core extensions. These are generally required by
Spiral and should only be modified if you know what you're
doing. Use ``App.Meta.extensions`` to add to this list, rather
than overriding core extensions. That said if you want to prune
down your application, you can remove core extensions if they
are not necessary (for example if using your own log handler
extension you might not need/want ``LoggingLogHandler`` to be
registered).
"""
core_meta_override = [
"debug",
"plugin_dir",
"ignore_deprecation_warnings",
"template_dir",
"mail_handler",
"cache_handler",
"log_handler",
"output_handler",
"template_handler",
"plot_handler",
]
"""
List of meta options that can/will be overridden by config
options of the ``base`` config section (where ``base`` is the
base configuration section of the application which is
determined by ``App.Meta.config_section`` but defaults to
``App.Meta.label``). These overrides are required by the
framework to function properly and should not be used by
end-user (developers) unless you really know what you're doing.
To add your own extended meta overrides you should use
``App.Meta.meta_override``.
"""
core_interfaces = [
extension.ExtensionInterface,
log.LogInterface,
config.ConfigInterface,
mail.MailInterface,
plugin.PluginInterface,
output.OutputInterface,
template.TemplateInterface,
arg.ArgumentInterface,
controller.ControllerInterface,
cache.CacheInterface,
plot.PlotInterface,
ml.MLInterface,
]
"""
List of core interfaces to be defined (by the framework). You
should not modify this unless you really know what you're
doing... instead, you probably want to add your own interfaces
to ``App.Meta.interfaces``.
"""
def __init__(self, label=None, **kw):
self._loaded_bootstrap = None
self.interface = None
self.handler = None
self.hook = None
self.controller = None
self.plot = None
self._suppress_loggers()
super().__init__(label, **kw)
@staticmethod
def _suppress_loggers():
"""
Set logging level of non-application loggers to ERROR.
"""
for name in logging.root.manager.loggerDict:
logger = logging.getLogger(name)
if "cement" not in name and "spiral" not in name:
LOG.debug(f"Setting log level for '{name}' to ERROR")
logger.setLevel(logging.ERROR)
def _lay_cement(self):
"""
Initialize the framework.
"""
LOG.debug(f"laying cement for the '{self._meta.label}' application")
self.interface = InterfaceManager(self)
self.handler = HandlerManager(self)
self.hook = HookManager(self)
# define framework hooks
self.hook.define("pre_setup")
self.hook.define("post_setup")
self.hook.define("pre_run")
self.hook.define("post_run")
self.hook.define("pre_argument_parsing")
self.hook.define("post_argument_parsing")
self.hook.define("pre_close")
self.hook.define("post_close")
self.hook.define("signal")
self.hook.define("pre_render")
self.hook.define("post_render")
# define application hooks from meta
for label in self._meta.define_hooks:
self.hook.define(label)
# register some built-in framework hooks
self.hook.register("post_setup", add_handler_override_options, weight=-99)
self.hook.register("post_argument_parsing", handler_override, weight=-99)
# register application hooks from meta. the hooks listed in
# App.Meta.hooks are registered here, so obviously can not be
# for any hooks other than the builtin framework hooks that we just
# defined here (above). Anything that we couldn't register here
# will be retried after setup
self.__retry_hooks__ = []
for hook_spec in self._meta.hooks:
if not self.hook.defined(hook_spec[0]):
LOG.debug(f"hook {hook_spec[0]} not defined, will retry after setup")
self.__retry_hooks__.append(hook_spec)
else:
self.hook.register(*hook_spec)
# define interfaces
for i in self._meta.core_interfaces:
self.interface.define(i)
for i in self._meta.interfaces:
self.interface.define(i)
# extension handler is the only thing that can't be loaded... as,
# well, an extension. ;)
self.handler.register(extension.ExtensionHandler)
# register application handlers
for handler_class in self._meta.handlers:
self.handler.register(handler_class)
def setup(self):
"""
Application setup method.
This method wraps all ``_setup`` actons in one call. It is called
before ``self.run()``, allowing the application to be setup but not
executed (possibly letting the developer perform other actions before
full execution).
All handlers should be instantiated and callable after setup is
complete.
"""
LOG.debug(f"now setting up the '{self._meta.label}' application")
if self._meta.bootstrap is not None:
LOG.debug(f"importing bootstrap code from {self._meta.bootstrap}")
if (
self._meta.bootstrap not in sys.modules
or self._loaded_bootstrap is None
):
__import__(self._meta.bootstrap, globals(), locals(), [], 0)
if hasattr(sys.modules[self._meta.bootstrap], "load"):
sys.modules[self._meta.bootstrap].load(self)
self._loaded_bootstrap = sys.modules[self._meta.bootstrap]
else:
reload_module(self._loaded_bootstrap)
for _result in self.hook.run("pre_setup", self):
pass # pragma: nocover
self._setup_extension_handler()
self._setup_signals()
self._setup_config_handler()
self._setup_mail_handler()
self._setup_cache_handler()
self._setup_log_handler()
self._setup_plugin_handler()
self._setup_arg_handler()
self._setup_output_handler()
self._setup_template_handler()
self._setup_controllers()
self._setup_plot_handler()
for hook_spec in self.__retry_hooks__:
self.hook.register(*hook_spec)
for _result in self.hook.run("post_setup", self):
pass
def _setup_controllers(self):
LOG.debug("setting up application controllers")
if self.handler.registered("controller", "base"):
self.controller = self._resolve_handler("controller", "base")
else:
class DefaultBaseController(Controller):
class Meta:
label = "base"
def _default(self):
# don't enforce anything cause developer might not be
# using controllers... if they are, they should define
# a base controller.
pass
self.handler.register(DefaultBaseController)
self.controller = self._resolve_handler("controller", "base")
def _setup_plot_handler(self):
self.plot = self._resolve_handler("plot", self._meta.plot_handler)
class TestApp(App):
"""
Testing application.
"""
# tells pytest to not consider this a class for testing
__test__ = False
class Meta:
"""
Test application meta-data.
"""
label = f"app-{misc.rando()[:12]}"
argv = []
core_system_config_files = []
core_user_config_files = []
config_files = []
core_system_config_dirs = []
core_user_config_dirs = []
config_dirs = []
core_system_template_dirs = []
core_user_template_dirs = []
core_system_plugin_dirs = []
core_user_plugin_dirs = []
plugin_dirs = []
exit_on_close = False
| 31.526984
| 85
| 0.609204
|
22e76c125b742bb80a73897a373372f4c1ef8d04
| 655
|
py
|
Python
|
torch/optimization/combine_optimization.py
|
jihuacao/Putil
|
b753fc94bea4cbda00f483681c55f0e9f54adef2
|
[
"Apache-2.0"
] | 1
|
2018-12-09T06:09:29.000Z
|
2018-12-09T06:09:29.000Z
|
torch/optimization/combine_optimization.py
|
jihuacao/Putil
|
b753fc94bea4cbda00f483681c55f0e9f54adef2
|
[
"Apache-2.0"
] | null | null | null |
torch/optimization/combine_optimization.py
|
jihuacao/Putil
|
b753fc94bea4cbda00f483681c55f0e9f54adef2
|
[
"Apache-2.0"
] | null | null | null |
# coding=utf-8
#from torch.optim import Optimizer
import torch
class CombineOptimization:
def __init__(self, **optimizations):
self._optimizations = optimizations
pass
def step(self, closure=None):
for index, (k, v) in enumerate(self._optimizations.items()):
v.step()
pass
pass
def load_state_dict(self, state_dict, unexisted_strategy):
for index, (k, v) in enumerate(self._optimizations.items()):
if k in state_dict.dict():
v.load_state_dict(state_dict[k])
pass
else:
pass
pass
pass
| 26.2
| 68
| 0.574046
|
4a23f60ae4d4981c154f86980c84b697fda1cf57
| 9,515
|
py
|
Python
|
infdist/simulator/experiment/legacy/trial.py
|
zeroos/infdist
|
5fca2c42bbe5ea650866a26568d1eaf240b2b47e
|
[
"MIT"
] | null | null | null |
infdist/simulator/experiment/legacy/trial.py
|
zeroos/infdist
|
5fca2c42bbe5ea650866a26568d1eaf240b2b47e
|
[
"MIT"
] | null | null | null |
infdist/simulator/experiment/legacy/trial.py
|
zeroos/infdist
|
5fca2c42bbe5ea650866a26568d1eaf240b2b47e
|
[
"MIT"
] | null | null | null |
from copy import deepcopy
from optimization.agent import ( # NOQA
EstimatingAgent,
FixedRatioAgent,
FullCommAgent,
FullKnowledgeAgent,
GreedyConstrainedAgent,
)
from simulator.network import NS3Network
from simulator import simulator
from optimization import missions, simplesim
from optimization.models import MessageSet
class Trial:
def __init__(self, nodes_num, t_end, msgset):
self.nodes_num = nodes_num
self.t_end = t_end
self.msgset = msgset
self.agent_cls = FullCommAgent
self.agent_kwargs = {}
self.messages = None
self.ctx = None
self.net = None
self.agents = None
self.constraints = {}
self.now_func = simulator.now_float
self.network_data_rate = 5.5
def create_agent(self, i):
return self.agent_cls(i, self.net, self.ctx, self.now_func,
**self.agent_kwargs)
def agent_stats(self):
return {
agent: (
len(agent.received_messages),
self.ctx.utility(agent.received_messages).value(),
)
for agent in self.agents
}
def stats(self):
total_utility = self.ctx.utility(self.all_received_messages()).value()
no_duplicates = MessageSet(
self.all_received_messages().t_end,
list(set(self.all_received_messages().all())),
)
latencies = [
m.t_rcv - m.t_gen
for m in self.all_received_messages().all()
]
avg_latency = sum(latencies)/(len(latencies) or 1)
constraints = {}
for name, constraint in self.constraints.items():
constraints[name] = constraint(no_duplicates)
all_messages = deepcopy(self.messages)
simplesim.apply_latency(all_messages, 0)
return {
'all_messages': all_messages,
't_end': self.t_end,
'no_duplicates': no_duplicates,
'all_received_messages': self.all_received_messages(),
'received_num': sum(
[len(agent.received_messages) for agent in self.agents]
),
'sent_num': sum(
[len(agent.sent_messages) for agent in self.agents]
),
'sent_received_num': len(no_duplicates),
'total_utility': total_utility,
'normalized_utility': total_utility/len(self.agents)/self.t_end,
'total_messages': len(self.messages),
'constraints': constraints,
'max_utility': self.ctx.utility(all_messages).value(),
'avg_latency': avg_latency,
'agents_num': len(self.agents),
}
def all_generated_messages(self):
result = MessageSet(0, [])
for agent in self.agents:
result += agent.generated_messages
return result
def all_received_messages(self):
result = MessageSet(0, [])
for agent in self.agents:
result += agent.received_messages
return result
@staticmethod
def print_stats(stats):
print(
(
"Received # {}, sent: {}, "
"total utility: {}, "
"normalized utility: {}"
).format(
stats['received_num'],
stats['sent_num'],
stats['total_utility'],
stats['normalized_utility'],
)
)
print((
"Received {:.0f}% of all messages, "
"{:.0f}% of sent messages.").format(
stats['sent_received_num']/stats['total_messages']*100,
stats['sent_received_num']/(stats['sent_num'] or 1)*100,
))
print("AVG data rate: {:.3f} Mbps with avg latency of {}".format(
sum([m.size for m in stats['no_duplicates'].all()]) * 8 / 10**6
/ stats['t_end'],
stats['avg_latency'],
))
print("Max utility: {}".format(
stats['max_utility']
))
for name, constraint_violations in stats['constraints'].items():
if constraint_violations > 0:
print("!!! {} constraint NOT met ({} times)".format(
name, constraint_violations
))
def finish_mission(self):
real_t_end = simulator.now_float()
for a in self.agents:
a.finish_mission(real_t_end)
@staticmethod
def generate_messages_from_msgset(msgset, t_end, nodes_num):
msgset_type = msgset.get('type', '3D_reconstruction')
seed = msgset.get('seed', 0)
if msgset_type == '3D_reconstruction':
messages, ctx = \
missions.generate_simple_3D_reconstruction(
t_end,
msgset=msgset,
senders=set(range(nodes_num)),
seed=seed,
)
elif msgset_type == 'serialized':
messages = msgset['messages']
ctx = msgset['ctx']
return messages, ctx
def prepare_messages(self):
if self.messages is not None:
assert self.ctx is not None
return # already prepared
self.messages, self.ctx = self.generate_messages_from_msgset(
self.msgset, self.t_end, self.nodes_num,
)
def prepare_agents(self):
assert self.net is not None, "Network has to be prepared before agents"
if self.agents is not None:
return # already prepared
self.agents = [
self.create_agent(i)
for i in range(self.nodes_num)
]
def prepare_network(self):
if self.net is not None:
return # already prepared
self.net = NS3Network(self.nodes_num, self.network_data_rate)
def print_progress(self):
print(
f" {self.now_func():.02f}s "
f"({self.now_func()/self.t_end*100:.02f}%)",
end="\r"
)
def run(self):
self.prepare_messages()
self.prepare_network()
self.prepare_agents() # this has to be done after network
for i in range(self.nodes_num):
self.net.add_message_received_callback(
self.agents[i].gen_message_received_callback(),
i
)
for m in self.messages.all():
# print("Scheduling sending at {} by {}".format(m.t_gen, m.sender))
native_message = self.net.serialize(m)
agent = self.agents[m.sender]
simulator.schedule(m.t_gen, agent.gen_generate_message_callback(
native_message
))
simulator.schedule(
m.t_gen,
self.print_progress
)
simulator.schedule(self.t_end, self.finish_mission)
simulator.stop(self.t_end+1)
simulator.run()
class FixedRatioTrial(Trial):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.agent_cls = FixedRatioAgent
def set_drop_rate(self, drop_rate):
self.agent_kwargs = {'drop_ratio': drop_rate}
class GreedyTrial(Trial):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.agent_cls = GreedyConstrainedAgent
self.agent_kwargs = {
'constraints': {},
}
@property
def constraints(self):
return self.agent_kwargs['constraints']
@constraints.setter
def constraints(self, value):
self.agent_kwargs['constraints'] = value
class TreeTrial(Trial):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.prepare_messages()
self.agent_cls = EstimatingAgent
self.agent_kwargs = {
'agents': {
ident: lambda t: set()
for ident in range(self.nodes_num)
},
'constraints': {},
'window_size': 1000,
}
self.drop_rate_set = False
self.throughput_set = False
@property
def constraints(self):
return self.agent_kwargs['constraints']
@constraints.setter
def constraints(self, value):
self.agent_kwargs['constraints'] = value
def add_msgnum_constraint(self, messages_num, timeslot_length):
# self.agent_kwargs['window_size'] = timeslot_length
self.constraints = {
'MSGNUM': simplesim.create_msgnum_constraint_violations(
messages_num, timeslot_length
),
}
def add_throughput_constraint(self, throughput, timeslot_length):
# self.agent_kwargs['window_size'] = timeslot_length
self.constraints = {
'TPUT': simplesim.create_throughput_constraint_violations(
throughput, timeslot_length,
),
}
return self.constraints
def set_throughput(self, throughput):
return self.add_throughput_constraint(throughput, 2.5)
def set_drop_rate(self, drop_rate):
assert not self.drop_rate_set
timeslot_length = 2.5
avg_msgs_per_second = 1.5*len(self.messages)/self.t_end
self.add_msgnum_constraint(
(1-drop_rate)*(timeslot_length)*avg_msgs_per_second,
timeslot_length
)
def set_simulations_num(self, value):
self.agent_kwargs['simulations_num'] = value
def set_suppress_warnings(self, value):
self.agent_kwargs['suppress_warnings'] = value
| 31.611296
| 79
| 0.575617
|
6b2705d22a8cfd39b0e10b8d480a7025bf549140
| 16,237
|
py
|
Python
|
MCCGPIndividual.py
|
TM6501/cgp
|
2ddec84b13c53228602a5bfa5ae4f9a0943609bc
|
[
"MIT"
] | 1
|
2020-05-23T04:41:31.000Z
|
2020-05-23T04:41:31.000Z
|
MCCGPIndividual.py
|
TM6501/cgp
|
2ddec84b13c53228602a5bfa5ae4f9a0943609bc
|
[
"MIT"
] | null | null | null |
MCCGPIndividual.py
|
TM6501/cgp
|
2ddec84b13c53228602a5bfa5ae4f9a0943609bc
|
[
"MIT"
] | null | null | null |
import random
import copy
import inspect
import itertools
import math
import AbstractCGPIndividual
class MCCGPIndividual(AbstractCGPIndividual.AbstractCGPIndividual):
"""This class represents a Multi-Chromosomal Individual. MCCGP individuals
will maintain a separate genotype for every output and can mutate using
crossover.
Traditionally, they have a fitness function for every output,
but that will be optional with this class. If multiple fitness functions
are provided, crossover will choose the best of each genotype. If not, then
genotypes will be chosen randomly from the available population (assumed to
be the best of the most recent epoch's generation)."""
def __init__(
self,
type=None,
inputSize=1,
outputSize=1,
shape=None,
pRange=None,
constraintRange=None,
functionList=None,
MC_CGPSpecificParameters=None):
args, _, _, values = inspect.getargvalues(inspect.currentframe())
values.pop("self")
for arg, val in values.items():
setattr(self, arg, val)
self.integerConversion()
self.__genotype = None
self.crossoverStrategy = MC_CGPSpecificParameters['crossoverStrategy']
if not self.checkAndSetParameters():
raise ValueError("Found error in parameters.")
def integerConversion(self):
"""Convert any values that are needed as integers, but may have been
passed in as floating point values."""
integerList = ['rows', 'cols', 'maxColForward', 'maxColBack',
'inputSize']
for name in integerList:
setattr(self, name, int(getattr(self, name)))
def checkAndSetParameters(self):
"""Check all variables passed into this class and modify those that
need to be changed.
Arguments: None
Returns:
True if the class variables were able to be modified enough so
that the class is ready."""
retValue = True
# First check all of the variables that must exist:
if self.inputSize is None or self.outputSize is None \
or self.shape is None or self.pRange is None \
or self.functionList is None \
or self.MC_CGPSpecificParameters is None:
print("At least one required parameter was not provided.")
retValue = False
if self.inputSize is not None:
self.totalInputCount = self.inputSize
# For later simplicity, there will be separate values for every
# shape, pRange, constraintRange, and functionList. For those
# that the user only provided a single value, we'll create those lists:
# Shape:
if isinstance(self.shape, list):
if len(self.shape) != outputSize:
print("If a list of shapes is provided, its length must be the\
number of outputs.")
retValue = False
else: # Make a list from the single provided shape:
self.shape = [self.shape] * self.outputSize
# To make accessing the values easier later:
self.rows = []
self.cols = []
self.maxColForward = []
self.maxColBack = []
for shape in self.shape:
self.rows.append(shape['rowCount'])
self.cols.append(shape['colCount'])
self.maxColForward.append(shape['maxColForward'])
self.maxColBack.append(shape['maxColBack'])
# pRanges:
if isinstance(self.pRange[0], list):
if len(self.pRange) != outputSize:
print("If a list of pRanges is provided, its length must be \
number of outputs.")
retValue = False
else: # Make a list from the provided pRanges:
self.pRange = [self.pRange] * self.outputSize
# constraintRange:
if self.constraintRange is not None:
if isinstance(self.constraintRange[0], list):
if len(self.pRange) != outputSize:
print("If a list of constraints is provided, its length \
must be number of outputs.")
retValue = False
else: # Make a list from the provided constraint ranges:
self.constraintRange = [self.constraintRange] * self.outputSize
else:
self.constraintRange = [None] * self.outputSize
# Function lists:
if isinstance(self.functionList[0], list):
if len(self.functionList) != outputSize:
print("If the functionlist is provided as a list of lists, the\
top-level list's length must be the outputsize.")
retValue = False
else: # Just a list of functions, create our list of lists:
self.functionList = [self.functionList] * self.outputSize
return retValue
def getPercentageNodesUsed(self):
# Return a list of percentages:
percentages = []
for i in self.__genotype:
activeGenes = self.getActiveGenes_generic(i, 1)
percentages.append((len(activeGenes) / len(i)) * 100.0)
return percentages
def randomize(self):
"""Randomize this individual by creating all of our genotypes."""
self.__genotype = []
for i in range(self.outputSize):
self.__genotype.append(self.getRandomizedGenotype(
self.functionList[i], self.rows[i], self.cols[i],
self.maxColForward[i], self.maxColBack[i], self.pRange[i],
self.totalInputCount, 1))
def calculateOutputs(self, inputs):
outputs = []
for i in range(self.outputSize):
outputs.append(self.calculateSingleGenotypeOutput(
self.__genotype[i], inputs, self.functionList[i]))
return outputs
def calculateSingleGenotypeOutput(self, genotype, inputs, functionList):
"""Calculate the output from a single genotype.
Arguments:
genotype - The genotype to use.
inputs - A list of inputs, expected to be of the appropriate
length.
functionList - The functionlist associated with this genotype.
Returns: A single output value for this genotype"""
# Start out with none of the values calculated:
geneOutputs = [None] * len(genotype)
# Fill in that the inputs have values available:
inputNumber = 0
for input in inputs:
geneOutputs[inputNumber] = inputs[inputNumber]
inputNumber += 1
# Get all of the active genes:
temp = self.getActiveGenes_generic(genotype, 1)
activeGenes = copy.deepcopy(temp)
# Remove the input and output genes from the active gene list:
activeGenes = [x for x in activeGenes if x not in
range(self.totalInputCount)]
activeGenes = [x for x in activeGenes if x not in
range(len(genotype) - 1,
len(genotype))]
# Make sure they are in order:
activeGenes = sorted(activeGenes)
# To deal with the possibility of recurrent connections, we will move
# forward in the active genes list, calculating every output we can
# and repeat that process for as long as we are still making progress
# (at least 1 gene output value is calculated). If we can't make any
# more progress (circular connection), we'll set uncalculatable inputs
# to zero and finish producing outputs.
progressMade = True
while progressMade:
progressMade = False
genesToRemove = []
for geneNum in activeGenes:
# Get the gene's inputs:
X = geneOutputs[genotype[geneNum][1]]
Y = geneOutputs[genotype[geneNum][2]]
# Check if we can calculate our output:
if X is not None and Y is not None:
# Calculate the value and set it into our outputs:
geneOutputs[geneNum] = self.constrain(
genotype[geneNum][3] * \
(functionList[genotype[geneNum][0]](
X, Y, genotype[geneNum][3])))
# Mark progress made:
progressMade = True
# Remove from future calculations:
genesToRemove.append(geneNum)
activeGenes = [x for x in activeGenes if x not in genesToRemove]
# No more progress being made, calculate the rest with 0 used for
# uncalculatable inputs. Moving from left to right, some values may
# cascade as one gene's output provides another's input:
for geneNum in activeGenes:
X = geneOutputs[genotype[geneNum][1]]
Y = geneOutputs[genotype[geneNum][2]]
if X is None:
X = 0
if Y is None:
Y = 0
geneOutputs[geneNum] = self.constrain(
genotype[geneNum][3] * (functionList[genotype[geneNum][0]](
X, Y, genotype[geneNum][3])))
# Now, all gene outputs should be set, we can collect our output, which
# should be the last value:
output = None
geneNum = len(genotype) - 1
output = geneOutputs[genotype[geneNum][0]]
if output is None:
self.printNumberedGenotype()
raise ValueError("Output for gene %d not available." %
(genotype[geneNum][0]))
return output
def __getProbabilisticMutatedChild(self, genMutationRate=0.1,
outMutationRate=0.1,
application='pergene'):
child = copy.deepcopy(self)
for i in range(len(child.__genotype)):
child.probabilisticMutate(child.__genotype[i],
child.functionList[i],
child.pRange[i],
child.maxColForward[i],
child.maxColBack[i],
totalInputCount=child.totalInputCount,
outputSize=1,
rows=child.rows[i],
cols=child.cols[i],
genMutationRate=genMutationRate,
outMutationRate=outMutationRate,
application=application)
return child
def __getActiveGeneMutatedChild(self, numGenesToMutate=1):
child = copy.deepcopy(self)
for i in range(len(child.__genotype)):
activeGenes = child.getActiveGenes_generic(child.__genotype[i], 1)
child.activeGeneMutate(child.__genotype[i],
child.functionList[i],
child.pRange[i],
activeGenes,
child.maxColForward[i],
child.maxColBack[i],
numGenesToMutate=numGenesToMutate,
totalInputCount=child.totalInputCount,
outputSize=1,
rows=child.rows[i],
cols=child.cols[i])
return child
def getOneMutatedChild(self, mutationStrategy):
"""This function will return a mutated child based upon this
individual.
Arguments:
mutationStrategy - A dictionary with the name of the mutation
strategy as well as any parameters necessary
for that strategy.
Returns:
The new child."""
# Mutation rate and number of genes to mutate are given as ranges.
# We need to select a value from within the available range.
# Apply a certain chance of mutation to all genes:
if mutationStrategy['name'].lower() == 'probability':
return self.__getProbabilisticMutatedChild(
genMutationRate=random.uniform(mutationStrategy['genRate'][0],
mutationStrategy['genRate'][1]),
outMutationRate=random.uniform(mutationStrategy['outRate'][0],
mutationStrategy['outRate'][1]),
application=mutationStrategy['application'])
# Mutate genes until at least X active genes are mutated. X is
# normally 1.
elif mutationStrategy['name'].lower() == 'activegene':
return self.__getActiveGeneMutatedChild(
numGenesToMutate=random.randint(
mutationStrategy['numGenes'][0],
mutationStrategy['numGenes'][1]))
else:
ValueError("Unknown mutation strategy.")
def performOncePerEpochUpdates(self, listAllIndividuals, epochFitnesses):
"""Multi-chromosomal individuals do crossover mutations once every
epoch."""
return self.__produceCrossoverIndividuals(listAllIndividuals, epochFitnesses)
def __becomeRandomCrossover(self, listOfParents):
"""Given a list of parents to pull from, turn our genome into a random
crossover of all parents."""
maxRand = len(listOfParents) - 1
for i in range(len(self.__genotype)):
parentNum = random.randint(0, maxRand)
self.__genotype[i] = copy.deepcopy(listOfParents[parentNum].__genotype[i])
def __produceNewPopulationFromParents_randomCrossover(self, listAllIndividuals):
"""Assume that all given individuals are the parents of the new
generation and produce some crossover individuals from them."""
childrenToProduce = self.MC_CGPSpecificParameters['numberCrossoverChildren']
maxParent = len(listAllIndividuals)
for i in range(childrenToProduce):
newInd = copy.deepcopy(self)
newInd.__becomeRandomCrossover(listAllIndividuals[:maxParent])
listAllIndividuals.append(newInd)
return listAllIndividuals
def __produceCrossoverIndividuals(self, listAllIndividuals, epochFitnesses):
"""Add to the list of individuals as many crossover individuals as we
want to create."""
# There are 2 possibilities with crossover indivdiuals:
# 1. We have fitness functions for each output and we can pick the
# best of each to produce a single best individual.
# 2. We have a single fitness function and we should crossover from
# random combinations of our best individuals.
if self.crossoverStrategy.lower() == "singlebest":
return self.__produceNewPopulationFromParents_singleBest(listAllIndividuals, epochFitnesses)
else:
return self.__produceNewPopulationFromParents_randomCrossover(listAllIndividuals)
def __produceNewPopulationFromParents_singleBest(self, listAllIndividuals,
epochFitnesses):
"""Change this individual's genotypes into the best of all available
individuals."""
# When creating only a single individual, it is assumed that we ar that
# individual. We need to modify ourselves, then set ourselves as the
# only item in the all-individuals list that was passed in.
bestFitnesses = copy.deepcopy(epochFitnesses[0])
bestFitnessIndices = [0] * len(epochFitnesses[0])
# Determine the best individual genotypes:
for indNum in range(len(epochFitnesses)):
for valNum in range(len(epochFitnesses[0])):
if epochFitnesses[indNum][valNum] >= bestFitnesses[valNum]:
bestFitnesses[valNum] = epochFitnesses[indNum][valNum]
bestFitnessIndices[valNum] = indNum
for valNum in range(len(epochFitnesses[0])):
self.__genotype[valNum] = copy.deepcopy(
listAllIndividuals[bestFitnessIndices[valNum]].__genotype[valNum])
return [self]
| 40.899244
| 104
| 0.594506
|
721765d9ced30e0992833eeda36fd246236ebc40
| 6,031
|
py
|
Python
|
lib/saltlint/__init__.py
|
myii/salt-lint
|
efeaaff8343e26376dc06b6090b14808a87f95f2
|
[
"MIT"
] | null | null | null |
lib/saltlint/__init__.py
|
myii/salt-lint
|
efeaaff8343e26376dc06b6090b14808a87f95f2
|
[
"MIT"
] | null | null | null |
lib/saltlint/__init__.py
|
myii/salt-lint
|
efeaaff8343e26376dc06b6090b14808a87f95f2
|
[
"MIT"
] | null | null | null |
# Copyright (c) 2013-2014 Will Thames <will@thames.id.au>
# Modified work Copyright (c) 2019 Roald Nefs
from __future__ import print_function
from collections import defaultdict
import os
import re
import sys
import six
import saltlint.utils
import codecs
default_rulesdir = os.path.join(os.path.dirname(saltlint.utils.__file__), 'rules')
class SaltLintRule(object):
def __repr__(self):
return self.id + ": " + self.shortdesc
def verbose(self):
return self.id + ": " + self.shortdesc + "\n " + self.description
match = None
@staticmethod
def unjinja(text):
return re.sub(r"{{[^}]*}}", "JINJA_VAR", text)
def matchlines(self, file, text):
matches = []
if not self.match:
return matches
# arrays are 0-based, line numbers are 1-based
# so use prev_line_no as the counter
for (prev_line_no, line) in enumerate(text.split("\n")):
if line.lstrip().startswith('#'):
continue
rule_id_list = saltlint.utils.get_rule_skips_from_line(line)
if self.id in rule_id_list:
continue
result = self.match(file, line)
if not result:
continue
message = None
if isinstance(result, six.string_types):
message = result
matches.append(Match(prev_line_no+1, line,
file['path'], self, message))
return matches
class RulesCollection(object):
def __init__(self):
self.rules = []
def register(self, obj):
self.rules.append(obj)
def __iter__(self):
return iter(self.rules)
def __len__(self):
return len(self.rules)
def extend(self, more):
self.rules.extend(more)
def run(self, statefile, tags=set(), skip_list=frozenset()):
text = ""
matches = list()
try:
with codecs.open(statefile['path'], mode='rb', encoding='utf-8') as f:
text = f.read()
except IOError as e:
print("WARNING: Coudn't open %s - %s" %
(statefile['path'], e.strerror),
file=sys.stderr)
return matches
for rule in self.rules:
if not tags or not set(rule.tags).union([rule.id]).isdisjoint(tags):
rule_definition = set(rule.tags)
rule_definition.add(rule.id)
if set(rule_definition).isdisjoint(skip_list):
matches.extend(rule.matchlines(statefile, text))
return matches
def __repr__(self):
return "\n".join([rule.verbose()
for rule in sorted(self.rules, key=lambda x: x.id)])
def listtags(self):
tags = defaultdict(list)
for rule in self.rules:
for tag in rule.tags:
tags[tag].append("[{0}]".format(rule.id))
results = []
for tag in sorted(tags):
results.append("{0} {1}".format(tag, tags[tag]))
return "\n".join(results)
@classmethod
def create_from_directory(cls, rulesdir):
result = cls()
result.rules = saltlint.utils.load_plugins(os.path.expanduser(rulesdir))
return result
class Match(object):
def __init__(self, linenumber, line, filename, rule, message=None):
self.linenumber = linenumber
self.line = line
self.filename = filename
self.rule = rule
self.message = message or rule.shortdesc
def __repr__(self):
formatstr = u"[{0}] ({1}) matched {2}:{3} {4}"
return formatstr.format(self.rule.id, self.message,
self.filename, self.linenumber, self.line)
class Runner(object):
def __init__(self, rules, state, tags, skip_list, exclude_paths,
verbosity=0, checked_files=None):
self.rules = rules
self.states = set()
# assume state is directory
if os.path.isdir(state):
self.states.add((os.path.join(state, 'init.sls'), 'state'))
else:
self.states.add((state, 'state'))
self.tags = tags
self.skip_list = skip_list
self._update_exclude_paths(exclude_paths)
self.verbosity = verbosity
if checked_files is None:
checked_files = set()
self.checked_files = checked_files
def _update_exclude_paths(self, exclude_paths):
if exclude_paths:
# These will be (potentially) relative paths
paths = [s.strip() for s in exclude_paths]
self.exclude_paths = paths + [os.path.abspath(p) for p in paths]
else:
self.exclude_paths = []
def is_excluded(self, file_path):
# Any will short-circuit as soon as something returns True, but will
# be poor performance for the case where the path under question is
# not excluded.
return any(file_path.startswith(path) for path in self.exclude_paths)
def run(self):
files = list()
for state in self.states:
if self.is_excluded(state[0]):
continue
files.append({'path': state[0], 'type': state[1]})
matches = list()
# remove duplicates from files list
files = [value for n, value in enumerate(files) if value not in files[:n]]
# remove duplicates from files list
files = [value for n, value in enumerate(files) if value not in files[:n]]
# remove files that have already been checked
files = [x for x in files if x['path'] not in self.checked_files]
for file in files:
if self.verbosity > 0:
print("Examining %s of type %s" % (file['path'], file['type']))
matches.extend(self.rules.run(file, tags=set(self.tags),
skip_list=self.skip_list))
# update list of checked files
self.checked_files.update([x['path'] for x in files])
return matches
| 31.087629
| 82
| 0.580501
|
5d00e9e1c199a09476261517643b88289892a39a
| 2,630
|
py
|
Python
|
14-semparsing/ucca/ucca/visualization.py
|
ariasjose/nn4nlp-code
|
7327ea3e93161afbc8c008e287b646daa802be4d
|
[
"Apache-2.0"
] | null | null | null |
14-semparsing/ucca/ucca/visualization.py
|
ariasjose/nn4nlp-code
|
7327ea3e93161afbc8c008e287b646daa802be4d
|
[
"Apache-2.0"
] | null | null | null |
14-semparsing/ucca/ucca/visualization.py
|
ariasjose/nn4nlp-code
|
7327ea3e93161afbc8c008e287b646daa802be4d
|
[
"Apache-2.0"
] | null | null | null |
import operator
import warnings
from collections import defaultdict
import matplotlib.cbook
import networkx as nx
from ucca import layer0, layer1
from ucca.layer1 import Linkage
warnings.filterwarnings("ignore", category=matplotlib.cbook.mplDeprecation)
warnings.filterwarnings("ignore", category=UserWarning)
def draw(passage):
G = nx.DiGraph()
terminals = sorted(passage.layer(layer0.LAYER_ID).all, key=operator.attrgetter("position"))
G.add_nodes_from([(n.ID, {"label": n.text, "node_color": "white"}) for n in terminals])
G.add_nodes_from([(n.ID, {"label": "IMPLICIT" if n.attrib.get("implicit") else "",
"node_color": "gray" if isinstance(n, Linkage) else (
"white" if n.attrib.get("implicit") else "black")})
for n in passage.layer(layer1.LAYER_ID).all])
G.add_edges_from([(n.ID, e.child.ID, {"label": e.tag, "style": "dashed" if e.attrib.get("remote") else "solid"})
for layer in passage.layers for n in layer.all for e in n])
pos = topological_layout(passage)
nx.draw(G, pos, arrows=False, font_size=10,
node_color=[d["node_color"] for _, d in G.nodes(data=True)],
labels={n: d["label"] for n, d in G.nodes(data=True) if d["label"]},
style=[d["style"] for _, _, d in G.edges(data=True)])
nx.draw_networkx_edge_labels(G, pos, font_size=8,
edge_labels={(u, v): d["label"] for u, v, d in G.edges(data=True)})
def topological_layout(passage):
visited = defaultdict(set)
pos = {}
implicit_offset = 1 + max((n.position for n in passage.layer(layer0.LAYER_ID).all), default=-1)
remaining = [n for layer in passage.layers for n in layer.all if not n.parents]
while remaining:
node = remaining.pop()
if node.ID in pos: # done already
continue
if node.children:
children = [c for c in node.children if c.ID not in pos and c not in visited[node.ID]]
if children:
visited[node.ID].update(children) # to avoid cycles
remaining += [node] + children
continue
xs, ys = zip(*(pos[c.ID] for c in node.children))
pos[node.ID] = (sum(xs) / len(xs), 1 + max(ys)) # done with children
elif node.layer.ID == layer0.LAYER_ID: # terminal
pos[node.ID] = (int(node.position), 0)
else: # implicit
pos[node.ID] = (implicit_offset, 0)
implicit_offset += 1
return pos
| 46.140351
| 117
| 0.590494
|
fd68467355379e1343b1759f080e43910d7e7172
| 11,181
|
py
|
Python
|
m2cgen/interpreters/interpreter.py
|
jasonkena/m2cgen
|
f831d4248fd9e3f47d1fc41fecc9fa94e056406d
|
[
"MIT"
] | 1
|
2021-01-25T09:55:29.000Z
|
2021-01-25T09:55:29.000Z
|
m2cgen/interpreters/interpreter.py
|
jasonkena/m2cgen
|
f831d4248fd9e3f47d1fc41fecc9fa94e056406d
|
[
"MIT"
] | null | null | null |
m2cgen/interpreters/interpreter.py
|
jasonkena/m2cgen
|
f831d4248fd9e3f47d1fc41fecc9fa94e056406d
|
[
"MIT"
] | null | null | null |
from m2cgen import ast
from m2cgen.assemblers import fallback_expressions
from m2cgen.interpreters.utils import CachedResult, _get_handler_name
class BaseInterpreter:
"""
Base class of AST interpreter. Provides single public method .interpret()
which takes instance of AST expression and recursively applies method
_do_interpret() to it.
"""
def __init__(self):
self._cached_expr_results = {}
def interpret(self, expr):
self._reset_reused_expr_cache()
return self._do_interpret(expr)
# Private methods implementing Visitor pattern
def _pre_interpret_hook(self, expr, **kwargs):
return None, kwargs
def _do_interpret(self, expr, to_reuse=None, **kwargs):
# Hook which allows to override kwargs and to return custom result.
result, kwargs = self._pre_interpret_hook(expr, **kwargs)
# If result is empty, it means that we still need to process expr.
if result is not None:
return result
if expr in self._cached_expr_results:
return self._cached_expr_results[expr].var_name
handler = self._select_handler(expr)
# Note that the reuse flag passed in the arguments has a higher
# precedence than one specified in the expression. One use case for
# this behavior is to override the original to_reuse flag for
# expressions that are wrapped by subroutine expression in case when
# subroutines are not supported by specific interpreter implementation.
expr_to_reuse = to_reuse if to_reuse is not None else expr.to_reuse
if not expr_to_reuse:
return handler(expr, **kwargs)
result = handler(expr, **kwargs)
return self._cache_reused_expr(expr, result)
def _cache_reused_expr(self, expr, expr_result):
# No caching by default.
return expr_result
def _reset_reused_expr_cache(self):
self._cached_expr_results = {}
def _select_handler(self, expr):
handler_name = _get_handler_name(type(expr))
if hasattr(self, handler_name):
return getattr(self, handler_name)
raise NotImplementedError(
f"No handler found for '{type(expr).__name__}'")
class BaseToCodeInterpreter(BaseInterpreter):
def __init__(self, cg, feature_array_name="input"):
super().__init__()
self._cg = cg
self._feature_array_name = feature_array_name
class ToCodeInterpreter(BaseToCodeInterpreter):
"""
This interpreter provides default implementation for the methods
interpreting AST expression into code.
It can be used for the most programming languages and requires only
language-specific instance of the CodeGenerator.
!!IMPORTANT!!: Code generators used by this interpreter must know nothing
about AST.
"""
abs_function_name = NotImplemented
atan_function_name = NotImplemented
exponent_function_name = NotImplemented
logarithm_function_name = NotImplemented
log1p_function_name = NotImplemented
power_function_name = NotImplemented
sqrt_function_name = NotImplemented
tanh_function_name = NotImplemented
def __init__(self, cg, feature_array_name="input"):
super().__init__(cg, feature_array_name=feature_array_name)
self.with_vectors = False
self.with_math_module = False
def interpret_id_expr(self, expr, **kwargs):
return self._do_interpret(expr.expr, **kwargs)
def interpret_comp_expr(self, expr, **kwargs):
op = self._cg._comp_op_overwrite(expr.op)
return self._cg.infix_expression(
left=self._do_interpret(expr.left, **kwargs),
op=op,
right=self._do_interpret(expr.right, **kwargs))
def interpret_bin_num_expr(self, expr, **kwargs):
return self._cg.infix_expression(
left=self._do_interpret(expr.left, **kwargs),
op=expr.op.value,
right=self._do_interpret(expr.right, **kwargs))
def interpret_num_val(self, expr, **kwargs):
return self._cg.num_value(value=expr.value)
def interpret_feature_ref(self, expr, **kwargs):
return self._cg.array_index_access(
array_name=self._feature_array_name,
index=expr.index)
def interpret_vector_val(self, expr, **kwargs):
self.with_vectors = True
nested = [self._do_interpret(expr, **kwargs) for expr in expr.exprs]
return self._cg.vector_init(nested)
def interpret_abs_expr(self, expr, **kwargs):
if self.abs_function_name is NotImplemented:
return self._do_interpret(
fallback_expressions.abs(expr.expr), **kwargs)
self.with_math_module = True
nested_result = self._do_interpret(expr.expr, **kwargs)
return self._cg.function_invocation(
self.abs_function_name, nested_result)
def interpret_atan_expr(self, expr, **kwargs):
if self.atan_function_name is NotImplemented:
return self._do_interpret(
fallback_expressions.atan(expr.expr), **kwargs)
self.with_math_module = True
nested_result = self._do_interpret(expr.expr, **kwargs)
return self._cg.function_invocation(
self.atan_function_name, nested_result)
def interpret_exp_expr(self, expr, **kwargs):
if self.exponent_function_name is NotImplemented:
return self._do_interpret(
fallback_expressions.exp(expr.expr),
**kwargs)
self.with_math_module = True
nested_result = self._do_interpret(expr.expr, **kwargs)
return self._cg.function_invocation(
self.exponent_function_name, nested_result)
def interpret_log_expr(self, expr, **kwargs):
if self.logarithm_function_name is NotImplemented:
raise NotImplementedError("Logarithm function is not provided")
self.with_math_module = True
nested_result = self._do_interpret(expr.expr, **kwargs)
return self._cg.function_invocation(
self.logarithm_function_name, nested_result)
def interpret_log1p_expr(self, expr, **kwargs):
if self.log1p_function_name is NotImplemented:
return self._do_interpret(
fallback_expressions.log1p(expr.expr), **kwargs)
self.with_math_module = True
nested_result = self._do_interpret(expr.expr, **kwargs)
return self._cg.function_invocation(
self.log1p_function_name, nested_result)
def interpret_sqrt_expr(self, expr, **kwargs):
if self.sqrt_function_name is NotImplemented:
return self._do_interpret(
fallback_expressions.sqrt(expr.expr),
**kwargs)
self.with_math_module = True
nested_result = self._do_interpret(expr.expr, **kwargs)
return self._cg.function_invocation(
self.sqrt_function_name, nested_result)
def interpret_tanh_expr(self, expr, **kwargs):
if self.tanh_function_name is NotImplemented:
return self._do_interpret(
fallback_expressions.tanh(expr.expr), **kwargs)
self.with_math_module = True
nested_result = self._do_interpret(expr.expr, **kwargs)
return self._cg.function_invocation(
self.tanh_function_name, nested_result)
def interpret_pow_expr(self, expr, **kwargs):
if self.power_function_name is NotImplemented:
raise NotImplementedError("Power function is not provided")
self.with_math_module = True
base_result = self._do_interpret(expr.base_expr, **kwargs)
exp_result = self._do_interpret(expr.exp_expr, **kwargs)
return self._cg.function_invocation(
self.power_function_name, base_result, exp_result)
class ImperativeToCodeInterpreter(ToCodeInterpreter):
"""
This interpreter provides default implementation for the methods
interpreting AST expression into code.
It can be used for the most programming languages and requires only
language-specific instance of the CodeGenerator.
!!IMPORTANT!!: Code generators used by this interpreter must know nothing
about AST.
"""
def interpret_if_expr(self, expr, if_var_name=None, **kwargs):
if if_var_name is not None:
var_name = if_var_name
else:
var_name = self._cg.add_var_declaration(expr.output_size)
def handle_nested_expr(nested):
if isinstance(nested, ast.IfExpr):
self._do_interpret(nested, if_var_name=var_name, **kwargs)
else:
nested_result = self._do_interpret(nested, **kwargs)
self._cg.add_var_assignment(var_name, nested_result,
nested.output_size)
self._cg.add_if_statement(self._do_interpret(expr.test, **kwargs))
handle_nested_expr(expr.body)
self._cg.add_else_statement()
handle_nested_expr(expr.orelse)
self._cg.add_block_termination()
return var_name
def _cache_reused_expr(self, expr, expr_result):
var_name = self._cg.add_var_declaration(expr.output_size)
self._cg.add_var_assignment(var_name, expr_result, expr.output_size)
self._cached_expr_results[expr] = CachedResult(
var_name=var_name, expr_result=None)
return var_name
class FunctionalToCodeInterpreter(ToCodeInterpreter):
"""
This interpreter provides default implementation for the methods
interpreting AST expression into code.
It can be used for the most functional programming languages and requires
only language-specific instance of the CodeGenerator.
!!IMPORTANT!!: Code generators used by this interpreter must know nothing
about AST.
"""
def interpret_if_expr(self, expr, if_code_gen=None, **kwargs):
if if_code_gen is None:
code_gen = self.create_code_generator()
nested = False
else:
code_gen = if_code_gen
nested = True
code_gen.add_if_statement(self._do_interpret(
expr.test, **kwargs))
code_gen.add_code_line(self._do_interpret(
expr.body, if_code_gen=code_gen, **kwargs))
code_gen.add_else_statement()
code_gen.add_code_line(self._do_interpret(
expr.orelse, if_code_gen=code_gen, **kwargs))
code_gen.add_if_termination()
if not nested:
return self._cache_reused_expr(
expr, code_gen.finalize_and_get_generated_code())
# Cached expressions become functions with no arguments, i.e. values
# which are CAFs. Therefore, they are computed only once.
def _cache_reused_expr(self, expr, expr_result):
if expr in self._cached_expr_results:
return self._cached_expr_results[expr].var_name
else:
func_name = self._cg.get_func_name()
self._cached_expr_results[expr] = CachedResult(
var_name=func_name, expr_result=expr_result)
return func_name
def create_code_generator(self):
raise NotImplementedError
| 38.688581
| 79
| 0.679367
|
f4c7e9642949261642955dcb0eacbe80614fb44a
| 12,219
|
py
|
Python
|
eval_sample_generation.py
|
NingMiao/mfcvae
|
6c6b83bd05fdc618db3976da5faf54838af7e334
|
[
"MIT"
] | 20
|
2021-06-10T00:26:26.000Z
|
2022-03-30T01:28:01.000Z
|
eval_sample_generation.py
|
NingMiao/mfcvae
|
6c6b83bd05fdc618db3976da5faf54838af7e334
|
[
"MIT"
] | 1
|
2022-03-03T16:40:30.000Z
|
2022-03-04T14:49:46.000Z
|
eval_sample_generation.py
|
NingMiao/mfcvae
|
6c6b83bd05fdc618db3976da5faf54838af7e334
|
[
"MIT"
] | 8
|
2021-06-13T08:53:33.000Z
|
2022-03-14T06:22:00.000Z
|
import numpy as np
import torch
import torch.distributions as D
import matplotlib.pyplot as plt
from torchvision.utils import make_grid
from torch.autograd import Variable
import os
import argparse
from datasets import Fast_MNIST, Fast_SVHN, Fast_3DShapes
from load_model import load_model_from_save_dict
from plotting import plot_sample_generations_from_each_cluster_torch_grid
def eval_sample_generation():
"""
Run this function to perform post-training sample generation plot of a model.
For more information on the plot, see Section 4.4 and Appendix E.6 of the paper.
"""
parser = argparse.ArgumentParser(description='Evaluation parsing.')
parser.add_argument('--model_path', type=str, default="pretrained_models/mnist.pt", metavar='N', help="Path to a model file of type .pt .")
parser.add_argument('--results_dir', type=str, default="results/mnist", metavar='N', help="Path to a directory where results will be stored.")
parser.add_argument('--device', type=str, default='cpu', metavar='N', help="device to use for all heavy tensor operations, e.g. 'cuda:0', 'cpu', ...")
parser.add_argument('--temperature', type=float, default=0.3, metavar='N', help='temperature factor for scaling covariance matrix of sampling distributions.')
eval_args, unknown = parser.parse_known_args()
# configs
model_path = eval_args.model_path
results_dir = eval_args.results_dir
device_string = eval_args.device
temperature = eval_args.temperature
# define device and load model
mfcvae, args = load_model_from_save_dict(model_path, map_location=device_string)
# changes model to evaluation mode (e.g. dropout, batch norm affected)
mfcvae.eval()
# transfer model to device
args.device = device_string
device = torch.device(device_string)
mfcvae.device = device
mfcvae = mfcvae.to(device)
if args.dataset == 'fast_mnist':
print("Initialize MNIST data and data loaders...")
# initialize dataset
train_data = Fast_MNIST('./data', train=True, download=True,
device=args.device) # before: torchvision.datasets.MNIST
test_data = Fast_MNIST("./data", train=False, device=args.device) # before: torchvision.datasets.MNIST
elif args.dataset == 'fast_svhn':
print("Initialize SVHN data and data loaders...")
# initialize dataset
test_data = Fast_SVHN("./data", split='test', download=True, device=args.device)
elif args.dataset == 'fast_3dshapes':
print("Initialize 3DShapes data and data loaders...")
test_data = Fast_3DShapes(train=False, device=args.device, train_frac=args.threedshapes_train_frac,
factors_variation_dict=args.factors_variation_dict,
factors_label_list=args.factors_label_list,
seed=args.seed)
# initialize data loaders
test_loader = torch.utils.data.DataLoader(test_data, batch_size=args.eval_batch_size, shuffle=False,
num_workers=0) # must be 0 with GPU, good article: https://discuss.pytorch.org/t/cuda-initialization-error-when-dataloader-with-cuda-tensor/43390
mfcvae.eval() # changes model to evaluation mode (e.g. dropout, batch norm affected)
if args.do_progressive_training:
epoch = int(sum(args.n_epochs_per_progressive_step)) - 1
else:
epoch = args.n_epochs - 1
vis_examples_per_cluster_logged = [{} for j in range(args.J_n_mixtures)]
vis_count_examples_per_cluster = [{} for j in range(args.J_n_mixtures)]
vis_z_j_per_cluster = [{} for j in range(args.J_n_mixtures)]
index_to_y_j_cluster = {}
index_to_prob_p_c_j_z_j = {}
for j in range(args.J_n_mixtures):
for k in range(mfcvae.n_clusters_j_list[j]):
vis_examples_per_cluster_logged[j][k] = []
vis_z_j_per_cluster[j][k] = []
vis_count_examples_per_cluster[j][k] = 0
for n in range(len(test_data)):
index_to_y_j_cluster[n] = []
row_indices = []
num_nonempty_clusters = []
for batch_idx, (x, y_true) in enumerate(test_loader):
x, y_true = x.to(device), y_true.to(device)
if args.dataset == 'fast_mnist':
x = x.view(x.size(0), -1).float()
global_indices = list(range(batch_idx*args.eval_batch_size, (batch_idx+1)*args.eval_batch_size))
x_hat, q_z_j_x_list, z_sample_q_z_j_x_list = mfcvae.forward(x, epoch, 0)
prob_p_c_j_z_j_list = mfcvae.compute_q_c_j_x(z_sample_q_z_j_x_list)
for h in range(z_sample_q_z_j_x_list[0].shape[0]): # is probably == batch size
g = global_indices[h]
index_to_prob_p_c_j_z_j[g] = [prob_p_c_j_z_j_list[j][h].detach().cpu() for j in range(args.J_n_mixtures)]
y_pred_j_list = []
for j in range(mfcvae.J_n_mixtures):
prob_p_c_j_z_j_list[j] = prob_p_c_j_z_j_list[j].data.cpu().numpy()
y_pred_j = np.argmax(prob_p_c_j_z_j_list[j], axis=1)
y_pred_j_list.append(y_pred_j)
for j in range(mfcvae.J_n_mixtures):
for k in range(mfcvae.n_clusters_j_list[j]):
y_pred = y_pred_j_list[j]
indices = (np.where(y_pred == k)[0])
count_indices = indices.shape[0]
indices = indices.tolist()
for h in indices:
index_to_y_j_cluster[global_indices[h]].append(k)
vis_count_examples_per_cluster[j][k] += count_indices
# print("looped through test data.")
# build a useful data structure to handle the clustering probabilities
j_to_cluster_to_index_prob = {}
# create empty things
for j in range(args.J_n_mixtures):
j_to_cluster_to_index_prob[j] = {}
for c in range(args.n_clusters_j_list[j]):
j_to_cluster_to_index_prob[j][c] = []
for (index, prob_list) in index_to_prob_p_c_j_z_j.items():
for j in range(args.J_n_mixtures):
cluster_j = torch.argmax(prob_list[j])
cluster_j = cluster_j.item()
j_to_cluster_to_index_prob[j][cluster_j].append((index, prob_list[j][cluster_j]))
# Sort clusters s.t. cluster with the largest "average confidence" is 0, second largest 1 etc.
cluster_average_confidence = {}
for j in range(args.J_n_mixtures):
cluster_average_confidence[j] = {}
for c in range(args.n_clusters_j_list[j]):
cluster_average_confidence[j][c] = np.nan_to_num(np.mean([j_to_cluster_to_index_prob[j][c][k][1] for k in range(len(j_to_cluster_to_index_prob[j][c]))]))
# sort
cluster_index_average_confidence_list = [(cluster_j, score) for (cluster_j, score) in cluster_average_confidence[j].items()]
cluster_index_average_confidence_list = sorted(cluster_index_average_confidence_list, key=lambda tuple: tuple[1], reverse=True)
# print(cluster_index_average_confidence_list)
cluster_j_sorted = [cluster_j for (cluster_j, score) in cluster_index_average_confidence_list]
row_indices.append(cluster_j_sorted)
# compute the number of clusters with non-empty assignment from the test set
num_nonempty_clusters.append(len(np.argwhere(np.array([cluster_index_average_confidence_list[i][1] for i in range(args.n_clusters_j_list[j])]))))
fromto_mapping = {cluster_j: i for i, cluster_j in enumerate(cluster_j_sorted)}
# remap the dictionary - https://gist.github.com/pszaflarski/b139736415abbf8d344d77524baaece8
j_to_cluster_to_index_prob[j] = {fromto_mapping.get(k, k): v for k, v in j_to_cluster_to_index_prob[j].items() if k in fromto_mapping}
# log sample generations per facet and cluster, in the order of y_pred_j_count_list
args.n_sample_generations_per_cluster = 10
# print('Checkpoint 1.')
fig_list = plot_sample_generation(row_indices, num_nonempty_clusters, mfcvae, args, temperature, results_dir, show_plot=True)
def plot_sample_generation(row_indices, num_clusters, mfcvae, args, temperature, results_dir=None, show_plot=False):
"""
Args:
row_indices: The indices which decide the row sorting.
num_clusters: The number of clusters to be visualised.
mfcvae: The trained MFCVAE model.
args: The arguments associated with the training procedure.
temperature: The multiplier for the variance of p(z|c) during sampling.
results_dir: Path to save the output plots.
show_plot: Whether to show the plots by plt.show().
"""
if args.dataset == 'fast_mnist':
in_channels = 1
width, height = 28, 28
elif args.dataset in ['fast_svhn', 'fast_3dshapes']:
in_channels = 3
width, height = 32, 32
vis_sample_generations_logged = {}
for i in range(mfcvae.J_n_mixtures): # J clusterings
vis_sample_generations_logged[i] = {}
for j in range(mfcvae.n_clusters_j_list[i]):
vis_sample_generations_logged[i][j] = []
fig_list = []
for i in range(mfcvae.J_n_mixtures):
# sort rows by "average confidence":
y_pred_j_confidence_descend_index = row_indices[i]
for j0 in range(num_clusters[i]):
# sort rows by "average confidence":
j = y_pred_j_confidence_descend_index[j0]
for K in range(args.n_sample_generations_per_cluster):
z_sample_list = []
for k in range(args.J_n_mixtures):
c_k = int(D.Categorical(probs=mfcvae.pi_p_c_j_list[k]).sample())
if args.cov_type_p_z_c == 'diag':
z_sample_list.append(
torch.unsqueeze(D.Normal(loc=mfcvae.mu_p_z_j_c_j_list[k][:, c_k],
scale=temperature * mfcvae.sigma_square_p_z_j_c_j_list[
k][:, c_k]).sample(), 0))
elif args.cov_type_p_z_c == 'full':
z_sample_list.append(torch.unsqueeze(
D.MultivariateNormal(loc=mfcvae.mu_p_z_j_c_j_list[k][:, c_k],
scale_tril=temperature * mfcvae.l_mat_p_z_j_c_j_list[k][:,
:, c_k]).sample(), 0))
cluster_mu = mfcvae.mu_p_z_j_c_j_list[i][:, j]
if args.cov_type_p_z_c == 'diag':
cluster_sigma_square = mfcvae.sigma_square_p_z_j_c_j_list[i][:, j]
p_z_i_c_i = D.Normal(loc=cluster_mu, scale=temperature*cluster_sigma_square)
elif args.cov_type_p_z_c == 'full':
cluster_l_mat = mfcvae.l_mat_p_z_j_c_j_list[i][:, :, j]
p_z_i_c_i = D.MultivariateNormal(loc=cluster_mu, scale_tril=temperature*cluster_l_mat)
z_sample_list[i] = torch.unsqueeze(p_z_i_c_i.sample(), 0)
x_generated_samples = mfcvae.decode(
z_sample_q_z_j_x_list=z_sample_list) # slightly inconcistent naming
x_generated_samples = torch.squeeze(x_generated_samples, dim=0)
x_generated_samples = torch.clamp(x_generated_samples, min=1e-10,
max=1 - (1e-10))
vis_sample_generations_logged[i][j0] = vis_sample_generations_logged[i][j0] + [
(x_generated_samples.view(in_channels, width, height).cpu().detach(),
'input_facet_' + str(i) + '_pred_' + str(j))]
# do plotting
fig = plot_sample_generations_from_each_cluster_torch_grid(
sample_dict=vis_sample_generations_logged[i],
n_clusters=num_clusters[i],
n_examples_per_cluster=args.n_sample_generations_per_cluster)
fig_list.append(fig)
if results_dir is not None:
plt.savefig(os.path.join(results_dir, 'generations_facet_%d.pdf'%(i)), format='pdf') # , dpi=3000
if show_plot:
plt.show()
plt.close(fig)
print("sample generation done.")
return fig_list
if __name__ == '__main__':
eval_sample_generation()
| 51.556962
| 192
| 0.651854
|
1fe528402bf459a1d01aec196625e7915bc4c1df
| 641
|
py
|
Python
|
Day7.py
|
BWeesy/advent-of-code-2021
|
7e46de303ac9bcdaafcbc7de30d3bd4e059450c0
|
[
"Unlicense"
] | null | null | null |
Day7.py
|
BWeesy/advent-of-code-2021
|
7e46de303ac9bcdaafcbc7de30d3bd4e059450c0
|
[
"Unlicense"
] | null | null | null |
Day7.py
|
BWeesy/advent-of-code-2021
|
7e46de303ac9bcdaafcbc7de30d3bd4e059450c0
|
[
"Unlicense"
] | null | null | null |
import fileHandler as fh
def getFuelCost(location, rendevous):
numberOfSteps = abs(location - rendevous)
return numberOfSteps * (numberOfSteps + 1) / 2
def main():
subLocations = fh.getMappedCommaSeparatedFirstLine('input/day7', int)
minTotalCost = sum(subLocations) * sum(subLocations)
for rendevous in range(min(subLocations), (max(subLocations))):
totalFuelCost = sum([getFuelCost(location, rendevous) for location in subLocations])
if minTotalCost < totalFuelCost:
print(f"Rendevous @ {rendevous-1} for totalCost {minTotalCost}")
break
minTotalCost = totalFuelCost
if __name__ == "__main__":
main()
| 35.611111
| 88
| 0.736349
|
e751fddc37c6375e74fb8d1fdfb8298dd956e487
| 1,427
|
py
|
Python
|
config.py
|
DM2-ND/CoEvoGNN
|
748d3a54f6e625890c9f9cb65db62f1dc3e20952
|
[
"MIT"
] | 1
|
2021-04-22T13:43:32.000Z
|
2021-04-22T13:43:32.000Z
|
config.py
|
DM2-ND/CoEvoGNN
|
748d3a54f6e625890c9f9cb65db62f1dc3e20952
|
[
"MIT"
] | null | null | null |
config.py
|
DM2-ND/CoEvoGNN
|
748d3a54f6e625890c9f9cb65db62f1dc3e20952
|
[
"MIT"
] | null | null | null |
"""
Global configurations
"""
import os
emb_dir = os.path.join('.', 'emb')
def con_dataset(d_name):
if d_name == '2k':
""" Co-author temporal networks 2k """
nodes_f = os.path.join('data', 'node2author.n2k.csv')
temporalgraphs_f = os.path.join('data', 'temporalgraph.n2k.csv')
venues_f = os.path.join('data', 'venue2name.n2k.csv')
temporalfeatures_venue_f = os.path.join('data', 'temporalfeature-venues.n2k.csv')
words_f = os.path.join('data', 'word2name.n2k.csv')
temporalfeatures_word_f = os.path.join('data', 'temporalfeature-words.n2k.csv')
else: # d_name == '10k'
""" Co-author temporal networks 10k """
nodes_f = os.path.join('data', 'node2author.n10k.csv')
temporalgraphs_f = os.path.join('data', 'temporalgraph.n10k.csv')
venues_f = os.path.join('data', 'venue2name.n10k.csv')
temporalfeatures_venue_f = os.path.join('data', 'temporalfeature-venues.n10k.csv')
words_f = os.path.join('data', 'word2name.n10k.csv')
temporalfeatures_word_f = os.path.join('data', 'temporalfeature-words.n10k.csv')
_fs = [nodes_f, temporalgraphs_f, venues_f, temporalfeatures_venue_f, words_f, temporalfeatures_word_f]
assert all([os.path.exists(_f) for _f in _fs])
return _fs
""" Cache files """
H_0_2k_npf = os.path.join(emb_dir, 'H_0.2k.npy')
H_0_10k_npf = os.path.join(emb_dir, 'H_0.10k.npy')
| 36.589744
| 107
| 0.658725
|
df92bd141852c4c4aab4c0040fe13350a11a5561
| 49,011
|
py
|
Python
|
sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_04_01/aio/operations/_blob_containers_operations.py
|
vincenttran-msft/azure-sdk-for-python
|
348b56f9f03eeb3f7b502eed51daf494ffff874d
|
[
"MIT"
] | 1
|
2022-03-09T08:59:13.000Z
|
2022-03-09T08:59:13.000Z
|
sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_04_01/aio/operations/_blob_containers_operations.py
|
vincenttran-msft/azure-sdk-for-python
|
348b56f9f03eeb3f7b502eed51daf494ffff874d
|
[
"MIT"
] | null | null | null |
sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_04_01/aio/operations/_blob_containers_operations.py
|
vincenttran-msft/azure-sdk-for-python
|
348b56f9f03eeb3f7b502eed51daf494ffff874d
|
[
"MIT"
] | 1
|
2022-03-04T06:21:56.000Z
|
2022-03-04T06:21:56.000Z
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
import functools
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.core.tracing.decorator_async import distributed_trace_async
from azure.mgmt.core.exceptions import ARMErrorFormat
from ... import models as _models
from ..._vendor import _convert_request
from ...operations._blob_containers_operations import build_clear_legal_hold_request, build_create_or_update_immutability_policy_request, build_create_request, build_delete_immutability_policy_request, build_delete_request, build_extend_immutability_policy_request, build_get_immutability_policy_request, build_get_request, build_lease_request, build_list_request, build_lock_immutability_policy_request, build_set_legal_hold_request, build_update_request
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class BlobContainersOperations:
"""BlobContainersOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.storage.v2019_04_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
@distributed_trace
def list(
self,
resource_group_name: str,
account_name: str,
skip_token: Optional[str] = None,
maxpagesize: Optional[str] = None,
filter: Optional[str] = None,
**kwargs: Any
) -> AsyncIterable["_models.ListContainerItems"]:
"""Lists all containers and does not support a prefix like data plane. Also SRP today does not
return continuation token.
:param resource_group_name: The name of the resource group within the user's subscription. The
name is case insensitive.
:type resource_group_name: str
:param account_name: The name of the storage account within the specified resource group.
Storage account names must be between 3 and 24 characters in length and use numbers and
lower-case letters only.
:type account_name: str
:param skip_token: Optional. Continuation token for the list operation.
:type skip_token: str
:param maxpagesize: Optional. Specified maximum number of containers that can be included in
the list.
:type maxpagesize: str
:param filter: Optional. When specified, only container names starting with the filter will be
listed.
:type filter: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ListContainerItems or the result of cls(response)
:rtype:
~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.storage.v2019_04_01.models.ListContainerItems]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ListContainerItems"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
def prepare_request(next_link=None):
if not next_link:
request = build_list_request(
resource_group_name=resource_group_name,
account_name=account_name,
subscription_id=self._config.subscription_id,
skip_token=skip_token,
maxpagesize=maxpagesize,
filter=filter,
template_url=self.list.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
request = build_list_request(
resource_group_name=resource_group_name,
account_name=account_name,
subscription_id=self._config.subscription_id,
skip_token=skip_token,
maxpagesize=maxpagesize,
filter=filter,
template_url=next_link,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("ListContainerItems", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/blobServices/default/containers'} # type: ignore
@distributed_trace_async
async def create(
self,
resource_group_name: str,
account_name: str,
container_name: str,
blob_container: "_models.BlobContainer",
**kwargs: Any
) -> "_models.BlobContainer":
"""Creates a new container under the specified account as described by request body. The container
resource includes metadata and properties for that container. It does not include a list of the
blobs contained by the container.
:param resource_group_name: The name of the resource group within the user's subscription. The
name is case insensitive.
:type resource_group_name: str
:param account_name: The name of the storage account within the specified resource group.
Storage account names must be between 3 and 24 characters in length and use numbers and
lower-case letters only.
:type account_name: str
:param container_name: The name of the blob container within the specified storage account.
Blob container names must be between 3 and 63 characters in length and use numbers, lower-case
letters and dash (-) only. Every dash (-) character must be immediately preceded and followed
by a letter or number.
:type container_name: str
:param blob_container: Properties of the blob container to create.
:type blob_container: ~azure.mgmt.storage.v2019_04_01.models.BlobContainer
:keyword callable cls: A custom type or function that will be passed the direct response
:return: BlobContainer, or the result of cls(response)
:rtype: ~azure.mgmt.storage.v2019_04_01.models.BlobContainer
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.BlobContainer"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
_json = self._serialize.body(blob_container, 'BlobContainer')
request = build_create_request(
resource_group_name=resource_group_name,
account_name=account_name,
container_name=container_name,
subscription_id=self._config.subscription_id,
content_type=content_type,
json=_json,
template_url=self.create.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('BlobContainer', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('BlobContainer', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
create.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/blobServices/default/containers/{containerName}'} # type: ignore
@distributed_trace_async
async def update(
self,
resource_group_name: str,
account_name: str,
container_name: str,
blob_container: "_models.BlobContainer",
**kwargs: Any
) -> "_models.BlobContainer":
"""Updates container properties as specified in request body. Properties not mentioned in the
request will be unchanged. Update fails if the specified container doesn't already exist.
:param resource_group_name: The name of the resource group within the user's subscription. The
name is case insensitive.
:type resource_group_name: str
:param account_name: The name of the storage account within the specified resource group.
Storage account names must be between 3 and 24 characters in length and use numbers and
lower-case letters only.
:type account_name: str
:param container_name: The name of the blob container within the specified storage account.
Blob container names must be between 3 and 63 characters in length and use numbers, lower-case
letters and dash (-) only. Every dash (-) character must be immediately preceded and followed
by a letter or number.
:type container_name: str
:param blob_container: Properties to update for the blob container.
:type blob_container: ~azure.mgmt.storage.v2019_04_01.models.BlobContainer
:keyword callable cls: A custom type or function that will be passed the direct response
:return: BlobContainer, or the result of cls(response)
:rtype: ~azure.mgmt.storage.v2019_04_01.models.BlobContainer
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.BlobContainer"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
_json = self._serialize.body(blob_container, 'BlobContainer')
request = build_update_request(
resource_group_name=resource_group_name,
account_name=account_name,
container_name=container_name,
subscription_id=self._config.subscription_id,
content_type=content_type,
json=_json,
template_url=self.update.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('BlobContainer', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/blobServices/default/containers/{containerName}'} # type: ignore
@distributed_trace_async
async def get(
self,
resource_group_name: str,
account_name: str,
container_name: str,
**kwargs: Any
) -> "_models.BlobContainer":
"""Gets properties of a specified container.
:param resource_group_name: The name of the resource group within the user's subscription. The
name is case insensitive.
:type resource_group_name: str
:param account_name: The name of the storage account within the specified resource group.
Storage account names must be between 3 and 24 characters in length and use numbers and
lower-case letters only.
:type account_name: str
:param container_name: The name of the blob container within the specified storage account.
Blob container names must be between 3 and 63 characters in length and use numbers, lower-case
letters and dash (-) only. Every dash (-) character must be immediately preceded and followed
by a letter or number.
:type container_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: BlobContainer, or the result of cls(response)
:rtype: ~azure.mgmt.storage.v2019_04_01.models.BlobContainer
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.BlobContainer"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_get_request(
resource_group_name=resource_group_name,
account_name=account_name,
container_name=container_name,
subscription_id=self._config.subscription_id,
template_url=self.get.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('BlobContainer', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/blobServices/default/containers/{containerName}'} # type: ignore
@distributed_trace_async
async def delete(
self,
resource_group_name: str,
account_name: str,
container_name: str,
**kwargs: Any
) -> None:
"""Deletes specified container under its account.
:param resource_group_name: The name of the resource group within the user's subscription. The
name is case insensitive.
:type resource_group_name: str
:param account_name: The name of the storage account within the specified resource group.
Storage account names must be between 3 and 24 characters in length and use numbers and
lower-case letters only.
:type account_name: str
:param container_name: The name of the blob container within the specified storage account.
Blob container names must be between 3 and 63 characters in length and use numbers, lower-case
letters and dash (-) only. Every dash (-) character must be immediately preceded and followed
by a letter or number.
:type container_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_delete_request(
resource_group_name=resource_group_name,
account_name=account_name,
container_name=container_name,
subscription_id=self._config.subscription_id,
template_url=self.delete.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/blobServices/default/containers/{containerName}'} # type: ignore
@distributed_trace_async
async def set_legal_hold(
self,
resource_group_name: str,
account_name: str,
container_name: str,
legal_hold: "_models.LegalHold",
**kwargs: Any
) -> "_models.LegalHold":
"""Sets legal hold tags. Setting the same tag results in an idempotent operation. SetLegalHold
follows an append pattern and does not clear out the existing tags that are not specified in
the request.
:param resource_group_name: The name of the resource group within the user's subscription. The
name is case insensitive.
:type resource_group_name: str
:param account_name: The name of the storage account within the specified resource group.
Storage account names must be between 3 and 24 characters in length and use numbers and
lower-case letters only.
:type account_name: str
:param container_name: The name of the blob container within the specified storage account.
Blob container names must be between 3 and 63 characters in length and use numbers, lower-case
letters and dash (-) only. Every dash (-) character must be immediately preceded and followed
by a letter or number.
:type container_name: str
:param legal_hold: The LegalHold property that will be set to a blob container.
:type legal_hold: ~azure.mgmt.storage.v2019_04_01.models.LegalHold
:keyword callable cls: A custom type or function that will be passed the direct response
:return: LegalHold, or the result of cls(response)
:rtype: ~azure.mgmt.storage.v2019_04_01.models.LegalHold
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.LegalHold"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
_json = self._serialize.body(legal_hold, 'LegalHold')
request = build_set_legal_hold_request(
resource_group_name=resource_group_name,
account_name=account_name,
container_name=container_name,
subscription_id=self._config.subscription_id,
content_type=content_type,
json=_json,
template_url=self.set_legal_hold.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('LegalHold', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
set_legal_hold.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/blobServices/default/containers/{containerName}/setLegalHold'} # type: ignore
@distributed_trace_async
async def clear_legal_hold(
self,
resource_group_name: str,
account_name: str,
container_name: str,
legal_hold: "_models.LegalHold",
**kwargs: Any
) -> "_models.LegalHold":
"""Clears legal hold tags. Clearing the same or non-existent tag results in an idempotent
operation. ClearLegalHold clears out only the specified tags in the request.
:param resource_group_name: The name of the resource group within the user's subscription. The
name is case insensitive.
:type resource_group_name: str
:param account_name: The name of the storage account within the specified resource group.
Storage account names must be between 3 and 24 characters in length and use numbers and
lower-case letters only.
:type account_name: str
:param container_name: The name of the blob container within the specified storage account.
Blob container names must be between 3 and 63 characters in length and use numbers, lower-case
letters and dash (-) only. Every dash (-) character must be immediately preceded and followed
by a letter or number.
:type container_name: str
:param legal_hold: The LegalHold property that will be clear from a blob container.
:type legal_hold: ~azure.mgmt.storage.v2019_04_01.models.LegalHold
:keyword callable cls: A custom type or function that will be passed the direct response
:return: LegalHold, or the result of cls(response)
:rtype: ~azure.mgmt.storage.v2019_04_01.models.LegalHold
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.LegalHold"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
_json = self._serialize.body(legal_hold, 'LegalHold')
request = build_clear_legal_hold_request(
resource_group_name=resource_group_name,
account_name=account_name,
container_name=container_name,
subscription_id=self._config.subscription_id,
content_type=content_type,
json=_json,
template_url=self.clear_legal_hold.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('LegalHold', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
clear_legal_hold.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/blobServices/default/containers/{containerName}/clearLegalHold'} # type: ignore
@distributed_trace_async
async def create_or_update_immutability_policy(
self,
resource_group_name: str,
account_name: str,
container_name: str,
if_match: Optional[str] = None,
parameters: Optional["_models.ImmutabilityPolicy"] = None,
**kwargs: Any
) -> "_models.ImmutabilityPolicy":
"""Creates or updates an unlocked immutability policy. ETag in If-Match is honored if given but
not required for this operation.
:param resource_group_name: The name of the resource group within the user's subscription. The
name is case insensitive.
:type resource_group_name: str
:param account_name: The name of the storage account within the specified resource group.
Storage account names must be between 3 and 24 characters in length and use numbers and
lower-case letters only.
:type account_name: str
:param container_name: The name of the blob container within the specified storage account.
Blob container names must be between 3 and 63 characters in length and use numbers, lower-case
letters and dash (-) only. Every dash (-) character must be immediately preceded and followed
by a letter or number.
:type container_name: str
:param if_match: The entity state (ETag) version of the immutability policy to update. A value
of "*" can be used to apply the operation only if the immutability policy already exists. If
omitted, this operation will always be applied.
:type if_match: str
:param parameters: The ImmutabilityPolicy Properties that will be created or updated to a blob
container.
:type parameters: ~azure.mgmt.storage.v2019_04_01.models.ImmutabilityPolicy
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ImmutabilityPolicy, or the result of cls(response)
:rtype: ~azure.mgmt.storage.v2019_04_01.models.ImmutabilityPolicy
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ImmutabilityPolicy"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
if parameters is not None:
_json = self._serialize.body(parameters, 'ImmutabilityPolicy')
else:
_json = None
request = build_create_or_update_immutability_policy_request(
resource_group_name=resource_group_name,
account_name=account_name,
container_name=container_name,
subscription_id=self._config.subscription_id,
content_type=content_type,
json=_json,
if_match=if_match,
template_url=self.create_or_update_immutability_policy.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
response_headers = {}
response_headers['ETag']=self._deserialize('str', response.headers.get('ETag'))
deserialized = self._deserialize('ImmutabilityPolicy', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, response_headers)
return deserialized
create_or_update_immutability_policy.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/blobServices/default/containers/{containerName}/immutabilityPolicies/{immutabilityPolicyName}'} # type: ignore
@distributed_trace_async
async def get_immutability_policy(
self,
resource_group_name: str,
account_name: str,
container_name: str,
if_match: Optional[str] = None,
**kwargs: Any
) -> "_models.ImmutabilityPolicy":
"""Gets the existing immutability policy along with the corresponding ETag in response headers and
body.
:param resource_group_name: The name of the resource group within the user's subscription. The
name is case insensitive.
:type resource_group_name: str
:param account_name: The name of the storage account within the specified resource group.
Storage account names must be between 3 and 24 characters in length and use numbers and
lower-case letters only.
:type account_name: str
:param container_name: The name of the blob container within the specified storage account.
Blob container names must be between 3 and 63 characters in length and use numbers, lower-case
letters and dash (-) only. Every dash (-) character must be immediately preceded and followed
by a letter or number.
:type container_name: str
:param if_match: The entity state (ETag) version of the immutability policy to update. A value
of "*" can be used to apply the operation only if the immutability policy already exists. If
omitted, this operation will always be applied.
:type if_match: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ImmutabilityPolicy, or the result of cls(response)
:rtype: ~azure.mgmt.storage.v2019_04_01.models.ImmutabilityPolicy
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ImmutabilityPolicy"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_get_immutability_policy_request(
resource_group_name=resource_group_name,
account_name=account_name,
container_name=container_name,
subscription_id=self._config.subscription_id,
if_match=if_match,
template_url=self.get_immutability_policy.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
response_headers = {}
response_headers['ETag']=self._deserialize('str', response.headers.get('ETag'))
deserialized = self._deserialize('ImmutabilityPolicy', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, response_headers)
return deserialized
get_immutability_policy.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/blobServices/default/containers/{containerName}/immutabilityPolicies/{immutabilityPolicyName}'} # type: ignore
@distributed_trace_async
async def delete_immutability_policy(
self,
resource_group_name: str,
account_name: str,
container_name: str,
if_match: str,
**kwargs: Any
) -> "_models.ImmutabilityPolicy":
"""Aborts an unlocked immutability policy. The response of delete has
immutabilityPeriodSinceCreationInDays set to 0. ETag in If-Match is required for this
operation. Deleting a locked immutability policy is not allowed, only way is to delete the
container after deleting all blobs inside the container.
:param resource_group_name: The name of the resource group within the user's subscription. The
name is case insensitive.
:type resource_group_name: str
:param account_name: The name of the storage account within the specified resource group.
Storage account names must be between 3 and 24 characters in length and use numbers and
lower-case letters only.
:type account_name: str
:param container_name: The name of the blob container within the specified storage account.
Blob container names must be between 3 and 63 characters in length and use numbers, lower-case
letters and dash (-) only. Every dash (-) character must be immediately preceded and followed
by a letter or number.
:type container_name: str
:param if_match: The entity state (ETag) version of the immutability policy to update. A value
of "*" can be used to apply the operation only if the immutability policy already exists. If
omitted, this operation will always be applied.
:type if_match: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ImmutabilityPolicy, or the result of cls(response)
:rtype: ~azure.mgmt.storage.v2019_04_01.models.ImmutabilityPolicy
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ImmutabilityPolicy"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_delete_immutability_policy_request(
resource_group_name=resource_group_name,
account_name=account_name,
container_name=container_name,
subscription_id=self._config.subscription_id,
if_match=if_match,
template_url=self.delete_immutability_policy.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
response_headers = {}
response_headers['ETag']=self._deserialize('str', response.headers.get('ETag'))
deserialized = self._deserialize('ImmutabilityPolicy', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, response_headers)
return deserialized
delete_immutability_policy.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/blobServices/default/containers/{containerName}/immutabilityPolicies/{immutabilityPolicyName}'} # type: ignore
@distributed_trace_async
async def lock_immutability_policy(
self,
resource_group_name: str,
account_name: str,
container_name: str,
if_match: str,
**kwargs: Any
) -> "_models.ImmutabilityPolicy":
"""Sets the ImmutabilityPolicy to Locked state. The only action allowed on a Locked policy is
ExtendImmutabilityPolicy action. ETag in If-Match is required for this operation.
:param resource_group_name: The name of the resource group within the user's subscription. The
name is case insensitive.
:type resource_group_name: str
:param account_name: The name of the storage account within the specified resource group.
Storage account names must be between 3 and 24 characters in length and use numbers and
lower-case letters only.
:type account_name: str
:param container_name: The name of the blob container within the specified storage account.
Blob container names must be between 3 and 63 characters in length and use numbers, lower-case
letters and dash (-) only. Every dash (-) character must be immediately preceded and followed
by a letter or number.
:type container_name: str
:param if_match: The entity state (ETag) version of the immutability policy to update. A value
of "*" can be used to apply the operation only if the immutability policy already exists. If
omitted, this operation will always be applied.
:type if_match: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ImmutabilityPolicy, or the result of cls(response)
:rtype: ~azure.mgmt.storage.v2019_04_01.models.ImmutabilityPolicy
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ImmutabilityPolicy"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_lock_immutability_policy_request(
resource_group_name=resource_group_name,
account_name=account_name,
container_name=container_name,
subscription_id=self._config.subscription_id,
if_match=if_match,
template_url=self.lock_immutability_policy.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
response_headers = {}
response_headers['ETag']=self._deserialize('str', response.headers.get('ETag'))
deserialized = self._deserialize('ImmutabilityPolicy', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, response_headers)
return deserialized
lock_immutability_policy.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/blobServices/default/containers/{containerName}/immutabilityPolicies/default/lock'} # type: ignore
@distributed_trace_async
async def extend_immutability_policy(
self,
resource_group_name: str,
account_name: str,
container_name: str,
if_match: str,
parameters: Optional["_models.ImmutabilityPolicy"] = None,
**kwargs: Any
) -> "_models.ImmutabilityPolicy":
"""Extends the immutabilityPeriodSinceCreationInDays of a locked immutabilityPolicy. The only
action allowed on a Locked policy will be this action. ETag in If-Match is required for this
operation.
:param resource_group_name: The name of the resource group within the user's subscription. The
name is case insensitive.
:type resource_group_name: str
:param account_name: The name of the storage account within the specified resource group.
Storage account names must be between 3 and 24 characters in length and use numbers and
lower-case letters only.
:type account_name: str
:param container_name: The name of the blob container within the specified storage account.
Blob container names must be between 3 and 63 characters in length and use numbers, lower-case
letters and dash (-) only. Every dash (-) character must be immediately preceded and followed
by a letter or number.
:type container_name: str
:param if_match: The entity state (ETag) version of the immutability policy to update. A value
of "*" can be used to apply the operation only if the immutability policy already exists. If
omitted, this operation will always be applied.
:type if_match: str
:param parameters: The ImmutabilityPolicy Properties that will be extended for a blob
container.
:type parameters: ~azure.mgmt.storage.v2019_04_01.models.ImmutabilityPolicy
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ImmutabilityPolicy, or the result of cls(response)
:rtype: ~azure.mgmt.storage.v2019_04_01.models.ImmutabilityPolicy
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ImmutabilityPolicy"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
if parameters is not None:
_json = self._serialize.body(parameters, 'ImmutabilityPolicy')
else:
_json = None
request = build_extend_immutability_policy_request(
resource_group_name=resource_group_name,
account_name=account_name,
container_name=container_name,
subscription_id=self._config.subscription_id,
content_type=content_type,
if_match=if_match,
json=_json,
template_url=self.extend_immutability_policy.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
response_headers = {}
response_headers['ETag']=self._deserialize('str', response.headers.get('ETag'))
deserialized = self._deserialize('ImmutabilityPolicy', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, response_headers)
return deserialized
extend_immutability_policy.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/blobServices/default/containers/{containerName}/immutabilityPolicies/default/extend'} # type: ignore
@distributed_trace_async
async def lease(
self,
resource_group_name: str,
account_name: str,
container_name: str,
parameters: Optional["_models.LeaseContainerRequest"] = None,
**kwargs: Any
) -> "_models.LeaseContainerResponse":
"""The Lease Container operation establishes and manages a lock on a container for delete
operations. The lock duration can be 15 to 60 seconds, or can be infinite.
:param resource_group_name: The name of the resource group within the user's subscription. The
name is case insensitive.
:type resource_group_name: str
:param account_name: The name of the storage account within the specified resource group.
Storage account names must be between 3 and 24 characters in length and use numbers and
lower-case letters only.
:type account_name: str
:param container_name: The name of the blob container within the specified storage account.
Blob container names must be between 3 and 63 characters in length and use numbers, lower-case
letters and dash (-) only. Every dash (-) character must be immediately preceded and followed
by a letter or number.
:type container_name: str
:param parameters: Lease Container request body.
:type parameters: ~azure.mgmt.storage.v2019_04_01.models.LeaseContainerRequest
:keyword callable cls: A custom type or function that will be passed the direct response
:return: LeaseContainerResponse, or the result of cls(response)
:rtype: ~azure.mgmt.storage.v2019_04_01.models.LeaseContainerResponse
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.LeaseContainerResponse"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
if parameters is not None:
_json = self._serialize.body(parameters, 'LeaseContainerRequest')
else:
_json = None
request = build_lease_request(
resource_group_name=resource_group_name,
account_name=account_name,
container_name=container_name,
subscription_id=self._config.subscription_id,
content_type=content_type,
json=_json,
template_url=self.lease.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('LeaseContainerResponse', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
lease.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/blobServices/default/containers/{containerName}/lease'} # type: ignore
| 48.913174
| 455
| 0.689743
|
25778f38b960b1f4970cb926d28b1b9c1ffb02dd
| 5,306
|
py
|
Python
|
predict.py
|
wengmingao/yolox-facemask
|
bad9d1151fd801cc939a49832a30bc07142d45e1
|
[
"Apache-2.0"
] | null | null | null |
predict.py
|
wengmingao/yolox-facemask
|
bad9d1151fd801cc939a49832a30bc07142d45e1
|
[
"Apache-2.0"
] | null | null | null |
predict.py
|
wengmingao/yolox-facemask
|
bad9d1151fd801cc939a49832a30bc07142d45e1
|
[
"Apache-2.0"
] | null | null | null |
#-----------------------------------------------------------------------#
# predict.py将单张图片预测、摄像头检测、FPS测试和目录遍历检测等功能
# 整合到了一个py文件中,通过指定mode进行模式的修改。
#-----------------------------------------------------------------------#
import time
import cv2
import numpy as np
from PIL import Image
from yolo import YOLO
if __name__ == "__main__":
yolo = YOLO()
#----------------------------------------------------------------------------------------------------------#
# mode用于指定测试的模式:
# 'predict'表示单张图片预测,如果想对预测过程进行修改,如保存图片,截取对象等,可以先看下方详细的注释
# 'video'表示视频检测,可调用摄像头或者视频进行检测,详情查看下方注释。
# 'fps'表示测试fps,使用的图片是img里面的street.jpg,详情查看下方注释。
# 'dir_predict'表示遍历文件夹进行检测并保存。默认遍历img文件夹,保存img_out文件夹,详情查看下方注释。
#----------------------------------------------------------------------------------------------------------#
mode = "predict"
#----------------------------------------------------------------------------------------------------------#
# video_path用于指定视频的路径,当video_path=0时表示检测摄像头
# 想要检测视频,则设置如video_path = "xxx.mp4"即可,代表读取出根目录下的xxx.mp4文件。
# video_save_path表示视频保存的路径,当video_save_path=""时表示不保存
# 想要保存视频,则设置如video_save_path = "yyy.mp4"即可,代表保存为根目录下的yyy.mp4文件。
# video_fps用于保存的视频的fps
# video_path、video_save_path和video_fps仅在mode='video'时有效
# 保存视频时需要ctrl+c退出或者运行到最后一帧才会完成完整的保存步骤。
#----------------------------------------------------------------------------------------------------------#
video_path = 0
video_save_path = ""
video_fps = 25.0
#-------------------------------------------------------------------------#
# test_interval用于指定测量fps的时候,图片检测的次数
# 理论上test_interval越大,fps越准确。
#-------------------------------------------------------------------------#
test_interval = 100
#-------------------------------------------------------------------------#
# dir_origin_path指定了用于检测的图片的文件夹路径
# dir_save_path指定了检测完图片的保存路径
# dir_origin_path和dir_save_path仅在mode='dir_predict'时有效
#-------------------------------------------------------------------------#
dir_origin_path = "img/"
dir_save_path = "img_out/"
if mode == "predict":
'''
1、如果想要进行检测完的图片的保存,利用r_image.save("img.jpg")即可保存,直接在predict.py里进行修改即可。
2、如果想要获得预测框的坐标,可以进入yolo.detect_image函数,在绘图部分读取top,left,bottom,right这四个值。
3、如果想要利用预测框截取下目标,可以进入yolo.detect_image函数,在绘图部分利用获取到的top,left,bottom,right这四个值
在原图上利用矩阵的方式进行截取。
4、如果想要在预测图上写额外的字,比如检测到的特定目标的数量,可以进入yolo.detect_image函数,在绘图部分对predicted_class进行判断,
比如判断if predicted_class == 'car': 即可判断当前目标是否为车,然后记录数量即可。利用draw.text即可写字。
'''
while True:
img = input('Input image filename:')
try:
image = Image.open(img)
except:
print('Open Error! Try again!')
continue
else:
r_image = yolo.detect_image(image)
r_image.show()
elif mode == "video":
capture = cv2.VideoCapture(video_path)
if video_save_path!="":
fourcc = cv2.VideoWriter_fourcc(*'XVID')
size = (int(capture.get(cv2.CAP_PROP_FRAME_WIDTH)), int(capture.get(cv2.CAP_PROP_FRAME_HEIGHT)))
out = cv2.VideoWriter(video_save_path, fourcc, video_fps, size)
fps = 0.0
while(True):
t1 = time.time()
# 读取某一帧
ref,frame=capture.read()
# 格式转变,BGRtoRGB
frame = cv2.cvtColor(frame,cv2.COLOR_BGR2RGB)
# 转变成Image
frame = Image.fromarray(np.uint8(frame))
# 进行检测
frame = np.array(yolo.detect_image(frame))
# RGBtoBGR满足opencv显示格式
frame = cv2.cvtColor(frame,cv2.COLOR_RGB2BGR)
fps = ( fps + (1./(time.time()-t1)) ) / 2
print("fps= %.2f"%(fps))
frame = cv2.putText(frame, "fps= %.2f"%(fps), (0, 40), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 0), 2)
cv2.imshow("video",frame)
c= cv2.waitKey(1) & 0xff
if video_save_path!="":
out.write(frame)
if c==27:
capture.release()
break
capture.release()
out.release()
cv2.destroyAllWindows()
elif mode == "fps":
img = Image.open('img/street.jpg')
tact_time = yolo.get_FPS(img, test_interval)
print(str(tact_time) + ' seconds, ' + str(1/tact_time) + 'FPS, @batch_size 1')
elif mode == "dir_predict":
import os
from tqdm import tqdm
img_names = os.listdir(dir_origin_path)
for img_name in tqdm(img_names):
if img_name.lower().endswith(('.bmp', '.dib', '.png', '.jpg', '.jpeg', '.pbm', '.pgm', '.ppm', '.tif', '.tiff')):
image_path = os.path.join(dir_origin_path, img_name)
image = Image.open(image_path)
r_image = yolo.detect_image(image)
if not os.path.exists(dir_save_path):
os.makedirs(dir_save_path)
r_image.save(os.path.join(dir_save_path, img_name))
else:
raise AssertionError("Please specify the correct mode: 'predict', 'video', 'fps' or 'dir_predict'.")
| 42.111111
| 125
| 0.491519
|
cb6ee87804801e74d602c76140b5efb5e52ae54e
| 11,689
|
py
|
Python
|
neutron/plugins/ryu/ryu_neutron_plugin.py
|
ksshanam/neutron-vrrp
|
f9fb7f9b41adc0de401cc118a4d97026d3abb6e0
|
[
"Apache-2.0"
] | null | null | null |
neutron/plugins/ryu/ryu_neutron_plugin.py
|
ksshanam/neutron-vrrp
|
f9fb7f9b41adc0de401cc118a4d97026d3abb6e0
|
[
"Apache-2.0"
] | null | null | null |
neutron/plugins/ryu/ryu_neutron_plugin.py
|
ksshanam/neutron-vrrp
|
f9fb7f9b41adc0de401cc118a4d97026d3abb6e0
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2012 Isaku Yamahata <yamahata at private email ne jp>
# <yamahata at valinux co jp>
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# @author: Isaku Yamahata
from oslo.config import cfg
from ryu.app import client
from ryu.app import rest_nw_id
from neutron.agent import securitygroups_rpc as sg_rpc
from neutron.api.rpc.handlers import dhcp_rpc
from neutron.api.rpc.handlers import l3_rpc
from neutron.common import constants as q_const
from neutron.common import exceptions as n_exc
from neutron.common import rpc as n_rpc
from neutron.common import topics
from neutron.db import api as db
from neutron.db import db_base_plugin_v2
from neutron.db import external_net_db
from neutron.db import extraroute_db
from neutron.db import l3_gwmode_db
from neutron.db import models_v2
from neutron.db import portbindings_base
from neutron.db import securitygroups_rpc_base as sg_db_rpc
from neutron.extensions import portbindings
from neutron.openstack.common import excutils
from neutron.openstack.common import log as logging
from neutron.plugins.common import constants as svc_constants
from neutron.plugins.ryu.common import config # noqa
from neutron.plugins.ryu.db import api_v2 as db_api_v2
LOG = logging.getLogger(__name__)
class RyuRpcCallbacks(n_rpc.RpcCallback,
sg_db_rpc.SecurityGroupServerRpcCallbackMixin):
RPC_API_VERSION = '1.1'
def __init__(self, ofp_rest_api_addr):
super(RyuRpcCallbacks, self).__init__()
self.ofp_rest_api_addr = ofp_rest_api_addr
def get_ofp_rest_api(self, context, **kwargs):
LOG.debug(_("get_ofp_rest_api: %s"), self.ofp_rest_api_addr)
return self.ofp_rest_api_addr
@classmethod
def get_port_from_device(cls, device):
port = db_api_v2.get_port_from_device(device)
if port:
port['device'] = device
return port
class AgentNotifierApi(n_rpc.RpcProxy,
sg_rpc.SecurityGroupAgentRpcApiMixin):
BASE_RPC_API_VERSION = '1.0'
def __init__(self, topic):
super(AgentNotifierApi, self).__init__(
topic=topic, default_version=self.BASE_RPC_API_VERSION)
self.topic_port_update = topics.get_topic_name(topic,
topics.PORT,
topics.UPDATE)
def port_update(self, context, port):
self.fanout_cast(context,
self.make_msg('port_update', port=port),
topic=self.topic_port_update)
class RyuNeutronPluginV2(db_base_plugin_v2.NeutronDbPluginV2,
external_net_db.External_net_db_mixin,
extraroute_db.ExtraRoute_db_mixin,
l3_gwmode_db.L3_NAT_db_mixin,
sg_db_rpc.SecurityGroupServerRpcMixin,
portbindings_base.PortBindingBaseMixin):
_supported_extension_aliases = ["external-net", "router", "ext-gw-mode",
"extraroute", "security-group",
"binding", "quotas"]
@property
def supported_extension_aliases(self):
if not hasattr(self, '_aliases'):
aliases = self._supported_extension_aliases[:]
sg_rpc.disable_security_group_extension_by_config(aliases)
self._aliases = aliases
return self._aliases
def __init__(self, configfile=None):
super(RyuNeutronPluginV2, self).__init__()
self.base_binding_dict = {
portbindings.VIF_TYPE: portbindings.VIF_TYPE_OVS,
portbindings.VIF_DETAILS: {
# TODO(rkukura): Replace with new VIF security details
portbindings.CAP_PORT_FILTER:
'security-group' in self.supported_extension_aliases,
portbindings.OVS_HYBRID_PLUG: True
}
}
portbindings_base.register_port_dict_function()
self.tunnel_key = db_api_v2.TunnelKey(
cfg.CONF.OVS.tunnel_key_min, cfg.CONF.OVS.tunnel_key_max)
self.ofp_api_host = cfg.CONF.OVS.openflow_rest_api
if not self.ofp_api_host:
raise n_exc.Invalid(_('Invalid configuration. check ryu.ini'))
self.client = client.OFPClient(self.ofp_api_host)
self.tun_client = client.TunnelClient(self.ofp_api_host)
self.iface_client = client.NeutronIfaceClient(self.ofp_api_host)
for nw_id in rest_nw_id.RESERVED_NETWORK_IDS:
if nw_id != rest_nw_id.NW_ID_UNKNOWN:
self.client.update_network(nw_id)
self._setup_rpc()
# register known all network list on startup
self._create_all_tenant_network()
def _setup_rpc(self):
self.service_topics = {svc_constants.CORE: topics.PLUGIN,
svc_constants.L3_ROUTER_NAT: topics.L3PLUGIN}
self.conn = n_rpc.create_connection(new=True)
self.notifier = AgentNotifierApi(topics.AGENT)
self.endpoints = [RyuRpcCallbacks(self.ofp_api_host),
dhcp_rpc.DhcpRpcCallback(),
l3_rpc.L3RpcCallback()]
for svc_topic in self.service_topics.values():
self.conn.create_consumer(svc_topic, self.endpoints, fanout=False)
self.conn.consume_in_threads()
def _create_all_tenant_network(self):
for net in db_api_v2.network_all_tenant_list():
self.client.update_network(net.id)
for tun in self.tunnel_key.all_list():
self.tun_client.update_tunnel_key(tun.network_id, tun.tunnel_key)
session = db.get_session()
for port in session.query(models_v2.Port):
self.iface_client.update_network_id(port.id, port.network_id)
def _client_create_network(self, net_id, tunnel_key):
self.client.create_network(net_id)
self.tun_client.create_tunnel_key(net_id, tunnel_key)
def _client_delete_network(self, net_id):
RyuNeutronPluginV2._safe_client_delete_network(self.safe_reference,
net_id)
@staticmethod
def _safe_client_delete_network(safe_reference, net_id):
# Avoid handing naked plugin references to the client. When
# the client is mocked for testing, such references can
# prevent the plugin from being deallocated.
client.ignore_http_not_found(
lambda: safe_reference.client.delete_network(net_id))
client.ignore_http_not_found(
lambda: safe_reference.tun_client.delete_tunnel_key(net_id))
def create_network(self, context, network):
session = context.session
with session.begin(subtransactions=True):
#set up default security groups
tenant_id = self._get_tenant_id_for_create(
context, network['network'])
self._ensure_default_security_group(context, tenant_id)
net = super(RyuNeutronPluginV2, self).create_network(context,
network)
self._process_l3_create(context, net, network['network'])
tunnel_key = self.tunnel_key.allocate(session, net['id'])
try:
self._client_create_network(net['id'], tunnel_key)
except Exception:
with excutils.save_and_reraise_exception():
self._client_delete_network(net['id'])
return net
def update_network(self, context, id, network):
session = context.session
with session.begin(subtransactions=True):
net = super(RyuNeutronPluginV2, self).update_network(context, id,
network)
self._process_l3_update(context, net, network['network'])
return net
def delete_network(self, context, id):
self._client_delete_network(id)
session = context.session
with session.begin(subtransactions=True):
self.tunnel_key.delete(session, id)
self._process_l3_delete(context, id)
super(RyuNeutronPluginV2, self).delete_network(context, id)
def create_port(self, context, port):
session = context.session
port_data = port['port']
with session.begin(subtransactions=True):
self._ensure_default_security_group_on_port(context, port)
sgids = self._get_security_groups_on_port(context, port)
port = super(RyuNeutronPluginV2, self).create_port(context, port)
self._process_portbindings_create_and_update(context,
port_data,
port)
self._process_port_create_security_group(
context, port, sgids)
self.notify_security_groups_member_updated(context, port)
self.iface_client.create_network_id(port['id'], port['network_id'])
return port
def delete_port(self, context, id, l3_port_check=True):
# if needed, check to see if this is a port owned by
# and l3-router. If so, we should prevent deletion.
if l3_port_check:
self.prevent_l3_port_deletion(context, id)
with context.session.begin(subtransactions=True):
router_ids = self.disassociate_floatingips(
context, id, do_notify=False)
port = self.get_port(context, id)
self._delete_port_security_group_bindings(context, id)
super(RyuNeutronPluginV2, self).delete_port(context, id)
# now that we've left db transaction, we are safe to notify
self.notify_routers_updated(context, router_ids)
self.notify_security_groups_member_updated(context, port)
def update_port(self, context, id, port):
deleted = port['port'].get('deleted', False)
session = context.session
need_port_update_notify = False
with session.begin(subtransactions=True):
original_port = super(RyuNeutronPluginV2, self).get_port(
context, id)
updated_port = super(RyuNeutronPluginV2, self).update_port(
context, id, port)
self._process_portbindings_create_and_update(context,
port['port'],
updated_port)
need_port_update_notify = self.update_security_group_on_port(
context, id, port, original_port, updated_port)
need_port_update_notify |= self.is_security_group_member_updated(
context, original_port, updated_port)
need_port_update_notify |= (original_port['admin_state_up'] !=
updated_port['admin_state_up'])
if need_port_update_notify:
self.notifier.port_update(context, updated_port)
if deleted:
db_api_v2.set_port_status(session, id, q_const.PORT_STATUS_DOWN)
return updated_port
| 42.974265
| 78
| 0.646847
|
a64435095a3e11aec5d41248ddb8076e5b876baf
| 367
|
py
|
Python
|
ruleta_app/migrations/0002_alter_round_fecha.py
|
Franco1605/ruleta-app
|
93ac27039b5a502aa03683ec2991aefda65bbf41
|
[
"MIT"
] | null | null | null |
ruleta_app/migrations/0002_alter_round_fecha.py
|
Franco1605/ruleta-app
|
93ac27039b5a502aa03683ec2991aefda65bbf41
|
[
"MIT"
] | null | null | null |
ruleta_app/migrations/0002_alter_round_fecha.py
|
Franco1605/ruleta-app
|
93ac27039b5a502aa03683ec2991aefda65bbf41
|
[
"MIT"
] | null | null | null |
# Generated by Django 3.2.9 on 2021-11-21 04:25
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('ruleta_app', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='round',
name='fecha',
field=models.DateTimeField(),
),
]
| 19.315789
| 47
| 0.580381
|
e5f7a58c76e9583e4981b9dc644beb9712b5354f
| 1,252
|
py
|
Python
|
manilaclient/v1/contrib/list_extensions.py
|
Murray-LIANG/python-manilaclient
|
e3652b9c1c36c825d07dce741802930ad0ec1a12
|
[
"CNRI-Python",
"Apache-1.1"
] | null | null | null |
manilaclient/v1/contrib/list_extensions.py
|
Murray-LIANG/python-manilaclient
|
e3652b9c1c36c825d07dce741802930ad0ec1a12
|
[
"CNRI-Python",
"Apache-1.1"
] | null | null | null |
manilaclient/v1/contrib/list_extensions.py
|
Murray-LIANG/python-manilaclient
|
e3652b9c1c36c825d07dce741802930ad0ec1a12
|
[
"CNRI-Python",
"Apache-1.1"
] | null | null | null |
# Copyright 2013 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import sys
import warnings
from manilaclient.v2.contrib import list_extensions
warnings.warn(
"Module manilaclient.v1.contrib.list_extensions is deprecated "
"(taken as a basis for manilaclient.v2.contrib.list_extensions). "
"The preferable way to get a client class or object is to use "
"the manilaclient.client module.")
class MovedModule(object):
def __init__(self, new_module):
self.new_module = new_module
def __getattr__(self, attr):
return getattr(self.new_module, attr)
sys.modules["manilaclient.v1.contrib.list_extensions"] = MovedModule(
list_extensions)
| 33.837838
| 78
| 0.73722
|
37b246fbbf6f2b8aa286b7c9935d8865cd4d9bcd
| 1,874
|
py
|
Python
|
pydiscordbio/models/user.py
|
awersli99/pydiscordbio
|
adbe6853594f1ee700043f9520dfd9a893fa44f0
|
[
"MIT"
] | 7
|
2020-08-29T15:56:24.000Z
|
2021-02-21T22:30:37.000Z
|
pydiscordbio/models/user.py
|
awersli99/pydiscordbio
|
adbe6853594f1ee700043f9520dfd9a893fa44f0
|
[
"MIT"
] | null | null | null |
pydiscordbio/models/user.py
|
awersli99/pydiscordbio
|
adbe6853594f1ee700043f9520dfd9a893fa44f0
|
[
"MIT"
] | null | null | null |
from datetime import datetime
import dateutil.parser
from typing import Optional
GENDER = {
0: "Male",
1: "Female"
}
def from_datetime(x: str) -> Optional[datetime]:
"""Returns a parsed datetime object."""
try:
return dateutil.parser.parse(x)
except ValueError:
return None
class User:
"""A discord.bio user object"""
slug: str
user_id: int
flags: int
verified: bool
premium_type: int
created_at: datetime
description: str
location: str
gender: Optional[str]
birthday: Optional[datetime]
email: Optional[str]
occupation: Optional[str]
banner: Optional[str]
premium: bool
staff: bool
likes: int
def __init__(self, obj: dict) -> 'None':
assert isinstance(
obj, dict), 'Received malformed payload from discord.bio API'
self.slug = obj.get("slug")
self.user_id = int(obj.get("user_id"))
self.flags = int(obj.get("flags"))
self.verified = bool(obj.get('verified', 0))
self.premium_type = int(obj.get("premium_type"))
self.created_at = from_datetime(obj.get("created_at"))
self.description = obj.get("description")
self.location = obj.get("location")
self.gender = GENDER.get(obj.get("gender"), None)
self.birthday = None
if obj.get("birthday", None):
self.birthday = from_datetime(obj.get("birthday"))
if obj.get("email") == "":
self.email = None
else:
self.email = obj.get("email")
if obj.get("occupation") == "":
self.occupation = None
else:
self.occupation = obj.get("occupation")
self.banner = obj.get("banner", None)
self.premium = obj.get('premium', False)
self.staff = obj.get('staff', False)
self.likes = obj.get('likes', 0)
| 28.830769
| 73
| 0.593917
|
58ddccc226cee1a0915ab31e3cfff1c5815c70b1
| 253
|
py
|
Python
|
Python3/0347-Top-K-Frequent-Elements/soln-1.py
|
wyaadarsh/LeetCode-Solutions
|
3719f5cb059eefd66b83eb8ae990652f4b7fd124
|
[
"MIT"
] | 5
|
2020-07-24T17:48:59.000Z
|
2020-12-21T05:56:00.000Z
|
Python3/0347-Top-K-Frequent-Elements/soln-1.py
|
zhangyaqi1989/LeetCode-Solutions
|
2655a1ffc8678ad1de6c24295071308a18c5dc6e
|
[
"MIT"
] | null | null | null |
Python3/0347-Top-K-Frequent-Elements/soln-1.py
|
zhangyaqi1989/LeetCode-Solutions
|
2655a1ffc8678ad1de6c24295071308a18c5dc6e
|
[
"MIT"
] | 2
|
2020-07-24T17:49:01.000Z
|
2020-08-31T19:57:35.000Z
|
class Solution:
def topKFrequent(self, nums, k):
"""
:type nums: List[int]
:type k: int
:rtype: List[int]
"""
counts = collections.Counter(nums)
return heapq.nlargest(k, counts, key=counts.get)
| 28.111111
| 56
| 0.545455
|
8561aa27d25ae11242c3f136bb5101a785606dc8
| 54
|
py
|
Python
|
src/Stele/processing/processing_hsg/__init__.py
|
SherwinGroup/Stele
|
9bb7da0b406a801975e21c9f7ce05d369ae661e5
|
[
"MIT"
] | null | null | null |
src/Stele/processing/processing_hsg/__init__.py
|
SherwinGroup/Stele
|
9bb7da0b406a801975e21c9f7ce05d369ae661e5
|
[
"MIT"
] | null | null | null |
src/Stele/processing/processing_hsg/__init__.py
|
SherwinGroup/Stele
|
9bb7da0b406a801975e21c9f7ce05d369ae661e5
|
[
"MIT"
] | null | null | null |
__author__ = 'Sphinx'
from . import helper_functions
| 13.5
| 30
| 0.777778
|
6d24ed469f3ab5d151cc76ac7ba1ce734e7256f4
| 23,705
|
py
|
Python
|
deepspeed/runtime/pipe/module.py
|
ConnollyLeon/DeepSpeed
|
2d84d1c185ef0345eaf43a7240d61b33eda43497
|
[
"MIT"
] | 2
|
2021-07-01T15:00:50.000Z
|
2022-03-27T15:35:49.000Z
|
deepspeed/runtime/pipe/module.py
|
ConnollyLeon/DeepSpeed
|
2d84d1c185ef0345eaf43a7240d61b33eda43497
|
[
"MIT"
] | 2
|
2020-05-28T01:37:24.000Z
|
2022-02-26T06:51:48.000Z
|
deepspeed/runtime/pipe/module.py
|
ConnollyLeon/DeepSpeed
|
2d84d1c185ef0345eaf43a7240d61b33eda43497
|
[
"MIT"
] | 1
|
2021-01-23T16:07:54.000Z
|
2021-01-23T16:07:54.000Z
|
import os
import enum
import re as regex
from collections import defaultdict
from functools import partial
import torch
import torch.nn as nn
import torch.distributed as dist
from deepspeed.utils import logger
from .. import utils as ds_utils
from ..activation_checkpointing import checkpointing
from .topology import PipeDataParallelTopology, PipelineParallelGrid
class PipelineError(Exception):
"""Errors related to the use of deepspeed.PipelineModule """
class LayerSpec:
"""Building block for specifying pipeline-parallel modules.
LayerSpec stores the type information and parameters for each stage in a
PipelineModule. For example:
.. code-block:: python
nn.Sequence(
torch.nn.Linear(self.in_dim, self.hidden_dim, bias=False),
torch.nn.Linear(self.hidden_hidden, self.out_dim)
)
becomes
.. code-block:: python
layer_specs = [
LayerSpec(torch.nn.Linear, self.in_dim, self.hidden_dim, bias=False),
LayerSpec(torch.nn.Linear, self.hidden_hidden, self.out_dim)]
]
"""
def __init__(self, typename, *module_args, **module_kwargs):
self.typename = typename
self.module_args = module_args
self.module_kwargs = module_kwargs
if not issubclass(typename, nn.Module):
raise RuntimeError('LayerSpec only supports torch.nn.Module types.')
if dist.is_initialized():
self.global_rank = dist.get_rank()
else:
self.global_rank = -1
def __repr__(self):
return ds_utils.call_to_str(self.typename.__name__,
self.module_args,
self.module_kwargs)
def build(self, log=False):
"""Build the stored specification."""
if log:
logger.info(f'RANK={self.global_rank} building {repr(self)}')
return self.typename(*self.module_args, **self.module_kwargs)
class TiedLayerSpec(LayerSpec):
def __init__(self,
key,
typename,
*module_args,
forward_fn=None,
tied_weight_attr='weight',
**module_kwargs):
super().__init__(typename, *module_args, **module_kwargs)
self.key = key
self.forward_fn = forward_fn
self.tied_weight_attr = tied_weight_attr
class PipelineModule(nn.Module):
def __init__(self,
layers,
num_stages=None,
topology=None,
loss_fn=None,
seed_layers=False,
seed_fn=None,
base_seed=1234,
partition_method='parameters',
activation_checkpoint_interval=0,
activation_checkpoint_func=checkpointing.checkpoint):
"""Modules to be parallelized with pipeline parallelism.
The key constraint that enables pipeline parallelism is the
representation of the forward pass as a sequence of layers
and the enforcement of a simple interface between them. The
forward pass is implicitly defined by the module ``layers``. The key
assumption is that the output of each layer can be directly fed as
input to the next, like a ``torch.nn.Sequence``. The forward pass is
implicitly:
.. code-block:: python
def forward(self, inputs):
x = inputs
for layer in self.layers:
x = layer(x)
return x
Args:
layers (Iterable): A sequence of layers defining pipeline structure. Can be a ``torch.nn.Sequential`` module.
num_stages (int, optional): The degree of pipeline parallelism. If not specified, ``topology`` must be provided.
topology (``deepseed.pipe.ProcessTopology``, optional): Defines the axes of parallelism axes for training. Must be provided if ``num_stages`` is ``None``.
loss_fn (callable, optional): Loss is computed ``loss = loss_fn(outputs, label)``
base_seed (int, optional): [description]. Defaults to 1234.
partition_method (str, optional): [description]. Defaults to 'parameters'.
activation_checkpoint_interval (int, optional): The granularity activation checkpointing in terms of number of layers. 0 disables activation checkpointing.
activation_checkpoint_func (callable, optional): The function to use for activation checkpointing. Defaults to ``deepspeed.checkpointing.checkpoint``.
"""
super().__init__()
if num_stages is None and topology is None:
raise RuntimeError('must provide num_stages or topology')
self.micro_offset = 0
self.loss_fn = loss_fn
self.seed_layers = seed_layers
self.seed_fn = seed_fn
self.base_seed = base_seed
if dist.get_rank() == 0:
try:
seed_str = self.seed_fn.__name__
except AttributeError:
seed_str = None
print(
f'SEED_LAYERS={self.seed_layers} BASE_SEED={self.base_seed} SEED_FN={seed_str}'
)
# Setup world info
self.world_group = dist.new_group(ranks=range(dist.get_world_size()))
self.global_rank = dist.get_rank(group=self.world_group)
self.world_size = dist.get_world_size(group=self.world_group)
if topology:
self._topo = topology
self.num_stages = self._topo.get_dim('pipe')
else:
self.num_stages = num_stages
if topology is None:
if self.world_size % self.num_stages != 0:
raise RuntimeError(
f'num_stages ({self.num_stages}) must divide distributed world size ({self.world_size})'
)
dp = self.world_size // num_stages
topology = PipeDataParallelTopology(num_pp=num_stages, num_dp=dp)
self._topo = topology
# Contruct communicators for pipeline topology
self._grid = PipelineParallelGrid(process_group=self.world_group,
topology=self._topo)
self.stage_id = self._topo.get_coord(self.global_rank).pipe
# Initialize partition information
self._layer_specs = list(layers)
self._num_layers = len(self._layer_specs)
self._local_start = 0
self._local_stop = None
self._partition_layers(method=partition_method)
self.forward_funcs = []
self.tied_modules = nn.ModuleDict()
self.tied_weight_attrs = {}
# Offset the random seed by the stage ID.
#newseed = torch.cuda.initial_seed() + self._grid.get_stage_id()
#ds_utils.set_random_seed(newseed)
#with torch.random.fork_rng(devices=[torch.cuda.current_device()]):
self._build()
self.to('cuda')
self.tied_comms = self._index_tied_modules()
self._synchronize_tied_weights()
self.activation_checkpoint_interval = activation_checkpoint_interval
self.activation_checkpoint_func = activation_checkpoint_func
def _build(self):
specs = self._layer_specs
for local_idx, layer in enumerate(specs[self._local_start:self._local_stop]):
layer_idx = local_idx + self._local_start
if self.seed_layers:
if self.seed_fn:
self.seed_fn(self.base_seed + layer_idx)
else:
ds_utils.set_random_seed(self.base_seed + layer_idx)
# Recursively build PipelineModule objects
if isinstance(layer, PipelineModule):
raise NotImplementedError('RECURSIVE BUILD NOT YET IMPLEMENTED')
# LayerSpec objects contain an nn.Module that should be allocated now.
elif isinstance(layer, nn.Module):
name = str(layer_idx)
self.forward_funcs.append(layer)
self.add_module(name, layer)
# TiedLayerSpec objects contain an nn.Module that should be allocated now.
elif isinstance(layer, TiedLayerSpec):
# Build and register the module if we haven't seen it before.
if layer.key not in self.tied_modules:
self.tied_modules[layer.key] = layer.build()
self.tied_weight_attrs[layer.key] = layer.tied_weight_attr
if layer.forward_fn is None:
# Just use forward()
self.forward_funcs.append(self.tied_modules[layer.key])
else:
# User specified fn with args (module, input)
self.forward_funcs.append(
partial(layer.forward_fn,
self.tied_modules[layer.key]))
# LayerSpec objects contain an nn.Module that should be allocated now.
elif isinstance(layer, LayerSpec):
module = layer.build()
name = str(layer_idx)
self.forward_funcs.append(module)
self.add_module(name, module)
# Last option: layer may be a functional (e.g., lambda). We do nothing in
# that case and just use it in forward()
else:
self.forward_funcs.append(layer)
# All pipeline parameters should be considered as model parallel in the context
# of our FP16 optimizer
for p in self.parameters():
p.model_parallel = True
def _count_layer_params(self):
"""Count the trainable parameters in individual layers.
This routine will only build one layer at a time.
Returns:
A list of the number of parameters in each layer.
"""
param_counts = [0] * len(self._layer_specs)
for idx, layer in enumerate(self._layer_specs):
if isinstance(layer, LayerSpec):
l = layer.build()
params = filter(lambda p: p.requires_grad, l.parameters())
param_counts[idx] = sum(p.numel() for p in params)
elif isinstance(layer, nn.Module):
params = filter(lambda p: p.requires_grad, layer.parameters())
param_counts[idx] = sum(p.numel() for p in params)
return param_counts
def _find_layer_type(self, layername):
idxs = []
typeregex = regex.compile(layername, regex.IGNORECASE)
for idx, layer in enumerate(self._layer_specs):
name = None
if isinstance(layer, LayerSpec):
name = layer.typename.__name__
elif isinstance(layer, nn.Module):
name = layer.__class__.__name__
else:
try:
name = layer.__name__
except AttributeError:
continue
if typeregex.search(name):
idxs.append(idx)
if len(idxs) == 0:
raise RuntimeError(
f"Partitioning '{layername}' found no valid layers to partition.")
return idxs
def forward(self, forward_input):
# We need to offset the seed by the microbatch ID. Save it in a local var to
# ensure it is preserved in the closure. Otherwise checkpointed forward funcs
# will see a different offset.
self.micro_offset += 1
def exec_range_func(start, end):
''' Helper function to be used with checkpoint()
Adapted from torch.utils.checkpoint:checkpoint_sequential()
'''
local_micro_offset = self.micro_offset + 1
def exec_func(*inputs):
# Single tensor inputs need to be unwrapped
if len(inputs) == 1:
inputs = inputs[0]
for idx, layer in enumerate(self.forward_funcs[start:end]):
self.curr_layer = idx + self._local_start
if self.seed_layers:
new_seed = (self.base_seed *
local_micro_offset) + self.curr_layer
if self.seed_fn:
self.seed_fn(new_seed)
else:
ds_utils.set_random_seed(new_seed)
inputs = layer(inputs)
return inputs
return exec_func
if self.activation_checkpoint_interval == 0:
func = exec_range_func(0, len(self.forward_funcs))
x = func(forward_input)
else:
num_layers = len(self.forward_funcs)
x = forward_input
for start_idx in range(0, num_layers, self.activation_checkpoint_interval):
end_idx = min(start_idx + self.activation_checkpoint_interval,
num_layers)
funcs = self.forward_funcs[start_idx:end_idx]
# Since we either pass tensors or tuples of tensors without unpacking, we
# need to be careful not to double-wrap tensors with tuple.
if not isinstance(x, tuple):
x = (x, )
if self._is_checkpointable(funcs):
x = self.activation_checkpoint_func(
exec_range_func(start_idx,
end_idx),
*x)
else:
x = exec_range_func(start_idx, end_idx)(*x)
return x
def _partition_layers(self, method='uniform'):
num_stages = self._topo.get_dim('pipe')
stage_id = self._topo.get_coord(self.global_rank).pipe
if self.global_rank == 0:
logger.info(f'Partitioning pipeline stages with method {method}')
method = method.lower()
# Each stage gets a simple uniform number of layers.
if method == 'uniform':
num_layers = len(self._layer_specs)
self.parts = ds_utils.partition_uniform(num_items=num_layers,
num_parts=num_stages)
elif method == 'parameters':
param_counts = self._count_layer_params()
self.parts = ds_utils.partition_balanced(weights=param_counts,
num_parts=num_stages)
elif method.startswith('type:'):
layertype = method.split(':')[1]
binary_weights = [0] * len(self._layer_specs)
for idx in self._find_layer_type(layertype):
binary_weights[idx] = 1
else:
self.parts = ds_utils.partition_balanced(weights=binary_weights,
num_parts=num_stages)
elif method == 'profile':
raise NotImplementedError(f'Partitioning method {method} not implemented.')
else:
raise NotImplementedError(f'Partitioning method {method} not implemented.')
# Print some information on the partitioning.
if self.global_rank == 0:
for stage in range(num_stages):
start = self.parts[stage]
stop = self.parts[stage + 1]
print(f'stage={stage} layers={stop - start}')
for idx, layer in enumerate(self._layer_specs[start:stop]):
name = str(layer)
if isinstance(layer, LayerSpec):
name = layer.typename.__name__
if isinstance(layer, nn.Module):
name = layer.__class__.__name__
else:
try:
name = layer.__name__
except AttributeError:
pass
print(f' {idx+start:2d}: {name}')
if self.loss_fn:
try:
print(f' loss: {self.loss_fn.__name__}')
except AttributeError:
print(f' loss: {self.loss_fn.__class__.__name__}')
self._set_bounds(start=self.parts[stage_id], stop=self.parts[stage_id + 1])
def allreduce_tied_weight_gradients(self):
'''All reduce the gradients of the tied weights between tied stages'''
for key, comm in self.tied_comms.items():
weight = getattr(self.tied_modules[key], comm['weight_attr'])
dist.all_reduce(weight.grad, group=comm['group'])
def _synchronize_tied_weights(self):
for key, comm in self.tied_comms.items():
dist.broadcast(
getattr(comm['module'],
comm['weight_attr']),
src=min(comm['ranks']),
group=comm['group'],
)
def _index_tied_modules(self):
''' Build communication structures for tied modules. '''
tied_comms = {}
if self._topo.get_dim('pipe') == 1:
return tied_comms
specs = self._layer_specs
tie_keys = set(s.key for s in specs if isinstance(s, TiedLayerSpec))
for key in tie_keys:
# Find the layers that the tied module appears in
tied_layers = []
for idx, layer in enumerate(specs):
if isinstance(layer, TiedLayerSpec) and layer.key == key:
tied_layers.append(idx)
# Find all stages with this tied module
# TODO: Would be nice to remove the nested data/model parallelism loops and
# TODO: instead generalize in some way, since we really just care about the
# TODO: stage that owns the tied layer. Then loop over each (dp, mp, ...)
# TODO: fiber to generate process groups.
tied_stages = set(self.stage_owner(idx) for idx in tied_layers)
for dp in range(self._grid.data_parallel_size):
for mp in range(self._grid.model_parallel_size):
tied_ranks = []
for s in sorted(tied_stages):
if self._grid.model_parallel_size > 1:
tied_ranks.append(
self._grid.stage_to_global(stage_id=s,
data=dp,
model=mp))
else:
tied_ranks.append(
self._grid.stage_to_global(stage_id=s,
data=dp))
group = dist.new_group(ranks=tied_ranks)
# Record this tied module if we own a local copy of it.
if self.global_rank in tied_ranks:
assert key in self.tied_modules
if key in self.tied_modules:
tied_comms[key] = {
'ranks': tied_ranks,
'group': group,
'weight_attr': self.tied_weight_attrs[key],
'module': self.tied_modules[key],
}
# Only count the tied module once in the eyes of the FP16 optimizer
if self.global_rank != tied_ranks[0]:
for p in self.tied_modules[key].parameters():
p.model_parallel = False
'''
if len(tied_comms) > 0:
print(f'RANK={self.global_rank} tied_comms={tied_comms}')
'''
return tied_comms
def partitions(self):
return self.parts
def stage_owner(self, layer_idx):
assert 0 <= layer_idx < self._num_layers
for stage in range(self._topo.get_dim('pipe')):
if self.parts[stage] <= layer_idx < self.parts[stage + 1]:
return stage
raise RuntimeError(f'Layer {layer_idx} not owned? parts={self.parts}')
def _set_bounds(self, start=None, stop=None):
"""Manually define the range of layers that will be built on this process.
These boundaries are treated as list slices and so start is inclusive and stop is
exclusive. The default of None for both results in all layers being built
locally.
"""
self._local_start = start
self._local_stop = stop
def set_checkpoint_interval(self, interval):
assert interval >= 0
self.checkpoint_interval = interval
def topology(self):
""" ProcessTopology object to query process mappings. """
return self._topo
def mpu(self):
return self._grid
def num_pipeline_stages(self):
return self._topo.get_dim('pipe')
def ckpt_prefix(self, checkpoints_path, tag):
"""Build a prefix for all checkpoint files written by this module. """
# All checkpoint files start with this
rank_name = 'module'
# Data parallelism is omitted from the naming convention because we are agnostic
# to this in the checkpoint.
omit_dims = frozenset(['data'])
axes = [a for a in self._grid._topo.get_axis_names() if a not in omit_dims]
for dim in axes:
rank = getattr(self._grid._topo.get_coord(rank=self.global_rank), dim)
rank_name += f'-{dim}_{rank:02d}'
ckpt_name = os.path.join(checkpoints_path, str(tag), rank_name)
return ckpt_name
def ckpt_layer_path(self, ckpt_dir, local_layer_idx):
"""Customize a prefix for a specific pipeline module layer. """
idx = local_layer_idx + self._local_start
layer_ckpt_path = os.path.join(ckpt_dir, f'layer_{idx:02d}')
rank_repr = self._grid._topo.get_rank_repr(rank=self.global_rank)
if rank_repr is not '':
layer_ckpt_path += f'-{rank_repr}'
layer_ckpt_path += '-model_states.pt'
return layer_ckpt_path
def save_state_dict(self, save_dir):
if self._grid.data_parallel_id != 0:
return
os.makedirs(save_dir, exist_ok=True)
layer_offset = self._local_start
for idx, layer in enumerate(self.forward_funcs):
model_ckpt_path = self.ckpt_layer_path(save_dir, idx)
if not hasattr(layer, 'state_dict'):
continue
torch.save(layer.state_dict(), model_ckpt_path)
def load_state_dir(self, load_dir, strict=True):
rank = dist.get_rank()
layer_offset = self._local_start
for idx, layer in enumerate(self.forward_funcs):
# Functions, etc. will not have state_dicts
if not hasattr(layer, 'load_state_dict'):
continue
model_ckpt_path = self.ckpt_layer_path(load_dir, idx)
layer.load_state_dict(torch.load(model_ckpt_path,
map_location=lambda storage,
loc: storage),
strict=strict)
if self._grid.data_parallel_id == 0:
logger.info(
f'RANK={self.global_rank} Loaded layer={idx+layer_offset} file={model_ckpt_path}'
)
self._synchronize_tied_weights()
def _is_checkpointable(self, funcs):
if self.__class__.__name__ == 'GPT2ModelPipe':
return all('ParallelTransformerLayerPipe' in f.__class__.__name__
for f in funcs)
params = [f.parameters() for f in funcs if isinstance(f, torch.nn.Module)]
return any(len(list(p)) > 0 for p in params)
| 41.154514
| 167
| 0.571989
|
20c2930c5056f8a60868e0e954c5a69ae6bdfc80
| 3,583
|
py
|
Python
|
python/seldon/cli/zk_utils.py
|
gigliovale/seldon-server
|
64f04bc8c47bdc53d4f1e58f454b9aa82d957b9a
|
[
"Apache-2.0"
] | null | null | null |
python/seldon/cli/zk_utils.py
|
gigliovale/seldon-server
|
64f04bc8c47bdc53d4f1e58f454b9aa82d957b9a
|
[
"Apache-2.0"
] | null | null | null |
python/seldon/cli/zk_utils.py
|
gigliovale/seldon-server
|
64f04bc8c47bdc53d4f1e58f454b9aa82d957b9a
|
[
"Apache-2.0"
] | 1
|
2018-08-07T14:40:53.000Z
|
2018-08-07T14:40:53.000Z
|
import json
from os import walk
import os
import sys
import errno
def mkdir_p(path):
try:
os.makedirs(path)
except OSError as exc: # Python >2.5
if exc.errno == errno.EEXIST and os.path.isdir(path):
pass
else: raise
def is_json_data(data):
if (data != None) and (len(data)>0):
return data[0] == '{' or data[0] == '['
else:
return False
def push_all_nodes(zk_client,zkroot):
for (dirpath, dirnames, filenames) in walk(zkroot):
for filename in filenames:
file_path = dirpath + "/" + filename
f = open(file_path)
data = f.read()
f.close()
node_path = file_path.replace(zkroot,"").replace("/_data_","")
node_set(zk_client,node_path,data)
def get_all_nodes_list(zk_client, start_node, all_nodes_list):
#print "processing: {}".format(start_node)
try:
children = zk_client.get_children(start_node)
for child in children:
child = str(child)
node_path = start_node+"/"+child if start_node != '/' else "/"+child
all_nodes_list.add(node_path)
get_all_nodes_list(zk_client, node_path, all_nodes_list)
except kazoo.exceptions.NoNodeError:
pass
def write_data_to_file(data_fpath, data):
json = dict_to_json(data, True) if isinstance(data,dict) else str(data)
mkdir_p(os.path.dirname(data_fpath))
f = open(data_fpath,'w')
f.write(json)
f.write('\n')
f.close()
print "Writing data to file[{data_fpath}]".format(**locals())
def dict_to_json(d, expand=False):
return json.dumps(d, sort_keys=True, indent=4, separators=(',', ': ')) if expand else json.dumps(d, sort_keys=True, separators=(',',':'))
def json_to_dict(json_data):
return json.loads(json_data)
def pull_all_nodes(zk_client,zkroot):
all_nodes_list = set()
nodes = ["/config","/all_clients"]
for node in nodes:
start_node = node
get_all_nodes_list(zk_client, start_node, all_nodes_list)
all_nodes_list = list(all_nodes_list)
for node_path in all_nodes_list:
print "trying to sync ",node_path
node_value = node_get(zk_client,node_path)
if not node_value is None:
node_value = node_value.strip()
if is_json_data(node_value):
data = json_to_dict(node_value) if node_value != None and len(node_value)>0 else ""
else:
data = str(node_value)
data_fpath = zkroot + node_path + "/_data_"
write_data_to_file(data_fpath, data)
def json_compress(json_data):
d = json.loads(json_data)
return json.dumps(d, sort_keys=True, separators=(',',':'))
def node_set(zk_client, node_path, node_value):
if is_json_data(node_value):
node_value = json_compress(node_value)
node_value = node_value.strip() if node_value != None else node_value
if zk_client.exists(node_path):
retVal = zk_client.set(node_path,node_value)
else:
retVal = zk_client.create(node_path,node_value,makepath=True)
print "updated zk node[{node_path}]".format(node_path=node_path)
def node_get(zk_client, node_path):
theValue = None
if zk_client.exists(node_path):
theValue = zk_client.get(node_path)
theValue = theValue[0]
return theValue.strip() if theValue != None else theValue
def node_delete(zk_client, node_path):
if zk_client.exists(node_path):
retVal = zk_client.delete(node_path)
print "deleted zk node[{node_path}]".format(node_path=node_path)
| 34.451923
| 141
| 0.650293
|
7e6f2612dfebd53dd0a64c00606594f22a8b14ad
| 1,900
|
py
|
Python
|
tacker/tests/unit/test_hacking.py
|
K-OpenNet/OPNFV-Alarm
|
453d5dc0e01999fd23cf6b62671468f11c0dd2e2
|
[
"Apache-2.0"
] | 3
|
2016-03-01T12:26:07.000Z
|
2016-08-03T06:24:59.000Z
|
tacker/tests/unit/test_hacking.py
|
K-OpenNet/OPNFV-Alarm
|
453d5dc0e01999fd23cf6b62671468f11c0dd2e2
|
[
"Apache-2.0"
] | 24
|
2015-10-21T19:09:02.000Z
|
2021-08-02T11:27:26.000Z
|
tacker/tests/unit/test_hacking.py
|
K-OpenNet/OPNFV-Alarm
|
453d5dc0e01999fd23cf6b62671468f11c0dd2e2
|
[
"Apache-2.0"
] | 12
|
2016-02-16T15:01:46.000Z
|
2017-03-13T10:01:16.000Z
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tacker.hacking import checks
from tacker.tests import base
class HackingTestCase(base.BaseTestCase):
def test_log_translations(self):
logs = ['audit', 'error', 'info', 'warn', 'warning', 'critical',
'exception']
levels = ['_LI', '_LW', '_LE', '_LC']
debug = "LOG.debug('OK')"
self.assertEqual(
0, len(list(checks.validate_log_translations(debug, debug, 'f'))))
for log in logs:
bad = 'LOG.%s("Bad")' % log
self.assertEqual(
1, len(list(checks.validate_log_translations(bad, bad, 'f'))))
ok = "LOG.%s(_('OK'))" % log
self.assertEqual(
0, len(list(checks.validate_log_translations(ok, ok, 'f'))))
ok = "LOG.%s('OK') # noqa" % log
self.assertEqual(
0, len(list(checks.validate_log_translations(ok, ok, 'f'))))
ok = "LOG.%s(variable)" % log
self.assertEqual(
0, len(list(checks.validate_log_translations(ok, ok, 'f'))))
for level in levels:
ok = "LOG.%s(%s('OK'))" % (log, level)
self.assertEqual(
0, len(list(checks.validate_log_translations(ok,
ok, 'f'))))
| 43.181818
| 78
| 0.566842
|
1173141c6f08f3b6eab66438331ee4f49f329d03
| 3,529
|
py
|
Python
|
lwganrt/holo/data_struct.py
|
darkAlert/impersonator-rt
|
8a2b879cf60f2094944a0104592d460fee3bda6a
|
[
"MIT"
] | 6
|
2020-04-17T08:47:58.000Z
|
2021-07-02T10:58:52.000Z
|
lwganrt/holo/data_struct.py
|
darkAlert/impersonator-rt
|
8a2b879cf60f2094944a0104592d460fee3bda6a
|
[
"MIT"
] | null | null | null |
lwganrt/holo/data_struct.py
|
darkAlert/impersonator-rt
|
8a2b879cf60f2094944a0104592d460fee3bda6a
|
[
"MIT"
] | 1
|
2020-05-24T23:46:54.000Z
|
2020-05-24T23:46:54.000Z
|
import os
class DSNode():
def __init__(self, nodes, idx_start, idx_stop, leaf=False):
# self.nodes = nodes
self._idx_start = idx_start
self._idx_stop = idx_stop
self._leaf = leaf
@property
def idx_start(self):
return self._idx_start
@property
def idx_stop(self):
return self._idx_stop
@property
def is_leaf(self):
return self._leaf
class DSLeaf():
def __init__(self, root, name, sub_dir):
self._root = root
self._name = name
self._dir = sub_dir
@property
def name(self):
return self._name
@property
def dir(self):
return self._dir
@property
def filename(self):
return self.name + self._root.ext
@property
def path(self):
return os.path.join(self.dir,self.filename)
@property
def abs_dir(self):
return os.path.join(self._root.root_dir,self.dir)
@property
def abs_path(self):
return os.path.join(self.abs_dir,self.filename)
class DataStruct():
def __init__(self):
self._root_dir = None
self._ext = None
self._level_names = None
self._levels = None
self._tree = None
self._items = None
def parse(self, root, levels, ext='.mp4'):
assert os.path.exists(root)
self._root_dir = root
self._ext = ext if ext[0] == '.' else '.' + ext
if levels[0] == '/':
levels = levels[1:]
if levels[-1] == '/':
levels = levels[:-1]
self._level_names = levels.split('/')
self._levels = {l:[] for l in self._level_names}
self._items = []
self._tree = self.__parse__()
return self
@property
def root_dir(self):
return self._root_dir
@property
def ext(self):
return self._ext
def __len__(self):
return len(self._items)
def __parse__(self, path='', level=0):
abs_path = os.path.join(self.root_dir, path)
# Create leafs:
if level == len(self._level_names):
names = []
for file in os.listdir(abs_path):
if file.endswith(self.ext):
names.append(file.split('.')[0])
names.sort()
idx = len(self._items)
for name in names:
self._items.append(DSLeaf(root=self, name=name, sub_dir=path))
return DSNode(None, idx_start=idx, idx_stop=len(self._items), leaf=True)
# Or go dipper in subdirs:
subdirs = [p.name for p in os.scandir(abs_path) if p.is_dir()]
subdirs.sort()
nodes = {}
idx = len(self._items)
for dir_name in subdirs:
sub_path = os.path.join(path,dir_name)
node = self.__parse__(sub_path, level=level+1)
nodes[dir_name] = node
self._levels[self._level_names[level]].append((nodes[dir_name],sub_path))
return DSNode(nodes, idx_start=idx, idx_stop=len(self._items))
def items(self, start=0, stop=None, step=1):
if isinstance(start, tuple):
assert len(start)==2
start, stop = start[0], start[1]
elif isinstance(start, DSNode):
start, stop = start.idx_start, start.idx_stop
if stop is None:
stop = len(self._items)
for idx in range(start, stop, step):
yield self._items[idx]
def levels(self,name=None):
if name is None:
name = self._level_names[0]
level = self._levels[name]
for node,path in level:
yield (node.idx_start, node.idx_stop),path
def nodes(self, name):
level = self._levels[name]
for node,path in level:
yield node, path
if __name__ == "__main__":
root_dir = '/home/darkalert/KazendiJob/Data/HoloVideo/Data/mp4'
data = DataStruct().parse(root_dir, levels='subject/light/garment/scene', ext='mp4')
# for item in data.items():
# print (item.abs_path)
for idx_range,path in data.levels('scene'):
print ('====================:',path)
for item in data.items(idx_range):
print (item.abs_path)
| 22.335443
| 85
| 0.677529
|
9e273fcd8092bba96961c330fb011ea4507d2212
| 23,923
|
py
|
Python
|
SuperSonic/utils/engine/config_search.py
|
HuantWang/SUPERSONIC
|
bea7090e8bc4a54ed52495dd910ef946c88bec67
|
[
"CC-BY-4.0"
] | 78
|
2022-02-02T00:23:02.000Z
|
2022-03-15T11:44:02.000Z
|
SuperSonic/utils/engine/config_search.py
|
HuantWang/SUPERSONIC
|
bea7090e8bc4a54ed52495dd910ef946c88bec67
|
[
"CC-BY-4.0"
] | null | null | null |
SuperSonic/utils/engine/config_search.py
|
HuantWang/SUPERSONIC
|
bea7090e8bc4a54ed52495dd910ef946c88bec67
|
[
"CC-BY-4.0"
] | 3
|
2022-01-30T05:10:14.000Z
|
2022-03-04T21:18:44.000Z
|
from ray.tune.schedulers import ASHAScheduler, MedianStoppingRule, PopulationBasedTraining
from ray.tune import CLIReporter
import ray
from ray.rllib.models.catalog import ModelCatalog
from ray import tune
import subprocess
import third_party.contrib.alpha_zero.models.custom_torch_models
from ray.rllib import _register_all
_register_all()
class ConfigSearch:
""":class:
SuperSonic currently supports 23 RL algorithms from RLLib, covering a wide range of established RL algorithms.
"""
# cleanpid("50055")
# if ray.is_initialized():
# ray.shutdown()
def __init__(self):
self.num_workers = 0
self.training_iteration = 1
self.ray_num_cpus = 10
self.num_samples = 1
self.sched = ASHAScheduler('time_total_s', metric="episode_reward_mean", mode="max", max_t=10)
self.reporter = CLIReporter()
import os
os.environ['http_proxy'] = ''
os.environ['https_proxy'] = ''
def Algorithms(self, policy_algorithm, task_config, environment_path, iterations):
"""
Algorithms, using to call different RL algorithms
:param policy_algorithm:
:param task_config: The task_config, parameters passed to RL agent.
:param environment_path: The environment_path, tasks' environment path that RL agent called.
"""
reward_list = []
config_list = []
best_config =''
reward = 0
for i in range(iterations):
try:
if policy_algorithm == "MCTS":
best_config, reward = ConfigSearch().MCTS(task_config, environment_path)
if policy_algorithm == "PPO":
best_config, reward = ConfigSearch().PPO(task_config, environment_path)
if policy_algorithm == "DQN":
best_config, reward = ConfigSearch().DQN(task_config, environment_path)
if policy_algorithm == "QLearning":
best_config, reward = ConfigSearch().QLearning(task_config, environment_path)
if policy_algorithm == "APPO":
best_config, reward = ConfigSearch().APPO(task_config, environment_path)
if policy_algorithm == "A2C":
best_config, reward = ConfigSearch().A2C(task_config, environment_path)
if policy_algorithm == "A3C":
best_config, reward = ConfigSearch().A3C(task_config, environment_path)
if policy_algorithm == "ARS":
best_config, reward = ConfigSearch().ARS(task_config, environment_path)
if policy_algorithm == "ES":
best_config, reward = ConfigSearch().ES(task_config, environment_path)
if policy_algorithm == "MARWIL":
best_config, reward = ConfigSearch().MARWIL(task_config, environment_path)
if policy_algorithm == "PG":
best_config, reward = ConfigSearch().PG(task_config, environment_path)
if policy_algorithm == "SimpleQ":
best_config, reward = ConfigSearch().SimpleQ(task_config, environment_path)
except:
pass
config_list.append(best_config)
reward_list.append(reward)
index = reward_list.index(max(reward_list))
best_config = config_list[index]
return best_config
def PPO(self, task_config, environment_path):
self.lamda = 0.95
self.kl_coeff = 0.2
self.vf_clip_param = 10.0
self.entropy_coeff = 0.01
self.model = {"fcnet_hiddens": [128, 128]}
self.local_dir = task_config.get("local_dir")
if task_config.get("experiment") == "stoke":
self.child = subprocess.Popen(
f"cd {task_config.get('stoke_path')} && python run_synch.py {task_config.get('stoke_path')} {task_config.get('obs_file')}",
shell=True,
)
# start search config
analysis = tune.run("PPO",
scheduler=self.sched,
progress_reporter=self.reporter,
num_samples=self.num_samples,
stop={"training_iteration": self.training_iteration},
reuse_actors=True,
checkpoint_at_end=True,
config={"env": environment_path,
"env_config": task_config,
"num_workers": 0,
"lr": tune.uniform(0.001, 1.0),
"kl_coeff" : tune.uniform(0.2,0.5),
}
)
ray.shutdown(exiting_interpreter=False)
# get the best config
best_config,best_metric_score = analysis.get_best_config(metric="episode_reward_mean", mode="max")
return best_config,best_metric_score
def MCTS(self, task_config, environment_path):
"""
MCTS, An interface to start RL agent with MCTS algorithm.
MCTS is an RL agent originally designed for two-player games.
This version adapts it to handle single player games. The code can
be sscaled to any number of workers. It also implements the ranked
rewards (R2) strategy to enable self-play even in the one-player setting.
The code is mainly purposed to be used for combinatorial optimization.
:param task_config: The task_config, parameters passed to RL agent.
:param environment_path: The environment_path, tasks' environment path that RL agent called.
"""
self.mcts_config = {
"puct_coefficient": 1.5,
"num_simulations": 5,
"temperature": 1.0,
"dirichlet_epsilon": 0.20,
"dirichlet_noise": 0.03,
"argmax_tree_policy": False,
"add_dirichlet_noise": True,
}
self.ranked_rewards = {
"enable": True,
}
self.model = {
"custom_model": "dense_model",
}
ModelCatalog.register_custom_model(
"dense_model",
third_party.contrib.alpha_zero.models.custom_torch_models.DenseModel,
)
print(f"init {task_config}")
self.local_dir = task_config.get("local_dir")
if task_config.get("experiment") == "stoke":
try:
self.child = subprocess.Popen(
f"cd {task_config.get('stoke_path')} && python run_synch.py {task_config.get('stoke_path')} {task_config.get('obs_file')}",
shell=True,
)
except:
print("subprocess error")
analysis=tune.run(
"contrib/MCTS",
stop=task_config.get("stop"),
max_failures=0,
reuse_actors=True,
checkpoint_at_end=True,
local_dir=self.local_dir,
scheduler=self.sched,
progress_reporter=self.reporter,
num_samples=self.num_samples,
config={
"env": environment_path,
"env_config": task_config,
"num_workers": self.num_workers,
"lr": tune.uniform(0.001, 1.0),
"mcts_config": self.mcts_config,
"ranked_rewards": self.ranked_rewards,
"model": self.model,
},
)
ray.shutdown(exiting_interpreter=False)
best_config,best_metric_score = analysis.get_best_config(metric="episode_reward_mean", mode="max")
return best_config,best_metric_score
def APPO(self, task_config, environment_path):
"""
PPO, An interface to start RL agent with PPO algorithm.
PPO’s clipped objective supports multiple SGD passes over the same batch of experiences.
Paper (https://arxiv.org/abs/1707.06347)
:param task_config: The task_config, parameters passed to RL agent.
:param environment_path: The environment_path, tasks' environment path that RL agent called.
"""
self.local_dir = task_config.get("local_dir")
if task_config.get("experiment") == "stoke":
self.child = subprocess.Popen(
f"cd {task_config.get('stoke_path')} && python run_synch.py {task_config.get('stoke_path')} {task_config.get('obs_file')}",
shell=True,
)
analysis=tune.run(
"APPO", # 内置算法PPO
scheduler=self.sched,
progress_reporter=self.reporter,
num_samples=self.num_samples,
stop={"training_iteration": self.training_iteration},
max_failures=0,
reuse_actors=True,
checkpoint_at_end=True,
local_dir=self.local_dir,
config={
"env": environment_path,
"env_config": task_config,
"lr": tune.uniform(0.001, 1.0),
},
)
ray.shutdown(exiting_interpreter=False)
best_config,best_metric_score = analysis.get_best_config(metric="episode_reward_mean", mode="max")
return best_config,best_metric_score
def DQN(self, task_config, environment_path):
"""
PPO, An interface to start RL agent with PPO algorithm.
PPO’s clipped objective supports multiple SGD passes over the same batch of experiences.
Paper (https://arxiv.org/abs/1707.06347)
:param task_config: The task_config, parameters passed to RL agent.
:param environment_path: The environment_path, tasks' environment path that RL agent called.
"""
self.local_dir = task_config.get("local_dir")
if task_config.get("experiment") == "stoke":
self.child = subprocess.Popen(
f"cd {task_config.get('stoke_path')} && python run_synch.py {task_config.get('stoke_path')} {task_config.get('obs_file')}",
shell=True,
)
analysis=tune.run(
"DQN", # 内置算法PPO
scheduler=self.sched,
progress_reporter=self.reporter,
num_samples=self.num_samples,
stop={"training_iteration": self.training_iteration},
max_failures=0,
reuse_actors=True,
checkpoint_at_end=True,
local_dir=self.local_dir,
config={
"env": environment_path,
"env_config": task_config,
"lr": tune.uniform(0.001, 1.0),
},
)
ray.shutdown(exiting_interpreter=False)
best_config,best_metric_score = analysis.get_best_config(metric="episode_reward_mean", mode="max")
return best_config,best_metric_score
def QLearning(self, task_config, environment_path):
"""
Q-networks, An interface to start RL agent with Q-networks algorithm.
Use two Q-networks (instead of one) for action-value estimation.
Each Q-network will have its own target network.
:param task_config: The task_config, parameters passed to RL agent.
:param environment_path: The environment_path, tasks' environment path that RL agent called.
"""
self.local_dir = task_config.get("local_dir")
if task_config.get("experiment") == "stoke":
self.child = subprocess.Popen(
f"cd {task_config.get('stoke_path')} && python run_synch.py {task_config.get('stoke_path')} {task_config.get('obs_file')}",
shell=True,
)
analysis=tune.run(
"SAC",
stop={"training_iteration": self.training_iteration},
max_failures=0,
reuse_actors=True,
checkpoint_at_end=True,
scheduler=self.sched,
progress_reporter=self.reporter,
num_samples=self.num_samples,
local_dir=self.local_dir,
config={
"env": environment_path,
"env_config": task_config,
"num_workers": self.num_workers,
"timesteps_per_iteration": 1,
"learning_starts": tune.uniform(0.1, 1.0),
"normalize_actions": False,
# "model": self.model,
},
)
ray.shutdown(exiting_interpreter=False)
best_config,best_metric_score = analysis.get_best_config(metric="episode_reward_mean", mode="max")
return best_config,best_metric_score
def A2C(self, task_config, environment_path):
"""
PPO, An interface to start RL agent with PPO algorithm.
PPO’s clipped objective supports multiple SGD passes over the same batch of experiences.
Paper (https://arxiv.org/abs/1707.06347)
:param task_config: The task_config, parameters passed to RL agent.
:param environment_path: The environment_path, tasks' environment path that RL agent called.
"""
self.local_dir = task_config.get("local_dir")
if task_config.get("experiment") == "stoke":
self.child = subprocess.Popen(
f"cd {task_config.get('stoke_path')} && python run_synch.py {task_config.get('stoke_path')} {task_config.get('obs_file')}",
shell=True,
)
analysis=tune.run(
"A2C", # 内置算法PPO
scheduler=self.sched,
progress_reporter=self.reporter,
num_samples=self.num_samples,
stop={"training_iteration": self.training_iteration},
max_failures=0,
reuse_actors=True,
checkpoint_at_end=True,
local_dir=self.local_dir,
config={
"env": environment_path,
"env_config": task_config,
"lr": tune.uniform(0.001, 1.0),
},
)
ray.shutdown(exiting_interpreter=False)
best_config,best_metric_score = analysis.get_best_config(metric="episode_reward_mean", mode="max")
return best_config,best_metric_score
def A3C(self, task_config, environment_path):
"""
PPO, An interface to start RL agent with PPO algorithm.
PPO’s clipped objective supports multiple SGD passes over the same batch of experiences.
Paper (https://arxiv.org/abs/1707.06347)
:param task_config: The task_config, parameters passed to RL agent.
:param environment_path: The environment_path, tasks' environment path that RL agent called.
"""
self.local_dir = task_config.get("local_dir")
if task_config.get("experiment") == "stoke":
self.child = subprocess.Popen(
f"cd {task_config.get('stoke_path')} && python run_synch.py {task_config.get('stoke_path')} {task_config.get('obs_file')}",
shell=True,
)
analysis=tune.run(
"A3C", # 内置算法PPO
scheduler=self.sched,
progress_reporter=self.reporter,
num_samples=self.num_samples,
stop={"training_iteration": self.training_iteration},
max_failures=0,
reuse_actors=True,
checkpoint_at_end=True,
local_dir=self.local_dir,
config={
"env": environment_path,
"env_config": task_config,
"lr": tune.uniform(0.001, 1.0),
},
)
ray.shutdown(exiting_interpreter=False)
best_config,best_metric_score = analysis.get_best_config(metric="episode_reward_mean", mode="max")
return best_config,best_metric_score
def ARS(self, task_config, environment_path):
"""
PPO, An interface to start RL agent with PPO algorithm.
PPO’s clipped objective supports multiple SGD passes over the same batch of experiences.
Paper (https://arxiv.org/abs/1707.06347)
:param task_config: The task_config, parameters passed to RL agent.
:param environment_path: The environment_path, tasks' environment path that RL agent called.
"""
self.local_dir = task_config.get("local_dir")
if task_config.get("experiment") == "stoke":
self.child = subprocess.Popen(
f"cd {task_config.get('stoke_path')} && python run_synch.py {task_config.get('stoke_path')} {task_config.get('obs_file')}",
shell=True,
)
analysis=tune.run(
"ARS", # 内置算法PPO
scheduler=self.sched,
progress_reporter=self.reporter,
num_samples=self.num_samples,
stop={"training_iteration": self.training_iteration},
max_failures=0,
reuse_actors=True,
checkpoint_at_end=True,
local_dir=self.local_dir,
config={
"env": environment_path,
"env_config": task_config,
"lr": tune.uniform(0.001, 1.0),
},
)
ray.shutdown(exiting_interpreter=False)
best_config,best_metric_score = analysis.get_best_config(metric="episode_reward_mean", mode="max")
return best_config,best_metric_score
def ES(self, task_config, environment_path):
"""
PPO, An interface to start RL agent with PPO algorithm.
PPO’s clipped objective supports multiple SGD passes over the same batch of experiences.
Paper (https://arxiv.org/abs/1707.06347)
:param task_config: The task_config, parameters passed to RL agent.
:param environment_path: The environment_path, tasks' environment path that RL agent called.
"""
self.local_dir = task_config.get("local_dir")
if task_config.get("experiment") == "stoke":
self.child = subprocess.Popen(
f"cd {task_config.get('stoke_path')} && python run_synch.py {task_config.get('stoke_path')} {task_config.get('obs_file')}",
shell=True,
)
analysis=tune.run(
"ES", # 内置算法PPO
scheduler=self.sched,
progress_reporter=self.reporter,
num_samples=self.num_samples,
stop={"training_iteration": self.training_iteration},
max_failures=0,
reuse_actors=True,
checkpoint_at_end=True,
local_dir=self.local_dir,
config={
"env": environment_path,
"env_config": task_config,
"lr": tune.uniform(0.001, 1.0),
},
)
ray.shutdown(exiting_interpreter=False)
best_config,best_metric_score = analysis.get_best_config(metric="episode_reward_mean", mode="max")
return best_config,best_metric_score
def MARWIL(self, task_config, environment_path):
"""
PPO, An interface to start RL agent with PPO algorithm.
PPO’s clipped objective supports multiple SGD passes over the same batch of experiences.
Paper (https://arxiv.org/abs/1707.06347)
:param task_config: The task_config, parameters passed to RL agent.
:param environment_path: The environment_path, tasks' environment path that RL agent called.
"""
self.local_dir = task_config.get("local_dir")
if task_config.get("experiment") == "stoke":
self.child = subprocess.Popen(
f"cd {task_config.get('stoke_path')} && python run_synch.py {task_config.get('stoke_path')} {task_config.get('obs_file')}",
shell=True,
)
analysis=tune.run(
"MARWIL", # 内置算法PPO
scheduler=self.sched,
progress_reporter=self.reporter,
num_samples=self.num_samples,
stop={"training_iteration": self.training_iteration},
max_failures=0,
reuse_actors=True,
checkpoint_at_end=True,
local_dir=self.local_dir,
config={
"env": environment_path,
"env_config": task_config,
"lr": tune.uniform(0.001, 1.0),
},
)
ray.shutdown(exiting_interpreter=False)
best_config,best_metric_score = analysis.get_best_config(metric="episode_reward_mean", mode="max")
return best_config,best_metric_score
def PG(self, task_config, environment_path):
"""
PPO, An interface to start RL agent with PPO algorithm.
PPO’s clipped objective supports multiple SGD passes over the same batch of experiences.
Paper (https://arxiv.org/abs/1707.06347)
:param task_config: The task_config, parameters passed to RL agent.
:param environment_path: The environment_path, tasks' environment path that RL agent called.
"""
self.local_dir = task_config.get("local_dir")
if task_config.get("experiment") == "stoke":
self.child = subprocess.Popen(
f"cd {task_config.get('stoke_path')} && python run_synch.py {task_config.get('stoke_path')} {task_config.get('obs_file')}",
shell=True,
)
analysis=tune.run(
"PG", # 内置算法PPO
scheduler=self.sched,
progress_reporter=self.reporter,
num_samples=self.num_samples,
stop={"training_iteration": self.training_iteration},
max_failures=0,
reuse_actors=True,
checkpoint_at_end=True,
local_dir=self.local_dir,
config={
"env": environment_path,
"env_config": task_config,
"lr": tune.uniform(0.001, 1.0),
},
)
ray.shutdown(exiting_interpreter=False)
best_config,best_metric_score = analysis.get_best_config(metric="episode_reward_mean", mode="max")
return best_config,best_metric_score
def SimpleQ(self, task_config, environment_path):
"""
PPO, An interface to start RL agent with PPO algorithm.
PPO’s clipped objective supports multiple SGD passes over the same batch of experiences.
Paper (https://arxiv.org/abs/1707.06347)
:param task_config: The task_config, parameters passed to RL agent.
:param environment_path: The environment_path, tasks' environment path that RL agent called.
"""
self.local_dir = task_config.get("local_dir")
if task_config.get("experiment") == "stoke":
self.child = subprocess.Popen(
f"cd {task_config.get('stoke_path')} && python run_synch.py {task_config.get('stoke_path')} {task_config.get('obs_file')}",
shell=True,
)
analysis=tune.run(
"SimpleQ", # 内置算法PPO
scheduler=self.sched,
progress_reporter=self.reporter,
num_samples=self.num_samples,
stop={"training_iteration": self.training_iteration},
max_failures=0,
reuse_actors=True,
checkpoint_at_end=True,
local_dir=self.local_dir,
config={
"env": environment_path,
"env_config": task_config,
"lr": tune.uniform(0.001, 1.0),
},
)
ray.shutdown(exiting_interpreter=False)
best_config,best_metric_score = analysis.get_best_config(metric="episode_reward_mean", mode="max")
return best_config,best_metric_score
| 41.317789
| 143
| 0.583413
|
88c172d7a08545446f6b16cb49d29f6a5c999084
| 1,268
|
py
|
Python
|
apps/jobsub/setup.py
|
civascu/hue
|
82f2de44789ff5a981ed725175bae7944832d1e9
|
[
"Apache-2.0"
] | null | null | null |
apps/jobsub/setup.py
|
civascu/hue
|
82f2de44789ff5a981ed725175bae7944832d1e9
|
[
"Apache-2.0"
] | null | null | null |
apps/jobsub/setup.py
|
civascu/hue
|
82f2de44789ff5a981ed725175bae7944832d1e9
|
[
"Apache-2.0"
] | null | null | null |
# Licensed to Cloudera, Inc. under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. Cloudera, Inc. licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from setuptools import setup, find_packages
setup(
name = "jobsub",
version = "0.9.1",
url = 'http://github.com/cloudera/hue',
author = "Hue",
description = "Hadoop Job Submission",
packages = find_packages('src'),
package_dir = {'': 'src'},
install_requires = ['setuptools', 'desktop'],
entry_points = { 'desktop.supervisor.specs': [ 'jobsubd = jobsub:SUPERVISOR_SPEC' ],
'desktop.sdk.application': 'jobsub=jobsub' },
)
| 42.266667
| 90
| 0.700315
|
f164abe52975ad29749e06e1ebc4d49e947af51b
| 42,702
|
py
|
Python
|
homeassistant/components/mqtt/light/schema_basic.py
|
mag1024/core
|
f3a89de71f6fa693016a41397b051bd48c86f841
|
[
"Apache-2.0"
] | null | null | null |
homeassistant/components/mqtt/light/schema_basic.py
|
mag1024/core
|
f3a89de71f6fa693016a41397b051bd48c86f841
|
[
"Apache-2.0"
] | null | null | null |
homeassistant/components/mqtt/light/schema_basic.py
|
mag1024/core
|
f3a89de71f6fa693016a41397b051bd48c86f841
|
[
"Apache-2.0"
] | null | null | null |
"""Support for MQTT lights."""
import logging
import voluptuous as vol
from homeassistant.components.light import (
ATTR_BRIGHTNESS,
ATTR_COLOR_MODE,
ATTR_COLOR_TEMP,
ATTR_EFFECT,
ATTR_EFFECT_LIST,
ATTR_HS_COLOR,
ATTR_MAX_MIREDS,
ATTR_MIN_MIREDS,
ATTR_RGB_COLOR,
ATTR_RGBW_COLOR,
ATTR_RGBWW_COLOR,
ATTR_SUPPORTED_COLOR_MODES,
ATTR_WHITE,
ATTR_WHITE_VALUE,
ATTR_XY_COLOR,
COLOR_MODE_BRIGHTNESS,
COLOR_MODE_COLOR_TEMP,
COLOR_MODE_HS,
COLOR_MODE_ONOFF,
COLOR_MODE_RGB,
COLOR_MODE_RGBW,
COLOR_MODE_RGBWW,
COLOR_MODE_UNKNOWN,
COLOR_MODE_WHITE,
COLOR_MODE_XY,
ENTITY_ID_FORMAT,
SUPPORT_BRIGHTNESS,
SUPPORT_COLOR,
SUPPORT_COLOR_TEMP,
SUPPORT_EFFECT,
SUPPORT_WHITE_VALUE,
LightEntity,
valid_supported_color_modes,
)
from homeassistant.const import (
CONF_NAME,
CONF_OPTIMISTIC,
CONF_PAYLOAD_OFF,
CONF_PAYLOAD_ON,
CONF_VALUE_TEMPLATE,
STATE_ON,
)
from homeassistant.core import callback
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.restore_state import RestoreEntity
import homeassistant.util.color as color_util
from .. import MqttCommandTemplate, MqttValueTemplate, subscription
from ... import mqtt
from ..const import (
CONF_COMMAND_TOPIC,
CONF_ENCODING,
CONF_QOS,
CONF_RETAIN,
CONF_STATE_TOPIC,
)
from ..debug_info import log_messages
from ..mixins import MQTT_ENTITY_COMMON_SCHEMA, MqttEntity
from .schema import MQTT_LIGHT_SCHEMA_SCHEMA
_LOGGER = logging.getLogger(__name__)
CONF_BRIGHTNESS_COMMAND_TOPIC = "brightness_command_topic"
CONF_BRIGHTNESS_SCALE = "brightness_scale"
CONF_BRIGHTNESS_STATE_TOPIC = "brightness_state_topic"
CONF_BRIGHTNESS_VALUE_TEMPLATE = "brightness_value_template"
CONF_COLOR_MODE_STATE_TOPIC = "color_mode_state_topic"
CONF_COLOR_MODE_VALUE_TEMPLATE = "color_mode_value_template"
CONF_COLOR_TEMP_COMMAND_TEMPLATE = "color_temp_command_template"
CONF_COLOR_TEMP_COMMAND_TOPIC = "color_temp_command_topic"
CONF_COLOR_TEMP_STATE_TOPIC = "color_temp_state_topic"
CONF_COLOR_TEMP_VALUE_TEMPLATE = "color_temp_value_template"
CONF_EFFECT_COMMAND_TOPIC = "effect_command_topic"
CONF_EFFECT_LIST = "effect_list"
CONF_EFFECT_STATE_TOPIC = "effect_state_topic"
CONF_EFFECT_VALUE_TEMPLATE = "effect_value_template"
CONF_HS_COMMAND_TOPIC = "hs_command_topic"
CONF_HS_STATE_TOPIC = "hs_state_topic"
CONF_HS_VALUE_TEMPLATE = "hs_value_template"
CONF_MAX_MIREDS = "max_mireds"
CONF_MIN_MIREDS = "min_mireds"
CONF_RGB_COMMAND_TEMPLATE = "rgb_command_template"
CONF_RGB_COMMAND_TOPIC = "rgb_command_topic"
CONF_RGB_STATE_TOPIC = "rgb_state_topic"
CONF_RGB_VALUE_TEMPLATE = "rgb_value_template"
CONF_RGBW_COMMAND_TEMPLATE = "rgbw_command_template"
CONF_RGBW_COMMAND_TOPIC = "rgbw_command_topic"
CONF_RGBW_STATE_TOPIC = "rgbw_state_topic"
CONF_RGBW_VALUE_TEMPLATE = "rgbw_value_template"
CONF_RGBWW_COMMAND_TEMPLATE = "rgbww_command_template"
CONF_RGBWW_COMMAND_TOPIC = "rgbww_command_topic"
CONF_RGBWW_STATE_TOPIC = "rgbww_state_topic"
CONF_RGBWW_VALUE_TEMPLATE = "rgbww_value_template"
CONF_STATE_VALUE_TEMPLATE = "state_value_template"
CONF_XY_COMMAND_TOPIC = "xy_command_topic"
CONF_XY_STATE_TOPIC = "xy_state_topic"
CONF_XY_VALUE_TEMPLATE = "xy_value_template"
CONF_WHITE_COMMAND_TOPIC = "white_command_topic"
CONF_WHITE_SCALE = "white_scale"
CONF_WHITE_VALUE_COMMAND_TOPIC = "white_value_command_topic"
CONF_WHITE_VALUE_SCALE = "white_value_scale"
CONF_WHITE_VALUE_STATE_TOPIC = "white_value_state_topic"
CONF_WHITE_VALUE_TEMPLATE = "white_value_template"
CONF_ON_COMMAND_TYPE = "on_command_type"
MQTT_LIGHT_ATTRIBUTES_BLOCKED = frozenset(
{
ATTR_COLOR_MODE,
ATTR_BRIGHTNESS,
ATTR_COLOR_TEMP,
ATTR_EFFECT,
ATTR_EFFECT_LIST,
ATTR_HS_COLOR,
ATTR_MAX_MIREDS,
ATTR_MIN_MIREDS,
ATTR_RGB_COLOR,
ATTR_RGBW_COLOR,
ATTR_RGBWW_COLOR,
ATTR_SUPPORTED_COLOR_MODES,
ATTR_WHITE_VALUE,
ATTR_XY_COLOR,
}
)
DEFAULT_BRIGHTNESS_SCALE = 255
DEFAULT_NAME = "MQTT LightEntity"
DEFAULT_OPTIMISTIC = False
DEFAULT_PAYLOAD_OFF = "OFF"
DEFAULT_PAYLOAD_ON = "ON"
DEFAULT_WHITE_VALUE_SCALE = 255
DEFAULT_WHITE_SCALE = 255
DEFAULT_ON_COMMAND_TYPE = "last"
VALUES_ON_COMMAND_TYPE = ["first", "last", "brightness"]
COMMAND_TEMPLATE_KEYS = [
CONF_COLOR_TEMP_COMMAND_TEMPLATE,
CONF_RGB_COMMAND_TEMPLATE,
CONF_RGBW_COMMAND_TEMPLATE,
CONF_RGBWW_COMMAND_TEMPLATE,
]
VALUE_TEMPLATE_KEYS = [
CONF_BRIGHTNESS_VALUE_TEMPLATE,
CONF_COLOR_MODE_VALUE_TEMPLATE,
CONF_COLOR_TEMP_VALUE_TEMPLATE,
CONF_EFFECT_VALUE_TEMPLATE,
CONF_HS_VALUE_TEMPLATE,
CONF_RGB_VALUE_TEMPLATE,
CONF_RGBW_VALUE_TEMPLATE,
CONF_RGBWW_VALUE_TEMPLATE,
CONF_STATE_VALUE_TEMPLATE,
CONF_WHITE_VALUE_TEMPLATE,
CONF_XY_VALUE_TEMPLATE,
]
_PLATFORM_SCHEMA_BASE = (
mqtt.MQTT_RW_PLATFORM_SCHEMA.extend(
{
vol.Optional(CONF_BRIGHTNESS_COMMAND_TOPIC): mqtt.valid_publish_topic,
vol.Optional(
CONF_BRIGHTNESS_SCALE, default=DEFAULT_BRIGHTNESS_SCALE
): vol.All(vol.Coerce(int), vol.Range(min=1)),
vol.Optional(CONF_BRIGHTNESS_STATE_TOPIC): mqtt.valid_subscribe_topic,
vol.Optional(CONF_BRIGHTNESS_VALUE_TEMPLATE): cv.template,
vol.Optional(CONF_COLOR_MODE_STATE_TOPIC): mqtt.valid_subscribe_topic,
vol.Optional(CONF_COLOR_MODE_VALUE_TEMPLATE): cv.template,
vol.Optional(CONF_COLOR_TEMP_COMMAND_TEMPLATE): cv.template,
vol.Optional(CONF_COLOR_TEMP_COMMAND_TOPIC): mqtt.valid_publish_topic,
vol.Optional(CONF_COLOR_TEMP_STATE_TOPIC): mqtt.valid_subscribe_topic,
vol.Optional(CONF_COLOR_TEMP_VALUE_TEMPLATE): cv.template,
vol.Optional(CONF_EFFECT_COMMAND_TOPIC): mqtt.valid_publish_topic,
vol.Optional(CONF_EFFECT_LIST): vol.All(cv.ensure_list, [cv.string]),
vol.Optional(CONF_EFFECT_STATE_TOPIC): mqtt.valid_subscribe_topic,
vol.Optional(CONF_EFFECT_VALUE_TEMPLATE): cv.template,
vol.Optional(CONF_HS_COMMAND_TOPIC): mqtt.valid_publish_topic,
vol.Optional(CONF_HS_STATE_TOPIC): mqtt.valid_subscribe_topic,
vol.Optional(CONF_HS_VALUE_TEMPLATE): cv.template,
vol.Optional(CONF_MAX_MIREDS): cv.positive_int,
vol.Optional(CONF_MIN_MIREDS): cv.positive_int,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_ON_COMMAND_TYPE, default=DEFAULT_ON_COMMAND_TYPE): vol.In(
VALUES_ON_COMMAND_TYPE
),
vol.Optional(CONF_OPTIMISTIC, default=DEFAULT_OPTIMISTIC): cv.boolean,
vol.Optional(CONF_PAYLOAD_OFF, default=DEFAULT_PAYLOAD_OFF): cv.string,
vol.Optional(CONF_PAYLOAD_ON, default=DEFAULT_PAYLOAD_ON): cv.string,
vol.Optional(CONF_RGB_COMMAND_TEMPLATE): cv.template,
vol.Optional(CONF_RGB_COMMAND_TOPIC): mqtt.valid_publish_topic,
vol.Optional(CONF_RGB_STATE_TOPIC): mqtt.valid_subscribe_topic,
vol.Optional(CONF_RGB_VALUE_TEMPLATE): cv.template,
vol.Optional(CONF_RGBW_COMMAND_TEMPLATE): cv.template,
vol.Optional(CONF_RGBW_COMMAND_TOPIC): mqtt.valid_publish_topic,
vol.Optional(CONF_RGBW_STATE_TOPIC): mqtt.valid_subscribe_topic,
vol.Optional(CONF_RGBW_VALUE_TEMPLATE): cv.template,
vol.Optional(CONF_RGBWW_COMMAND_TEMPLATE): cv.template,
vol.Optional(CONF_RGBWW_COMMAND_TOPIC): mqtt.valid_publish_topic,
vol.Optional(CONF_RGBWW_STATE_TOPIC): mqtt.valid_subscribe_topic,
vol.Optional(CONF_RGBWW_VALUE_TEMPLATE): cv.template,
vol.Optional(CONF_STATE_VALUE_TEMPLATE): cv.template,
vol.Optional(CONF_WHITE_COMMAND_TOPIC): mqtt.valid_publish_topic,
vol.Optional(CONF_WHITE_SCALE, default=DEFAULT_WHITE_SCALE): vol.All(
vol.Coerce(int), vol.Range(min=1)
),
vol.Optional(CONF_WHITE_VALUE_COMMAND_TOPIC): mqtt.valid_publish_topic,
vol.Optional(
CONF_WHITE_VALUE_SCALE, default=DEFAULT_WHITE_VALUE_SCALE
): vol.All(vol.Coerce(int), vol.Range(min=1)),
vol.Optional(CONF_WHITE_VALUE_STATE_TOPIC): mqtt.valid_subscribe_topic,
vol.Optional(CONF_WHITE_VALUE_TEMPLATE): cv.template,
vol.Optional(CONF_XY_COMMAND_TOPIC): mqtt.valid_publish_topic,
vol.Optional(CONF_XY_STATE_TOPIC): mqtt.valid_subscribe_topic,
vol.Optional(CONF_XY_VALUE_TEMPLATE): cv.template,
},
)
.extend(MQTT_ENTITY_COMMON_SCHEMA.schema)
.extend(MQTT_LIGHT_SCHEMA_SCHEMA.schema)
)
PLATFORM_SCHEMA_BASIC = vol.All(
_PLATFORM_SCHEMA_BASE,
)
DISCOVERY_SCHEMA_BASIC = vol.All(
# CONF_VALUE_TEMPLATE is no longer supported, support was removed in 2022.2
cv.removed(CONF_VALUE_TEMPLATE),
_PLATFORM_SCHEMA_BASE.extend({}, extra=vol.REMOVE_EXTRA),
)
async def async_setup_entity_basic(
hass, config, async_add_entities, config_entry, discovery_data=None
):
"""Set up a MQTT Light."""
async_add_entities([MqttLight(hass, config, config_entry, discovery_data)])
class MqttLight(MqttEntity, LightEntity, RestoreEntity):
"""Representation of a MQTT light."""
_entity_id_format = ENTITY_ID_FORMAT
_attributes_extra_blocked = MQTT_LIGHT_ATTRIBUTES_BLOCKED
def __init__(self, hass, config, config_entry, discovery_data):
"""Initialize MQTT light."""
self._brightness = None
self._color_mode = None
self._color_temp = None
self._effect = None
self._hs_color = None
self._legacy_mode = False
self._rgb_color = None
self._rgbw_color = None
self._rgbww_color = None
self._state = False
self._supported_color_modes = None
self._white_value = None
self._xy_color = None
self._topic = None
self._payload = None
self._command_templates = None
self._value_templates = None
self._optimistic = False
self._optimistic_brightness = False
self._optimistic_color_mode = False
self._optimistic_color_temp = False
self._optimistic_effect = False
self._optimistic_hs_color = False
self._optimistic_rgb_color = False
self._optimistic_rgbw_color = False
self._optimistic_rgbww_color = False
self._optimistic_white_value = False
self._optimistic_xy_color = False
MqttEntity.__init__(self, hass, config, config_entry, discovery_data)
@staticmethod
def config_schema():
"""Return the config schema."""
return DISCOVERY_SCHEMA_BASIC
def _setup_from_config(self, config):
"""(Re)Setup the entity."""
if CONF_STATE_VALUE_TEMPLATE not in config and CONF_VALUE_TEMPLATE in config:
config[CONF_STATE_VALUE_TEMPLATE] = config[CONF_VALUE_TEMPLATE]
topic = {
key: config.get(key)
for key in (
CONF_BRIGHTNESS_COMMAND_TOPIC,
CONF_BRIGHTNESS_STATE_TOPIC,
CONF_COLOR_MODE_STATE_TOPIC,
CONF_COLOR_TEMP_COMMAND_TOPIC,
CONF_COLOR_TEMP_STATE_TOPIC,
CONF_COMMAND_TOPIC,
CONF_EFFECT_COMMAND_TOPIC,
CONF_EFFECT_STATE_TOPIC,
CONF_HS_COMMAND_TOPIC,
CONF_HS_STATE_TOPIC,
CONF_RGB_COMMAND_TOPIC,
CONF_RGB_STATE_TOPIC,
CONF_RGBW_COMMAND_TOPIC,
CONF_RGBW_STATE_TOPIC,
CONF_RGBWW_COMMAND_TOPIC,
CONF_RGBWW_STATE_TOPIC,
CONF_STATE_TOPIC,
CONF_WHITE_COMMAND_TOPIC,
CONF_WHITE_VALUE_COMMAND_TOPIC,
CONF_WHITE_VALUE_STATE_TOPIC,
CONF_XY_COMMAND_TOPIC,
CONF_XY_STATE_TOPIC,
)
}
self._topic = topic
self._payload = {"on": config[CONF_PAYLOAD_ON], "off": config[CONF_PAYLOAD_OFF]}
value_templates = {}
for key in VALUE_TEMPLATE_KEYS:
value_templates[key] = None
if CONF_VALUE_TEMPLATE in config:
value_templates = {
key: config.get(CONF_VALUE_TEMPLATE) for key in VALUE_TEMPLATE_KEYS
}
for key in VALUE_TEMPLATE_KEYS & config.keys():
value_templates[key] = config[key]
self._value_templates = {
key: MqttValueTemplate(
template, entity=self
).async_render_with_possible_json_value
for key, template in value_templates.items()
}
command_templates = {}
for key in COMMAND_TEMPLATE_KEYS:
command_templates[key] = None
for key in COMMAND_TEMPLATE_KEYS & config.keys():
command_templates[key] = MqttCommandTemplate(
config[key], entity=self
).async_render
self._command_templates = command_templates
optimistic = config[CONF_OPTIMISTIC]
self._optimistic_color_mode = (
optimistic or topic[CONF_COLOR_MODE_STATE_TOPIC] is None
)
self._optimistic = optimistic or topic[CONF_STATE_TOPIC] is None
self._optimistic_rgb_color = optimistic or topic[CONF_RGB_STATE_TOPIC] is None
self._optimistic_rgbw_color = optimistic or topic[CONF_RGBW_STATE_TOPIC] is None
self._optimistic_rgbww_color = (
optimistic or topic[CONF_RGBWW_STATE_TOPIC] is None
)
self._optimistic_brightness = (
optimistic
or (
topic[CONF_BRIGHTNESS_COMMAND_TOPIC] is not None
and topic[CONF_BRIGHTNESS_STATE_TOPIC] is None
)
or (
topic[CONF_BRIGHTNESS_COMMAND_TOPIC] is None
and topic[CONF_RGB_STATE_TOPIC] is None
)
)
self._optimistic_color_temp = (
optimistic or topic[CONF_COLOR_TEMP_STATE_TOPIC] is None
)
self._optimistic_effect = optimistic or topic[CONF_EFFECT_STATE_TOPIC] is None
self._optimistic_hs_color = optimistic or topic[CONF_HS_STATE_TOPIC] is None
self._optimistic_white_value = (
optimistic or topic[CONF_WHITE_VALUE_STATE_TOPIC] is None
)
self._optimistic_xy_color = optimistic or topic[CONF_XY_STATE_TOPIC] is None
supported_color_modes = set()
if topic[CONF_COLOR_TEMP_COMMAND_TOPIC] is not None:
supported_color_modes.add(COLOR_MODE_COLOR_TEMP)
self._color_mode = COLOR_MODE_COLOR_TEMP
if topic[CONF_HS_COMMAND_TOPIC] is not None:
supported_color_modes.add(COLOR_MODE_HS)
self._color_mode = COLOR_MODE_HS
if topic[CONF_RGB_COMMAND_TOPIC] is not None:
supported_color_modes.add(COLOR_MODE_RGB)
self._color_mode = COLOR_MODE_RGB
if topic[CONF_RGBW_COMMAND_TOPIC] is not None:
supported_color_modes.add(COLOR_MODE_RGBW)
self._color_mode = COLOR_MODE_RGBW
if topic[CONF_RGBWW_COMMAND_TOPIC] is not None:
supported_color_modes.add(COLOR_MODE_RGBWW)
self._color_mode = COLOR_MODE_RGBWW
if topic[CONF_WHITE_COMMAND_TOPIC] is not None:
supported_color_modes.add(COLOR_MODE_WHITE)
if topic[CONF_XY_COMMAND_TOPIC] is not None:
supported_color_modes.add(COLOR_MODE_XY)
self._color_mode = COLOR_MODE_XY
if len(supported_color_modes) > 1:
self._color_mode = COLOR_MODE_UNKNOWN
if not supported_color_modes:
if topic[CONF_BRIGHTNESS_COMMAND_TOPIC] is not None:
self._color_mode = COLOR_MODE_BRIGHTNESS
supported_color_modes.add(COLOR_MODE_BRIGHTNESS)
else:
self._color_mode = COLOR_MODE_ONOFF
supported_color_modes.add(COLOR_MODE_ONOFF)
# Validate the color_modes configuration
self._supported_color_modes = valid_supported_color_modes(supported_color_modes)
if topic[CONF_WHITE_VALUE_COMMAND_TOPIC] is not None:
self._legacy_mode = True
def _is_optimistic(self, attribute):
"""Return True if the attribute is optimistically updated."""
return getattr(self, f"_optimistic_{attribute}")
def _prepare_subscribe_topics(self): # noqa: C901
"""(Re)Subscribe to topics."""
topics = {}
def add_topic(topic, msg_callback):
"""Add a topic."""
if self._topic[topic] is not None:
topics[topic] = {
"topic": self._topic[topic],
"msg_callback": msg_callback,
"qos": self._config[CONF_QOS],
"encoding": self._config[CONF_ENCODING] or None,
}
@callback
@log_messages(self.hass, self.entity_id)
def state_received(msg):
"""Handle new MQTT messages."""
payload = self._value_templates[CONF_STATE_VALUE_TEMPLATE](
msg.payload, None
)
if not payload:
_LOGGER.debug("Ignoring empty state message from '%s'", msg.topic)
return
if payload == self._payload["on"]:
self._state = True
elif payload == self._payload["off"]:
self._state = False
self.async_write_ha_state()
if self._topic[CONF_STATE_TOPIC] is not None:
topics[CONF_STATE_TOPIC] = {
"topic": self._topic[CONF_STATE_TOPIC],
"msg_callback": state_received,
"qos": self._config[CONF_QOS],
"encoding": self._config[CONF_ENCODING] or None,
}
@callback
@log_messages(self.hass, self.entity_id)
def brightness_received(msg):
"""Handle new MQTT messages for the brightness."""
payload = self._value_templates[CONF_BRIGHTNESS_VALUE_TEMPLATE](
msg.payload, None
)
if not payload:
_LOGGER.debug("Ignoring empty brightness message from '%s'", msg.topic)
return
device_value = float(payload)
percent_bright = device_value / self._config[CONF_BRIGHTNESS_SCALE]
self._brightness = percent_bright * 255
self.async_write_ha_state()
add_topic(CONF_BRIGHTNESS_STATE_TOPIC, brightness_received)
def _rgbx_received(msg, template, color_mode, convert_color):
"""Handle new MQTT messages for RGBW and RGBWW."""
payload = self._value_templates[template](msg.payload, None)
if not payload:
_LOGGER.debug(
"Ignoring empty %s message from '%s'", color_mode, msg.topic
)
return None
color = tuple(int(val) for val in payload.split(","))
if self._optimistic_color_mode:
self._color_mode = color_mode
if self._topic[CONF_BRIGHTNESS_STATE_TOPIC] is None:
rgb = convert_color(*color)
percent_bright = float(color_util.color_RGB_to_hsv(*rgb)[2]) / 100.0
self._brightness = percent_bright * 255
return color
@callback
@log_messages(self.hass, self.entity_id)
def rgb_received(msg):
"""Handle new MQTT messages for RGB."""
rgb = _rgbx_received(
msg, CONF_RGB_VALUE_TEMPLATE, COLOR_MODE_RGB, lambda *x: x
)
if not rgb:
return
if self._legacy_mode:
self._hs_color = color_util.color_RGB_to_hs(*rgb)
else:
self._rgb_color = rgb
self.async_write_ha_state()
add_topic(CONF_RGB_STATE_TOPIC, rgb_received)
@callback
@log_messages(self.hass, self.entity_id)
def rgbw_received(msg):
"""Handle new MQTT messages for RGBW."""
rgbw = _rgbx_received(
msg,
CONF_RGBW_VALUE_TEMPLATE,
COLOR_MODE_RGBW,
color_util.color_rgbw_to_rgb,
)
if not rgbw:
return
self._rgbw_color = rgbw
self.async_write_ha_state()
add_topic(CONF_RGBW_STATE_TOPIC, rgbw_received)
@callback
@log_messages(self.hass, self.entity_id)
def rgbww_received(msg):
"""Handle new MQTT messages for RGBWW."""
rgbww = _rgbx_received(
msg,
CONF_RGBWW_VALUE_TEMPLATE,
COLOR_MODE_RGBWW,
color_util.color_rgbww_to_rgb,
)
if not rgbww:
return
self._rgbww_color = rgbww
self.async_write_ha_state()
add_topic(CONF_RGBWW_STATE_TOPIC, rgbww_received)
@callback
@log_messages(self.hass, self.entity_id)
def color_mode_received(msg):
"""Handle new MQTT messages for color mode."""
payload = self._value_templates[CONF_COLOR_MODE_VALUE_TEMPLATE](
msg.payload, None
)
if not payload:
_LOGGER.debug("Ignoring empty color mode message from '%s'", msg.topic)
return
self._color_mode = payload
self.async_write_ha_state()
add_topic(CONF_COLOR_MODE_STATE_TOPIC, color_mode_received)
@callback
@log_messages(self.hass, self.entity_id)
def color_temp_received(msg):
"""Handle new MQTT messages for color temperature."""
payload = self._value_templates[CONF_COLOR_TEMP_VALUE_TEMPLATE](
msg.payload, None
)
if not payload:
_LOGGER.debug("Ignoring empty color temp message from '%s'", msg.topic)
return
if self._optimistic_color_mode:
self._color_mode = COLOR_MODE_COLOR_TEMP
self._color_temp = int(payload)
self.async_write_ha_state()
add_topic(CONF_COLOR_TEMP_STATE_TOPIC, color_temp_received)
@callback
@log_messages(self.hass, self.entity_id)
def effect_received(msg):
"""Handle new MQTT messages for effect."""
payload = self._value_templates[CONF_EFFECT_VALUE_TEMPLATE](
msg.payload, None
)
if not payload:
_LOGGER.debug("Ignoring empty effect message from '%s'", msg.topic)
return
self._effect = payload
self.async_write_ha_state()
add_topic(CONF_EFFECT_STATE_TOPIC, effect_received)
@callback
@log_messages(self.hass, self.entity_id)
def hs_received(msg):
"""Handle new MQTT messages for hs color."""
payload = self._value_templates[CONF_HS_VALUE_TEMPLATE](msg.payload, None)
if not payload:
_LOGGER.debug("Ignoring empty hs message from '%s'", msg.topic)
return
try:
hs_color = tuple(float(val) for val in payload.split(",", 2))
if self._optimistic_color_mode:
self._color_mode = COLOR_MODE_HS
self._hs_color = hs_color
self.async_write_ha_state()
except ValueError:
_LOGGER.debug("Failed to parse hs state update: '%s'", payload)
add_topic(CONF_HS_STATE_TOPIC, hs_received)
@callback
@log_messages(self.hass, self.entity_id)
def white_value_received(msg):
"""Handle new MQTT messages for white value."""
payload = self._value_templates[CONF_WHITE_VALUE_TEMPLATE](
msg.payload, None
)
if not payload:
_LOGGER.debug("Ignoring empty white value message from '%s'", msg.topic)
return
device_value = float(payload)
percent_white = device_value / self._config[CONF_WHITE_VALUE_SCALE]
self._white_value = percent_white * 255
self.async_write_ha_state()
add_topic(CONF_WHITE_VALUE_STATE_TOPIC, white_value_received)
@callback
@log_messages(self.hass, self.entity_id)
def xy_received(msg):
"""Handle new MQTT messages for xy color."""
payload = self._value_templates[CONF_XY_VALUE_TEMPLATE](msg.payload, None)
if not payload:
_LOGGER.debug("Ignoring empty xy-color message from '%s'", msg.topic)
return
xy_color = tuple(float(val) for val in payload.split(","))
if self._optimistic_color_mode:
self._color_mode = COLOR_MODE_XY
if self._legacy_mode:
self._hs_color = color_util.color_xy_to_hs(*xy_color)
else:
self._xy_color = xy_color
self.async_write_ha_state()
add_topic(CONF_XY_STATE_TOPIC, xy_received)
self._sub_state = subscription.async_prepare_subscribe_topics(
self.hass, self._sub_state, topics
)
async def _subscribe_topics(self):
"""(Re)Subscribe to topics."""
await subscription.async_subscribe_topics(self.hass, self._sub_state)
last_state = await self.async_get_last_state()
def restore_state(attribute, condition_attribute=None):
"""Restore a state attribute."""
if condition_attribute is None:
condition_attribute = attribute
optimistic = self._is_optimistic(condition_attribute)
if optimistic and last_state and last_state.attributes.get(attribute):
setattr(self, f"_{attribute}", last_state.attributes[attribute])
if self._topic[CONF_STATE_TOPIC] is None and self._optimistic and last_state:
self._state = last_state.state == STATE_ON
restore_state(ATTR_BRIGHTNESS)
restore_state(ATTR_RGB_COLOR)
restore_state(ATTR_HS_COLOR, ATTR_RGB_COLOR)
restore_state(ATTR_RGBW_COLOR)
restore_state(ATTR_RGBWW_COLOR)
restore_state(ATTR_COLOR_MODE)
restore_state(ATTR_COLOR_TEMP)
restore_state(ATTR_EFFECT)
restore_state(ATTR_HS_COLOR)
restore_state(ATTR_WHITE_VALUE)
restore_state(ATTR_XY_COLOR)
restore_state(ATTR_HS_COLOR, ATTR_XY_COLOR)
@property
def brightness(self):
"""Return the brightness of this light between 0..255."""
if brightness := self._brightness:
brightness = min(round(brightness), 255)
return brightness
@property
def color_mode(self):
"""Return current color mode."""
if self._legacy_mode:
return None
return self._color_mode
@property
def hs_color(self):
"""Return the hs color value."""
if not self._legacy_mode:
return self._hs_color
# Legacy mode, gate color_temp with white_value == 0
if self._white_value:
return None
return self._hs_color
@property
def rgb_color(self):
"""Return the rgb color value."""
return self._rgb_color
@property
def rgbw_color(self):
"""Return the rgbw color value."""
return self._rgbw_color
@property
def rgbww_color(self):
"""Return the rgbww color value."""
return self._rgbww_color
@property
def xy_color(self):
"""Return the xy color value."""
return self._xy_color
@property
def color_temp(self):
"""Return the color temperature in mired."""
if not self._legacy_mode:
return self._color_temp
# Legacy mode, gate color_temp with white_value > 0
supports_color = (
self._topic[CONF_RGB_COMMAND_TOPIC]
or self._topic[CONF_HS_COMMAND_TOPIC]
or self._topic[CONF_XY_COMMAND_TOPIC]
)
if self._white_value or not supports_color:
return self._color_temp
return None
@property
def min_mireds(self):
"""Return the coldest color_temp that this light supports."""
return self._config.get(CONF_MIN_MIREDS, super().min_mireds)
@property
def max_mireds(self):
"""Return the warmest color_temp that this light supports."""
return self._config.get(CONF_MAX_MIREDS, super().max_mireds)
@property
def white_value(self):
"""Return the white property."""
if white_value := self._white_value:
return min(round(white_value), 255)
return None
@property
def is_on(self):
"""Return true if device is on."""
return self._state
@property
def assumed_state(self):
"""Return true if we do optimistic updates."""
return self._optimistic
@property
def effect_list(self):
"""Return the list of supported effects."""
return self._config.get(CONF_EFFECT_LIST)
@property
def effect(self):
"""Return the current effect."""
return self._effect
@property
def supported_color_modes(self):
"""Flag supported color modes."""
if self._legacy_mode:
return None
return self._supported_color_modes
@property
def supported_features(self):
"""Flag supported features."""
supported_features = 0
supported_features |= (
self._topic[CONF_EFFECT_COMMAND_TOPIC] is not None and SUPPORT_EFFECT
)
if not self._legacy_mode:
return supported_features
# Legacy mode
supported_features |= self._topic[CONF_RGB_COMMAND_TOPIC] is not None and (
SUPPORT_COLOR | SUPPORT_BRIGHTNESS
)
supported_features |= (
self._topic[CONF_BRIGHTNESS_COMMAND_TOPIC] is not None
and SUPPORT_BRIGHTNESS
)
supported_features |= (
self._topic[CONF_COLOR_TEMP_COMMAND_TOPIC] is not None
and SUPPORT_COLOR_TEMP
)
supported_features |= (
self._topic[CONF_HS_COMMAND_TOPIC] is not None and SUPPORT_COLOR
)
supported_features |= (
self._topic[CONF_WHITE_VALUE_COMMAND_TOPIC] is not None
and SUPPORT_WHITE_VALUE
)
supported_features |= (
self._topic[CONF_XY_COMMAND_TOPIC] is not None and SUPPORT_COLOR
)
return supported_features
async def async_turn_on(self, **kwargs): # noqa: C901
"""Turn the device on.
This method is a coroutine.
"""
should_update = False
on_command_type = self._config[CONF_ON_COMMAND_TYPE]
async def publish(topic, payload):
"""Publish an MQTT message."""
await mqtt.async_publish(
self.hass,
self._topic[topic],
payload,
self._config[CONF_QOS],
self._config[CONF_RETAIN],
self._config[CONF_ENCODING],
)
def scale_rgbx(color, brightness=None):
"""Scale RGBx for brightness."""
if brightness is None:
# If there's a brightness topic set, we don't want to scale the RGBx
# values given using the brightness.
if self._topic[CONF_BRIGHTNESS_COMMAND_TOPIC] is not None:
brightness = 255
else:
brightness = kwargs.get(
ATTR_BRIGHTNESS, self._brightness if self._brightness else 255
)
return tuple(int(channel * brightness / 255) for channel in color)
def render_rgbx(color, template, color_mode):
"""Render RGBx payload."""
if tpl := self._command_templates[template]:
keys = ["red", "green", "blue"]
if color_mode == COLOR_MODE_RGBW:
keys.append("white")
elif color_mode == COLOR_MODE_RGBWW:
keys.extend(["cold_white", "warm_white"])
rgb_color_str = tpl(variables=zip(keys, color))
else:
rgb_color_str = ",".join(str(channel) for channel in color)
return rgb_color_str
def set_optimistic(attribute, value, color_mode=None, condition_attribute=None):
"""Optimistically update a state attribute."""
if condition_attribute is None:
condition_attribute = attribute
if not self._is_optimistic(condition_attribute):
return False
if color_mode and self._optimistic_color_mode:
self._color_mode = color_mode
setattr(self, f"_{attribute}", value)
return True
if on_command_type == "first":
await publish(CONF_COMMAND_TOPIC, self._payload["on"])
should_update = True
# If brightness is being used instead of an on command, make sure
# there is a brightness input. Either set the brightness to our
# saved value or the maximum value if this is the first call
elif (
on_command_type == "brightness"
and ATTR_BRIGHTNESS not in kwargs
and ATTR_WHITE not in kwargs
):
kwargs[ATTR_BRIGHTNESS] = self._brightness if self._brightness else 255
hs_color = kwargs.get(ATTR_HS_COLOR)
if (
hs_color
and self._topic[CONF_RGB_COMMAND_TOPIC] is not None
and self._legacy_mode
):
# Legacy mode: Convert HS to RGB
rgb = scale_rgbx(color_util.color_hsv_to_RGB(*hs_color, 100))
rgb_s = render_rgbx(rgb, CONF_RGB_COMMAND_TEMPLATE, COLOR_MODE_RGB)
await publish(CONF_RGB_COMMAND_TOPIC, rgb_s)
should_update |= set_optimistic(
ATTR_HS_COLOR, hs_color, condition_attribute=ATTR_RGB_COLOR
)
if hs_color and self._topic[CONF_HS_COMMAND_TOPIC] is not None:
await publish(CONF_HS_COMMAND_TOPIC, f"{hs_color[0]},{hs_color[1]}")
should_update |= set_optimistic(ATTR_HS_COLOR, hs_color, COLOR_MODE_HS)
if (
hs_color
and self._topic[CONF_XY_COMMAND_TOPIC] is not None
and self._legacy_mode
):
# Legacy mode: Convert HS to XY
xy_color = color_util.color_hs_to_xy(*hs_color)
await publish(CONF_XY_COMMAND_TOPIC, f"{xy_color[0]},{xy_color[1]}")
should_update |= set_optimistic(
ATTR_HS_COLOR, hs_color, condition_attribute=ATTR_XY_COLOR
)
if (
(rgb := kwargs.get(ATTR_RGB_COLOR))
and self._topic[CONF_RGB_COMMAND_TOPIC] is not None
and not self._legacy_mode
):
scaled = scale_rgbx(rgb)
rgb_s = render_rgbx(scaled, CONF_RGB_COMMAND_TEMPLATE, COLOR_MODE_RGB)
await publish(CONF_RGB_COMMAND_TOPIC, rgb_s)
should_update |= set_optimistic(ATTR_RGB_COLOR, rgb, COLOR_MODE_RGB)
if (
(rgbw := kwargs.get(ATTR_RGBW_COLOR))
and self._topic[CONF_RGBW_COMMAND_TOPIC] is not None
and not self._legacy_mode
):
scaled = scale_rgbx(rgbw)
rgbw_s = render_rgbx(scaled, CONF_RGBW_COMMAND_TEMPLATE, COLOR_MODE_RGBW)
await publish(CONF_RGBW_COMMAND_TOPIC, rgbw_s)
should_update |= set_optimistic(ATTR_RGBW_COLOR, rgbw, COLOR_MODE_RGBW)
if (
(rgbww := kwargs.get(ATTR_RGBWW_COLOR))
and self._topic[CONF_RGBWW_COMMAND_TOPIC] is not None
and not self._legacy_mode
):
scaled = scale_rgbx(rgbww)
rgbww_s = render_rgbx(scaled, CONF_RGBWW_COMMAND_TEMPLATE, COLOR_MODE_RGBWW)
await publish(CONF_RGBWW_COMMAND_TOPIC, rgbww_s)
should_update |= set_optimistic(ATTR_RGBWW_COLOR, rgbww, COLOR_MODE_RGBWW)
if (
(xy_color := kwargs.get(ATTR_XY_COLOR))
and self._topic[CONF_XY_COMMAND_TOPIC] is not None
and not self._legacy_mode
):
await publish(CONF_XY_COMMAND_TOPIC, f"{xy_color[0]},{xy_color[1]}")
should_update |= set_optimistic(ATTR_XY_COLOR, xy_color, COLOR_MODE_XY)
if (
ATTR_BRIGHTNESS in kwargs
and self._topic[CONF_BRIGHTNESS_COMMAND_TOPIC] is not None
):
brightness_normalized = kwargs[ATTR_BRIGHTNESS] / 255
brightness_scale = self._config[CONF_BRIGHTNESS_SCALE]
device_brightness = min(
round(brightness_normalized * brightness_scale), brightness_scale
)
# Make sure the brightness is not rounded down to 0
device_brightness = max(device_brightness, 1)
await publish(CONF_BRIGHTNESS_COMMAND_TOPIC, device_brightness)
should_update |= set_optimistic(ATTR_BRIGHTNESS, kwargs[ATTR_BRIGHTNESS])
elif (
ATTR_BRIGHTNESS in kwargs
and ATTR_HS_COLOR not in kwargs
and self._topic[CONF_RGB_COMMAND_TOPIC] is not None
and self._legacy_mode
):
# Legacy mode
hs_color = self._hs_color if self._hs_color is not None else (0, 0)
brightness = kwargs[ATTR_BRIGHTNESS]
rgb = scale_rgbx(color_util.color_hsv_to_RGB(*hs_color, 100), brightness)
rgb_s = render_rgbx(rgb, CONF_RGB_COMMAND_TEMPLATE, COLOR_MODE_RGB)
await publish(CONF_RGB_COMMAND_TOPIC, rgb_s)
should_update |= set_optimistic(ATTR_BRIGHTNESS, kwargs[ATTR_BRIGHTNESS])
elif (
ATTR_BRIGHTNESS in kwargs
and ATTR_RGB_COLOR not in kwargs
and self._topic[CONF_RGB_COMMAND_TOPIC] is not None
and not self._legacy_mode
):
rgb_color = self._rgb_color if self._rgb_color is not None else (255,) * 3
rgb = scale_rgbx(rgb_color, kwargs[ATTR_BRIGHTNESS])
rgb_s = render_rgbx(rgb, CONF_RGB_COMMAND_TEMPLATE, COLOR_MODE_RGB)
await publish(CONF_RGB_COMMAND_TOPIC, rgb_s)
should_update |= set_optimistic(ATTR_BRIGHTNESS, kwargs[ATTR_BRIGHTNESS])
elif (
ATTR_BRIGHTNESS in kwargs
and ATTR_RGBW_COLOR not in kwargs
and self._topic[CONF_RGBW_COMMAND_TOPIC] is not None
and not self._legacy_mode
):
rgbw_color = (
self._rgbw_color if self._rgbw_color is not None else (255,) * 4
)
rgbw = scale_rgbx(rgbw_color, kwargs[ATTR_BRIGHTNESS])
rgbw_s = render_rgbx(rgbw, CONF_RGBW_COMMAND_TEMPLATE, COLOR_MODE_RGBW)
await publish(CONF_RGBW_COMMAND_TOPIC, rgbw_s)
should_update |= set_optimistic(ATTR_BRIGHTNESS, kwargs[ATTR_BRIGHTNESS])
elif (
ATTR_BRIGHTNESS in kwargs
and ATTR_RGBWW_COLOR not in kwargs
and self._topic[CONF_RGBWW_COMMAND_TOPIC] is not None
and not self._legacy_mode
):
rgbww_color = (
self._rgbww_color if self._rgbww_color is not None else (255,) * 5
)
rgbww = scale_rgbx(rgbww_color, kwargs[ATTR_BRIGHTNESS])
rgbww_s = render_rgbx(rgbww, CONF_RGBWW_COMMAND_TEMPLATE, COLOR_MODE_RGBWW)
await publish(CONF_RGBWW_COMMAND_TOPIC, rgbww_s)
should_update |= set_optimistic(ATTR_BRIGHTNESS, kwargs[ATTR_BRIGHTNESS])
if (
ATTR_COLOR_TEMP in kwargs
and self._topic[CONF_COLOR_TEMP_COMMAND_TOPIC] is not None
):
color_temp = int(kwargs[ATTR_COLOR_TEMP])
if tpl := self._command_templates[CONF_COLOR_TEMP_COMMAND_TEMPLATE]:
color_temp = tpl(variables={"value": color_temp})
await publish(CONF_COLOR_TEMP_COMMAND_TOPIC, color_temp)
should_update |= set_optimistic(
ATTR_COLOR_TEMP, kwargs[ATTR_COLOR_TEMP], COLOR_MODE_COLOR_TEMP
)
if ATTR_EFFECT in kwargs and self._topic[CONF_EFFECT_COMMAND_TOPIC] is not None:
effect = kwargs[ATTR_EFFECT]
if effect in self._config.get(CONF_EFFECT_LIST):
await publish(CONF_EFFECT_COMMAND_TOPIC, effect)
should_update |= set_optimistic(ATTR_EFFECT, effect)
if ATTR_WHITE in kwargs and self._topic[CONF_WHITE_COMMAND_TOPIC] is not None:
percent_white = float(kwargs[ATTR_WHITE]) / 255
white_scale = self._config[CONF_WHITE_SCALE]
device_white_value = min(round(percent_white * white_scale), white_scale)
await publish(CONF_WHITE_COMMAND_TOPIC, device_white_value)
should_update |= set_optimistic(
ATTR_BRIGHTNESS,
kwargs[ATTR_WHITE],
COLOR_MODE_WHITE,
)
if (
ATTR_WHITE_VALUE in kwargs
and self._topic[CONF_WHITE_VALUE_COMMAND_TOPIC] is not None
):
percent_white = float(kwargs[ATTR_WHITE_VALUE]) / 255
white_scale = self._config[CONF_WHITE_VALUE_SCALE]
device_white_value = min(round(percent_white * white_scale), white_scale)
await publish(CONF_WHITE_VALUE_COMMAND_TOPIC, device_white_value)
should_update |= set_optimistic(ATTR_WHITE_VALUE, kwargs[ATTR_WHITE_VALUE])
if on_command_type == "last":
await publish(CONF_COMMAND_TOPIC, self._payload["on"])
should_update = True
if self._optimistic:
# Optimistically assume that the light has changed state.
self._state = True
should_update = True
if should_update:
self.async_write_ha_state()
async def async_turn_off(self, **kwargs):
"""Turn the device off.
This method is a coroutine.
"""
await mqtt.async_publish(
self.hass,
self._topic[CONF_COMMAND_TOPIC],
self._payload["off"],
self._config[CONF_QOS],
self._config[CONF_RETAIN],
self._config[CONF_ENCODING],
)
if self._optimistic:
# Optimistically assume that the light has changed state.
self._state = False
self.async_write_ha_state()
| 38.99726
| 88
| 0.642171
|
82b65ecfcea1ac640d719ab52e4b930e160afadd
| 855
|
py
|
Python
|
interfaces/python/tests/conftest.py
|
iotile/baBLE-linux
|
faedca2c70b7fe91ea8ae0c3d8aff6bf843bd9db
|
[
"MIT"
] | 13
|
2018-07-04T16:35:37.000Z
|
2021-03-03T10:41:07.000Z
|
interfaces/python/tests/conftest.py
|
iotile/baBLE
|
faedca2c70b7fe91ea8ae0c3d8aff6bf843bd9db
|
[
"MIT"
] | 11
|
2018-06-01T20:32:32.000Z
|
2019-01-21T17:03:47.000Z
|
interfaces/python/tests/conftest.py
|
iotile/baBLE-linux
|
faedca2c70b7fe91ea8ae0c3d8aff6bf843bd9db
|
[
"MIT"
] | null | null | null |
# Pytest configuration file: will be run before tests to define fixtures
import pytest
import subprocess
import uuid
from tests.mocks.mock_subprocess import MockSubprocess
@pytest.fixture(scope='function')
def bridge_subprocess(monkeypatch):
""" Mock the subprocess.Popen() function to run a class representing the bable bridge instead. """
mocked_subprocess = MockSubprocess()
def on_popen(cmd, *args, **kwargs):
mocked_subprocess.start()
return mocked_subprocess
monkeypatch.setattr(subprocess, 'Popen', on_popen)
return mocked_subprocess
@pytest.fixture(scope='function')
def mock_uuid(monkeypatch, request):
""" Mock the uuid.uuid4() function to return a known and constant value (given as parameter) """
value = request.param
monkeypatch.setattr(uuid, 'uuid4', lambda: value)
return value
| 28.5
| 102
| 0.740351
|
ca9bfab914fde2d498e2ebb051ad7c1b281e433f
| 3,409
|
py
|
Python
|
fizzsite/fizzsite/settings.py
|
hurhurhurt/Fizzle
|
53d780d501966f4a4010b0e395bd9f87bf67a489
|
[
"MIT"
] | null | null | null |
fizzsite/fizzsite/settings.py
|
hurhurhurt/Fizzle
|
53d780d501966f4a4010b0e395bd9f87bf67a489
|
[
"MIT"
] | null | null | null |
fizzsite/fizzsite/settings.py
|
hurhurhurt/Fizzle
|
53d780d501966f4a4010b0e395bd9f87bf67a489
|
[
"MIT"
] | null | null | null |
"""
Django settings for fizzsite project.
Generated by 'django-admin startproject' using Django 3.0.3.
For more information on this file, see
https://docs.djangoproject.com/en/3.0/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.0/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.0/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '^2hypu+6col-9ck4if37g2ynafa_gpy^y=dyrl5^z3k_cj1kgi'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'crispy_forms',
'users',
'blog.apps.BlogConfig',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'accounts.apps.AccountsConfig'
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'fizzsite.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'templates')]
,
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'fizzsite.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.0/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/3.0/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.0/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.0/howto/static-files/
STATIC_URL = '/static/'
CRISPY_TEMPLATE_PACK = 'bootstrap4'
MEDIA_ROOT = os.path.join(BASE_DIR, 'media')
MEDIA_URL = '/media/'
LOGIN_REDIRECT_URL = 'Fizzle-Home'
LOGOUT_REDIRECT_URL = 'Fizzle-Home'
| 25.825758
| 91
| 0.699032
|
73a8097729953b5856d2317bc0969261ecc7c3c4
| 488
|
py
|
Python
|
backend/projectx/users/migrations/0006_user_anonymous.py
|
mmcardle/projectx
|
058935273834c683de8db8bb2d720b1ddcd433e8
|
[
"MIT"
] | 4
|
2021-04-22T08:55:13.000Z
|
2022-03-23T12:58:43.000Z
|
backend/projectx/users/migrations/0006_user_anonymous.py
|
mmcardle/projectx
|
058935273834c683de8db8bb2d720b1ddcd433e8
|
[
"MIT"
] | 3
|
2021-05-12T11:05:58.000Z
|
2021-09-12T16:40:31.000Z
|
backend/projectx/users/migrations/0006_user_anonymous.py
|
mmcardle/projectx
|
058935273834c683de8db8bb2d720b1ddcd433e8
|
[
"MIT"
] | 1
|
2021-04-18T08:33:02.000Z
|
2021-04-18T08:33:02.000Z
|
# Generated by Django 3.2.7 on 2021-11-09 22:46
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("users", "0005_alter_user_id"),
]
operations = [
migrations.AddField(
model_name="user",
name="anonymous",
field=models.BooleanField(
default=False, help_text="Whether the user is anonymous.", verbose_name="anonymous status"
),
),
]
| 23.238095
| 106
| 0.588115
|
378b5cbe013b7e8d7287b94974b44c782b853d22
| 7,975
|
py
|
Python
|
detect/sort/sort.py
|
bdkiran/openObserver
|
2c9b71f2e7fa021887d7fa760920dea06e67ba72
|
[
"MIT"
] | null | null | null |
detect/sort/sort.py
|
bdkiran/openObserver
|
2c9b71f2e7fa021887d7fa760920dea06e67ba72
|
[
"MIT"
] | null | null | null |
detect/sort/sort.py
|
bdkiran/openObserver
|
2c9b71f2e7fa021887d7fa760920dea06e67ba72
|
[
"MIT"
] | null | null | null |
"""
SORT: A Simple, Online and Realtime Tracker
Copyright (C) 2016-2020 Alex Bewley alex@bewley.ai
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
from __future__ import print_function
import numpy as np
from .kalman_filter import KalmanFilter
from scipy.optimize import linear_sum_assignment
np.random.seed(0)
def linear_assignment(cost_matrix):
x, y = linear_sum_assignment(cost_matrix)
return np.array(list(zip(x, y)))
def iou_batch(bb_test, bb_gt):
"""
From SORT: Computes IOU between two bboxes in the form [x1,y1,x2,y2]
"""
bb_gt = np.expand_dims(bb_gt, 0)
bb_test = np.expand_dims(bb_test, 1)
xx1 = np.maximum(bb_test[..., 0], bb_gt[..., 0])
yy1 = np.maximum(bb_test[..., 1], bb_gt[..., 1])
xx2 = np.minimum(bb_test[..., 2], bb_gt[..., 2])
yy2 = np.minimum(bb_test[..., 3], bb_gt[..., 3])
w = np.maximum(0., xx2 - xx1)
h = np.maximum(0., yy2 - yy1)
wh = w * h
o = wh / ((bb_test[..., 2] - bb_test[..., 0]) * (bb_test[..., 3] - bb_test[..., 1])
+ (bb_gt[..., 2] - bb_gt[..., 0]) * (bb_gt[..., 3] - bb_gt[..., 1]) - wh)
return(o)
def convert_bbox_to_z(bbox):
"""
Takes a bounding box in the form [x1,y1,x2,y2] and returns z in the form
[x,y,s,r] where x,y is the centre of the box and s is the scale/area and r is
the aspect ratio
"""
w = bbox[2] - bbox[0]
h = bbox[3] - bbox[1]
x = bbox[0] + w/2.
y = bbox[1] + h/2.
s = w * h #scale is just area
r = w / float(h)
return np.array([x, y, s, r]).reshape((4, 1))
def convert_x_to_bbox(x,score=None):
"""
Takes a bounding box in the centre form [x,y,s,r] and returns it in the form
[x1,y1,x2,y2] where x1,y1 is the top left and x2,y2 is the bottom right
"""
w = np.sqrt(x[2] * x[3])
h = x[2] / w
if(score==None):
return np.array([x[0]-w/2.,x[1]-h/2.,x[0]+w/2.,x[1]+h/2.]).reshape((1,4))
else:
return np.array([x[0]-w/2.,x[1]-h/2.,x[0]+w/2.,x[1]+h/2.,score]).reshape((1,5))
class KalmanBoxTracker(object):
"""
This class represents the internal state of individual tracked objects observed as bbox.
"""
count = 0
def __init__(self,bbox):
"""
Initialises a tracker using initial bounding box.
"""
#define constant velocity model
self.kf = KalmanFilter(dim_x=7, dim_z=4)
self.kf.F = np.array([[1,0,0,0,1,0,0],[0,1,0,0,0,1,0],[0,0,1,0,0,0,1],[0,0,0,1,0,0,0], [0,0,0,0,1,0,0],[0,0,0,0,0,1,0],[0,0,0,0,0,0,1]])
self.kf.H = np.array([[1,0,0,0,0,0,0],[0,1,0,0,0,0,0],[0,0,1,0,0,0,0],[0,0,0,1,0,0,0]])
self.kf.R[2:,2:] *= 10.
self.kf.P[4:,4:] *= 1000. #give high uncertainty to the unobservable initial velocities
self.kf.P *= 10.
self.kf.Q[-1,-1] *= 0.01
self.kf.Q[4:,4:] *= 0.01
self.kf.x[:4] = convert_bbox_to_z(bbox)
self.time_since_update = 0
self.id = KalmanBoxTracker.count
KalmanBoxTracker.count += 1
self.history = []
self.hits = 0
self.hit_streak = 0
self.age = 0
def update(self,bbox):
"""
Updates the state vector with observed bbox.
"""
self.time_since_update = 0
self.history = []
self.hits += 1
self.hit_streak += 1
self.kf.update(convert_bbox_to_z(bbox))
def predict(self):
"""
Advances the state vector and returns the predicted bounding box estimate.
"""
if((self.kf.x[6]+self.kf.x[2])<=0):
self.kf.x[6] *= 0.0
self.kf.predict()
self.age += 1
if(self.time_since_update>0):
self.hit_streak = 0
self.time_since_update += 1
self.history.append(convert_x_to_bbox(self.kf.x))
return self.history[-1]
def get_state(self):
"""
Returns the current bounding box estimate.
"""
return convert_x_to_bbox(self.kf.x)
def associate_detections_to_trackers(detections,trackers,iou_threshold = 0.3):
"""
Assigns detections to tracked object (both represented as bounding boxes)
Returns 3 lists of matches, unmatched_detections and unmatched_trackers
"""
if(len(trackers)==0):
return np.empty((0,2),dtype=int), np.arange(len(detections)), np.empty((0,5),dtype=int)
iou_matrix = iou_batch(detections, trackers)
if min(iou_matrix.shape) > 0:
a = (iou_matrix > iou_threshold).astype(np.int32)
if a.sum(1).max() == 1 and a.sum(0).max() == 1:
matched_indices = np.stack(np.where(a), axis=1)
else:
matched_indices = linear_assignment(-iou_matrix)
else:
matched_indices = np.empty(shape=(0,2))
unmatched_detections = []
for d, det in enumerate(detections):
if(d not in matched_indices[:,0]):
unmatched_detections.append(d)
unmatched_trackers = []
for t, trk in enumerate(trackers):
if(t not in matched_indices[:,1]):
unmatched_trackers.append(t)
#filter out matched with low IOU
matches = []
for m in matched_indices:
if(iou_matrix[m[0], m[1]]<iou_threshold):
unmatched_detections.append(m[0])
unmatched_trackers.append(m[1])
else:
matches.append(m.reshape(1,2))
if(len(matches)==0):
matches = np.empty((0,2),dtype=int)
else:
matches = np.concatenate(matches,axis=0)
return matches, np.array(unmatched_detections), np.array(unmatched_trackers)
class Sort(object):
def __init__(self, max_age=1, min_hits=3, iou_threshold=0.3):
"""
Sets key parameters for SORT
"""
self.max_age = max_age
self.min_hits = min_hits
self.iou_threshold = iou_threshold
self.trackers = []
self.frame_count = 0
def update(self, dets=np.empty((0, 5))):
"""
Params:
dets - a numpy array of detections in the format [[x1,y1,x2,y2,score],[x1,y1,x2,y2,score],...]
Requires: this method must be called once for each frame even with empty detections (use np.empty((0, 5)) for frames without detections).
Returns the a similar array, where the last column is the object ID.
NOTE: The number of objects returned may differ from the number of detections provided.
"""
self.frame_count += 1
# get predicted locations from existing trackers.
trks = np.zeros((len(self.trackers), 5))
to_del = []
ret = []
for t, trk in enumerate(trks):
pos = self.trackers[t].predict()[0]
trk[:] = [pos[0], pos[1], pos[2], pos[3], 0]
if np.any(np.isnan(pos)):
to_del.append(t)
trks = np.ma.compress_rows(np.ma.masked_invalid(trks))
for t in reversed(to_del):
self.trackers.pop(t)
matched, unmatched_dets, unmatched_trks = associate_detections_to_trackers(dets,trks, self.iou_threshold)
# update matched trackers with assigned detections
for m in matched:
self.trackers[m[1]].update(dets[m[0], :])
# create and initialise new trackers for unmatched detections
for i in unmatched_dets:
trk = KalmanBoxTracker(dets[i,:])
self.trackers.append(trk)
i = len(self.trackers)
for trk in reversed(self.trackers):
d = trk.get_state()[0]
if (trk.time_since_update < 1) and (trk.hit_streak >= self.min_hits or self.frame_count <= self.min_hits):
ret.append(np.concatenate((d,[trk.id+1])).reshape(1,-1)) # +1 as MOT benchmark requires positive
i -= 1
# remove dead tracklet
if(trk.time_since_update > self.max_age):
self.trackers.pop(i)
if(len(ret)>0):
return np.concatenate(ret)
return np.empty((0,5))
| 33.792373
| 141
| 0.633981
|
3a421696c12248fe7fb454763c8cbf1bb29e2b65
| 15,814
|
py
|
Python
|
avalanche/benchmarks/utils/data_loader.py
|
TomVeniat/avalanche
|
6e89f9945cf40c14471406a4cf4830a8d95c5705
|
[
"MIT"
] | 810
|
2018-10-08T15:49:05.000Z
|
2022-03-31T15:28:09.000Z
|
avalanche/benchmarks/utils/data_loader.py
|
TomVeniat/avalanche
|
6e89f9945cf40c14471406a4cf4830a8d95c5705
|
[
"MIT"
] | 477
|
2021-03-01T17:50:51.000Z
|
2022-03-31T14:51:23.000Z
|
avalanche/benchmarks/utils/data_loader.py
|
TomVeniat/avalanche
|
6e89f9945cf40c14471406a4cf4830a8d95c5705
|
[
"MIT"
] | 147
|
2018-10-08T15:49:18.000Z
|
2022-03-31T04:08:45.000Z
|
################################################################################
# Copyright (c) 2021 ContinualAI. #
# Copyrights licensed under the MIT License. #
# See the accompanying LICENSE file for terms. #
# #
# Date: 01-12-2020 #
# Author(s): Antonio Carta #
# E-mail: contact@continualai.org #
# Website: avalanche.continualai.org #
################################################################################
"""
Avalanche supports data loading using pytorch's dataloaders.
This module provides custom dataloaders for continual learning such as
support for balanced dataloading between different tasks or balancing
between the current data and the replay memory.
"""
from itertools import chain
from typing import Dict, Sequence
import torch
from torch.utils.data import RandomSampler
from torch.utils.data.dataloader import DataLoader
from avalanche.benchmarks.utils import AvalancheDataset
def _default_collate_mbatches_fn(mbatches):
""" Combines multiple mini-batches together.
Concatenates each tensor in the mini-batches along dimension 0 (usually this
is the batch size).
:param mbatches: sequence of mini-batches.
:return: a single mini-batch
"""
batch = []
for i in range(len(mbatches[0])):
t = torch.cat([el[i] for el in mbatches], dim=0)
batch.append(t)
return batch
class TaskBalancedDataLoader:
""" Task-balanced data loader for Avalanche's datasets."""
def __init__(self, data: AvalancheDataset,
oversample_small_tasks: bool = False,
collate_mbatches=_default_collate_mbatches_fn,
**kwargs):
""" Task-balanced data loader for Avalanche's datasets.
The iterator returns a mini-batch balanced across each task, which
makes it useful when training in multi-task scenarios whenever data is
highly unbalanced.
If `oversample_small_tasks == True` smaller tasks are
oversampled to match the largest task. Otherwise, once the data for a
specific task is terminated, that task will not be present in the
subsequent mini-batches.
:param data: an instance of `AvalancheDataset`.
:param oversample_small_tasks: whether smaller tasks should be
oversampled to match the largest one.
:param collate_mbatches: function that given a sequence of mini-batches
(one for each task) combines them into a single mini-batch. Used to
combine the mini-batches obtained separately from each task.
:param kwargs: data loader arguments used to instantiate the loader for
each task separately. See pytorch :class:`DataLoader`.
"""
self.data = data
self.dataloaders: Dict[int, DataLoader] = {}
self.oversample_small_tasks = oversample_small_tasks
self.collate_mbatches = collate_mbatches
# split data by task.
task_datasets = []
for task_label in self.data.task_set:
tdata = self.data.task_set[task_label]
task_datasets.append(tdata)
# the iteration logic is implemented by GroupBalancedDataLoader.
# we use kwargs to pass the arguments to avoid passing the same
# arguments multiple times.
if 'data' in kwargs:
del kwargs['data']
# needed if they are passed as positional arguments
kwargs['oversample_small_groups'] = oversample_small_tasks
kwargs['collate_mbatches'] = collate_mbatches
self._dl = GroupBalancedDataLoader(datasets=task_datasets, **kwargs)
def __iter__(self):
for el in self._dl.__iter__():
yield el
def __len__(self):
return self._dl.__len__()
class GroupBalancedDataLoader:
""" Data loader that balances data from multiple datasets."""
def __init__(self, datasets: Sequence[AvalancheDataset],
oversample_small_groups: bool = False,
collate_mbatches=_default_collate_mbatches_fn,
**kwargs):
""" Data loader that balances data from multiple datasets.
Mini-batches emitted by this dataloader are created by collating
together mini-batches from each group. It may be used to balance data
among classes, experiences, tasks, and so on.
If `oversample_small_groups == True` smaller groups are oversampled to
match the largest group. Otherwise, once data from a group is
completely iterated, the group will be skipped.
:param datasets: an instance of `AvalancheDataset`.
:param oversample_small_groups: whether smaller groups should be
oversampled to match the largest one.
:param collate_mbatches: function that given a sequence of mini-batches
(one for each task) combines them into a single mini-batch. Used to
combine the mini-batches obtained separately from each task.
:param kwargs: data loader arguments used to instantiate the loader for
each group separately. See pytorch :class:`DataLoader`.
"""
self.datasets = datasets
self.dataloaders = []
self.oversample_small_groups = oversample_small_groups
self.collate_mbatches = collate_mbatches
for data in self.datasets:
self.dataloaders.append(DataLoader(data, **kwargs))
self.max_len = max([len(d) for d in self.dataloaders])
def __iter__(self):
iter_dataloaders = []
for dl in self.dataloaders:
iter_dataloaders.append(iter(dl))
max_num_mbatches = max([len(d) for d in iter_dataloaders])
for it in range(max_num_mbatches):
mb_curr = []
is_removed_dataloader = False
# copy() is necessary because we may remove keys from the
# dictionary. This would break the generator.
for tid, t_loader in enumerate(iter_dataloaders):
try:
batch = next(t_loader)
except StopIteration:
# StopIteration is thrown if dataset ends.
if self.oversample_small_groups:
# reinitialize data loader
iter_dataloaders[tid] = iter(self.dataloaders[tid])
batch = next(iter_dataloaders[tid])
else:
# We iteratated over all the data from this group
# and we don't need the iterator anymore.
iter_dataloaders[tid] = None
is_removed_dataloader = True
continue
mb_curr.append(batch)
yield self.collate_mbatches(mb_curr)
# clear empty data-loaders
if is_removed_dataloader:
while None in iter_dataloaders:
iter_dataloaders.remove(None)
def __len__(self):
return self.max_len
class GroupBalancedInfiniteDataLoader:
""" Data loader that balances data from multiple datasets emitting an
infinite stream."""
def __init__(self, datasets: Sequence[AvalancheDataset],
collate_mbatches=_default_collate_mbatches_fn,
**kwargs):
""" Data loader that balances data from multiple datasets emitting an
infinite stream.
Mini-batches emitted by this dataloader are created by collating
together mini-batches from each group. It may be used to balance data
among classes, experiences, tasks, and so on.
:param datasets: an instance of `AvalancheDataset`.
:param collate_mbatches: function that given a sequence of mini-batches
(one for each task) combines them into a single mini-batch. Used to
combine the mini-batches obtained separately from each task.
:param kwargs: data loader arguments used to instantiate the loader for
each group separately. See pytorch :class:`DataLoader`.
"""
self.datasets = datasets
self.dataloaders = []
self.collate_mbatches = collate_mbatches
for data in self.datasets:
infinite_sampler = RandomSampler(data, replacement=True,
num_samples=10 ** 10)
dl = DataLoader(
data,
sampler=infinite_sampler,
**kwargs)
self.dataloaders.append(dl)
self.max_len = 10 ** 10
def __iter__(self):
iter_dataloaders = []
for dl in self.dataloaders:
iter_dataloaders.append(iter(dl))
while True:
mb_curr = []
for tid, t_loader in enumerate(iter_dataloaders):
batch = next(t_loader)
mb_curr.append(batch)
yield self.collate_mbatches(mb_curr)
def __len__(self):
return self.max_len
class ReplayDataLoader:
""" Custom data loader for rehearsal/replay strategies."""
def __init__(self, data: AvalancheDataset, memory: AvalancheDataset = None,
oversample_small_tasks: bool = False,
collate_mbatches=_default_collate_mbatches_fn,
batch_size: int = 32,
force_data_batch_size: int = None,
**kwargs):
""" Custom data loader for rehearsal strategies.
The iterates in parallel two datasets, the current `data` and the
rehearsal `memory`, which are used to create mini-batches by
concatenating their data together. Mini-batches from both of them are
balanced using the task label (i.e. each mini-batch contains a balanced
number of examples from all the tasks in the `data` and `memory`).
If `oversample_small_tasks == True` smaller tasks are oversampled to
match the largest task.
:param data: AvalancheDataset.
:param memory: AvalancheDataset.
:param oversample_small_tasks: whether smaller tasks should be
oversampled to match the largest one.
:param collate_mbatches: function that given a sequence of mini-batches
(one for each task) combines them into a single mini-batch. Used to
combine the mini-batches obtained separately from each task.
:param batch_size: the size of the batch. It must be greater than or
equal to the number of tasks.
:param ratio_data_mem: How many of the samples should be from
:param kwargs: data loader arguments used to instantiate the loader for
each task separately. See pytorch :class:`DataLoader`.
"""
self.data = data
self.memory = memory
self.loader_data: Sequence[DataLoader] = {}
self.loader_memory: Sequence[DataLoader] = {}
self.oversample_small_tasks = oversample_small_tasks
self.collate_mbatches = collate_mbatches
if force_data_batch_size is not None:
assert force_data_batch_size <= batch_size, \
"Forced batch size of data must be <= entire batch size"
mem_batch_size = batch_size - force_data_batch_size
remaining_example = 0
mem_keys = len(self.memory.task_set)
assert mem_batch_size >= mem_keys, \
"Batch size must be greator or equal " \
"to the number of tasks in the memory."
self.loader_data, _ = self._create_dataloaders(
data, force_data_batch_size,
remaining_example, **kwargs)
self.loader_memory, _ = self._create_dataloaders(
memory, mem_batch_size,
remaining_example, **kwargs)
else:
num_keys = len(self.data.task_set) + len(self.memory.task_set)
assert batch_size >= num_keys, \
"Batch size must be greator or equal " \
"to the number of tasks in the memory " \
"and current data."
single_group_batch_size = batch_size // num_keys
remaining_example = batch_size % num_keys
self.loader_data, remaining_example = self._create_dataloaders(
data, single_group_batch_size,
remaining_example, **kwargs)
self.loader_memory, remaining_example = self._create_dataloaders(
memory, single_group_batch_size,
remaining_example, **kwargs)
self.max_len = max([len(d) for d in chain(
self.loader_data.values(), self.loader_memory.values())]
)
def __iter__(self):
iter_data_dataloaders = {}
iter_buffer_dataloaders = {}
for t in self.loader_data.keys():
iter_data_dataloaders[t] = iter(self.loader_data[t])
for t in self.loader_memory.keys():
iter_buffer_dataloaders[t] = iter(self.loader_memory[t])
max_len = max([len(d) for d in chain(iter_data_dataloaders.values(),
iter_buffer_dataloaders.values())])
try:
for it in range(max_len):
mb_curr = []
self._get_mini_batch_from_data_dict(
self.data, iter_data_dataloaders,
self.loader_data, self.oversample_small_tasks,
mb_curr)
self._get_mini_batch_from_data_dict(
self.memory, iter_buffer_dataloaders,
self.loader_memory, self.oversample_small_tasks,
mb_curr)
yield self.collate_mbatches(mb_curr)
except StopIteration:
return
def __len__(self):
return self.max_len
def _get_mini_batch_from_data_dict(self, data, iter_dataloaders,
loaders_dict, oversample_small_tasks,
mb_curr):
# list() is necessary because we may remove keys from the
# dictionary. This would break the generator.
for t in list(iter_dataloaders.keys()):
t_loader = iter_dataloaders[t]
try:
tbatch = next(t_loader)
except StopIteration:
# StopIteration is thrown if dataset ends.
# reinitialize data loader
if oversample_small_tasks:
# reinitialize data loader
iter_dataloaders[t] = iter(loaders_dict[t])
tbatch = next(iter_dataloaders[t])
else:
del iter_dataloaders[t]
continue
mb_curr.append(tbatch)
def _create_dataloaders(self, data_dict, single_exp_batch_size,
remaining_example, **kwargs):
loaders_dict: Dict[int, DataLoader] = {}
for task_id in data_dict.task_set:
data = data_dict.task_set[task_id]
current_batch_size = single_exp_batch_size
if remaining_example > 0:
current_batch_size += 1
remaining_example -= 1
loaders_dict[task_id] = DataLoader(
data, batch_size=current_batch_size, **kwargs)
return loaders_dict, remaining_example
__all__ = [
'TaskBalancedDataLoader',
'GroupBalancedDataLoader',
'ReplayDataLoader',
'GroupBalancedInfiniteDataLoader'
]
| 42.283422
| 80
| 0.601303
|
ecacb892c48f4112d6eabf6856474712378bf6b7
| 8,800
|
py
|
Python
|
env/lib/python3.7/site-packages/docusign_admin/models/new_user_response.py
|
davidgacc/docusign
|
e63167101656d0066d481844576ce687ea80eb91
|
[
"MIT"
] | null | null | null |
env/lib/python3.7/site-packages/docusign_admin/models/new_user_response.py
|
davidgacc/docusign
|
e63167101656d0066d481844576ce687ea80eb91
|
[
"MIT"
] | null | null | null |
env/lib/python3.7/site-packages/docusign_admin/models/new_user_response.py
|
davidgacc/docusign
|
e63167101656d0066d481844576ce687ea80eb91
|
[
"MIT"
] | null | null | null |
# coding: utf-8
"""
DocuSign Admin API
An API for an organization administrator to manage organizations, accounts and users # noqa: E501
OpenAPI spec version: v2
Contact: devcenter@docusign.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
class NewUserResponse(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'id': 'str',
'site_id': 'int',
'user_name': 'str',
'first_name': 'str',
'last_name': 'str',
'email': 'str',
'language_culture': 'str',
'federated_status': 'str',
'accounts': 'list[NewUserResponseAccountProperties]'
}
attribute_map = {
'id': 'id',
'site_id': 'site_id',
'user_name': 'user_name',
'first_name': 'first_name',
'last_name': 'last_name',
'email': 'email',
'language_culture': 'language_culture',
'federated_status': 'federated_status',
'accounts': 'accounts'
}
def __init__(self, id=None, site_id=None, user_name=None, first_name=None, last_name=None, email=None, language_culture=None, federated_status=None, accounts=None): # noqa: E501
"""NewUserResponse - a model defined in Swagger""" # noqa: E501
self._id = None
self._site_id = None
self._user_name = None
self._first_name = None
self._last_name = None
self._email = None
self._language_culture = None
self._federated_status = None
self._accounts = None
self.discriminator = None
if id is not None:
self.id = id
if site_id is not None:
self.site_id = site_id
if user_name is not None:
self.user_name = user_name
if first_name is not None:
self.first_name = first_name
if last_name is not None:
self.last_name = last_name
if email is not None:
self.email = email
if language_culture is not None:
self.language_culture = language_culture
if federated_status is not None:
self.federated_status = federated_status
if accounts is not None:
self.accounts = accounts
@property
def id(self):
"""Gets the id of this NewUserResponse. # noqa: E501
:return: The id of this NewUserResponse. # noqa: E501
:rtype: str
"""
return self._id
@id.setter
def id(self, id):
"""Sets the id of this NewUserResponse.
:param id: The id of this NewUserResponse. # noqa: E501
:type: str
"""
self._id = id
@property
def site_id(self):
"""Gets the site_id of this NewUserResponse. # noqa: E501
:return: The site_id of this NewUserResponse. # noqa: E501
:rtype: int
"""
return self._site_id
@site_id.setter
def site_id(self, site_id):
"""Sets the site_id of this NewUserResponse.
:param site_id: The site_id of this NewUserResponse. # noqa: E501
:type: int
"""
self._site_id = site_id
@property
def user_name(self):
"""Gets the user_name of this NewUserResponse. # noqa: E501
:return: The user_name of this NewUserResponse. # noqa: E501
:rtype: str
"""
return self._user_name
@user_name.setter
def user_name(self, user_name):
"""Sets the user_name of this NewUserResponse.
:param user_name: The user_name of this NewUserResponse. # noqa: E501
:type: str
"""
self._user_name = user_name
@property
def first_name(self):
"""Gets the first_name of this NewUserResponse. # noqa: E501
:return: The first_name of this NewUserResponse. # noqa: E501
:rtype: str
"""
return self._first_name
@first_name.setter
def first_name(self, first_name):
"""Sets the first_name of this NewUserResponse.
:param first_name: The first_name of this NewUserResponse. # noqa: E501
:type: str
"""
self._first_name = first_name
@property
def last_name(self):
"""Gets the last_name of this NewUserResponse. # noqa: E501
:return: The last_name of this NewUserResponse. # noqa: E501
:rtype: str
"""
return self._last_name
@last_name.setter
def last_name(self, last_name):
"""Sets the last_name of this NewUserResponse.
:param last_name: The last_name of this NewUserResponse. # noqa: E501
:type: str
"""
self._last_name = last_name
@property
def email(self):
"""Gets the email of this NewUserResponse. # noqa: E501
:return: The email of this NewUserResponse. # noqa: E501
:rtype: str
"""
return self._email
@email.setter
def email(self, email):
"""Sets the email of this NewUserResponse.
:param email: The email of this NewUserResponse. # noqa: E501
:type: str
"""
self._email = email
@property
def language_culture(self):
"""Gets the language_culture of this NewUserResponse. # noqa: E501
:return: The language_culture of this NewUserResponse. # noqa: E501
:rtype: str
"""
return self._language_culture
@language_culture.setter
def language_culture(self, language_culture):
"""Sets the language_culture of this NewUserResponse.
:param language_culture: The language_culture of this NewUserResponse. # noqa: E501
:type: str
"""
self._language_culture = language_culture
@property
def federated_status(self):
"""Gets the federated_status of this NewUserResponse. # noqa: E501
:return: The federated_status of this NewUserResponse. # noqa: E501
:rtype: str
"""
return self._federated_status
@federated_status.setter
def federated_status(self, federated_status):
"""Sets the federated_status of this NewUserResponse.
:param federated_status: The federated_status of this NewUserResponse. # noqa: E501
:type: str
"""
self._federated_status = federated_status
@property
def accounts(self):
"""Gets the accounts of this NewUserResponse. # noqa: E501
:return: The accounts of this NewUserResponse. # noqa: E501
:rtype: list[NewUserResponseAccountProperties]
"""
return self._accounts
@accounts.setter
def accounts(self, accounts):
"""Sets the accounts of this NewUserResponse.
:param accounts: The accounts of this NewUserResponse. # noqa: E501
:type: list[NewUserResponseAccountProperties]
"""
self._accounts = accounts
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(NewUserResponse, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, NewUserResponse):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| 27.160494
| 182
| 0.58875
|
8ad7f8531a41e71c4af98b93764a1ec45dfe2915
| 4,853
|
py
|
Python
|
core/general.py
|
yaakiyu/rt-bot
|
f68bca95c516e08c31ecc846524dcea4c8ba1503
|
[
"BSD-4-Clause"
] | null | null | null |
core/general.py
|
yaakiyu/rt-bot
|
f68bca95c516e08c31ecc846524dcea4c8ba1503
|
[
"BSD-4-Clause"
] | null | null | null |
core/general.py
|
yaakiyu/rt-bot
|
f68bca95c516e08c31ecc846524dcea4c8ba1503
|
[
"BSD-4-Clause"
] | null | null | null |
# RT - General
from __future__ import annotations
from typing import TYPE_CHECKING, TypeVar, Optional, Any
from discord.ext import commands
from discord.ext.fslash import is_fslash
import discord
from rtlib.common.utils import make_error_message, code_block, text_format
from .utils import get_fsparent, gettext
from .types_ import NameIdObj, MentionIdObj
from .bot import RT
from data import Colors
if TYPE_CHECKING:
from .rtevent import EventContext
from .help import Help, HelpCommand, Text
__all__ = ("RT", "Cog", "t", "cast", "Embed")
class Embed(discord.Embed):
"Botのテーマカラーをデフォルトで設定するようにした`Embed`です。"
def __init__(self, title: str, *args, **kwargs):
kwargs["title"] = title
kwargs.setdefault("color", Colors.normal)
super().__init__(*args, **kwargs)
def _get_client(obj):
return obj._state._get_client()
def t(text: Text, ctx: Any, ignore_key_error: bool = False, **kwargs) -> str:
"""Extracts strings in the correct language from a dictionary of language code keys and their corresponding strings, based on information such as the `ctx` guild passed in.
You can use keyword arguments to exchange strings like f-string."""
# Extract client
client: Optional[RT] = kwargs.pop("client", None)
user, gu = False, False
if isinstance(ctx, (discord.User, discord.Member, discord.Object)):
client = _get_client(ctx) # type: ignore
user = True
elif getattr(ctx, "message", None) and not is_fslash(ctx):
client = _get_client(ctx.message)
elif getattr(ctx, "guild", None):
client = _get_client(ctx.guild)
elif getattr(ctx, "channel", None):
client = _get_client(ctx.channel)
elif getattr(ctx, "user", None):
client = _get_client(ctx.user)
elif gu := isinstance(ctx, (discord.Guild, discord.User)):
client = _get_client(ctx) # type: ignore
# Extract correct text
if client is None:
text = gettext(text, "en") # type: ignore
elif isinstance(ctx, int):
text = gettext(text, client.language.user.get(ctx) # type: ignore
or client.language.guild.get(ctx))
else:
language = None
if user:
language = client.language.user.get(ctx.id)
else:
if getattr(ctx, "user", None):
language = client.language.user.get(ctx.user.id) # type: ignore
if language is None and getattr(ctx, "author", None):
language = client.language.user.get(ctx.author.id) # type: ignore
if language is None and getattr(ctx, "guild", None):
language = client.language.guild.get(ctx.guild.id) # type: ignore
if language is None and gu:
language = client.language.guild.get(ctx.id)
if language is None: language = "en"
text = gettext(text, "en") if language is None else gettext(text, language) # type: ignore
try:
return text.format(**kwargs) # type: ignore
except KeyError:
if ignore_key_error:
return text # type: ignore
else:
raise
class BadRequest(Exception):
"400エラーを発生させます。"
UCReT = TypeVar("UCReT")
class Cog(commands.Cog):
"Extended cog"
text_format = staticmethod(text_format)
detail_or = staticmethod(lambda detail: "ERROR" if detail else "SUCCESS")
BadRequest = BadRequest
get_fsparent = staticmethod(get_fsparent)
Help: type[Help]
HelpCommand: type[HelpCommand]
Embed = Embed
ERRORS = {
"WRONG_WAY": lambda ctx: t(dict(
ja="使い方が違います。", en="This is wrong way to use this command."
), ctx)
}
t = staticmethod(t)
EventContext: type[EventContext]
bot: RT
async def group_index(self, ctx: commands.Context) -> None:
"グループコマンドが実行された際に「使用方法が違います」と返信します。"
if not ctx.invoked_subcommand:
await ctx.reply(t({
"ja": "使用方法が違います。", "en": "It is wrong way to use this command."
}, ctx))
@staticmethod
def mention_and_id(obj: MentionIdObj) -> str:
return f"{obj.mention} (`{obj.id}`)"
@staticmethod
def name_and_id(obj: NameIdObj) -> str:
return f"{obj.name} (`{obj.id}`)"
def embed(self, **kwargs) -> Embed:
"Make embed and set title to the cog name."
return Embed(self.__cog_name__, **kwargs)
CONSTANT_FOR_EXCEPTION_TO_TEXT = {
"ja": "内部エラーが発生しました。", "en": "An internal error has occurred."
}
@staticmethod
def error_to_text(error: Exception) -> Text:
error = code_block(make_error_message(error), "python") # type: ignore
return {
key: f"{Cog.CONSTANT_FOR_EXCEPTION_TO_TEXT[key]}\n{error}"
for key in ("ja", "en")
}
def cast(**kwargs: dict[str, str]) -> str:
return kwargs # type: ignore
| 33.239726
| 176
| 0.636101
|
39d25398c127a2e46f91b8f38662ad1f1c94e25f
| 3,592
|
py
|
Python
|
api/tests/integration/slack/test_slack_environment_viewset.py
|
mevinbabuc/flagsmith
|
751bd6cb4a34bd2f80af5a9c547559da9c2fa010
|
[
"BSD-3-Clause"
] | 1,259
|
2021-06-10T11:24:09.000Z
|
2022-03-31T10:30:44.000Z
|
api/tests/integration/slack/test_slack_environment_viewset.py
|
mevinbabuc/flagsmith
|
751bd6cb4a34bd2f80af5a9c547559da9c2fa010
|
[
"BSD-3-Clause"
] | 392
|
2021-06-10T11:12:29.000Z
|
2022-03-31T10:13:53.000Z
|
api/tests/integration/slack/test_slack_environment_viewset.py
|
mevinbabuc/flagsmith
|
751bd6cb4a34bd2f80af5a9c547559da9c2fa010
|
[
"BSD-3-Clause"
] | 58
|
2021-06-11T03:18:07.000Z
|
2022-03-31T14:39:10.000Z
|
import json
from django.urls import reverse
from rest_framework import status
def test_posting_env_config_return_400_when_slack_project_config_does_not_exist(
admin_client, environment, environment_api_key
):
# Given
url = reverse(
"api-v1:environments:integrations-slack-list",
args=[environment_api_key],
)
# When
response = admin_client.post(
url,
data=json.dumps({"channel_id": "test_id", "enabled": True}),
content_type="application/json",
)
# Then
assert response.status_code == status.HTTP_400_BAD_REQUEST
assert "Slack api token not found" in response.json()[0]
def test_posting_env_config_calls_join_channel(
mocker,
admin_client,
environment,
environment_api_key,
slack_project_config,
slack_bot_token,
):
# Given
url = reverse(
"api-v1:environments:integrations-slack-list",
args=[environment_api_key],
)
env_config = {"channel_id": "channel_id1", "enabled": True}
mocked_slack_wrapper = mocker.patch("integrations.slack.models.SlackWrapper")
# When
response = admin_client.post(
url,
data=json.dumps(env_config),
content_type="application/json",
)
# Then
mocked_slack_wrapper.assert_called_with(
api_token=slack_bot_token, channel_id=env_config["channel_id"]
)
mocked_slack_wrapper.return_value.join_channel.assert_called_with()
assert response.status_code == status.HTTP_201_CREATED
assert response.json()["enabled"] == env_config["enabled"]
assert response.json()["channel_id"] == env_config["channel_id"]
def test_update_environment_config_calls_join_channel(
mocker,
admin_client,
environment,
environment_api_key,
slack_environment_config,
slack_bot_token,
):
# Given
url = reverse(
"api-v1:environments:integrations-slack-detail",
args=[environment_api_key, slack_environment_config],
)
env_config = {"channel_id": "channel_id2", "enabled": True}
mocked_slack_wrapper = mocker.patch("integrations.slack.models.SlackWrapper")
# When
response = admin_client.put(
url,
data=json.dumps(env_config),
content_type="application/json",
)
# Then
mocked_slack_wrapper.assert_called_with(
api_token=slack_bot_token, channel_id=env_config["channel_id"]
)
mocked_slack_wrapper.return_value.join_channel.assert_called_with()
assert response.status_code == status.HTTP_200_OK
assert response.json()["enabled"] == env_config["enabled"]
assert response.json()["channel_id"] == env_config["channel_id"]
def test_get_environment_config_list_returns_200(
admin_client, environment, environment_api_key, slack_environment_config
):
# Given
url = reverse(
"api-v1:environments:integrations-slack-list",
args=[environment_api_key],
)
# When
response = admin_client.get(url)
# Then
assert response.status_code == status.HTTP_200_OK
assert len(response.json()) == 1
assert response.json()[0]["id"] == slack_environment_config
def test_get_environment_config_returns_200(
admin_client, environment, environment_api_key, slack_environment_config
):
# Given
url = reverse(
"api-v1:environments:integrations-slack-detail",
args=[environment_api_key, slack_environment_config],
)
# When
response = admin_client.get(url)
# Then
assert response.status_code == status.HTTP_200_OK
assert response.json()["id"] == slack_environment_config
| 28.967742
| 81
| 0.703786
|
9853480e0035f1d8213c5f3fc6625e5497ce16e6
| 180
|
py
|
Python
|
mongo_folder/1.mongo.py
|
relax-space/python-learning
|
22987e20a4b0a741e1c5ed8603a952a0fc8dd4bd
|
[
"Apache-2.0"
] | null | null | null |
mongo_folder/1.mongo.py
|
relax-space/python-learning
|
22987e20a4b0a741e1c5ed8603a952a0fc8dd4bd
|
[
"Apache-2.0"
] | null | null | null |
mongo_folder/1.mongo.py
|
relax-space/python-learning
|
22987e20a4b0a741e1c5ed8603a952a0fc8dd4bd
|
[
"Apache-2.0"
] | null | null | null |
"""
python.exe .\mongo_folder\1.mongo.py
"""
import pymongo
myclient = pymongo.MongoClient('mongodb://localhost:27017/')
dblist = myclient.list_database_names()
print(dblist)
| 16.363636
| 60
| 0.738889
|
8924e30450682cfa4df8e1c864e2c6ff41b4777c
| 842
|
py
|
Python
|
hackerrank/queues-and-stacks/queue_with_two_stacks.py
|
peoplenarthax/code-challenges
|
557a9d309d9a1602b5a0410a758d6342ec42c445
|
[
"MIT"
] | null | null | null |
hackerrank/queues-and-stacks/queue_with_two_stacks.py
|
peoplenarthax/code-challenges
|
557a9d309d9a1602b5a0410a758d6342ec42c445
|
[
"MIT"
] | null | null | null |
hackerrank/queues-and-stacks/queue_with_two_stacks.py
|
peoplenarthax/code-challenges
|
557a9d309d9a1602b5a0410a758d6342ec42c445
|
[
"MIT"
] | null | null | null |
class MyQueue(object):
def __init__(self):
self.inbox = []
self.outbox = []
def __refill_outbox__(self):
for i in range(len(self.inbox)):
self.outbox.append(self.inbox.pop())
def peek(self):
if len(self.outbox) == 0:
self.__refill_outbox__()
return self.outbox[-1]
def pop(self):
if len(self.outbox) == 0:
self.__refill_outbox__()
return self.outbox.pop()
def put(self, value):
self.inbox.append(value)
queue = MyQueue()
t = int(input())
for line in range(t):
values = map(int, input().split())
values = list(values)
if values[0] == 1:
queue.put(values[1])
elif values[0] == 2:
queue.pop()
else:
print(queue.peek())
| 24.057143
| 52
| 0.510689
|
0d141e9da67f23721799a9686a643581cf38f9cd
| 1,765
|
py
|
Python
|
mwparserfromhell/nodes/comment.py
|
hperala/kontuwikibot
|
f409e6fb45adf4e553dc326d9fb3c0d29eda6373
|
[
"MIT"
] | 2
|
2019-03-14T09:30:13.000Z
|
2021-01-07T16:59:27.000Z
|
mwparserfromhell/nodes/comment.py
|
hperala/kontuwikibot
|
f409e6fb45adf4e553dc326d9fb3c0d29eda6373
|
[
"MIT"
] | null | null | null |
mwparserfromhell/nodes/comment.py
|
hperala/kontuwikibot
|
f409e6fb45adf4e553dc326d9fb3c0d29eda6373
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
#
# Copyright (C) 2012-2016 Ben Kurtovic <ben.kurtovic@gmail.com>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from __future__ import unicode_literals
from . import Node
from ..compat import str
__all__ = ["Comment"]
class Comment(Node):
"""Represents a hidden HTML comment, like ``<!-- foobar -->``."""
def __init__(self, contents):
super(Comment, self).__init__()
self._contents = contents
def __unicode__(self):
return "<!--" + str(self.contents) + "-->"
@property
def contents(self):
"""The hidden text contained between ``<!--`` and ``-->``."""
return self._contents
@contents.setter
def contents(self, value):
self._contents = str(value)
| 36.770833
| 79
| 0.712181
|
c60a6503122c77858d678a23a8c8105071dc5433
| 56,404
|
py
|
Python
|
pymatgen/io/abinitio/pseudos.py
|
NadezhdaBzhilyanskaya/pymatgen
|
fae11a8142d457a649fa84ff9781eb2b39334bdc
|
[
"MIT"
] | 1
|
2022-02-28T04:24:46.000Z
|
2022-02-28T04:24:46.000Z
|
pymatgen/io/abinitio/pseudos.py
|
NadezhdaBzhilyanskaya/pymatgen
|
fae11a8142d457a649fa84ff9781eb2b39334bdc
|
[
"MIT"
] | null | null | null |
pymatgen/io/abinitio/pseudos.py
|
NadezhdaBzhilyanskaya/pymatgen
|
fae11a8142d457a649fa84ff9781eb2b39334bdc
|
[
"MIT"
] | null | null | null |
"""
This module provides objects describing the basic parameters of the
pseudopotentials used in Abinit, and a parser to instantiate pseudopotential objects..
"""
from __future__ import division, print_function
import sys
import os
import abc
import collections
import json
import warnings
import numpy as np
from pymatgen.core.design_patterns import FrozenDict, AttrDict
from pymatgen.core.periodic_table import PeriodicTable
from pymatgen.util.num_utils import iterator_from_slice
from pymatgen.util.string_utils import list_strings, is_string
__all__ = [
"Pseudo",
"PseudoTable",
]
__author__ = "Matteo Giantomassi"
__version__ = "0.1"
__maintainer__ = "Matteo Giantomassi"
# Tools and helper functions.
def straceback():
"""Returns a string with the traceback."""
import traceback
return traceback.format_exc()
def _read_nlines(filename, nlines):
"""
Read at most nlines lines from file filename.
If nlines is < 0, the entire file is read.
"""
if nlines < 0:
with open(filename, 'r') as fh:
return fh.readlines()
lines = []
with open(filename, 'r') as fh:
for (lineno, line) in enumerate(fh):
if lineno == nlines: break
lines.append(line)
return lines
_l2str = {
0: "s",
1: "p",
2: "d",
3: "f",
4: "g",
5: "h",
6: "i",
}
_str2l = {v: k for k, v in _l2str.items()}
def l2str(l):
"""Convert the angular momentum l (int) to string."""
try:
return _l2str[l]
except KeyError:
return "Unknown angular momentum, received l = %s" % l
def str2l(s):
"""Convert a string to the angular momentum l (int)"""
return _str2l[s]
def read_dojo_report(filename):
"""Helper function to read the DOJO_REPORT from file."""
with open(filename, "r") as fh:
lines = fh.readlines()
try:
start = lines.index("<DOJO_REPORT>\n")
except ValueError:
return {}
stop = lines.index("</DOJO_REPORT>\n")
return json.loads("".join(lines[start+1:stop]))
#class DojoReport(dict):
# _LATEST_VERSION = 1.0
# _START_LINE = "<DOJO_REPORT>\n"
# _END_LINE = "</DOJO_REPORT>\n"
#
# @classmethod
# def from_file(cls, path):
# new = read_dojo_report(path)
# new.__class__ = cls
# return new
#
# #def to_file(self, path):
_PTABLE = PeriodicTable()
class Pseudo(object):
"""
Abstract base class defining the methods that must be
implemented by the concrete pseudopotential classes.
"""
__metaclass__ = abc.ABCMeta
#def __init__(self, filepath):
# self.filepath = os.path.abspath(filepath)
# self._dojo_report = {}
@classmethod
def aspseudo(cls, obj):
"""
Convert obj into a pseudo. Accepts:
* Pseudo object.
* string defining a valid path.
"""
if isinstance(obj, cls):
return obj
else:
# Assumes path.
return cls.from_file(obj)
@staticmethod
def from_file(filename):
"""
Return a pseudopotential object from filename.
Note: the parser knows the concrete class that should be instanciated
"""
return PseudoParser().parse(filename)
def __repr__(self):
return "<%s at %s, name = %s>" % (
self.__class__.__name__, id(self), self.name)
def __str__(self):
"""String representation."""
lines = []
app = lines.append
app("<%s: %s>" % (self.__class__.__name__, self.name))
app(" summary: " + self.summary.strip())
app(" number of valence electrons: %s" % self.Z_val)
#FIXME: rewrite the treatment of xc, use XML specs as starting point
#app(" XC correlation (ixc): %s" % self._pspxc) #FIXME
app(" maximum angular momentum: %s" % l2str(self.l_max))
app(" angular momentum for local part: %s" % l2str(self.l_local))
if self.isnc:
app(" radius for non-linear core correction: %s" % self.nlcc_radius)
app("")
hint_normal = self.hint_for_accuracy()
if hint_normal is not None:
app(" hint for normal accuracy: %s" % str(hint_normal))
return "\n".join(lines)
@abc.abstractproperty
def summary(self):
"""String summarizing the most important properties."""
@property
def filepath(self):
return os.path.abspath(self.path)
@property
def name(self):
"""File basename."""
return os.path.basename(self.filepath)
@abc.abstractproperty
def Z(self):
"""The atomic number of the atom."""
@abc.abstractproperty
def Z_val(self):
"""Valence charge"""
@property
def element(self):
"""Pymatgen `Element`."""
try:
return _PTABLE[self.Z]
except (KeyError, IndexError):
return _PTABLE[int(self.Z)]
@property
def type(self):
return self.__class__.__name__
@property
def symbol(self):
"""Element symbol."""
return self.element.symbol
@abc.abstractproperty
def l_max(self):
"""Maximum angular momentum."""
@abc.abstractproperty
def l_local(self):
"""Angular momentum used for the local part."""
@property
def isnc(self):
"""True if norm-conserving pseudopotential."""
return isinstance(self, NcPseudo)
@property
def ispaw(self):
"""True if PAW pseudopotential."""
return isinstance(self, PawPseudo)
#@abc.abstractproperty
#def xc_type(self):
# """XC family e.g LDA, GGA, MGGA."""
#@abc.abstractproperty
#def xc_flavor(self):
# """XC flavor e.g PW, PW91, PBE."""
#@property
#def xc_functional(self):
# """XC identifier e.g LDA-PW91, GGA-PBE, GGA-revPBE."""
# return "-".join([self.xc_type, self.xc_flavor])
#@abc.abstractproperty
#def has_soc(self):
# """True if pseudo contains spin-orbit coupling."""
#@abc.abstractmethod
#def num_of_projectors(self, l='s'):
# """Number of projectors for the angular channel l"""
#@abc.abstractmethod
#def generation_mode
# """scalar scalar-relativistic, relativistic."""
@property
def has_dojo_report(self):
"""True if self contains the DOJO_REPORT section."""
return bool(self.dojo_report)
def delta_factor(self, accuracy="normal"):
"""
Returns the deltafactor [meV/natom] computed with the given accuracy.
None if self does not have info on the deltafactor.
"""
if not self.has_dojo_report:
return None
try:
return self.dojo_report["delta_factor"][accuracy]["dfact"]
except KeyError:
return None
def read_dojo_report(self):
"""Read the DOJO_REPORT section, returns {} if section is not present."""
return read_dojo_report(self.path)
def write_dojo_report(self, report):
"""Write a new DOJO_REPORT section to the pseudopotential file."""
path = self.path
# Create JSON string from report.
jstring = json.dumps(report, indent=4, sort_keys=True) + "\n"
# Read lines from file and insert jstring between the tags.
with open(path, "r") as fh:
lines = fh.readlines()
try:
start = lines.index("<DOJO_REPORT>\n")
except ValueError:
start = -1
if start == -1:
# DOJO_REPORT was not present.
lines += ["\n", "<DOJO_REPORT>\n", jstring , "</DOJO_REPORT>\n",]
else:
stop = lines.index("</DOJO_REPORT>\n")
lines.insert(stop, jstring)
del lines[start+1:stop]
# Write new file.
with open(path, "w") as fh:
fh.writelines(lines)
def remove_dojo_report(self):
"""Remove the DOJO_REPORT section from the pseudopotential file."""
# Read lines from file and insert jstring between the tags.
path = self.path
with open(path, "r") as fh:
lines = fh.readlines()
try:
start = lines.index("<DOJO_REPORT>\n")
except ValueError:
start = -1
if start == -1:
return
stop = lines.index("</DOJO_REPORT>\n")
if stop == -1:
return
del lines[start+1:stop]
# Write new file.
with open(path, "w") as fh:
fh.writelines(lines)
def hint_for_accuracy(self, accuracy="normal"):
"""
Returns an hint object with parameters such as ecut [Ha] and
aug_ratio for given accuracy. Returns None if no hint is available.
Args:
accuracy: ["low", "normal", "high"]
"""
if self.has_dojo_report:
return Hint.from_dict(self.dojo_report["hints"][accuracy])
else:
return None
@property
def has_hints(self):
"""True if self provides hints on the cutoff energy."""
for acc in ["low", "normal", "high"]:
if self.hint_for_accuracy(acc) is None:
return False
return True
#@property
#def md5(self):
# """
# Return the checksum of the pseudopotential file.
# """
# import hashlib
# hasher = hashlib.md5()
# with open(self.filepath, "r") as fh:
# hasher.update(fh.read())
# return hasher.hexdigest()
class NcPseudo(object):
"""
Abstract class defining the methods that must be implemented
by the concrete classes representing norm-conserving pseudopotentials.
"""
__metaclass__ = abc.ABCMeta
@abc.abstractproperty
def nlcc_radius(self):
"""
Radius at which the core charge vanish (i.e. cut-off in a.u.).
Returns 0.0 if nlcc is not used.
"""
@property
def has_nlcc(self):
"""True if the pseudo is generated with non-linear core correction."""
return self.nlcc_radius > 0.0
@property
def rcore(self):
"""Radius of the pseudization sphere in a.u."""
try:
return self._core
except AttributeError:
return None
class PawPseudo(object):
"""
Abstract class that defines the methods that must be implemented
by the concrete classes representing PAW pseudopotentials.
"""
__metaclass__ = abc.ABCMeta
#def nlcc_radius(self):
# """
# Radius at which the core charge vanish (i.e. cut-off in a.u.).
# Returns 0.0 if nlcc is not used.
# """
# return 0.0
#
#@property
#def has_nlcc(self):
# """True if the pseudo is generated with non-linear core correction."""
# return True
@abc.abstractproperty
def paw_radius(self):
"""Radius of the PAW sphere in a.u."""
@property
def rcore(self):
"""Alias of paw_radius."""
return self.paw_radius
class AbinitPseudo(Pseudo):
"""
An AbinitPseudo is a pseudopotential whose file contains an abinit header.
"""
def __init__(self, path, header):
"""
Args:
path:
Filename.
header:
`AbinitHeader` instance.
"""
self.path = path
self._summary = header.summary
if hasattr(self, "dojo_report"):
self.dojo_report = header.dojo_report
else:
self.dojo_report = {}
#self.pspcod = header.pspcod
for (attr_name, desc) in header.items():
value = header.get(attr_name, None)
# Hide these attributes since one should always use the public interface.
setattr(self, "_" + attr_name, value)
@property
def summary(self):
"""Summary line reported in the ABINIT header."""
return self._summary.strip()
@property
def Z(self):
return self._zatom
@property
def Z_val(self):
return self._zion
@property
def l_max(self):
return self._lmax
@property
def l_local(self):
return self._lloc
class NcAbinitPseudo(NcPseudo, AbinitPseudo):
"""
Norm-conserving pseudopotential in the Abinit format.
"""
@property
def summary(self):
return self._summary.strip()
@property
def Z(self):
return self._zatom
@property
def Z_val(self):
"""Number of valence electrons."""
return self._zion
@property
def l_max(self):
return self._lmax
@property
def l_local(self):
return self._lloc
@property
def nlcc_radius(self):
return self._rchrg
class PawAbinitPseudo(PawPseudo, AbinitPseudo):
"""Paw pseudopotential in the Abinit format."""
@property
def paw_radius(self):
return self._r_cut
#def orbitals(self):
class Hint(collections.namedtuple("Hint", "ecut aug_ratio")):
"""
Suggested value for the cutoff energy [Hartree units] and the augmentation ratio (PAW pseudo)
"""
@property
def to_dict(self):
return {f: getattr(self, f) for f in self._fields}
@classmethod
def from_dict(cls, d):
return cls(**{k: v for k,v in d.items() if not k.startswith("@")})
def _dict_from_lines(lines, key_nums, sep=None):
"""
Helper function to parse formatted text structured like:
value1 value2 ... sep key1, key2 ...
key_nums is a list giving the number of keys for each line. 0 if line should be skipped.
sep is a string denoting the character that separates the keys from the value (None if
no separator is present).
Returns:
dict{key1 : value1, key2 : value2, ...}
Raises:
ValueError if parsing fails.
"""
if is_string(lines):
lines = [lines]
if not isinstance(key_nums, collections.Iterable):
key_nums = list(key_nums)
if len(lines) != len(key_nums):
err_msg = "lines = %s\n key_num = %s" % (str(lines), str(key_nums))
raise ValueError(err_msg)
kwargs = FrozenDict()
for (i, nk) in enumerate(key_nums):
if nk == 0: continue
line = lines[i]
tokens = [t.strip() for t in line.split()]
values, keys = tokens[:nk], "".join(tokens[nk:])
# Sanitize keys: In some case we might string in for foo[,bar]
keys.replace("[", "").replace("]", "")
keys = keys.split(",")
if sep is not None:
check = keys[0][0]
if check != sep:
raise ValueError("Expecting separator %s, got %s" % (sep, check))
keys[0] = keys[0][1:]
if len(values) != len(keys):
msg = "line: %s\n len(keys) != len(value)\nkeys: %s\n values: %s" % (line, keys, values)
warnings.warn(msg)
#raise ValueError(msg)
kwargs.update(zip(keys, values))
return kwargs
class AbinitHeader(dict):
"""Dictionary whose keys can be also accessed as attributes."""
def __getattr__(self, name):
try:
# Default behaviour
return super(AbinitHeader, self).__getattribute__(name)
except AttributeError:
try:
# Try in the dictionary.
return self[name]
except KeyError as exc:
raise AttributeError(str(exc))
def _int_from_str(string):
"""
Convert string into integer
Raise:
TypeError if string is not a valid integer
"""
float_num = float(string)
int_num = int(float_num)
if float_num == int_num:
return int_num
else:
raise TypeError("Cannot convert string %s to int" % string)
class NcAbinitHeader(AbinitHeader):
"""
The abinit header found in the NC pseudopotential files.
"""
_attr_desc = collections.namedtuple("att", "default astype")
_VARS = {
# Mandatory
"zatom" : _attr_desc(None, _int_from_str),
"zion" : _attr_desc(None, float),
"pspdat" : _attr_desc(None, float),
"pspcod" : _attr_desc(None, int),
"pspxc" : _attr_desc(None, int),
"lmax" : _attr_desc(None, int),
"lloc" : _attr_desc(None, int),
"r2well" : _attr_desc(None, float),
"mmax" : _attr_desc(None, float),
# Optional variables for non linear-core correction. HGH does not have it.
"rchrg" : _attr_desc(0.0, float), # radius at which the core charge vanish (i.e. cut-off in a.u.)
"fchrg" : _attr_desc(0.0, float),
"qchrg" : _attr_desc(0.0, float),
}
del _attr_desc
def __init__(self, summary, **kwargs):
super(NcAbinitHeader, self).__init__()
# APE uses llocal instead of lloc.
if "llocal" in kwargs:
kwargs["lloc"] = kwargs.pop("llocal")
self.summary = summary.strip()
for (key, desc) in NcAbinitHeader._VARS.items():
default, astype = desc.default, desc.astype
value = kwargs.pop(key, None)
if value is None:
value = default
if default is None:
raise RuntimeError("Attribute %s must be specified" % key)
else:
try:
value = astype(value)
except:
raise RuntimeError("Conversion Error for key, value %s" % (key, value))
self[key] = value
# Add dojo_report
self["dojo_report"] = kwargs.pop("dojo_report", {})
#if kwargs:
# raise RuntimeError("kwargs should be empty but got %s" % str(kwargs))
@staticmethod
def fhi_header(filename, ppdesc):
"""Parse the FHI abinit header."""
# Example:
# Troullier-Martins psp for element Sc Thu Oct 27 17:33:22 EDT 1994
# 21.00000 3.00000 940714 zatom, zion, pspdat
# 1 1 2 0 2001 .00000 pspcod,pspxc,lmax,lloc,mmax,r2well
# 1.80626423934776 .22824404341771 1.17378968127746 rchrg,fchrg,qchrg
lines = _read_nlines(filename, -1)
try:
header = _dict_from_lines(lines[:4], [0, 3, 6, 3])
except ValueError:
# The last record with rchrg ... seems to be optional.
header = _dict_from_lines(lines[:3], [0, 3, 6])
summary = lines[0]
header["dojo_report"] = read_dojo_report(filename)
#print(header)
return NcAbinitHeader(summary, **header)
@staticmethod
def hgh_header(filename, ppdesc):
"""Parse the HGH abinit header."""
# Example:
#Hartwigsen-Goedecker-Hutter psp for Ne, from PRB58, 3641 (1998)
# 10 8 010605 zatom,zion,pspdat
# 3 1 1 0 2001 0 pspcod,pspxc,lmax,lloc,mmax,r2well
lines = _read_nlines(filename, -1)
header = _dict_from_lines(lines[:3], [0, 3, 6])
summary = lines[0]
header["dojo_report"] = read_dojo_report(filename)
return NcAbinitHeader(summary, **header)
@staticmethod
def tm_header(filename, ppdesc):
"""Parse the TM abinit header."""
# Example:
#Troullier-Martins psp for element Fm Thu Oct 27 17:28:39 EDT 1994
#100.00000 14.00000 940714 zatom, zion, pspdat
# 1 1 3 0 2001 .00000 pspcod,pspxc,lmax,lloc,mmax,r2well
# 0 4.085 6.246 0 2.8786493 l,e99.0,e99.9,nproj,rcpsp
# .00000000 .0000000000 .0000000000 .00000000 rms,ekb1,ekb2,epsatm
# 1 3.116 4.632 1 3.4291849 l,e99.0,e99.9,nproj,rcpsp
# .00000000 .0000000000 .0000000000 .00000000 rms,ekb1,ekb2,epsatm
# 2 4.557 6.308 1 2.1865358 l,e99.0,e99.9,nproj,rcpsp
# .00000000 .0000000000 .0000000000 .00000000 rms,ekb1,ekb2,epsatm
# 3 23.251 29.387 1 2.4776730 l,e99.0,e99.9,nproj,rcpsp
# .00000000 .0000000000 .0000000000 .00000000 rms,ekb1,ekb2,epsatm
# 3.62474762267880 .07409391739104 3.07937699839200 rchrg,fchrg,qchrg
lines = _read_nlines(filename, -1)
header = []
for (lineno, line) in enumerate(lines):
header.append(line)
if lineno == 2:
# Read lmax.
tokens = line.split()
pspcod, pspxc, lmax, lloc = map(int, tokens[:4])
mmax, r2well = map(float, tokens[4:6])
#if tokens[-1].strip() != "pspcod,pspxc,lmax,lloc,mmax,r2well":
# raise RuntimeError("%s: Invalid line\n %s" % (filename, line))
lines = lines[3:]
break
# TODO
# Parse the section with the projectors.
#0 4.085 6.246 0 2.8786493 l,e99.0,e99.9,nproj,rcpsp
#.00000000 .0000000000 .0000000000 .00000000 rms,ekb1,ekb2,epsatm
projectors = collections.OrderedDict()
for idx in range(2*(lmax+1)):
line = lines[idx]
if idx % 2 == 0: proj_info = [line,]
if idx % 2 == 1:
proj_info.append(line)
d = _dict_from_lines(proj_info, [5,4])
projectors[int(d["l"])] = d
# Add the last line with info on nlcc.
header.append(lines[idx+1])
summary = header[0]
header = _dict_from_lines(header, [0,3,6,3])
header["dojo_report"] = read_dojo_report(filename)
return NcAbinitHeader(summary, **header)
class PawAbinitHeader(AbinitHeader):
"""
The abinit header found in the PAW pseudopotential files.
"""
_attr_desc = collections.namedtuple("att", "default astype")
_VARS = {
"zatom" : _attr_desc(None, _int_from_str),
"zion" : _attr_desc(None, float),
"pspdat" : _attr_desc(None, float),
"pspcod" : _attr_desc(None, int),
"pspxc" : _attr_desc(None, int),
"lmax" : _attr_desc(None, int),
"lloc" : _attr_desc(None, int),
"mmax" : _attr_desc(None, int),
"r2well" : _attr_desc(None, float),
"pspfmt" : _attr_desc(None, str),
"creatorID" : _attr_desc(None, int),
"basis_size" : _attr_desc(None, int),
"lmn_size" : _attr_desc(None, int),
"orbitals" : _attr_desc(None, list),
"number_of_meshes": _attr_desc(None, int),
"r_cut" : _attr_desc(None, float), # r_cut(PAW) in the header
"shape_type" : _attr_desc(None, int),
"rshape" : _attr_desc(None, float),
}
del _attr_desc
def __init__(self, summary, **kwargs):
super(PawAbinitHeader, self).__init__()
self.summary = summary.strip()
for (key, desc) in self._VARS.items():
default, astype = desc.default, desc.astype
value = kwargs.pop(key, None)
if value is None:
value = default
if default is None:
raise RuntimeError("Attribute %s must be specified" % key)
else:
try:
value = astype(value)
except:
raise RuntimeError("Conversion Error for key %s, with value %s" % (key, value))
self[key] = value
if kwargs:
raise RuntimeError("kwargs should be empty but got %s" % str(kwargs))
@staticmethod
def paw_header(filename, ppdesc):
"""Parse the PAW abinit header."""
#Paw atomic data for element Ni - Generated by AtomPAW (N. Holzwarth) + AtomPAW2Abinit v3.0.5
# 28.000 18.000 20061204 : zatom,zion,pspdat
# 7 7 2 0 350 0. : pspcod,pspxc,lmax,lloc,mmax,r2well
# paw3 1305 : pspfmt,creatorID
# 5 13 : basis_size,lmn_size
# 0 0 1 1 2 : orbitals
# 3 : number_of_meshes
# 1 3 350 1.1803778368E-05 3.5000000000E-02 : mesh 1, type,size,rad_step[,log_step]
# 2 1 921 2.500000000000E-03 : mesh 2, type,size,rad_step[,log_step]
# 3 3 391 1.1803778368E-05 3.5000000000E-02 : mesh 3, type,size,rad_step[,log_step]
# 2.3000000000 : r_cut(SPH)
# 2 0.
# Example
#C (US d-loc) - PAW data extracted from US-psp (D.Vanderbilt) - generated by USpp2Abinit v2.3.0
# 6.000 4.000 20090106 : zatom,zion,pspdat
# 7 11 1 0 560 0. : pspcod,pspxc,lmax,lloc,mmax,r2well
# paw4 2230 : pspfmt,creatorID
# 4 8 : basis_size,lmn_size
# 0 0 1 1 : orbitals
# 5 : number_of_meshes
# 1 2 560 1.5198032759E-04 1.6666666667E-02 : mesh 1, type,size,rad_step[,log_step]
# 2 2 556 1.5198032759E-04 1.6666666667E-02 : mesh 2, type,size,rad_step[,log_step]
# 3 2 576 1.5198032759E-04 1.6666666667E-02 : mesh 3, type,size,rad_step[,log_step]
# 4 2 666 1.5198032759E-04 1.6666666667E-02 : mesh 4, type,size,rad_step[,log_step]
# 5 2 673 1.5198032759E-04 1.6666666667E-02 : mesh 5, type,size,rad_step[,log_step]
# 1.5550009124 : r_cut(PAW)
# 3 0. : shape_type,rshape
#Paw atomic data for element Si - Generated by atompaw v3.0.1.3 & AtomPAW2Abinit v3.3.1
# 14.000 4.000 20120814 : zatom,zion,pspdat
# 7 11 1 0 663 0. : pspcod,pspxc,lmax,lloc,mmax,r2well
# paw5 1331 : pspfmt,creatorID
# 4 8 : basis_size,lmn_size
# 0 0 1 1 : orbitals
# 5 : number_of_meshes
# 1 2 663 8.2129718540404674E-04 1.1498160595656655E-02 : mesh 1, type,size,rad_step[,log_step]
# 2 2 658 8.2129718540404674E-04 1.1498160595656655E-02 : mesh 2, type,size,rad_step[,log_step]
# 3 2 740 8.2129718540404674E-04 1.1498160595656655E-02 : mesh 3, type,size,rad_step[,log_step]
# 4 2 819 8.2129718540404674E-04 1.1498160595656655E-02 : mesh 4, type,size,rad_step[,log_step]
# 5 2 870 8.2129718540404674E-04 1.1498160595656655E-02 : mesh 5, type,size,rad_step[,log_step]
# 1.5669671236 : r_cut(PAW)
# 2 0. : shape_type,rshape
supported_formats = ["paw3", "paw4", "paw5"]
if ppdesc.format not in supported_formats:
raise NotImplementedError("format %s not in %s" % (ppdesc.format, supported_formats))
lines = _read_nlines(filename, -1)
summary = lines[0]
header = _dict_from_lines(lines[:5], [0, 3, 6, 2, 2], sep=":")
lines = lines[5:]
# TODO
# Parse orbitals and number of meshes.
header["orbitals"] = [int(t) for t in lines[0].split(":")[0].split()]
header["number_of_meshes"] = num_meshes = int(lines[1].split(":")[0])
#print filename, header
# Skip meshes =
lines = lines[2+num_meshes:]
#for midx in range(num_meshes):
# l = midx + 1
#print lines[0]
header["r_cut"] = float(lines[0].split(":")[0])
#print lines[1]
header.update(_dict_from_lines(lines[1], [2], sep=":"))
report = read_dojo_report(filename)
if report:
header["dojo_report"] = report
#print("PAW header\n", header)
return PawAbinitHeader(summary, **header)
class PseudoParserError(Exception):
"""Base Error class for the exceptions raised by `PseudoParser`"""
class PseudoParser(object):
"""
Responsible for parsing pseudopotential files and returning pseudopotential objects.
Usage::
pseudo = PseudoParser().parse("filename")
"""
Error = PseudoParserError
# Supported values of pspcod
ppdesc = collections.namedtuple("ppdesc", "pspcod name psp_type format")
# TODO Recheck
_PSPCODES = collections.OrderedDict( {
1 : ppdesc(1, "TM", "NC", None),
3 : ppdesc(3, "HGH", "NC", None),
#4 : ppdesc(4, "NC", , None),
#5 : ppdesc(5, "NC", , None),
6 : ppdesc(6, "FHI", "NC", None),
7 : ppdesc(6, "PAW_abinit_text", "PAW", None),
#8 : ppdesc(8, "NC", None),
10 : ppdesc(10, "HGHK", "NC", None),
})
del ppdesc
def __init__(self):
# List of files that have been parsed succesfully.
self._parsed_paths = []
# List of files that could not been parsed.
self._wrong_paths = []
def scan_directory(self, dirname, exclude_exts=(), exclude_fnames=()):
"""
Analyze the files contained in directory dirname.
Args:
dirname:
directory path
exclude_exts:
list of file extensions that should be skipped.
exclude_fnames:
list of file names that should be skipped.
returns:
List of pseudopotential objects.
"""
for (i, ext) in enumerate(exclude_exts):
if not ext.strip().startswith("."):
exclude_exts[i] = "." + ext.strip()
# Exclude files depending on the extension.
paths = []
for fname in os.listdir(dirname):
root, ext = os.path.splitext(fname)
path = os.path.join(dirname, fname)
if (ext in exclude_exts or fname in exclude_fnames or
fname.startswith(".") or not os.path.isfile(path)): continue
paths.append(path)
pseudos = []
for path in paths:
# Parse the file and generate the pseudo.
try:
pseudo = self.parse(path)
except:
pseudo = None
if pseudo is not None:
pseudos.append(pseudo)
self._parsed_paths.extend(path)
else:
self._wrong_paths.extend(path)
return pseudos
def read_ppdesc(self, filename):
"""
Read the pseudopotential descriptor from file filename.
Returns:
Pseudopotential descriptor. None if filename is not a valid pseudopotential file.
Raises:
`PseudoParserError` if fileformat is not supported.
"""
if filename.endswith(".xml"):
raise self.Error("XML pseudo not supported yet")
else:
# Assume file with the abinit header.
lines = _read_nlines(filename, -1)
for (lineno, line) in enumerate(lines):
if lineno == 2:
try:
tokens = line.split()
pspcod, pspxc = map(int, tokens[:2])
except:
msg = "%s: Cannot parse pspcod, pspxc in line\n %s" % (filename, line)
sys.stderr.write(msg)
return None
#if tokens[-1].strip().replace(" ","") not in ["pspcod,pspxc,lmax,lloc,mmax,r2well",
# "pspcod,pspxc,lmax,llocal,mmax,r2well"]:
# raise self.Error("%s: Invalid line\n %s" % (filename, line))
# return None
if pspcod not in self._PSPCODES:
raise self.Error("%s: Don't know how to handle pspcod %s\n" % (filename, pspcod))
ppdesc = self._PSPCODES[pspcod]
if pspcod == 7:
# PAW -> need to know the format pspfmt
tokens = lines[lineno+1].split()
pspfmt, creatorID = tokens[:2]
#if tokens[-1].strip() != "pspfmt,creatorID":
# raise self.Error("%s: Invalid line\n %s" % (filename, line))
# return None
ppdesc = ppdesc._replace(format = pspfmt)
return ppdesc
return None
def parse(self, filename):
"""
Read and parse a pseudopotential file. Main entry point for client code.
Returns:
pseudopotential object or None if filename is not a valid pseudopotential file.
"""
path = os.path.abspath(filename)
# Only PAW supports XML at present.
if filename.endswith(".xml"):
return PawXmlSetup(path)
ppdesc = self.read_ppdesc(path)
if ppdesc is None:
return None
psp_type = ppdesc.psp_type
parsers = {
"FHI" : NcAbinitHeader.fhi_header,
"TM" : NcAbinitHeader.tm_header,
"HGH" : NcAbinitHeader.hgh_header,
"HGHK" : NcAbinitHeader.hgh_header,
"PAW_abinit_text": PawAbinitHeader.paw_header,
}
try:
header = parsers[ppdesc.name](path, ppdesc)
except Exception as exc:
raise self.Error(path + ":\n" + straceback())
root, ext = os.path.splitext(path)
# Add the content of input file (if present).
# The name of the input is name + ".ini"
#input = None
#input_path = root + ".ini"
#if os.path.exists(input_path):
# with open(input_path, 'r') as fh:
# input = fh.read()
if psp_type == "NC":
pseudo = NcAbinitPseudo(path, header)
elif psp_type == "PAW":
pseudo = PawAbinitPseudo(path, header)
else:
raise NotImplementedError("psp_type not in [NC, PAW]")
return pseudo
#TODO use RadialFunction from pseudo_dojo.
class RadialFunction(collections.namedtuple("RadialFunction", "mesh values")):
pass
class PawXmlSetup(Pseudo, PawPseudo):
def __init__(self, filepath):
# FIXME
self.dojo_report = {}
self.path = os.path.abspath(filepath)
# Get the XML root (this trick is used to that the object is pickleable).
root = self.root
# Get the version of the XML format
self.paw_setup_version = root.get("version")
# Info on the atom.
atom_attrib = root.find("atom").attrib
#self._symbol = atom_attrib["symbol"]
self._zatom = int(float(atom_attrib["Z"]))
self.core, self.valence = map(float, [atom_attrib["core"], atom_attrib["valence"]])
#xc_info = root.find("atom").attrib
#self.xc_type, self.xc_name = xc_info["type"], xc_info["name"]
#self.ae_energy = {k: float(v) for k,v in root.find("ae_energy").attrib.items()}
# Old XML files do not define this field!
# In this case we set the PAW radius to None.
#self._paw_radius = float(root.find("PAW_radius").attrib["rpaw"])
pawr_element = root.find("PAW_radius")
self._paw_radius = None
if pawr_element is not None:
self._paw_radius = float(pawr_element.attrib["rpaw"])
#<valence_states>
# <state n="2" l="0" f="2" rc="1.10" e="-0.6766" id="N-2s"/>
# <state n="2" l="1" f="3" rc="1.10" e="-0.2660" id="N-2p"/>
# <state l="0" rc="1.10" e=" 0.3234" id="N-s1"/>
# <state l="1" rc="1.10" e=" 0.7340" id="N-p1"/>
# <state l="2" rc="1.10" e=" 0.0000" id="N-d1"/>
#</valence_states>
#
# The valence_states element contains several state elements.
# For this setup, the first two lines describe bound eigenstates
# with occupation numbers and principal quantum numbers.
# Notice, that the three additional unbound states should have no f and n attributes.
# In this way, we know that only the first two bound states (with f and n attributes)
# should be used for constructing an initial guess for the wave functions.
self.valence_states = {}
for node in root.find("valence_states"):
attrib = AttrDict(node.attrib)
assert attrib.id not in self.valence_states
self.valence_states[attrib.id] = attrib
#print(self.valence_states)
# Parse the radial grids
self.rad_grids = {}
for node in root.findall("radial_grid"):
grid_params = node.attrib
id = grid_params["id"]
assert id not in self.rad_grids
self.rad_grids[id] = self._eval_grid(grid_params)
def __getstate__(self):
"""
Return state is pickled as the contents for the instance.
In this case we just remove the XML root element process since Element object cannot be pickled.
"""
return {k:v for k,v in self.__dict__.items() if k not in ["_root",]}
@property
def root(self):
try:
return self._root
except AttributeError:
from xml.etree import cElementTree as ET
tree = ET.parse(self.filepath)
self._root = tree.getroot()
return self._root
@property
def Z(self):
return self._zatom
@property
def Z_val(self):
"""Number of valence electrons."""
return self.valence
# FIXME
@property
def l_max(self):
"""Maximum angular momentum."""
return None
@property
def l_local(self):
"""Angular momentum used for the local part."""
return None
@property
def summary(self):
"""String summarizing the most important properties."""
return ""
@property
def paw_radius(self):
return self._paw_radius
@staticmethod
def _eval_grid(grid_params):
"""
This function receives a dictionary with the parameters defining the
radial mesh and returns a `ndarray` with the mesh
"""
eq = grid_params.get("eq").replace(" " ,"")
istart, iend = int(grid_params.get("istart")), int(grid_params.get("iend"))
indices = range(istart, iend+1)
if eq == 'r=a*exp(d*i)':
a, d = float(grid_params['a']), float(grid_params['d'])
mesh = [a * np.exp(d * i) for i in indices]
elif eq == 'r=a*i/(n-i)':
a, n = float(grid_params['a']), float(grid_params['n'])
mesh = [a * i / (n - i) for i in indices]
elif eq == 'r=a*(exp(d*i)-1)':
a, d = float(grid_params['a']), float(grid_params['d'])
mesh = [a * (np.exp(d * i) - 1.0) for i in indices]
elif eq == 'r=d*i':
d = float(grid_params['d'])
mesh = [d * i for i in indices]
elif eq == 'r=(i/n+a)^5/a-a^4':
a, n = float(grid_params['a']), float(grid_params['n'])
mesh = [(i / n + a)**5 / a - a**4 for i in indices]
else:
raise ValueError('Unknown grid type: %s' % eq)
return np.array(mesh)
def _parse_radfunc(self, func_name):
"""Parse the first occurence of func_name in the XML file."""
node = self.root.find(func_name)
grid = node.attrib["grid"]
values = np.array([float(s) for s in node.text.split()])
return self.rad_grids[grid], values, node.attrib
def _parse_all_radfuncs(self, func_name):
"""Parse all the nodes with tag func_name in the XML file."""
for node in self.root.findall(func_name):
grid = node.attrib["grid"]
values = np.array([float(s) for s in node.text.split()])
yield self.rad_grids[grid], values, node.attrib
@property
def ae_core_density(self):
"""The all-electron radial density."""
try:
return self._ae_core_density
except AttributeError:
mesh, values, attrib = self._parse_radfunc("ae_core_density")
self._ae_core_density = RadialFunction(mesh, values)
return self._ae_core_density
@property
def pseudo_core_density(self):
"""The pseudized radial density."""
try:
return self._pseudo_core_density
except AttributeError:
mesh, values, attrib = self._parse_radfunc("pseudo_core_density")
self._pseudo_core_density = RadialFunction(mesh, values)
return self._pseudo_core_density
@property
def ae_partial_waves(self):
"""Dictionary with the AE partial waves indexed by state."""
try:
return self._ae_partial_waves
except AttributeError:
self._ae_partial_waves = {}
for (mesh, values, attrib) in self._parse_all_radfuncs("ae_partial_wave"):
state = attrib["state"]
val_state = self.valence_states[state]
self._ae_partial_waves[state] = RadialFunction(mesh, values)
#print("val_state", val_state)
return self._ae_partial_waves
@property
def pseudo_partial_waves(self):
"""Dictionary with the pseudo partial waves indexed by state."""
try:
return self._pseudo_partial_waves
except AttributeError:
self._pseudo_partial_waves = {}
for (mesh, values, attrib) in self._parse_all_radfuncs("pseudo_partial_wave"):
state = attrib["state"]
val_state = self.valence_states[state]
self._pseudo_partial_waves[state] = RadialFunction(mesh, values)
return self._pseudo_partial_waves
@property
def projector_functions(self):
"""Dictionary with the PAW projectors indexed by state."""
try:
return self._projector_functions
except AttributeError:
self._projector_functions = {}
for (mesh, values, attrib) in self._parse_all_radfuncs("projector_function"):
state = attrib["state"]
val_state = self.valence_states[state]
self._projector_functions[state] = RadialFunction(mesh, values)
return self._projector_functions
def plot_densities(self, **kwargs):
"""
Plot the PAW densities.
================ ==============================================================
kwargs Meaning
================ ==============================================================
title Title of the plot (Default: "Densities").
show True to show the figure (Default).
savefig 'abc.png' or 'abc.eps' to save the figure to a file.
================ ==============================================================
Returns:
`matplotlib` figure
"""
title = kwargs.pop("title", "Densities")
show = kwargs.pop("show", True)
savefig = kwargs.pop("savefig", None)
import matplotlib.pyplot as plt
fig = plt.figure()
ax = fig.add_subplot(1,1,1)
ax.grid(True)
ax.set_xlabel('r [Bohr]')
#ax.set_ylabel('density')
for i, den_name in enumerate(["ae_core_density", "pseudo_core_density"]):
rden = getattr(self, den_name)
label = "$n_c$" if i == 1 else "$\\tilde{n}_c$"
ax.plot(rden.mesh, rden.mesh * rden.values, label=label, lw=2)
plt.legend(loc="best")
if title is not None:
fig.suptitle(title)
if show:
plt.show()
if savefig:
fig.savefig(savefig)
return fig
def plot_waves(self, **kwargs):
"""
Plot the AE and the pseudo partial waves.
================ ==============================================================
kwargs Meaning
================ ==============================================================
title Title of the plot (Default: "Partial Waves").
show True to show the figure (Default).
savefig 'abc.png' or 'abc.eps' to save the figure to a file.
================ ==============================================================
Returns:
`matplotlib` figure
"""
title = kwargs.pop("title", "Partial Waves")
show = kwargs.pop("show", True)
savefig = kwargs.pop("savefig", None)
import matplotlib.pyplot as plt
fig = plt.figure()
ax = fig.add_subplot(1,1,1)
ax.grid(True)
ax.set_xlabel("r [Bohr]")
ax.set_ylabel("$r\phi,\\, r\\tilde\phi\, [Bohr]^{-\\frac{1}{2}}$")
ax.axvline(x=self.paw_radius, linewidth=2, color='k', linestyle="--")
#ax.annotate("$r_c$", xy=(self.paw_radius + 0.1, 0.1))
for state, rfunc in self.pseudo_partial_waves.items():
ax.plot(rfunc.mesh, rfunc.mesh * rfunc.values, lw=2, label="PS-WAVE: " + state)
for state, rfunc in self.ae_partial_waves.items():
ax.plot(rfunc.mesh, rfunc.mesh * rfunc.values, lw=2, label="AE-WAVE: " + state)
plt.legend(loc="best")
if title is not None:
fig.suptitle(title)
if show:
plt.show()
if savefig:
fig.savefig(savefig)
return fig
def plot_projectors(self, **kwargs):
"""
Plot the PAW projectors.
================ ==============================================================
kwargs Meaning
================ ==============================================================
title Title of the plot (Default: "Projectors").
show True to show the figure (Default).
savefig 'abc.png' or 'abc.eps' to save the figure to a file.
================ ==============================================================
Returns:
`matplotlib` figure
"""
title = kwargs.pop("title", "Projectors")
show = kwargs.pop("show", True)
savefig = kwargs.pop("savefig", None)
import matplotlib.pyplot as plt
fig = plt.figure()
ax = fig.add_subplot(1,1,1)
ax.grid(True)
ax.set_xlabel('r [Bohr]')
ax.set_ylabel("$r\\tilde p\, [Bohr]^{-\\frac{1}{2}}$")
ax.axvline(x=self.paw_radius, linewidth=2, color='k', linestyle="--")
#ax.annotate("$r_c$", xy=(self.paw_radius + 0.1, 0.1))
for state, rfunc in self.projector_functions.items():
ax.plot(rfunc.mesh, rfunc.mesh * rfunc.values, label="TPROJ: " + state)
plt.legend(loc="best")
if title is not None:
fig.suptitle(title)
if show:
plt.show()
if savefig:
fig.savefig(savefig)
return fig
#def plot_potentials(self, **kwargs):
# """
# ================ ==============================================================
# kwargs Meaning
# ================ ==============================================================
# title Title of the plot (Default: None).
# show True to show the figure (Default).
# savefig 'abc.png' or 'abc.eps' to save the figure to a file.
# ================ ==============================================================
# Returns:
# `matplotlib` figure
# """
# title = kwargs.pop("title", "Potentials")
# show = kwargs.pop("show", True)
# savefig = kwargs.pop("savefig", None)
# import matplotlib.pyplot as plt
# fig = plt.figure()
# ax = fig.add_subplot(1,1,1)
# ax.grid(True)
# ax.set_xlabel('r [Bohr]')
# ax.set_ylabel('density')
# ax.axvline(x=self.paw_radius, linewidth=2, color='k', linestyle="--")
# ax.annotate("$r_c$", xy=(self.paw_radius + 0.1, 0.1))
# for state, rfunc in self.potentials.items():
# ax.plot(rfunc.mesh, rfunc.values, label="TPROJ: " + state)
# plt.legend(loc="best")
# if title is not None:
# fig.suptitle(title)
# if show:
# plt.show()
# if savefig:
# fig.savefig(savefig)
# return fig
class PseudoTable(collections.Sequence):
"""
Define the pseudopotentials from the element table.
Individidual elements are accessed by name, symbol or atomic number.
For example, the following all retrieve iron:
print elements[26]
Fe
print elements.Fe
Fe
print elements.symbol('Fe')
Fe
print elements.name('iron')
Fe
print elements.isotope('Fe')
Fe
"""
@classmethod
def astable(cls, items):
"""
Return an instance of `PseudoTable` from the iterable items.
"""
if isinstance(items, cls): return items
return cls(items)
def __init__(self, pseudos):
"""
Args:
pseudos:
List of pseudopotentials or filepaths
"""
# Store pseudos in a default dictionary with z as key.
# Note that we can have more than one pseudo for given z.
# hence the values are lists of pseudos.
if not isinstance(pseudos, collections.Iterable):
pseudos = [pseudos]
if is_string(pseudos[0]):
pseudos = list_strings(pseudos)
self._pseudos_with_z = collections.defaultdict(list)
for pseudo in pseudos:
p = pseudo
if not isinstance(pseudo, Pseudo):
p = Pseudo.from_file(pseudo)
self._pseudos_with_z[p.Z].append(p)
for z in self.zlist:
pseudo_list = self._pseudos_with_z[z]
symbols = [p.symbol for p in pseudo_list]
symbol = symbols[0]
if any(symb != symbol for symb in symbols):
raise ValueError("All symbols must be equal while they are: %s" % str(symbols))
setattr(self, symbol, pseudo_list)
def __getitem__(self, Z):
"""
Retrieve pseudos for the atomic number z.
Accepts both int and slice objects.
"""
if isinstance(Z, slice):
assert Z.stop is not None
pseudos = []
for znum in iterator_from_slice(Z):
pseudos.extend(self._pseudos_with_z[znum])
return pseudos
else:
return self._pseudos_with_z[Z]
def __len__(self):
return len(list(self.__iter__()))
def __iter__(self):
"""Process the elements in Z order."""
for z in self.zlist:
for pseudo in self._pseudos_with_z[z]:
yield pseudo
def __repr__(self):
return "<%s at %s>" % (self.__class__.__name__, id(self))
def __str__(self):
lines = []
app = lines.append
app("<%s, len=%d>" % (self.__class__.__name__, len(self)))
for pseudo in self:
app(str(pseudo))
return "\n".join(lines)
@property
def allnc(self):
"""True if all pseudos are norm-conserving."""
return all(p.isnc for p in self)
@property
def allpaw(self):
"""True if all pseudos are PAW."""
return all(p.ispaw for p in self)
@property
def zlist(self):
"""Ordered list with the atomic numbers available in the table."""
zlist = list(self._pseudos_with_z.keys())
zlist.sort()
return zlist
def iscomplete(self, zmax=118):
"""
True if table is complete i.e. all elements with Z < zmax
have at least on pseudopotential
"""
for z in range(1, zmax):
if not self[z]: return False
return True
def pseudos_with_symbol(self, symbol):
"""
Return the list of pseudopotentials in the table the with given symbol.
Return an empty list if no pseudo is avaiable
"""
try:
return getattr(self, str(symbol))
except AttributeError:
#raise
return []
def pseudo_from_name(self, name):
"""Return the pseudo in the table with the given name"""
for pseudo in self:
if pseudo.name == name:
return pseudo
return None
def list_properties(self, *props, **kw):
"""
Print a list of elements with the given set of properties.
Args:
*prop1*, *prop2*, ... : string
Name of the properties to print
*format*: string
Template for displaying the element properties, with one
% for each property.
For example, print a table of mass and density.
from periodictable import elements
elements.list_properties('symbol','mass','density', format="%-2s: %6.2f u %5.2f g/cm^3")
H : 1.01 u 0.07 g/cm^3
He: 4.00 u 0.12 g/cm^3
Li: 6.94 u 0.53 g/cm^3
...
Bk: 247.00 u 14.00 g/cm^3
"""
format = kw.pop('format',None)
assert len(kw) == 0
for pseudo in self:
try:
values = tuple(getattr(pseudo, p) for p in props)
except AttributeError:
# Skip elements which don't define all the attributes
continue
# Skip elements with a value of None
if any(v is None for v in values):
continue
if format is None:
print(" ".join(str(p) for p in values))
else:
try:
print(format % values)
except:
print("format",format,"args",values)
raise
#def print_table(self, stream=sys.stdout, filter_function=None):
# """
# A pretty ASCII printer for the periodic table, based on some filter_function.
# Args:
# filter_function:
# A filtering function that take a Pseudo as input and returns a boolean.
# For example, setting filter_function = lambda el: el.Z_val > 2 will print
# a periodic table containing only pseudos with Z_val > 2.
# """
# for row in range(1, 10):
# rowstr = []
# for group in range(1, 19):
# el = Element.from_row_and_group(row, group)
# if el and ((not filter_function) or filter_function(el)):
# rowstr.append("{:3s}".format(el.symbol))
# else:
# rowstr.append(" ")
# print(" ".join(rowstr))
def sorted(self, attrname, reverse=False):
"""Sort the table according to the value of attribute attrname."""
attrs = []
for i, pseudo in self:
try:
a = getattr(pseudo, attrname)
except AttributeError:
a = np.inf
attrs.append((i, a))
# Sort attrs, and build new table with sorted pseudos.
attrs = sorted(attrs, key=lambda t:t[1], reverse=reverse)
return PseudoTable([self[a[0]] for a in attrs])
def select(self, condition):
"""
Select only those pseudopotentials for which condition is True.
Args:
condition:
Function that accepts a `Pseudo` object and returns True or False.
"""
return PseudoTable([p for p in self if condition(p)])
def with_dojo_report(self):
"""Select pseudos containing the DOJO_REPORT section."""
return self.select(condition=lambda p : p.has_dojo_report)
| 32.88863
| 113
| 0.541806
|
cc7322687d6018880116c480548346284f3eed55
| 279
|
py
|
Python
|
optic_store/optic_store/doctype/optical_store_hr_settings_salary_component/optical_store_hr_settings_salary_component.py
|
iptelephony/optic_store
|
9c5e6e2f7170d18ecac8dd53133a62a3250cd834
|
[
"MIT"
] | 14
|
2019-05-14T09:33:58.000Z
|
2022-03-19T14:43:36.000Z
|
optic_store/optic_store/doctype/optical_store_hr_settings_salary_component/optical_store_hr_settings_salary_component.py
|
iptelephony/optic_store
|
9c5e6e2f7170d18ecac8dd53133a62a3250cd834
|
[
"MIT"
] | 18
|
2019-03-22T19:51:22.000Z
|
2020-08-04T13:57:27.000Z
|
optic_store/optic_store/doctype/optical_store_hr_settings_salary_component/optical_store_hr_settings_salary_component.py
|
iptelephony/optic_store
|
9c5e6e2f7170d18ecac8dd53133a62a3250cd834
|
[
"MIT"
] | 25
|
2019-05-15T08:31:17.000Z
|
2022-02-25T07:21:18.000Z
|
# -*- coding: utf-8 -*-
# Copyright (c) 2020, 9T9IT and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe
from frappe.model.document import Document
class OpticalStoreHRSettingsSalaryComponent(Document):
pass
| 25.363636
| 54
| 0.799283
|
9f41bdda690f5e5f880e2c54515d8fac05de1606
| 4,114
|
py
|
Python
|
example/views.py
|
jouve/django-oauth-toolkit-example
|
44e8958a7e962ef3450a5562240c59e66db05bc4
|
[
"BSD-2-Clause"
] | 18
|
2015-03-31T08:08:16.000Z
|
2021-07-17T02:01:17.000Z
|
example/views.py
|
jouve/django-oauth-toolkit-example
|
44e8958a7e962ef3450a5562240c59e66db05bc4
|
[
"BSD-2-Clause"
] | 1
|
2017-11-04T01:06:54.000Z
|
2017-11-04T01:06:54.000Z
|
example/views.py
|
jouve/django-oauth-toolkit-example
|
44e8958a7e962ef3450a5562240c59e66db05bc4
|
[
"BSD-2-Clause"
] | 12
|
2015-03-31T05:32:39.000Z
|
2021-07-17T02:01:28.000Z
|
from django.http import HttpResponse
from django.core.urlresolvers import reverse
from django.views.generic import FormView, TemplateView, View
from oauth2_provider.compat import urlencode
from oauth2_provider.views.generic import ProtectedResourceView
from .forms import ConsumerForm, ConsumerExchangeForm, AccessTokenDataForm
import json
from collections import namedtuple
ApiUrl = namedtuple('ApiUrl', 'name, url')
class ConsumerExchangeView(FormView):
"""
The exchange view shows a form to manually perform the auth token swap
"""
form_class = ConsumerExchangeForm
template_name = 'example/consumer-exchange.html'
def get(self, request, *args, **kwargs):
try:
self.initial = {
'code': request.GET['code'],
'state': request.GET['state'],
'redirect_url': request.build_absolute_uri(reverse('consumer-exchange'))
}
except KeyError:
kwargs['noparams'] = True
form_class = self.get_form_class()
form = self.get_form(form_class)
return self.render_to_response(self.get_context_data(form=form, **kwargs))
class ConsumerView(FormView):
"""
The homepage to access Consumer's functionalities in the case of Authorization Code flow.
It offers a form useful for building "authorization links"
"""
form_class = ConsumerForm
success_url = '/consumer/'
template_name = 'example/consumer.html'
def __init__(self, **kwargs):
self.authorization_link = None
super(ConsumerView, self).__init__(**kwargs)
def get_success_url(self):
url = super(ConsumerView, self).get_success_url()
return '{url}?{qs}'.format(url=url, qs=urlencode({'authorization_link': self.authorization_link}))
def get(self, request, *args, **kwargs):
kwargs['authorization_link'] = request.GET.get('authorization_link', None)
form_class = self.get_form_class()
form = self.get_form(form_class)
return self.render_to_response(self.get_context_data(form=form, **kwargs))
def post(self, request, *args, **kwargs):
self.request = request
return super(ConsumerView, self).post(request, *args, **kwargs)
def form_valid(self, form):
qs = urlencode({
'client_id': form.cleaned_data['client_id'],
'response_type': 'code',
'state': 'random_state_string',
})
self.authorization_link = "{url}?{qs}".format(url=form.cleaned_data['authorization_url'], qs=qs)
return super(ConsumerView, self).form_valid(form)
class ConsumerDoneView(TemplateView):
"""
If exchange succeeded, come here, show a token and let users use the refresh token
"""
template_name = 'example/consumer-done.html'
def get(self, request, *args, **kwargs):
# do not show form when url is accessed without paramters
if 'access_token' in request.GET:
form = AccessTokenDataForm(initial={
'access_token': request.GET.get('access_token', None),
'token_type': request.GET.get('token_type', None),
'expires_in': request.GET.get('expires_in', None),
'refresh_token': request.GET.get('refresh_token', None),
})
kwargs['form'] = form
return super(ConsumerDoneView, self).get(request, *args, **kwargs)
class ApiClientView(TemplateView):
"""
TODO
"""
template_name = 'example/api-client.html'
def get(self, request, *args, **kwargs):
from .urls import urlpatterns
endpoints = []
for u in urlpatterns:
if 'api/' in u.regex.pattern:
endpoints.append(ApiUrl(name=u.name, url=reverse(u.name,
args=u.regex.groupindex.keys())))
kwargs['endpoints'] = endpoints
return super(ApiClientView, self).get(request, *args, **kwargs)
class ApiEndpoint(ProtectedResourceView):
def get(self, request, *args, **kwargs):
return HttpResponse('Hello, OAuth2!')
| 35.162393
| 106
| 0.64317
|
5735cb7582c2890d61de88a602a720535f6e7a89
| 57,914
|
py
|
Python
|
thorsky/thorskyclasses3.py
|
onekiloparsec/thorsky
|
0c5d4440b0332b5aea75596943cfb1af30fe0956
|
[
"BSD-2-Clause"
] | 18
|
2019-07-29T20:09:34.000Z
|
2021-06-25T18:57:19.000Z
|
thorsky/thorskyclasses3.py
|
onekiloparsec/thorsky
|
0c5d4440b0332b5aea75596943cfb1af30fe0956
|
[
"BSD-2-Clause"
] | 4
|
2019-07-30T04:05:49.000Z
|
2020-07-12T11:06:31.000Z
|
thorsky/thorskyclasses3.py
|
onekiloparsec/thorsky
|
0c5d4440b0332b5aea75596943cfb1af30fe0956
|
[
"BSD-2-Clause"
] | 1
|
2020-07-05T02:55:53.000Z
|
2020-07-05T02:55:53.000Z
|
#!/usr/bin/env python
"""thorskyclasses.py -- classes used for observational
circumstances.
"""
import numpy as np
import astropy.units as u
from astropy.coordinates import SkyCoord, Angle, EarthLocation, CartesianRepresentation
from astropy.coordinates import FK5, PrecessedGeocentric
from astropy.coordinates import solar_system_ephemeris, get_body_barycentric
from astropy.coordinates import get_body, get_moon, get_sun
from astropy.time import Time, TimeDelta
from datetime import datetime
from pytz import timezone
import pytz
import time as ttime
import dateutil.parser
from thorsky.thorskyutil import altazparang,lpsidereal,lpmoon,lpsun,accumoon,min_max_alt,phase_descr
from thorsky.thorskyutil import ha_alt, jd_sun_alt, jd_moon_alt, local_midnight_Time, hrs_up
from thorsky.thorskyutil import ztwilight, lunskybright, true_airmass
from thorsky.thorskyutil import currentgeocentframe, currentFK5frame, thorconsts
from thorsky.thorskyutil import precessmatrix, cel2localmatrix, cel2xyz
from thorsky.thorskyutil import getbrightest, skyproject, angle_between
from thorsky.thorskyutil import time_rounded_to_minute
# import matplotlib.pyplot as plt # for tests
import sys # for tests
import pkgutil # to get fixed data
# import warnings # to filter ERFA warnings about dubious years
# Don't have this working.
# this is slightly modified cribbed code.
#def fxn():
# warnings.warn("dubious", ErfaWarning)
#with warnings.catch_warnings():
# warnings.simplefilter("ignore")
# fxn()
class obs_site : # defaults to MDM. Arguments are all keyworded.
"""
Site from which observations are taken.
This initializes by default to Kitt Peak. In practice it is
usually specified by using an abbreviation from the list of observatories
distributed with the package, 'observatories_rev.dat'.
Other arguments are keyworded.
Notes
-----
The core of this is an EarthLocation, but it also includes time zone
information and a guess at the height of the observatory above surrounding
terrain for adjusting rise and set times for the sun and moon.
"""
def __init__(self, name = "MDM Observatory [Kitt Peak]", loc = (-1996199., -5037542., 3356753.),
tzstr = "America/Phoenix", tzstdabbr = "MST", tzdayabbr = "MDT",
westheight = 700., eastheight = 0.) :
"""Initialize an obs_site.
Parameters
----------
name : str, optional
The full name of the observatory. Defaults to "MDM Observatory [Kitt Peak]"
loc : tuple or list
Geocentric cartesian location, in meters. Default (-1996199., -5037542., 3356753.)
If you need to compute these for a new site, see the documentation for
astropy.coordinates.EarthLocation, in particular the "from_geodetic" method.
tzstr : str
time zone name in format used by unix programs; default "America/Phoenix".
tzstdabbr, tzdayabbr : str
abbreviations for standard and daylight time in this zone; defaults "MST","MDT"
westheight, eastheight :
rough height of observatory above surrounding terrain, in meters: defaults 700. and 0.
Attributes:
name : str
Name of observatory.
location : EarthLocation
EarthLocation of the observation site.
localtz : timezone
Local time zone.
localtzabbrev, localtzdayabbrev :
Abbreviations for standard and daylight time.
risealt, setalt : Angle
elevations at which the sun/moon rises or sets.
axisdist : Quantity in distance units
distance of site from earth's rotation axis
diurnalspeed : Quantity in velocity units
speed of diurnal rotation at this site, for use in velocity corrections.
"""
self.name = name
if isinstance(loc, EarthLocation) :
self.location = loc
elif isinstance(loc,tuple) or isinstance(loc, list) : # geocentric xyz
self.location = EarthLocation.from_geocentric(loc[0], loc[1], loc[2],
unit = u.m) # MDM
self.tzstr = tzstr
self.localtz = timezone(tzstr)
self.localtzabbrev = tzstdabbr # abbrevn for standard time zone
self.localtzdayabbrev = tzdayabbr # abbrevn for daylight time zone
self.height_above_west = westheight * u.m # height of obs over its west horizon
self.height_above_east = eastheight * u.m # height of obs over its east horizon
# compute the altitude at which sunrise and sunset occur, defined as the
# top limb of the sun or moon coinciding with the horizon. In the
# absence of terrain effects, this is conventionally at zenith dist
# 90 degrees 50 minutes. In real life higher precision is not useful
# because atmospheric refraction varies a great deal at such low
# altitudes. Include in the calculation the possibility that the
# observatory is above or below the terrain that forms its horizon,
# using a simple approximation for the depression of the horizon.
# These altitudes also serve for the moon. The angular diameter
# variation is insignificant for this purpose, again because of
# atmospheric refraction.
self.risealt = None
self.setalt = None
west_depression = Angle(np.sqrt(2. * ( abs(self.height_above_west)
/ (thorconsts.EQUAT_RAD))) * u.rad)
# print "west_depression",west_depression
if self.height_above_west > 0. :
self.setalt = Angle(-0.833,unit=u.deg) - west_depression
# zd = 90 deg 50 arcmin
elif self.height_above_west <= 0. :
self.setalt = Angle(-0.833,unit=u.deg) + west_depression
east_depression = Angle(np.sqrt(2. * (abs(self.height_above_east)
/ (thorconsts.EQUAT_RAD))) * u.rad)
# print "east_depression",east_depression
if self.height_above_east > 0. :
self.risealt = Angle(-0.833 * u.deg) - east_depression
# zd = 90 deg 50 arcmin
elif self.height_above_east <= 0. :
self.risealt = Angle(-0.833 * u.deg) + east_depression
# print "self.risealt = ",self.risealt
# compute and store the speed of diurnal rotation at this site
# for later use in barycentric velocity correction.
axisdist = np.sqrt(self.location.x ** 2 + self.location.y ** 2)
axisdist = axisdist.to(u.km)
# one sidereal day is about 86164.09 seconds.
self.diurnalspeed = (2. * np.pi * axisdist) / (86164.09 * u.s)
def get_sites() : # reads from a file of site names.
"""Creates a dictionary of obs_site instances from the file of site parameters
provided in the package. Runs at import time and will probably never need to be
called by a user
"""
# inf = open("observatories_rev.dat")
# use pkgutil since I'm packaging this ...
# https://stackoverflow.com/questions/6028000/how-to-read-a-static-file-from-inside-a-python-package
# print('package name:',__name__) # was __package__, which broke in Python 3.
bytestr = pkgutil.get_data(__name__, 'observatories_rev.dat')
# this was just a file bu tis now a byte string containing the raw contents of the
# file. Turn it into ASCII and cut into lines, and it'll read as if it were a file.
inf = bytestr.decode('ASCII').splitlines()
sitedict = {}
for l in inf :
# print("l = ",l) # diagnostic -- this took some doing!
if l[0] != '#' : # allow commenting out of lines
try :
x = l.split('\t')
y = x[2].split(",") # xyz coordinates in meters as characters
loc = (float(y[0]),float(y[1]),float(y[2]))
sitedict[x[0]] = obs_site(name = x[1], loc = loc, tzstr = x[3], tzstdabbr = x[4],
tzdayabbr = x[5], eastheight = float(x[6]), westheight = float(x[7]))
except :
print("Bad input line in get_sites, may not be tab-separated:")
print(l)
return sitedict
sitedict = get_sites() # reads site dictionary from package data file 'observatories_rev.dat'
class Observation :
"""Circumstances of an observation at a given SkyCoord, obs_site, and time.
This is the main class, used to compute time-and-the-sky information.
A minimal use example ::
import astropy.units as u
import thorsky.thorskyutil as tsu
import thorsky.thorskyclasses3 as tsc3
o = tsc3.Observation() # defaults to zenith at default site at present moment
o.computesky() # hour angle, airmass, and more
o.computesunmoon() # note that the order in which these are called is important
o.computequickbary()
o.setnightevents()
o.computeplanets()
o.printnow() # see what you have
o.printnight()
This class has a huge number of attributes. To avoid unneeded computation,
these are mostly left blank unless computed explicitly using the methods
provided.
The order in which you call the methods that compute results is important
since results are cumulative: In sum:
* computesky needs to be first
* computesunmoon, if needed, is second.
* computequickbary depends on computesky but nothing else
* setnightevents depends on both computesky and computesunmoon
* hours_up depends on computesky, computesunmoon, and setnightevents
* computeplanets depends on computsky and computesunmoon.
These have been split out to save compute time. For example, if you only
need airmass, all you need to run is computesky, but if you want to know
if the moon is up you need to run computesunmoon as well.
"""
def __init__(self,celest = None, t = None, site = None, default_site = 'mdm',
use_local_time = True, ra_unit = u.hourangle, dec_unit = u.deg) :
"""
Parameters :
celest : SkyCoord, or string, or list, or tuple; optional
Celestial location of the observation. Defaults to the zenith for the site and time.
A wide variety of input formats are permissible; see 'setcelest' below.
t : astropy Time, or string, or tuple, or float; optional
Instant at which observation occurs. Default is time at which code is executed.
Also accepts a wide variety of inputs.
site : obs_site, or str; optional
Site on earth from which observations are taken. Usually passed as a key from
the builtin dictionary of sites, but can be a obs_site instance.
Default is 'mdm'.
use_local_time : boolean; optional
Report results in local time; alternative is UT. Default is True.
ra_unit : astropy.unit; optional
unit for ra input; default is u.hourangle, which I wish they'd named something
different since it's a term of art.
dec_unit : astropy.unit; optional
unit for dec input; default is u.deg
Attributes :
site : obs_site
The observer's site.
celest : astropy SkyCoord
celestial position observed.
t : astropy Time
instant in time of the observation.
julyear : float
julian year corresponding to t.
lst : astropy Angle
local sidereal time, computed to about 1 sec bypassing slow astropy routines.
nowfr : astropy coordinate frame
FK5 frame for equinox of obsevation ('of date')
hanow : astropy Angle
hour angle (from meridian)
airmass : float
true airmass (not secant-z)
cel_J2000 : astropy SkyCoord
celestial location for J2000.
icrs2now : numpy array
rotation matrix to transform cartesian icrs to present equinox
current2topoxyz : numpy array
rotation matrix to transform current equinox to topocentric XYZ
icrs2topoxyz : numpy array
matrix product of icrs2now and current2topoxyz; tranforms icrs to topocentric XYZ
constel : string
IAU abbreviation of the constellation, e.g. 'UMa'
previouslstmid : astropy Angle
sidereal time of previous night's midnight
moonpos : SkyCoord
topocentric moon celestial position to about 10 arcsec precision in current equinox
moonobjang : astropy Angle
angle subtended by Observation's celest and the moon
moonphasedescr : string
human-readable description of the moon phase
moonillumfrac : float
fraction of moon's face that is illuminated (0. to 1.)
moonaltit : astropy Angle
elevation of moon above horizon
lunskybrightness : float
predicted sky brightness added by the moon in mag per square arcsec
sunpos : astropy SkyCoord
sun position to about 0.01 degrees
sunobjang : astropy Angle
angle subtended by sun and moon
sunaltit : astropy Angle
elevation of sun center above horizon (no refraction)
sunaz : astropy Angle
azimuth of sun
twi : float
rough zenight twilight brightness in blue mag per square arcsec
lstmid : astropy Angle
local sidereal time at midnight
tsunset, tsunrise : astropy Time
times of sunset and sunrise on present night
tevetwi, tmorntwi : astropy Time
end and start of astronomical (18-degr) twilight
teve12,tmorn12 : astropy Time
end and start of nautical (12-degr) twilight
tnightcenter : astropy Time
time of lower culmination of the sun
uptime30, uptime20, uptime15 : astropy TimeDelta
How long object has <3, <2, and <1.5 airmasses during night defined by 12-deg twilight.
barytcorr : astropy TimeDelta
Amount to add to Observation.t to get arrival time at barycenter
baryvcorr : astropy Quantity
velocity to add to shift velocity to solar system barycenter frame
planetdict : Dictionary of astropy SkyCoords
sky positions of planets, keyed by lowercase name
planetmags : Dictionary of floats
apparent magnitudes of planets
"""
# set up the site.
self.setsite(site, default_site = default_site)
# set up the time.
if t == None : # set to system clock if no time specified.
self.t = Time(ttime.time(),format='unix')
else :
self.settime(t,use_local_time = use_local_time)
# set up the celestial coordinates.
if celest != None and celest != 'ZENITH' :
self.setcelest(celest) # many legal formats -- see below.
else : # default to zenith
self.lst = lpsidereal(self.t, self.site.location) # sidereal
self.nowfr = currentFK5frame(self.t)
self.celest = SkyCoord(self.lst,self.site.location.lat,frame=self.nowfr) # zenith
self.cel_J2000 = self.celest.transform_to('icrs')
# print("after xform, self.celest is",self.celest)
self.constel = self.celest.get_constellation(short_name = True)
self.previouslstmid = None
self.moonpos = None # SkyCoord of moon
self.moonobjang = None
self.moonphasedescr = None
self.moonillumfrac = None
self.moonaltit = None
self.moonaz = None
self.lunskybright = None # approximate lunar contrib to sky
# brightness at this location
self.sunpos = None # SkyCoord of sun
self.sunobjang = None
self.sunaltit = None
self.sunaz = None
self.twi = None # rough magnitude difference between dark
# night sky and present blue twilight contrib in zenith.
self.lstmid = None
self.tsunset = None
self.tevetwi = None
self.tnightcenter = None
self.tmorntwi = None
self.tsunrise = None
# self.moonrise = None
# self.sunrise = None
self.barytcorr = None
self.baryvcorr = None
self.planetdict = {} # dictionary of planet SkyCoords by name.
self.planetmags = {} # dictionary of planet visual mags by name.
def setsite(self, site, default_site = 'mdm') :
"""Set the site on earth of the observation.
Parameters
----------
site : obs_site instance or string
if string, it is the key used for the site in the site dictionary.
"""
if isinstance(site, obs_site) :
self.site = site
else :
# sitedict = get_sites()
# print "got sites; ",sitedict.keys()
if site == None :
if default_site != None :
self.site = sitedict[default_site]
# 'backup default' is mdm
else : self.site = sitedict['mdm']
else :
self.site = sitedict[site]
def settime(self, t, use_local_time = True) :
"""set Observation time t using the input 't'
Note the instance variable 't' is always stored as
in utc. If use_local_time, then the input is assumed to be local
zone time (though if t is already a Time instance this is ignored).
Input t can be an ISO string, or a tuple of at least yr, month,
day, hour, and minute.
Note also that if use_local_time is True (default) and the site
switches back and forth from standard to daylight saving time, then
one hour of clock time gets repeated in the autumn, and an hour gets
skipped in the spring, which is tricky if localtime is input. For
example, specifying 2:30 AM Eastern time on the 'spring forward' night
sets to 3:30 AM Eastern Daylight time; 2:30 AM Eastern Standard does
not happen on that night. When Daylight Saving ends, a time in the
ambiguous hour reverts to standard, so there's an hour of time that
you just can't specifiy using local time input.
Parameters
----------
t : a Time, or a string, or a tuple
time to which to set Observation attribute t. If not already a Time,
* if a string, can be anything the dateutil parser accepts,e.g."2023-04-17 19:08:23"
* if a tuple, must be (year, month, day, hour, minute, [sec]) as ints; seconds are optional.
use_local_time : boolean
whether input time is interpreted as local or not; default True.
"""
if isinstance(t, Time) :
self.t = t
elif isinstance(t,str) : # generally an ISO string like '2018-07-22 14:13:22'
if use_local_time :
localt = self.site.localtz.localize(dateutil.parser.parse((t)))
self.t = Time(localt)
else :
self.t = Time(dateutil.parser.parse(t))
elif isinstance(t,tuple) or isinstance(t,list) :
if len(t) == 5 :
dtin = datetime(t[0],t[1],t[2],t[3],t[4])
else :
dtin = datetime(t[0],t[1],t[2],t[3],t[4],t[5])
if use_local_time :
# print "using local"
localt = self.site.localtz.localize(dtin)
self.t = Time(localt)
else :
# print "not using local"
self.t = Time(dtin)
elif isinstance(t, float) :
self.t = Time(t,format = 'jd')
# Keep a 'pure number' version of the julian epoch.
self.julyear = 2000. + (self.t - thorconsts.J2000_Time).jd / 365.25
def advancetime(self, delta, forward = True) :
"""Advance Observation.t by the specified amount.
Parameters
----------
delta : Amount to advance the time, as an astropy TimeDelta, or a string
such as "143 s" or "5 d" (see below), or a float in sec.
forward : boolean : if True, go forward; if False, go back.
When specifying the interval with a string, the codes for units are
* s - second
* m - minute
* h - hour
* t - 1 sidereal day (transit-to-transit)
* d - 1 solar day
* w - one week of solar days
* l - one lunation
* y - one Julian year of 365.25 days
"""
# non-obvious: 't' for 'transit' is 1 sidereal day,
# 'w' is a week, 'l' is exactly 30 days -- a very rough 'lunation' that
# will land on the same time of night about one month later -- and
# 'y' is a 365-day 'year', again because it's more likely you want to
# land on the same time of night next year than keep track over many years.
if isinstance(delta, TimeDelta) :
if forward :
self.t = self.t + delta
else :
self.t = self.t - delta
elif isinstance(delta, str) : # e.g., "123. or 2. d"
# codes for time delta intervals, and their values in seconds.
# non-obvious: 't' for 'transit' is 1 sidereal day,
# 'w' is a week, 'l' is exactly 30 days -- a very rough 'lunation' that
# will land on the same time of night about one month later -- and
# 'y' is a 365-day 'year', again because it's more likely you want to
# land on the same time of night next year than keep track over many years.
t_unit_dict = {'s' : 1., 'm' : 60., 'h' : 3600., 't' : 86164.0905352,
'd' : 86400, 'w' : 604800., 'l' : 2592000., 'y' : 31536000.}
x = delta.split()
try :
deltafloat = float(x[0])
if len(x) > 1 :
deltafloat = deltafloat * t_unit_dict[x[1][0]]
dt = TimeDelta(deltafloat, format = 'sec')
if forward :
self.t = self.t + dt
else :
self.t = self.t - dt
except :
print("Bad time step string.")
elif isinstance(delta, float) : # float defaults to seconds.
dt = TimeDelta(delta, format = 'sec')
if forward :
self.t = self.t + dt
else :
self.t = self.t - dt
else :
print("Time step must be a float, an astropy TimeDelta or a string.")
self.julyear = 2000. + (self.t - thorconsts.J2000_Time).jd / 365.25
def setcelest(self,celestin,ra_unit = u.hourangle, dec_unit = u.deg) :
"""Sets the celestial coords.
Input can be a SkyCoord instance, a list or tuple of (ra,dec) or
(ra,dec,equinox), or a character string.
Parameters
----------
celestin : Skycoord, or list/tuple, or string. Celestial coordinates to set. See below.
ra_unit : astropy unit; input unit for RA, defaults to u.hourangle (i.e, hrs min sec)
dec_unit : astropy unit; input unit for dec, defaults to u.deg
If celestin is a string, some permissible formats are
* '18:22:22.3 -0:18:33' (defaults to ICRS or basically J2000)
* '18:22:22.3 -0:18:33 2015'
* '18 22 22.3 -0 18 33'
* '18 22 22.3 -0 18 33 2015'
Input units only apply if celestin is not already a SkyCoord.
"""
if isinstance(celestin, SkyCoord) : # if it's a SkyCoord, just copy it.
self.celest = celestin
elif isinstance(celestin, tuple) or isinstance(celestin, list) : #
if len(celestin) == 2 :
self.celest = SkyCoord(celestin[0],celestin[1], unit=(ra_unit, dec_unit)) # parse a tuple
elif len(celestin) == 3 :
# print("celestin[2] = ",celestin[2])
eq = float(celestin[2])
if eq == 2000. :
self.celest = SkyCoord(celestin[0],celestin[1],unit = (ra_unit, dec_unit))
else :
eq = "J%7.2f" % (eq)
self.celest = SkyCoord(celestin[0],celestin[1],unit = (ra_unit, dec_unit),frame = FK5(equinox = eq))
elif isinstance(celestin, str) : # str includes unicode in python3.
pieces = celestin.split()
if len(pieces) >= 6 : # space-separated fields - glue back together.
rastr = pieces[0] + ":" + pieces[1] + ":" + pieces[2]
decstr = pieces[3] + ":" + pieces[4] + ":" + pieces[5]
if len(pieces) == 7 : # if there's an equinox ...
eqstr = pieces[6]
else : eqstr = '2000.'
else : # e.g. 21:29:36.2 so split gets ra and dec separately
rastr = pieces[0]
decstr = pieces[1]
if len(pieces) > 2 : eqstr = pieces[2] # if there's an equinox ...
else : eqstr = '2000.'
# print "rastr decstr eqstr",rastr,decstr,eqstr
if float(eqstr) == 2000. : # default to ICRS if 2000.
self.celest = SkyCoord(rastr,decstr,unit = (ra_unit, dec_unit))
else : # or set in FK5 frame of date if not 2000.
eq = "J"+eqstr
self.celest = SkyCoord(rastr,decstr,unit = (ra_unit, dec_unit),frame = FK5(equinox = eq))
# print(" ************ IN SETCELEST ************ ")
# print( "self.celest:",self.celest)
# print( "frame.name:",self.celest.frame.name)
if self.celest.frame.name == 'icrs' or self.celest.frame.name == 'gcrs' :
self.cel_J2000 = self.celest
elif self.celest.frame.name == 'precessedgeocentric' :
self.cel_J2000 = self.celest.transform_to('gcrs')
else :
self.cel_J2000 = self.celest.transform_to('icrs')
# print "self.cel_J2000:",self.cel_J2000
def computesky(self, redo_coords = True) :
"""Compute basic observational circumstances.
Quantities computed include lst, current-equinox coords,
hour angle, alt, az, parallactic angle, midhight, LST mid,. Precession to
current equinox can be suppressed for speed.
Parameters
----------
redo_coords : boolean, defaults to true. If False, turns off precession.
"""
# current equinox etc., useful for repeat calls tracing out a single
# night
# Also, if redo_coords is on, computes rotation matrices to
# take celestial cartesian coords in ICRS directly into the
# observer's topocentric frame. This speeds up such things
# as the star display by a large factor.
# print("entering computesksy: self.celest = ", self.celest)
self.lst = lpsidereal(self.t, self.site.location) # sidereal
# use these to test for 'this is the same night.'
self.midnight = local_midnight_Time(self.t,self.site.localtz)
self.lstmid = lpsidereal(self.midnight,self.site.location)
if redo_coords :
if self.celest.frame.name == 'gcrs' or self.celest.frame.name == 'precessedgeocentric' :
self.nowfr = currentgeocentframe(self.t)
else : self.nowfr = currentFK5frame(self.t)
# print("self.nowfr = ",self.nowfr)
self.celnow = self.celest.transform_to(self.nowfr)
self.hanow = (self.lst - self.celnow.ra).wrap_at(12.*u.hour) # hour angle
(self.altit, self.az, self.parang) = altazparang(self.celnow.dec, self.hanow,
self.site.location.lat)
self.airmass = true_airmass(self.altit) # polynomial expansion.
if redo_coords :
self.constel = self.celest.get_constellation(short_name = True)
# compute some convenient rotation matrices:
# compute matrix for precession from J2000 (i.e., icrs almost exactly)
# to now, for later use.
self.icrs2now = precessmatrix(thorconsts.J2000_Time,self.t)
# and matrix to rotate a current-equinox celestial xyz into topocentric
self.current2topoxyz = cel2localmatrix(self.lst, self.site.location.lat)
# and matrix product to rotate icrs into topocentric xyz.
self.icrs2topoxyz = self.current2topoxyz.dot(self.icrs2now) # matrix product
# If we're in the south, rotate by 180 degrees around vertical axis
# to invert display. Simply negate top two rows of the matrix.
if self.site.location.lat < 0. * u.deg :
self.icrs2topoxyz = np.array([[-1],[-1],[1]]) * self.icrs2topoxyz
# print("leaving computesksy: self.celest = ", self.celest)
def computebary(self) : # this is a bit expensive so split off.
"""computes and set bary corrections using astropy routines. Slow; nearly all users will want 'computequickbary'.
"""
self.baryvcorr = self.celest.radial_velocity_correction(obstime = self.t,
location = self.site.location).to(u.km / u.second)
# print "baryvcorr: ", self.baryvcorr.to(u.km/u.s)
# print type(self.baryvcorr.to(u.km/u.s))
self.barytcorr = self.t.light_travel_time(self.celnow, kind='barycentric',
location = self.site.location, ephemeris = 'builtin')
self.tbary = self.t + self.barytcorr
# print "barytcorr: ", self.barytcorr.to(u.s)
# print type(self.barytcorr.to(u.s))
def computequickbary(self) :
"""compute and set barycentric corrections to 0.1s and 10m/s. Reasonably fast.
"""
# The call to computebary is very expensive in
# order to get ultimate accuracy. When less precision is
# required it should be possible to make it much faster by
# computing the corrections directly.
# The goal is to get something like 0.1 sec in light time, and
# 10 meters per second in velocity.
earthpos = get_body_barycentric('earth',self.t)
# astropy doesn't seem to offer a velocity function.
# For moderate precision, the simplest numerical derivative works:
x1 = get_body_barycentric('earth', self.t - (500. * u.s))
x2 = get_body_barycentric('earth', self.t + (500. * u.s))
earthvel = (x2 - x1) / (1000. * u.s)
# ignore geographic correction to light time delay -- it's
# 21 msec for a source at the zenith.
# nice to have a builtin dot product function for this!
extra = earthpos.dot(self.cel_J2000.cartesian) # extra distance to barycenter
tcorr = extra / ((299792.458 * u.km) / (1.0 * u.s))
self.barytcorr = TimeDelta(tcorr) # needs to be a TimeDelta
self.tbary = self.t + self.barytcorr # this is the right sign!
# earth-rotation part of velocity correction
diurnalvel = CartesianRepresentation(
-1. * self.site.diurnalspeed * np.sin(self.lst),
self.site.diurnalspeed * np.cos(self.lst),
self.site.diurnalspeed * 0.)
# totalvel should be earth orbit + earth rotation velocity wrt barycenter
totalvel = earthvel + diurnalvel
self.baryvcorr = totalvel.dot(self.cel_J2000.cartesian).to(u.km/u.s)
# print("self.barytcorr ",self.barytcorr,"self.baryvcorr",self.baryvcorr)
def computesunmoon(self) :
"""Compute and set the many moon and sun quantities."""
self.lst = lpsidereal(self.t, self.site.location)
self.moonpos, self.moondist = accumoon(self.t,self.site.location)
self.moonha = self.lst - self.moonpos.ra
(self.moonaltit, self.moonaz, parang) = altazparang(self.moonpos.dec, self.moonha, self.site.location.lat)
self.sunpos = lpsun(self.t)
self.sunha = self.lst - self.sunpos.ra
(self.sunaltit, self.sunaz, parang) = altazparang(self.sunpos.dec, self.sunha, self.site.location.lat)
self.twi = ztwilight(self.sunaltit)
self.sunmoonang = self.sunpos.separation(self.moonpos)
self.moonillumfrac = 0.5 * (1. - np.cos(self.sunmoonang))
self.moonobjang = self.celnow.separation(self.moonpos)
(self.moonphasedescr, self.lunage, self.lunation) = phase_descr(self.t.jd)
# print "age %f lunation %d" % (self.lunage, self.lunation)
# print "moon illum frac:",self.moonillumfrac
# print "moon-obj ang:", self.moonobjang
# print "moon altit",self.moonaltit,"obj altit",self.altit
self.lunsky = lunskybright(self.sunmoonang,self.moonobjang,thorconsts.KZEN,self.moonaltit,
self.altit,self.moondist, self.sunaltit)
# print "lunsky: ",self.lunsky
# print "lst",self.lst
# print "moon",self.moonpos
# print "moon ha, alt, az", self.moonha, self.moonaltit, self.moonaz
# print "sun",self.sunpos
# print "sun ha, alt, az", self.sunha, self.sunaltit, self.sunaz
# print "twilight diff: " , self.twi
def computeplanets(self) :
"""compute and store planetary positions and magnitudes."""
# print "starting planets"
planetlist = ['mercury','venus','mars','jupiter','saturn','uranus','neptune']
# to get magnitudes need to get sun and earth positions too
sunbary = get_body_barycentric('sun',self.t)
# print 'sunbary:'
# print sunbary
earthbary = get_body_barycentric('earth',self.t)
# print 'earthbary:'
# print earthbary
for p in planetlist :
# get celestial position of planet
self.planetdict[p] = get_body(p,self.t)
# print "get_body gives",p, self.planetdict[p]
# This returns a position in "GCRS: which is earth-centered.
# now get sun-centered location (slightly different from bary) to get
# magnitude.
pbary = get_body_barycentric(p,self.t)
psun = sunbary - pbary # vector from sun to planet
psundist = np.sqrt(psun.x ** 2 + psun.y ** 2 + psun.z ** 2) # modulus
pearth = earthbary - pbary # vector from planet to earth
pearthdist = np.sqrt(pearth.x ** 2 + pearth.y ** 2 + pearth.z ** 2)
# for inner planets, use polynomials for the phase angle dependence,
# which is not at all trivial.
if p == 'mercury' or p == 'venus' or p == 'mars' :
# angle between sun and earth as viewed from planet
phaseang = angle_between(psun,pearth)
# print "phaseang:",phaseang
phasefac = np.polyval(thorconsts.PLANETPHASECOEFS[p],phaseang.value)
# print "phasefac:",phasefac
self.planetmags[p] = phasefac + 5. * np.log10(psundist.to(u.AU).value * pearthdist.to(u.AU).value)
# print "mag:",self.planetmags[p]
# outer planets are close enough to phase zero all the time to ignore the phase angle.
else :
phasefac = thorconsts.PLANETPHASECOEFS[p]
self.planetmags[p] = phasefac + 5. * np.log10(psundist.to(u.AU).value * pearthdist.to(u.AU).value)
# print "mag:",self.planetmags[p]
# saturn will not be good because there's no ring-tilt dependence factored in.
fr = currentgeocentframe(self.t)
#print "frame attributes:",fr.get_frame_attr_names()
# print """
#after transformation:
# """
# we want these in equinox of date for plotting. They'll be
# converted back to J2000 for the table when the coordinates are loaded
# into the observation instance.
for p in planetlist :
self.planetdict[p] = self.planetdict[p].transform_to(fr)
# print p, self.planetdict[p].to_string('hmsdms')
# print "ending planets"
def setnightevents(self) :
"""Compute the events (sunset etc) for a single night. """
# self.midnight also computed in computesky, but it's cheap.
self.midnight = local_midnight_Time(self.t,self.site.localtz)
self.lstmid = lpsidereal(self.midnight,self.site.location)
# if you're looking at the same night, from the same site,
# lst mid will be the same. Don't bother with the calculation
if self.previouslstmid != None :
if abs(self.previouslstmid - self.lstmid) < 0.001 * u.deg :
# print "no night event calc'n -- same."
return
sunmid = lpsun(self.midnight)
# sunrise and sunset altitudes are complicated and initialized
# with the site for efficiency's sake. Twilight altitude is
# fixed at -18 degrees, so not costly to set it here.
twialt = Angle(-18.,unit=u.deg)
twialt12 = Angle(-12.,unit=u.deg)
# Compute sunset, sunrise, and twilight times.
# Start by computing the approximate hour angles at which these
# occur, for the dec that the sun has at midnight.
# for this purpose, wrap hour angles at noon so that all for a given night
# are positive
# Find hour angle at which the dec of the sun (evaluated at
# local midnight) rises or sets
sunsetha = ha_alt(sunmid.dec,self.site.location.lat,self.site.setalt)
# If the dec of the sun never rises or sets -- possible in the arctic -- set
# flags for later use; "0" is normal, it rises and sets, "1" is it's always
# up (midnight sun) and "-1" is it never rises.
if sunsetha > (500. * u.rad) :
self.sunsetflag = 1 # sun always up
elif sunsetha < (-500. * u.rad) :
self.sunsetflag = -1 # sun never rises or sets
else : self.sunsetflag = 0
sunriseha = Angle(2. * np.pi, unit = u.rad) - ha_alt(sunmid.dec,self.site.location.lat,self.site.risealt)
twilightha = ha_alt(sunmid.dec,self.site.location.lat,twialt) # positive, correct for evening
# print "sunsetha, sunriseha, twilightha",sunsetha,sunriseha,twilightha
# Again, set flag in case twilight never ends (high-latitude summer) or the sun doesn't get
# higher than -18 degrees
if twilightha > (500. * u.rad) :
self.twilightflag = 1 # never gets dark
elif twilightha < (-500. * u.rad) :
self.twilightflag = -1 # fully dark all night (only happens near pole and solstice)
else : self.twilightflag = 0
# do the same for 12-degree twilight; with the sun between 12 and 18 degrees below the horizon the
# sky is fairly dark and one can work on brighter object, standard stars and so on.
twilight12ha = ha_alt(sunmid.dec,self.site.location.lat,twialt12) # positive, correct for evening
if twilight12ha > (500. * u.rad) :
self.twilight12flag = 1 # never gets dark
elif twilight12ha < (-500. * u.rad) :
self.twilight12flag = -1 # fully dark all night (only happens near pole and solstice)
else : self.twilight12flag = 0
hasunmid = (self.lstmid - sunmid.ra).wrap_at(24. * u.hour)
#print "hasunmid:",hasunmid
#print "midnight",self.midnight
self.tnightcenter = self.midnight - TimeDelta(hasunmid.hour / 24. - 0.5, format = 'jd')
#self.lstnightcenter = lpsidereal(self.tnightcenter,self.site.location)
#print 'night center',self.nightcenter
if self.sunsetflag == 0 : # if dec of sun is such that sunset and sunrise occur
sunsetguess = hasunmid - sunsetha # hour angle difference from sun's posn at midnight
sunriseguess = sunriseha - hasunmid
if self.twilightflag == 0 :
evetwiguess = hasunmid - twilightha
morntwiguess = Angle(2.*np.pi, unit=u.rad) - twilightha - hasunmid
if self.twilight12flag == 0 :
evetwi12guess = hasunmid - twilight12ha
morntwi12guess = Angle(2.*np.pi, unit=u.rad) - twilight12ha - hasunmid
#print "sunsetguess, sunriseguess",sunsetguess,sunriseguess.hour
#print "eve, morn",evetwiguess,morntwiguess.hour
# convert to time differences
if self.sunsetflag == 0 :
TDsunset = TimeDelta(sunsetguess.hour / 24., format = 'jd')
TDsunrise = TimeDelta(sunriseguess.hour / 24., format = 'jd')
#print "tdsunset, tdsunrise",TDsunset,TDsunrise
if self.twilightflag == 0 :
TDevetwi = TimeDelta(evetwiguess.hour / 24., format = 'jd')
TDmorntwi = TimeDelta(morntwiguess.hour / 24., format = 'jd')
#print "TDeve, TDmorn",TDevetwi,TDmorntwi
if self.twilight12flag == 0 :
TDevetwi12 = TimeDelta(evetwi12guess.hour / 24., format = 'jd')
TDmorntwi12 = TimeDelta(morntwi12guess.hour / 24., format = 'jd')
# form into times and iterate to accurate answers.
if self.sunsetflag == 0 :
self.tsunset = self.midnight - TDsunset # first approx
#print "first approx",self.tsunset
self.tsunset = jd_sun_alt(self.site.setalt, self.tsunset, self.site.location)
self.tsunrise = self.midnight + TDsunrise # first approx
#print "first approx",self.tsunrise
self.tsunrise = jd_sun_alt(self.site.risealt, self.tsunrise, self.site.location)
if self.twilightflag == 0 :
self.tevetwi = self.midnight - TDevetwi
self.tevetwi = jd_sun_alt(twialt, self.tevetwi, self.site.location)
self.tmorntwi = self.midnight + TDmorntwi
self.tmorntwi = jd_sun_alt(twialt, self.tmorntwi, self.site.location)
if self.twilight12flag == 0 :
self.tevetwi12 = self.midnight - TDevetwi12
self.tevetwi12 = jd_sun_alt(twialt12, self.tevetwi12, self.site.location)
#self.lsteve12 = lpsidereal(self.tevetwi12,self.site.location)
self.tmorntwi12 = self.midnight + TDmorntwi12
self.tmorntwi12 = jd_sun_alt(twialt12, self.tmorntwi12, self.site.location)
#self.lstmorn12 = lpsidereal(self.tmorntwi12,self.site.location)
# and, moonrise and set times for that night.
moonmid = lpmoon(self.midnight, self.site.location)
hamoonmid = self.lstmid - moonmid.ra
hamoonmid.wrap_at(12. * u.hour, inplace = True)
#print "moon at midnight",moonmid
#print "hamoonmid: ",hamoonmid.hour, 'hr'
roughlunarday = TimeDelta(1.0366, format = 'jd')
moonsetha = ha_alt(moonmid.dec,self.site.location.lat,self.site.setalt)
# Using the midnight position of the moon to assess whether it actually
# rises or sets in a 12-hour window around that time is problematic,
# since the moon's dec can move pretty quickly. This is a rare 'corner
# case' that only matters at very high latitudes so I'm not going to worry
# about it too much.
if moonsetha > (500. * u.rad) :
self.moonsetflag = 1 # moon always up
elif moonsetha < (-500. * u.rad) :
self.moonsetflag = -1 # moon always below horizon
else : self.moonsetflag = 0
moonsetdiff = moonsetha - hamoonmid # how far from setting at midnight
# find nearest setting point
if moonsetdiff.hour >= 12. : moonsetdiff = moonsetdiff - Angle(24. * u.hour)
if moonsetdiff.hour < -12. : moonsetdiff = moonsetdiff + Angle(24. * u.hour)
TDmoonset = TimeDelta(moonsetdiff.hour / 24., format = 'jd')
self.tmoonset = self.midnight + TDmoonset
#print "moonset first approx:",self.tmoonset
#print "aiming for set alt = ",self.site.setalt
self.tmoonset = jd_moon_alt(self.site.setalt, self.tmoonset, self.site.location)
#print "moonset: ",self.tmoonset # .to_datetime(timezone = localtzone)
moonriseha = -1. * ha_alt(moonmid.dec,self.site.location.lat,self.site.risealt) # signed
moonrisediff = moonriseha - hamoonmid # how far from riseting point at midn.
# find nearest riseing point
if moonrisediff.hour >= 12. : moonrisediff = moonrisediff - Angle(24. * u.hour)
if moonrisediff.hour < -12. : moonrisediff = moonrisediff + Angle(24. * u.hour)
TDmoonrise = TimeDelta(moonrisediff.hour / 24., format = 'jd')
self.tmoonrise = self.midnight + TDmoonrise
#print "moonrise first approx:",self.tmoonrise
#print "aiming for rise alt = ",self.site.risealt
self.tmoonrise = jd_moon_alt(self.site.risealt, self.tmoonrise, self.site.location)
#print "moonrise: ",self.tmoonrise # .to_datetime(timezone = localtzone)
# Save this to avoid re-doing unnecessarily. If lstmid is exactly the same,
# then the night and location are almost certainly unchanged.
self.previouslstmid = self.lstmid
def compute_hours_up(self) : # involves night events but is specific to this object.
"""compute how long an object is up during the night, defined by
12-degree (or nautical) twilight. This requires setnightevents
to have been run previously. Computes how long the object is less
that 3.0 airmass, 2.0 airmasses, and 1.5 airmasses."""
# this requires setnightevents to have been run for the same night.
minalt, maxalt = min_max_alt(self.site.location.lat, self.celnow.dec)
if self.twilight12flag == 0 :
self.ha_mid = (self.lstmid - self.celnow.ra).wrap_at(12. * u.hour)
#print("self.ha_mid.value",self.ha_mid.value)
#print("self.ha_mid.hourangle",self.ha_mid.hourangle)
deltattran = TimeDelta(self.ha_mid.hourangle / 24., format = 'jd') / 1.0027379093
self.ttransit = self.midnight - deltattran
#print("self.midnight = ",self.midnight)
#print("self.ttransit = ",self.ttransit)
if minalt < thorconsts.ALT30 and maxalt > thorconsts.ALT30 :
# if this dec passes through 3 airmasses
ha30 = ha_alt(self.celnow.dec,self.site.location.lat,thorconsts.ALT30)
dt30 = TimeDelta(ha30.hourangle / 24., format = 'jd') / 1.0027379093
jd30_1 = self.ttransit - dt30 # Time of rise through 3 airmasses
jd30_2 = self.ttransit + dt30 # Time of set past 3 airmasses
# print("jd30_1 = ",jd30_1)
# print("jd30_2 = ",jd30_2)
self.uptime30 = hrs_up(jd30_1,jd30_2,self.tevetwi12,self.tmorntwi12)
elif minalt > thorconsts.ALT30 : self.uptime30 = (self.tmorntwi12 - self.tevetwi12)
elif maxalt < thorconsts.ALT30 : self.uptime30 = thorconsts.ZERO_TIMEDELTA
#print("time above 3 airm", self.uptime30)
if minalt < thorconsts.ALT20 and maxalt > thorconsts.ALT20 :
# if it passes through 2 airmass
ha20 = ha_alt(self.celnow.dec,self.site.location.lat,thorconsts.ALT20)
dt20 = TimeDelta(ha20.hourangle / 24., format = 'jd') / 1.0027379093
jd20_1 = self.ttransit - dt20
jd20_2 = self.ttransit + dt20
self.uptime20 = hrs_up(jd20_1,jd20_2,self.tevetwi12,self.tmorntwi12)
elif minalt > thorconsts.ALT20 : self.uptime20 = (self.tmorntwi12 - self.tevetwi12)
elif maxalt < thorconsts.ALT20 : self.uptime20 = thorconsts.ZERO_TIMEDELTA
#print("time above 2 airm", self.uptime20)
if minalt < thorconsts.ALT15 and maxalt > thorconsts.ALT15 :
# if it passes through 1.5 airmasses
ha15 = ha_alt(self.celnow.dec,self.site.location.lat,thorconsts.ALT15)
dt15 = TimeDelta(ha15.hourangle / 24., format = 'jd') / 1.0027379093
jd15_1 = self.ttransit - dt15
jd15_2 = self.ttransit + dt15
self.uptime15 = hrs_up(jd15_1,jd15_2,self.tevetwi12,self.tmorntwi12)
elif minalt > thorconsts.ALT15 : self.uptime15 = (self.tmorntwi12 - self.tevetwi12)
elif maxalt < thorconsts.ALT15 : self.uptime15 = thorconsts.ZERO_TIMEDELTA
#print("time above 1.5 airm", self.uptime15)
def printnow(self, use_slow_bary = False) :
"""Compute the instantaneous circumstances to ensure they're current and
print out a nicely formatted display."""
# first ensure that they're up to date ...
self.computesky()
self.computesunmoon()
self.computeplanets()
if use_slow_bary :
self.computebary()
else :
self.computequickbary()
print(" ")
print("Site : %s; E longit = %s, lat = %s" % (self.site.name,
self.site.location.lon.to_string(unit = u.deg, sep=' '),
self.site.location.lat.to_string(unit = u.deg, sep=' ')))
print(" ")
print(" J2000: %s %s (in %s)" %
(self.cel_J2000.ra.to_string(unit = u.hourangle, sep = ' ',
precision=2, pad = True),
self.cel_J2000.dec.to_string(sep = ' ',precision = 1, pad = True,
alwayssign = True),
self.constel))
eqout = self.t
eqout.format = "jyear_str"
print("%s : %s %s" % (eqout.value,
self.celnow.ra.to_string(unit = u.hourangle, sep = ' ',
precision=2, pad = True), self.celnow.dec.to_string(sep = ' ',precision = 1,
pad = True, alwayssign = True)))
print(" ")
ut = self.t.to_datetime()
local = self.t.to_datetime(timezone = self.site.localtz)
# localdow = dows[datetime.weekday()]
print("UT date and time : %s JD %s " % (ut.strftime("%a %Y-%m-%d %H:%M:%S"), self.t.jd))
print("local date and time : %s" % (local.strftime("%a %Y-%m-%d %H:%M:%S")))
print(" ")
print("Local mean sidereal time: %s " % (self.lst.to_string(unit = u.hourangle, sep = ' ',
precision = 0)))
print(" ")
parang_opposite = self.parang + Angle(180 * u.deg)
parang_opposite.wrap_at(180. * u.deg)
print("Hour angle: %s AltAz: %5.1f, %6.1f Parallactic: %4.1f [%4.1f]" % \
(self.hanow.to_string(unit = u.hourangle, sep = ' ', precision = 0, pad = True,
alwayssign = True),
self.altit.deg, self.az.deg, self.parang.deg, parang_opposite.deg))
if self.altit < 0. : print("Below horizon.")
elif self.airmass > 10. : print("Airmass > 10.")
else : print("Airmass: %6.3f" % (self.airmass))
print(" ")
print("Moon: %s Alt,Az %4.1f, %4.1f" % (self.moonphasedescr,
self.moonaltit.deg, self.moonaz.deg))
if self.moonaltit > 0. : # give more detail on the moon if it's up.
print("Moon ra and dec: %s %s (%s)" % (
self.moonpos.ra.to_string(unit = u.hourangle, sep = ' ',
precision=0,pad=True), self.moonpos.dec.to_string(sep = ' ',fields = 2,
pad = True, alwayssign = True), eqout.value))
print("Illum. fract : %5.3f Moon-obj ang: %5.1f deg" % (self.moonillumfrac,
self.moonobjang.deg))
if self.lunsky != 99. :
print("Lunar sky brightness %4.1f mag/sq arcsec" % (self.lunsky))
else : print("The moon is down. ")
print(" ")
if self.sunaltit.deg < -18. :
print("The sun is down; there is no twilight.")
elif self.sunaltit.deg < 0. :
print("In twilight; sky %4.1f mag brighter than dark sky." % self.twi)
else :
print("THE SUN IS UP.")
print("Sun RA and dec: %s %s (%s); AltAz %4.1f, %5.1f" % (
self.sunpos.ra.to_string(unit = u.hourangle, sep = ' ',
precision=1, pad = True), self.sunpos.dec.to_string(sep = ' ',precision = 0,
pad = True, alwayssign = True),
eqout.value, self.sunaltit.deg, self.sunaz.deg))
print(" ")
print("Barycentric corrns: add %7.2f sec and %6.2f km/s to observed." % \
(self.barytcorr.to(u.s).value,self.baryvcorr.to(u.km/u.s).value))
print("Barycentric JD (UTC system): %14.5f." % (self.tbary.jd))
def printnight(self, use_local_time = True) :
"""Print a nicely formatted display of the rise/set times etc.."""
self.setnightevents()
if use_local_time :
tz = self.site.localtz
print("Night events; times listed are local.\n")
else :
tz = None
print("Night events; times listed are UT.\n")
sunset = self.tsunset.to_datetime(timezone = tz)
print(" Sunset: %s" % (time_rounded_to_minute(sunset, incl_date = True, incl_day = True)))
endtwi = self.tevetwi.to_datetime(timezone = tz)
print(" Twilight Ends: %s" % (time_rounded_to_minute(endtwi, incl_date = True, incl_day = True)))
nghtctr = self.tnightcenter.to_datetime(timezone = tz)
print("Center of Night: %s" % (time_rounded_to_minute(nghtctr, incl_date = True, incl_day = True)))
begtwi = self.tmorntwi.to_datetime(timezone = tz)
print("Twilight Begins: %s" % (time_rounded_to_minute(begtwi, incl_date = True, incl_day = True)))
sunrise = self.tsunrise.to_datetime(timezone = tz)
print(" Sunrise: %s" % (time_rounded_to_minute(sunrise, incl_date = True, incl_day = True)))
print(" ")
moonrise = self.tmoonrise.to_datetime(timezone = tz)
moonset = self.tmoonset.to_datetime(timezone = tz)
if self.tmoonrise < self.tmoonset :
print(" Moonrise: %s" % (time_rounded_to_minute(moonrise, incl_date = True, incl_day = True)))
print(" Moonset: %s" % (time_rounded_to_minute(moonset, incl_date = True, incl_day = True)))
else :
print(" Moonset: %s" % (time_rounded_to_minute(moonset, incl_date = True, incl_day = True)))
print(" Moonrise: %s" % (time_rounded_to_minute(moonrise, incl_date = True, incl_day = True)))
if __name__ == "__main__" :
# sitedict = get_sites()
# obsite = sitedict['mdm']
# obgeo = obsite.location.to_geodetic()
# print obgeo
# cel = SkyCoord("21:29:36.2 -47:04:08",unit = (u.hourangle, u.deg), frame = 'icrs')
# print "year, month, day, hr, min"
#
# x = raw_input().split()
# year = int(x[0])
# month = int(x[1])
# day = int(x[2])
# hr = int(x[3])
# minute = int(x[4])
#
# dt = obsite.localtz.localize(datetime(year,month,day,hr,minute,0))
# t = Time(dt)
# print t.jd
#
# dt2 = obsite.localtz.localize(datetime(2000,1,1,0,0,0))
# t2 = Time(dt2)
# print t2.jd
#
print("ra dec: ")
x = raw_input()
cel = SkyCoord(x,unit = (u.hourangle, u.deg), frame = 'icrs')
o = Observation(celest = cel, t = "2018-07-22T23:00:00", use_local_time = True,
site = None, default_site = 'keck')
obgeo = o.site.location.to_geodetic()
print(obgeo)
print( o.celest)
print( o.t)
print( "lst: ",o.lst)
print( "celnow: ",o.celnow)
print( "hanow : ",o.hanow)
print( "alt, az, parang ",o.altit, o.az, o.parang)
o.setnightevents()
print("sunset: ", o.tsunset.to_datetime(timezone = o.site.localtz))
print("eve twi: ",o.tevetwi.to_datetime(timezone = o.site.localtz))
print("night ctr:",o.tnightcenter.to_datetime(timezone = o.site.localtz))
print("morn twi:",o.tmorntwi.to_datetime(timezone = o.site.localtz))
print("sunrise: ",o.tsunrise.to_datetime(timezone = o.site.localtz))
print("moonset: ",o.tmoonset.to_datetime(timezone = o.site.localtz))
print("moonrise:",o.tmoonrise.to_datetime(timezone = o.site.localtz))
o.computesunmoon()
o.computeplanets()
#print "c21:"
#print c2l
celnowxyz = cel2xyz(o.celnow)
#print "celnowxyz:"
#print celnowxyz
topoxyz = o.current2topoxyz.dot(celnowxyz)
#print "topoxyz:"
#print topoxyz
# print "topoxyz[0]",topoxyz[0]
#az = np.arctan2(topoxyz[0],topoxyz[1]) * thorconsts.DEG_IN_RADIAN
#alt = np.arcsin(topoxyz[2]) * thorconsts.DEG_IN_RADIAN
#print "alt az",alt,az
#fullmat = c2l.dot(prec) # both!
#celxyz = cel2xyz(o.celest) # icrs!
#topo2 = fullmat.dot(celxyz)
#print "topo2:"
#print topo2
#az2 = np.arctan2(topo2[0],topo2[1]) * thorconsts.DEG_IN_RADIAN
#alt2 = np.arcsin(topo2[2]) * thorconsts.DEG_IN_RADIAN
#print "alt2 az2",alt2,az2
(bright2000, brightmags, brightcolors, brightnames) = getbrightest("cartesian_bright.dat")
(projectedx,projectedy) = skyproject(o.icrs2topoxyz,bright2000)
(objx,objy) = skyproject(o.current2topoxyz,celnowxyz)
for i in range(0,len(brightmags)) :
size = (5. - 0.9 * brightmags[i])
if size > 0.:
plt.plot(projectedx[i],projectedy[i],'bo',markersize = size)
plt.plot(objx,objy,'ro')
plt.xlim(-1.,1.)
plt.ylim(-1.,1.)
plt.show()
| 42.615158
| 122
| 0.603498
|
990984ccc9482a76ec298237810a4864e32a7a79
| 4,257
|
py
|
Python
|
src/pytorch/torchdata.py
|
jeetsagar/turbojet
|
9b17edde0a7e01d0fa320261fbc2734ce53577d2
|
[
"MIT"
] | null | null | null |
src/pytorch/torchdata.py
|
jeetsagar/turbojet
|
9b17edde0a7e01d0fa320261fbc2734ce53577d2
|
[
"MIT"
] | null | null | null |
src/pytorch/torchdata.py
|
jeetsagar/turbojet
|
9b17edde0a7e01d0fa320261fbc2734ce53577d2
|
[
"MIT"
] | 2
|
2021-05-20T05:47:59.000Z
|
2021-08-24T07:44:37.000Z
|
#!python3
"""preparing the dataset for pytorch"""
import h5py
import torch
import bisect
import numpy as np
from explore import print_keys
from torch.utils.data import Dataset, DataLoader
def normalize_data(x):
x_max = np.max(x, axis=0)
x_min = np.min(x, axis=0)
x_denom = (x_max - x_min)
x_denom[x_denom == 0] = 1
x_norm = -1 + (2 * (x - x_min) / x_denom)
return x_norm
def load_traindata(params):
dataset = UnitDataset(params.traindata, params.units, 'dev', params.features_last)
train_loader = DataLoader(dataset, batch_size=params.batch_size, shuffle=False, pin_memory=False)
return train_loader
def load_testdata(params):
dataset = UnitDataset(params.testdata, params.units, 'test', params.features_last)
test_loader = DataLoader(dataset, batch_size=params.batch_size, shuffle=False, pin_memory=False)
return test_loader
class UnitDataset(Dataset):
def __init__(self, filepath, units, suffix='dev', features_last=False):
self.window = 50
self.features_last = features_last
with h5py.File(filepath, 'r') as hdf:
W_array = np.array(hdf.get(f'W_{suffix}'))
X_s_array = np.array(hdf.get(f'X_s_{suffix}'))
A_array = np.array(hdf.get(f'A_{suffix}'))
Y_array = np.array(hdf.get(f'Y_{suffix}'))
unit_array = np.array(A_array[:, 0], dtype=np.int32)
existing_units = list(np.unique(unit_array))
if units:
units = units[0]
self.units = list(set(units).intersection(set(existing_units)))
self.units.sort()
else:
self.units = existing_units
self.num_units = len(self.units)
dev_data = np.concatenate((W_array, X_s_array), axis=1)
dev_data = normalize_data(dev_data)
self.data_list = []
self.target_list = []
self.length_list = []
self.total_length = 0
for unit in self.units:
unit_ind = (unit_array == unit)
unit_data = dev_data[unit_ind]
unit_target = Y_array[unit_ind]
unit_target = unit_target[self.window:]
# using a subset of the data for testing
# unit_data = unit_data[:1024+self.window]
# unit_target = unit_target[:1024]
# remove the transpose() call when using tensorflow
# tensorflow uses channels last, but pytorch uses channels first
# by default features/channels occupy the last axis, the transpose call moves the features/channels first
if self.features_last:
data_tensor = torch.Tensor(unit_data)
else:
data_tensor = torch.Tensor(unit_data).transpose(0, 1)
self.data_list.append(data_tensor)
target_tensor = torch.Tensor(unit_target)
self.target_list.append(target_tensor)
target_length = target_tensor.shape[0]
self.total_length += target_length
self.length_list.append(target_length)
self.total_elem = list(np.cumsum(self.length_list))
def _get_index(self, n):
n = n + 1
n = max(1, min(self.total_length, n))
i = bisect.bisect_left(self.total_elem, n)
if i == 0:
j = n - 1
else:
m = self.total_elem[i-1]
j = n - m - 1
return i, j
def __len__(self):
return self.total_length
def __getitem__(self, index):
i, j = self._get_index(index)
if self.features_last:
data = self.data_list[i][j:j+self.window, :]
else:
data = self.data_list[i][:, j:j + self.window]
target = self.target_list[i][j]
return data, target
if __name__ == '__main__':
fpath = '../../../data_set/N-CMAPSS_DS02-006.h5'
ds = UnitDataset(fpath, [[14]], 'test', False)
# a, b = ds[0]
# print(a.shape, b.shape)
# print(ds.units)
# print(ds.num_units)
# print(ds.length_list)
# print(len(ds))
td = DataLoader(ds, batch_size=4, shuffle=False, pin_memory=False)
for i, (j, k) in enumerate(td):
if i > 1:
break
print(j.shape)
print(j.dtype)
print(k.shape)
print(k.dtype)
| 29.978873
| 117
| 0.603946
|
b97280843ac0fe835c564113a953f45d49f68877
| 628
|
py
|
Python
|
manage.py
|
AviH0/coursist
|
3db05e8a168be33d2f03b9e082ee4779d80be7c7
|
[
"MIT"
] | 6
|
2020-06-26T12:09:10.000Z
|
2021-12-18T11:44:55.000Z
|
manage.py
|
AviH0/coursist
|
3db05e8a168be33d2f03b9e082ee4779d80be7c7
|
[
"MIT"
] | 89
|
2020-06-02T11:42:57.000Z
|
2021-06-10T19:09:09.000Z
|
manage.py
|
AviH0/coursist
|
3db05e8a168be33d2f03b9e082ee4779d80be7c7
|
[
"MIT"
] | 14
|
2020-06-26T12:08:34.000Z
|
2021-04-20T10:59:45.000Z
|
#!/usr/bin/env python
"""Django's command-line utility for administrative tasks."""
import os
import sys
def main():
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "Coursist.settings")
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
if __name__ == "__main__":
main()
| 28.545455
| 73
| 0.683121
|
b9cd7e2eb25c0a155806d20ff6255c2d5830bc05
| 1,915
|
py
|
Python
|
run_expts/06a2_tucodec.py
|
scheng1992/Data_Assimilation
|
b4d43895229205ee2cd16b15ee20beccb33b71d6
|
[
"MIT"
] | 1
|
2021-11-25T12:46:48.000Z
|
2021-11-25T12:46:48.000Z
|
run_expts/06a2_tucodec.py
|
bugsuse/Data_Assimilation
|
2965ccf78951df11f8686282cd6814bae18afde5
|
[
"MIT"
] | null | null | null |
run_expts/06a2_tucodec.py
|
bugsuse/Data_Assimilation
|
2965ccf78951df11f8686282cd6814bae18afde5
|
[
"MIT"
] | 2
|
2021-03-02T13:29:34.000Z
|
2022-03-12T11:01:08.000Z
|
"""
After success of first experiment (particularly with vanilla blocks)
I want to investigate the effect of
1) Increasing the number of channels to [96, 128]
2) Increasing the amount of augmentation (as overfitting was a problem)
i.e. I have removed the FieldJitter(0, 0) augmentation
NOTE: 128 failed with a CUDA memory error
"""
from VarDACAE.settings.models.resNeXt import ResStack3
from VarDACAE.settings.models.CLIC import CLIC
from VarDACAE import TrainAE, ML_utils, BatchDA
from run_expts.expt_config import ExptConfigTest
TEST = False
GPU_DEVICE = 0
exp_base = "experiments/train/06a2/"
#global variables for DA and training:
class ExptConfig():
EPOCHS = 300
SMALL_DEBUG_DOM = False #For training
calc_DA_MAE = True
num_epochs_cv = 0
LR = 0.0002
print_every = 10
test_every = 10
def main():
blocks = ["vanilla"]
channels = [96, 128]
if TEST:
expt = ExptConfigTest()
else:
expt = ExptConfig()
idx = 0
for block in blocks:
for Cstd in channels:
kwargs = {"model_name": "Tucodec", "block_type": block, "Cstd": Cstd}
idx += 1
for k, v in kwargs.items():
print("{}={}, ".format(k, v), end="")
print()
settings = CLIC(**kwargs)
settings.GPU_DEVICE = GPU_DEVICE
settings.export_env_vars()
expdir = exp_base + str(idx - 1) + "/"
trainer = TrainAE(settings, expdir, expt.calc_DA_MAE)
expdir = trainer.expdir #get full path
model = trainer.train(expt.EPOCHS, test_every=expt.test_every,
num_epochs_cv=expt.num_epochs_cv,
learning_rate = expt.LR, print_every=expt.print_every,
small_debug=expt.SMALL_DEBUG_DOM)
if __name__ == "__main__":
main()
| 24.87013
| 90
| 0.608877
|
4bab2938eae96ffaf14d1b9aaeeaacc3860e9f1f
| 12,779
|
py
|
Python
|
jornadas/views.py
|
marthalilianamd/SIVORE
|
7f0d6c2c79aa909e6cecbf5f562ebe64f40a560d
|
[
"Apache-2.0"
] | 1
|
2016-02-11T05:01:49.000Z
|
2016-02-11T05:01:49.000Z
|
jornadas/views.py
|
marthalilianamd/SIVORE
|
7f0d6c2c79aa909e6cecbf5f562ebe64f40a560d
|
[
"Apache-2.0"
] | 89
|
2016-01-29T00:04:48.000Z
|
2016-07-05T15:52:30.000Z
|
jornadas/views.py
|
Jorgesolis1989/SIVORE
|
7f0d6c2c79aa909e6cecbf5f562ebe64f40a560d
|
[
"Apache-2.0"
] | 2
|
2016-02-08T15:16:12.000Z
|
2016-05-14T02:33:06.000Z
|
from django.contrib.auth.decorators import permission_required
from django.shortcuts import render_to_response, render, redirect
from jornadas.forms import FormularioRegistroJornada, FormularioEditarJornada
from planchas.models import Plancha
from candidatos.models import Candidato
from corporaciones.models import Corporacion
from datetime import datetime
from django.utils import timezone
from django.utils.timezone import activate
from django.conf import settings
activate(settings.TIME_ZONE)
from jornadas.models import Jornada , Jornada_Corporacion
def ingresar_plancha_voto_blanco(jornada_corporacion):
# Creando la plancha con el candidato voto en blanco para esa corporacion especifica
plancha_voto_en_blanco = Plancha()
plancha_voto_en_blanco.candidato_principal = None
plancha_voto_en_blanco.is_active = True
plancha_voto_en_blanco.jornada_corporacion = jornada_corporacion
plancha_voto_en_blanco.numeroplancha = 0
try:
plancha_voto_en_blanco.save()
except Exception as e:
print(e)
@permission_required("usuarios.Administrador", login_url="/")
def registro_jornada(request):
#Verificación para crear una jornada
if request.method == 'POST' and "btncreate" in request.POST:
form = FormularioRegistroJornada(request.POST)
#Si el formulario es valido
if form.is_valid():
jornada = Jornada()
jornada.nombrejornada = form.cleaned_data["nombre_jornada"]
# Creando la jornada electoral y habilitando las corporaciones.
corporaciones = form.cleaned_data["corporaciones"]
hora_completa = form.cleaned_data["fecha_jornada"] + " "+ form.cleaned_data["hora_inicio"]
jornada.fecha_inicio_jornada = datetime.strptime(hora_completa, "%m/%d/%Y %I:%M %p")
jornada.fecha_inicio_jornada = timezone.make_aware(jornada.fecha_inicio_jornada,
timezone.get_current_timezone())
hora_completa = form.cleaned_data["fecha_jornada"] + " "+ form.cleaned_data["hora_final"]
jornada.fecha_final_jornada = datetime.strptime(hora_completa, "%m/%d/%Y %I:%M %p")
jornada.fecha_final_jornada = timezone.make_aware(jornada.fecha_final_jornada,
timezone.get_current_timezone())
try:
jornada.save()
except Exception as e:
print(e)
jornada.corporaciones = corporaciones
try:
jornada.save()
except Exception as e:
print(e)
for corporacion in corporaciones:
jornada_corporacion = Jornada_Corporacion()
#guardamos jornada
jornada_corporacion.jornada = jornada
#guardamos corporacion
jornada_corporacion.corporacion = corporacion
try:
jornada_corporacion.save()
except Exception as e:
print(e)
# Ingresando candidato voto en blanco a la corporacion a elegir
ingresar_plancha_voto_blanco(jornada_corporacion)
mensaje = "Se creó la jornada "+jornada.nombrejornada+" exitosamente "
llamarMensaje = "exito_usuario"
request.session["mensaje"] = mensaje
request.session["llamarMensaje"] = llamarMensaje
return redirect(listar_jornadas)
#si no es valido el formulario, crear
else:
mensaje = "Datos incompleto para crear la jornada"
llamarMensaje = "fracaso_usuario"
form = FormularioRegistroJornada()
data = {'mensaje': mensaje, 'form': form, 'llamarMensaje':llamarMensaje}
return render(request, 'registro_jornada.html', data)
else:
form = FormularioRegistroJornada()
return render(request, 'registro_jornada.html', {'form': form})
# Vista para listar votantes
@permission_required("usuarios.Administrador", login_url="/")
def listar_jornadas(request):
jornadas = Jornada.objects.filter(is_active=True)
jornada_corporaciones = Jornada_Corporacion.objects.filter(jornada__is_active=True).values_list('corporacion_id', flat=True)
llamarMensaje = request.session.pop('llamarMensaje', None)
mensaje = request.session.pop('mensaje', None)
return render(request, 'listar_jornadas.html', {'jornadas': jornadas, 'llamarMensaje': llamarMensaje,'mensaje': mensaje,
'jornada_corporaciones': jornada_corporaciones})
# funcion que permite la edición de la jornada seleccionada
@permission_required("usuarios.Administrador", login_url="/")
def editar_jornada(request, id):
try:
jornada = Jornada.objects.get(id = id)
except Jornada.DoesNotExist:
llamarmensaje = "fracaso_usuario"
mensaje = "La jornada #" + str(id)+ " no existe en el sistema."
request.session["llamarMensaje"] = llamarmensaje
request.session["mensaje"] = mensaje
return redirect("listar_jornadas")
if request.method == 'POST' and "btncreate" in request.POST:
form = FormularioEditarJornada(request.POST)
#Si el formulario es valido
if form.is_valid():
jornada.nombrejornada = form.cleaned_data["nombre_jornada"]
hora_completa = form.cleaned_data["fecha_jornada"] + " " + form.cleaned_data["hora_inicio"]
jornada.fecha_inicio_jornada = datetime.strptime(hora_completa, "%m/%d/%Y %I:%M %p")
jornada.fecha_inicio_jornada = timezone.make_aware(jornada.fecha_inicio_jornada,
timezone.get_current_timezone())
print(hora_completa)
hora_completa = form.cleaned_data["fecha_jornada"] + " "+ form.cleaned_data["hora_final"]
jornada.fecha_final_jornada = datetime.strptime(hora_completa, "%m/%d/%Y %I:%M %p")
jornada.fecha_final_jornada = timezone.make_aware(jornada.fecha_final_jornada,
timezone.get_current_timezone())
print(jornada.fecha_final_jornada)
# Creando la jornada electoral y habilitando las corporaciones.
corporaciones = form.cleaned_data["corporaciones"]
try:
jornada.save()
except Exception as e:
print(e)
# Trabajar con las corporaciones
jornadas_activas = Jornada_Corporacion.objects.filter(jornada_id = jornada.id , is_active=True)
for jornada_activa in jornadas_activas:
# Verificando las corporaciones de la lista de jornadas
if jornada_activa.corporacion not in corporaciones:
# Desactivamos los candidatos de esa jornada
candidatos_a_desactivar = Candidato.objects.filter(jornada_corporacion__jornada_id=jornada_activa.jornada.id ,
jornada_corporacion__corporacion__id=jornada_activa.corporacion.id,
is_active=True)
#Guardando los candidatos
for candidato in candidatos_a_desactivar:
candidato.is_active = False
candidato.save()
# Desactivamos las planchas de la jornada
planchas_a_desactivar = Plancha.objects.filter(jornada_corporacion__jornada_id=jornada_activa.jornada.id ,
jornada_corporacion__corporacion__id=jornada_activa.corporacion.id,
is_active=True)
#Guardando los planchas
for plancha in planchas_a_desactivar:
plancha.is_active = False
plancha.save()
# Desactivamos la jornada
jornada_activa.is_active = False
try:
jornada_activa.save()
except Exception as e:
print(e)
#Para agregar las corporaciones faltantes
for corporacion in corporaciones:
if corporacion.id not in jornadas_activas.values_list('corporacion__id' , flat=True):
jornada_corporacion = Jornada_Corporacion(jornada=jornada , corporacion=corporacion , is_active=True)
try:
jornada_corporacion.save()
except Exception as e:
print(e)
ingresar_plancha_voto_blanco(jornada_corporacion)
jornada.corporaciones = corporaciones
try:
jornada.save()
except Exception as e:
print(e)
mensaje = "Se editó la jornada " + jornada.nombrejornada + " exitosamente "
llamarMensaje = "exito_usuario"
request.session["mensaje"] = mensaje
request.session["llamarMensaje"] = llamarMensaje
return redirect(listar_jornadas)
#si no es valido el formulario, crear
else:
mensaje = "Datos incompleto para editar la jornada"
llamarMensaje = "fracaso_usuario"
form = FormularioRegistroJornada()
data = {'mensaje': mensaje, 'form': form, 'llamarMensaje':llamarMensaje}
return render(request, 'editar_jornada.html', data)
else:
form = FormularioEditarJornada()
# corporaciones de la jornada
corporaciones_de_jornada = Corporacion.objects.filter(id__in=Jornada_Corporacion.objects.filter(jornada_id= jornada.id, is_active=True, ).values_list("corporacion__id" , flat=True))
print(corporaciones_de_jornada)
# lista de ids Corporaciones ocupadas
corporaciones_ocupadas = Corporacion.objects.filter(id__in=Jornada_Corporacion.objects.filter(jornada__is_active=True).values_list("corporacion__id", flat=True))
# Corporaciones libres
corporaciones_libres= Corporacion.objects.all().exclude(id__in=corporaciones_ocupadas.exclude(id__in=corporaciones_de_jornada.values_list("id", flat=True)).values_list("id", flat=True))
# Agregando las corporaciones de la jornada porque aparecen ocupadas
form.fields["corporaciones"].queryset = corporaciones_libres
#envio de datos al formulario editar
form.initial = {'nombre_jornada' : jornada.nombrejornada, "fecha_jornada" : jornada.fecha_inicio_jornada.date().strftime("%m/%d/%Y"),
"hora_inicio" : timezone.localtime(jornada.fecha_inicio_jornada).strftime('%I:%M %p'),
"hora_final" : timezone.localtime(jornada.fecha_final_jornada).strftime('%I:%M %p'), "corporaciones": [o for o in corporaciones_de_jornada]}
return render(request, 'editar_jornada.html', {'form': form})
# Este metodo no elimina en la base de datos, sino que desactiva la jornada con sus dependencias (Planchas y Candidatos)
@permission_required("usuarios.Administrador", login_url="/")
def eliminar_jornada(request, idjornada=None):
if request.method == 'POST':
# Desactivando la jornada de la tabla jornada
jornada=Jornada.objects.get(id=idjornada , is_active=True)
jornada.is_active = False
try:
jornada.save()
except Exception as e:
print(e)
# Desactivando las planchas de la tabla Jornada_Corporacion
planchas_jornada = Plancha.objects.filter(jornada_corporacion__jornada_id=idjornada , is_active=True)
for plancha_jornada in planchas_jornada:
plancha_jornada.is_active = False
try:
plancha_jornada.save()
except Exception as e:
print(e)
# Desactivando los candidatos
candidatos_jornada = Candidato.objects.filter(jornada_corporacion__jornada_id=idjornada , is_active=True)
for candidato_jornada in candidatos_jornada:
candidato_jornada.is_active = False
try:
candidato_jornada.save()
except Exception as e:
print(e)
llamarMensaje = "elimino_corporacion"
mensaje = "Se eliminó la jornada electoral " + str(jornada.nombrejornada) +" con sus candidatos y corporaciones asociadas sactisfactoriamente"
request.session['llamarMensaje'] = llamarMensaje
request.session['mensaje'] = mensaje
return redirect("listar_jornadas")
| 46.981618
| 193
| 0.631818
|
fc26f27ae39373971d43c803000d13051fd7bdec
| 984
|
py
|
Python
|
Python3/0695-Max-Area-of-Island/soln-1.py
|
wyaadarsh/LeetCode-Solutions
|
3719f5cb059eefd66b83eb8ae990652f4b7fd124
|
[
"MIT"
] | 5
|
2020-07-24T17:48:59.000Z
|
2020-12-21T05:56:00.000Z
|
Python3/0695-Max-Area-of-Island/soln-1.py
|
zhangyaqi1989/LeetCode-Solutions
|
2655a1ffc8678ad1de6c24295071308a18c5dc6e
|
[
"MIT"
] | null | null | null |
Python3/0695-Max-Area-of-Island/soln-1.py
|
zhangyaqi1989/LeetCode-Solutions
|
2655a1ffc8678ad1de6c24295071308a18c5dc6e
|
[
"MIT"
] | 2
|
2020-07-24T17:49:01.000Z
|
2020-08-31T19:57:35.000Z
|
class Solution:
def maxAreaOfIsland(self, grid):
"""
:type grid: List[List[int]]
:rtype: int
"""
if not grid or not grid[0]:
return 0
def dfs(r, c):
m, n = len(grid), len(grid[0])
stack = [(r, c)]
grid[r][c] = 0
ans = 0
while stack:
i, j = stack.pop()
ans += 1
for di, dj in ((-1, 0), (1, 0), (0, -1), (0, 1)):
newi, newj = i + di, j + dj
if 0 <= newi < m and 0 <= newj < n and grid[newi][newj] == 1:
grid[newi][newj] = 0
stack.append((newi, newj))
return ans
ans = 0
for i, row in enumerate(grid):
for j, val in enumerate(row):
if val == 1:
area = dfs(i, j)
if area > ans:
ans = area
return ans
| 31.741935
| 81
| 0.356707
|
7b0bd118fe332b41c113ce6ca994d83e9581d615
| 6,955
|
py
|
Python
|
lib/airflow/tests/contrib/jobs/test_scheduler_client.py
|
ideax-business/ai-flow
|
0a7797a093beddf827f68cad7a11e0babf1b5059
|
[
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"ECL-2.0",
"BSD-3-Clause"
] | null | null | null |
lib/airflow/tests/contrib/jobs/test_scheduler_client.py
|
ideax-business/ai-flow
|
0a7797a093beddf827f68cad7a11e0babf1b5059
|
[
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"ECL-2.0",
"BSD-3-Clause"
] | null | null | null |
lib/airflow/tests/contrib/jobs/test_scheduler_client.py
|
ideax-business/ai-flow
|
0a7797a093beddf827f68cad7a11e0babf1b5059
|
[
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"ECL-2.0",
"BSD-3-Clause"
] | null | null | null |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
import unittest
from typing import List
from notification_service.base_notification import EventWatcher, BaseEvent
from notification_service.client import NotificationClient
from notification_service.event_storage import MemoryEventStorage
from notification_service.server import NotificationServer
from notification_service.service import NotificationService
from airflow.contrib.jobs.scheduler_client import EventSchedulerClient, ExecutionContext
from airflow.events.scheduler_events import SchedulerInnerEventType
from airflow.executors.scheduling_action import SchedulingAction
PORT = 50053
class MockScheduler(object):
def __init__(self):
self.client = NotificationClient(server_uri="localhost:{}".format(PORT),
default_namespace="scheduler")
def start(self, watcher):
self.client.start_listen_events(watcher=watcher)
def stop(self):
self.client.stop_listen_events()
class PassWatcher(EventWatcher):
def process(self, events: List[BaseEvent]):
pass
class TestSchedulerClient(unittest.TestCase):
@classmethod
def setUpClass(cls) -> None:
cls.storage = MemoryEventStorage()
cls.server = NotificationServer(NotificationService(cls.storage), PORT)
cls.server.run()
@classmethod
def tearDownClass(cls) -> None:
cls.server.stop()
def setUp(self) -> None:
self.client = EventSchedulerClient(ns_client=NotificationClient(server_uri="localhost:{}".format(PORT),
default_namespace="test_namespace"))
self.scheduler = MockScheduler()
def tearDown(self) -> None:
self.scheduler.stop()
def test_parse_dag(self):
class W(EventWatcher):
def process(self, events: List[BaseEvent]):
s_client = NotificationClient(server_uri="localhost:{}".format(PORT),
default_namespace="scheduler")
s_client.send_event(BaseEvent(key=events[0].key, value='',
event_type=SchedulerInnerEventType.PARSE_DAG_RESPONSE.value,
namespace='scheduler'))
self.scheduler.start(watcher=W())
result = self.client.trigger_parse_dag(file_path='/test')
self.assertTrue(result)
def test_parse_dag_timeout(self):
self.scheduler.start(watcher=PassWatcher())
with self.assertRaises(TimeoutError) as context:
result = self.client.trigger_parse_dag(file_path='/test', timeout=1)
self.assertTrue('Get response timeout' in str(context.exception))
def test_schedule_dag(self):
class W(EventWatcher):
def process(self, events: List[BaseEvent]):
s_client = NotificationClient(server_uri="localhost:{}".format(PORT),
default_namespace="scheduler")
s_client.send_event(BaseEvent(key=events[0].key, value='1',
event_type=SchedulerInnerEventType.RESPONSE.value,
namespace='scheduler'))
self.scheduler.start(watcher=W())
result = self.client.schedule_dag(dag_id='1', context='')
self.assertEqual('1', result.dagrun_id)
def test_schedule_dag_timeout(self):
self.scheduler.start(watcher=PassWatcher())
with self.assertRaises(TimeoutError) as context:
result = self.client.schedule_dag(dag_id='1', context='', timeout=1)
self.assertTrue('Get response timeout' in str(context.exception))
def test_stop_dag_run(self):
class W(EventWatcher):
def process(self, events: List[BaseEvent]):
s_client = NotificationClient(server_uri="localhost:{}".format(PORT),
default_namespace="scheduler")
s_client.send_event(BaseEvent(key=events[0].key, value='1',
event_type=SchedulerInnerEventType.RESPONSE.value,
namespace='scheduler'))
self.scheduler.start(watcher=W())
result = self.client.stop_dag_run(dag_id='1', context=ExecutionContext(dagrun_id='1'))
self.assertEqual('1', result.dagrun_id)
def test_stop_dag_run_timeout(self):
self.scheduler.start(watcher=PassWatcher())
with self.assertRaises(TimeoutError) as context:
result = self.client.stop_dag_run(dag_id='1', context=ExecutionContext(dagrun_id='1'), timeout=1)
self.assertTrue('Get response timeout' in str(context.exception))
def test_schedule_task(self):
class W(EventWatcher):
def process(self, events: List[BaseEvent]):
s_client = NotificationClient(server_uri="localhost:{}".format(PORT),
default_namespace="scheduler")
s_client.send_event(BaseEvent(key=events[0].key, value='1',
event_type=SchedulerInnerEventType.RESPONSE.value,
namespace='scheduler'))
self.scheduler.start(watcher=W())
result = self.client.schedule_task(dag_id='1', task_id='t_1',
action=SchedulingAction.START,
context=ExecutionContext(dagrun_id='1'))
self.assertEqual('1', result.dagrun_id)
def test_schedule_task_timeout(self):
self.scheduler.start(watcher=PassWatcher())
with self.assertRaises(TimeoutError) as context:
result = self.client.schedule_task(dag_id='1', task_id='t_1',
action=SchedulingAction.START,
context=ExecutionContext(dagrun_id='1'),
timeout=1)
self.assertTrue('Get response timeout' in str(context.exception))
if __name__ == '__main__':
unittest.main()
| 45.162338
| 111
| 0.627606
|
c0e9d38c3ca514b85f6e8c40422e60096bd2227a
| 2,312
|
py
|
Python
|
dit/pid/tests/test_idep.py
|
volpatto/dit
|
a8b0ffd72e0829c4b239419e85b9e0cfd20085dd
|
[
"BSD-3-Clause"
] | 1
|
2021-03-15T08:51:42.000Z
|
2021-03-15T08:51:42.000Z
|
dit/pid/tests/test_idep.py
|
volpatto/dit
|
a8b0ffd72e0829c4b239419e85b9e0cfd20085dd
|
[
"BSD-3-Clause"
] | null | null | null |
dit/pid/tests/test_idep.py
|
volpatto/dit
|
a8b0ffd72e0829c4b239419e85b9e0cfd20085dd
|
[
"BSD-3-Clause"
] | null | null | null |
"""
Tests for dit.pid.idep.
"""
import pytest
from dit.pid.ibroja import PID_BROJA
from dit.pid.idep import PID_dep, PID_RA
from dit.pid.distributions import bivariates, trivariates
def test_pid_dep1():
"""
Test idep on a generic distribution.
"""
d = bivariates['reduced or']
pid = PID_dep(d, ((0,), (1,)), (2,))
assert pid[((0,), (1,))] == pytest.approx(0.073761301440421256, abs=1e-4)
assert pid[((0,),)] == pytest.approx(0.23751682301871169, abs=1e-4)
assert pid[((1,),)] == pytest.approx(0.23751682301871169, abs=1e-4)
assert pid[((0, 1),)] == pytest.approx(0.45120505252215537, abs=1e-4)
def test_pid_dep2():
"""
Test idep on another generic distribution.
"""
d = trivariates['anddup']
pid = PID_dep(d, [[0], [1], [2]], [3])
for atom in pid._lattice:
if atom == ((0,), (1,), (2,)):
assert pid[atom] == pytest.approx(0.081704409646414788, abs=1e-4)
elif atom == ((0, 1), (1, 2)):
assert pid[atom] == pytest.approx(0.27042624480113808, abs=1e-4)
elif atom in [((0,), (2,)), ((1,),)]:
assert pid[atom] == pytest.approx(0.22957374150893717, abs=1e-4)
else:
assert pid[atom] == pytest.approx(0.0, abs=1e-4)
def test_pid_dep3():
"""
Test that idep and ibroja differ on reduced or.
"""
d = bivariates['reduced or']
pid1 = PID_BROJA(d)
pid2 = PID_dep(d)
assert pid1 != pid2
def test_pid_dep4():
"""
Test that anddup is complete.
"""
d = trivariates['anddup']
pid = PID_dep(d)
assert pid.complete
assert pid.nonnegative
assert pid.consistent
def test_pid_ra1():
"""
"""
d = bivariates['and']
pid = PID_RA(d, ((0,), (1,)), (2,))
assert pid[((0,), (1,))] == pytest.approx(-0.18872, abs=1e-4)
assert pid[((0,),)] == pytest.approx(0.5, abs=1e-4)
assert pid[((1,),)] == pytest.approx(0.5, abs=1e-4)
assert pid[((0, 1),)] == pytest.approx(0.0, abs=1e-4)
def test_pid_ra2():
"""
"""
d = trivariates['cat']
pid = PID_RA(d, ((0,), (1,), (2,)), (3,))
for atom in pid._lattice:
if atom in (((0,),), ((1,),), ((2,),)):
assert pid[atom] == pytest.approx(1.0, abs=1e-4)
else:
assert pid[atom] == pytest.approx(0.0, abs=1e-4)
| 28.54321
| 77
| 0.556661
|
6389ac77c43ecd4a95a843d1a40f83b191749021
| 4,385
|
py
|
Python
|
ops/nl/nonlocal_block.py
|
ancientmooner/CCNet
|
c914913ad523ec854f78c28f22044af204d4fbe6
|
[
"MIT"
] | null | null | null |
ops/nl/nonlocal_block.py
|
ancientmooner/CCNet
|
c914913ad523ec854f78c28f22044af204d4fbe6
|
[
"MIT"
] | null | null | null |
ops/nl/nonlocal_block.py
|
ancientmooner/CCNet
|
c914913ad523ec854f78c28f22044af204d4fbe6
|
[
"MIT"
] | null | null | null |
import torch
import torch.nn.functional as F
from libs import InPlaceABN, InPlaceABNSync
from torch import nn
from torch.nn import init
import math
class _NonLocalNd(nn.Module):
def __init__(self, dim, inplanes, planes, downsample, use_gn, lr_mult, use_out):
assert dim in [1, 2, 3], "dim {} is not supported yet".format(dim)
if dim == 3:
conv_nd = nn.Conv3d
if downsample:
max_pool = nn.MaxPool3d(kernel_size=(1, 2, 2), stride=(1, 2, 2))
else:
max_pool = None
bn_nd = nn.BatchNorm3d
elif dim == 2:
conv_nd = nn.Conv2d
if downsample:
max_pool = nn.MaxPool2d(kernel_size=(2, 2), stride=(2, 2))
else:
max_pool = None
bn_nd = nn.BatchNorm2d
else:
conv_nd = nn.Conv1d
if downsample:
max_pool = nn.MaxPool1d(kernel_size=2, stride=2)
else:
max_pool = None
bn_nd = nn.BatchNorm1d
super(_NonLocalNd, self).__init__()
self.conv_query = conv_nd(inplanes, planes, kernel_size=1)
self.conv_key = conv_nd(inplanes, planes, kernel_size=1)
if use_out:
self.conv_value = conv_nd(inplanes, planes, kernel_size=1)
self.conv_out = conv_nd(planes, inplanes, kernel_size=1, bias=False)
else:
self.conv_value = conv_nd(inplanes, inplanes, kernel_size=1, bias=False)
self.conv_out = None
self.softmax = nn.Softmax(dim=2)
self.downsample = max_pool
# self.norm = nn.GroupNorm(num_groups=32, num_channels=inplanes) if use_gn else InPlaceABNSync(num_features=inplanes)
self.gamma = nn.Parameter(torch.zeros(1))
self.scale = math.sqrt(planes)
self.reset_parameters()
self.reset_lr_mult(lr_mult)
def reset_parameters(self):
for m in self.modules():
if isinstance(m, nn.Conv3d) or isinstance(m, nn.Conv2d) or isinstance(m, nn.Conv1d):
init.normal_(m.weight, 0, 0.01)
if m.bias is not None:
init.zeros_(m.bias)
m.inited = True
#init.constant_(self.norm.weight, 0)
#init.constant_(self.norm.bias, 0)
#self.norm.inited = True
def reset_lr_mult(self, lr_mult):
if lr_mult is not None:
for m in self.modules():
m.lr_mult = lr_mult
else:
print('not change lr_mult')
def forward(self, x):
# [N, C, T, H, W]
residual = x
# [N, C, T, H', W']
if self.downsample is not None:
input_x = self.downsample(x)
else:
input_x = x
# [N, C', T, H, W]
query = self.conv_query(x)
# [N, C', T, H', W']
key = self.conv_key(input_x)
value = self.conv_value(input_x)
# [N, C', T x H x W]
query = query.view(query.size(0), query.size(1), -1)
# [N, C', T x H' x W']
key = key.view(key.size(0), key.size(1), -1)
value = value.view(value.size(0), value.size(1), -1)
# [N, T x H x W, T x H' x W']
sim_map = torch.bmm(query.transpose(1, 2), key)
sim_map = sim_map/self.scale
sim_map = self.softmax(sim_map)
# [N, T x H x W, C']
out = torch.bmm(sim_map, value.transpose(1, 2))
# [N, C', T x H x W]
out = out.transpose(1, 2)
# [N, C', T, H, W]
out = out.view(out.size(0), out.size(1), *x.size()[2:])
# [N, C, T, H, W]
if self.conv_out is not None:
out = self.conv_out(out)
# if self.norm is not None:
# out = self.norm(out)
out = self.gamma * out
out = residual + out
return out
class NonLocal2d(_NonLocalNd):
def __init__(self, inplanes, planes, downsample=True, use_gn=False, lr_mult=None, use_out=False):
super(NonLocal2d, self).__init__(dim=2, inplanes=inplanes, planes=planes, downsample=downsample, use_gn=use_gn, lr_mult=lr_mult, use_out=use_out)
class NonLocal3d(_NonLocalNd):
def __init__(self, inplanes, planes, downsample, use_gn, lr_mult, use_out):
super(NonLocal3d, self).__init__(dim=3, inplanes=inplanes, planes=planes, downsample=downsample, use_gn=use_gn, lr_mult=lr_mult, use_out=use_out)
| 35.362903
| 153
| 0.566249
|
2bfd16b927d06d3900aa5c9d7c16ab9a7ec24963
| 5,142
|
py
|
Python
|
qa/rpc-tests/cfund-vote.py
|
oleksiilihai/Bitcoin-Oil
|
0c40c096fae0437adf8033e37a8a4f8e489551cb
|
[
"MIT"
] | null | null | null |
qa/rpc-tests/cfund-vote.py
|
oleksiilihai/Bitcoin-Oil
|
0c40c096fae0437adf8033e37a8a4f8e489551cb
|
[
"MIT"
] | null | null | null |
qa/rpc-tests/cfund-vote.py
|
oleksiilihai/Bitcoin-Oil
|
0c40c096fae0437adf8033e37a8a4f8e489551cb
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# Copyright (c) 2018 The Bitcoinoil Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
from test_framework.test_framework import BitcoinOilTestFramework
from test_framework.cfund_util import *
import time
class CommunityFundVotesTest(BitcoinOilTestFramework):
"""Tests the voting procedures of the Community fund."""
def __init__(self):
super().__init__()
self.setup_clean_chain = True
self.num_nodes = 1
def setup_network(self, split=False):
self.nodes = self.setup_nodes()
self.is_network_split = split
def run_test(self):
self.nodes[0].staking(False)
activate_cfund(self.nodes[0])
self.nodes[0].donatefund(1000)
proposalid0 = self.nodes[0].createproposal(self.nodes[0].getnewaddress(), 1, 3600, "test")["hash"]
slow_gen(self.nodes[0], 1)
# Verify the proposal is now in the proposals list
assert(self.nodes[0].getproposal(proposalid0)["hash"] == proposalid0)
self.nodes[0].proposalvote(proposalid0, "yes")
blockhash_yes = slow_gen(self.nodes[0], 1)[0]
# Verify the vote has been counted
assert(self.nodes[0].getproposal(proposalid0)["votingCycle"] == 0)
assert(self.nodes[0].getproposal(proposalid0)["votesYes"] == 1 and self.nodes[0].getproposal(proposalid0)["votesNo"] == 0)
self.nodes[0].proposalvote(proposalid0, "no")
blockhash_no = slow_gen(self.nodes[0], 1)[0]
# Verify the vote has been counted
assert(self.nodes[0].getproposal(proposalid0)["votesYes"] == 1 and self.nodes[0].getproposal(proposalid0)["votesNo"] == 1)
self.nodes[0].invalidateblock(blockhash_no)
# Verify the votes have been reseted
assert(self.nodes[0].getproposal(proposalid0)["votesYes"] == 1 and self.nodes[0].getproposal(proposalid0)["votesNo"] == 0)
self.nodes[0].invalidateblock(blockhash_yes)
# Verify the votes have been reseted
assert(self.nodes[0].getproposal(proposalid0)["votesYes"] == 0 and self.nodes[0].getproposal(proposalid0)["votesNo"] == 0)
# Add dummy votes
slow_gen(self.nodes[0], 5)
self.nodes[0].proposalvote(proposalid0, "yes")
slow_gen(self.nodes[0], 5)
self.nodes[0].proposalvote(proposalid0, "remove")
# Check votes are added
assert(self.nodes[0].getproposal(proposalid0)["votesYes"] == 5 and self.nodes[0].getproposal(proposalid0)["votesNo"] == 5)
# Move to the end of the cycle
slow_gen(self.nodes[0], self.nodes[0].cfundstats()["votingPeriod"]["ending"] - self.nodes[0].cfundstats()["votingPeriod"]["current"])
# Check we are still in the first cycle
assert(self.nodes[0].getproposal(proposalid0)["votingCycle"] == 0)
slow_gen(self.nodes[0], 1)
# Check we are in a new cycle
assert(self.nodes[0].getproposal(proposalid0)["votingCycle"] == 1)
# Check the number of votes reseted and the proposal state
assert(self.nodes[0].getproposal(proposalid0)["votesYes"] == 0 and self.nodes[0].getproposal(proposalid0)["votesNo"] == 0)
assert(self.nodes[0].getproposal(proposalid0)["status"] == "pending" and self.nodes[0].getproposal(proposalid0)["state"] == 0)
slow_gen(self.nodes[0], self.nodes[0].cfundstats()["consensus"]["blocksPerVotingCycle"] - 10)
self.nodes[0].proposalvote(proposalid0, "yes")
# Vote in the limits of a cycle
slow_gen(self.nodes[0], 9)
# Check we are still in the same cycle
assert(self.nodes[0].getproposal(proposalid0)["votingCycle"] == 1)
# Check the number of votes reseted and the proposal state
assert(self.nodes[0].getproposal(proposalid0)["votesYes"] == 9 and self.nodes[0].getproposal(proposalid0)["votesNo"] == 0)
assert(self.nodes[0].getproposal(proposalid0)["status"] == "pending" and self.nodes[0].getproposal(proposalid0)["state"] == 0)
# Vote into the new cycle
firstblockofcycle = slow_gen(self.nodes[0], 1)[0]
# Check we are in the new cycle
assert(self.nodes[0].getproposal(proposalid0)["votingCycle"] == 2)
# Check the number of votes
assert(self.nodes[0].getproposal(proposalid0)["votesYes"] == 0 and self.nodes[0].getproposal(proposalid0)["votesNo"] == 0)
assert(self.nodes[0].getproposal(proposalid0)["status"] == "pending" and self.nodes[0].getproposal(proposalid0)["state"] == 0)
# Move back to the end of the previous cycle
self.nodes[0].invalidateblock(firstblockofcycle)
# Check we are again in the old cycle
assert(self.nodes[0].getproposal(proposalid0)["votingCycle"] == 1)
assert(self.nodes[0].getproposal(proposalid0)["votesYes"] == 9 and self.nodes[0].getproposal(proposalid0)["votesNo"] == 0)
assert(self.nodes[0].getproposal(proposalid0)["status"] == "pending" and self.nodes[0].getproposal(proposalid0)["state"] == 0)
if __name__ == '__main__':
CommunityFundVotesTest().main()
| 44.713043
| 141
| 0.666472
|
23d2b844f1168ced1452fc210177de9c78d9d949
| 2,010
|
py
|
Python
|
src/models.py
|
Salah-Zair/openctt-assistant
|
490c224456cf6e22f8e336d5e1eeeac93ff7ce59
|
[
"MIT"
] | null | null | null |
src/models.py
|
Salah-Zair/openctt-assistant
|
490c224456cf6e22f8e336d5e1eeeac93ff7ce59
|
[
"MIT"
] | null | null | null |
src/models.py
|
Salah-Zair/openctt-assistant
|
490c224456cf6e22f8e336d5e1eeeac93ff7ce59
|
[
"MIT"
] | null | null | null |
from datetime import datetime
class Teacher:
def __init__(self):
self.teacher_id: int
self.name: str
self.last_name: str
self.title: str
self.edu_rank: str
self.ext_id: int
class Room:
def __init__(self):
self.room_id: int
self.name: str
self.capacity: int
self.ext_id: int
class Day:
def __init__(self):
self.day_id: int
self.name: int
self.day_index: int
class Term:
def __init__(self):
self.term_id: int
self.start_h: int
self.start_min: int
self.end_h: int
self.end_min: int
self.term_index
class EducationProgrammeGroup:
def __init__(self):
self.epg_id: int
self.name: int
self.ext_id: int
class EducationProgramme:
def __init__(self):
self.ep_id: int
self.name: str
self.code: str
self.semester: str
self.ext_id: int
self.epg_id: int
class TimeTableData:
def __init__(self):
self.tt_id: int
self.type: int
self.institution_name: str
self.school_year: str
self.last_change: str
class Course:
def __init__(self):
self.course_id: int
self.name: str
self.short_name: str
self.group_name: str
self.course_type: str
self.num_of_lesson_per_week: int
self.ext_id: int
self.ep_id: int
self.teacher_id: int
class AllocatedLesson:
def __init__(self):
self.allocless_id: int
self.course_id: int
self.room_id: int
self.day_id: int
self.term_id: int
class Session:
def __init__(self):
self.key: str
self.expired_date: datetime
self.user_id: int
class User:
def __init__(self):
self.user_id: int
self.user_name: str
self.email: str
self.password: str
self.is_logged_in: bool
self.last_logged_in: datetime
| 19.705882
| 40
| 0.58806
|
c504f35fef8cabbf9fc91db1c8e7c1d067b619e7
| 885
|
py
|
Python
|
tests/test_modify_contact.py
|
elpinkypie/python_training
|
110b84fa108894a703f07734b4b5e77f038a54ec
|
[
"Apache-2.0"
] | null | null | null |
tests/test_modify_contact.py
|
elpinkypie/python_training
|
110b84fa108894a703f07734b4b5e77f038a54ec
|
[
"Apache-2.0"
] | null | null | null |
tests/test_modify_contact.py
|
elpinkypie/python_training
|
110b84fa108894a703f07734b4b5e77f038a54ec
|
[
"Apache-2.0"
] | null | null | null |
from git.model.contact import ContactFormAttributes
from random import randrange
def test_modify_contact(fixt):
if fixt.contact.count_contacts() == 0:
fixt.contact.create(ContactFormAttributes(firstname="firstname", lastname="lastname"))
old_contacts = fixt.contact.get_contact_list()
# random choose contact
index = randrange(len(old_contacts))
contact = ContactFormAttributes(firstname="modified")
contact.id = old_contacts[index].id
fixt.contact.modify_contact_by_index(index, contact)
assert len(old_contacts) == fixt.contact.count_contacts()
new_contacts = fixt.contact.get_contact_list()
old_contacts[index] = contact
assert sorted(old_contacts, key=ContactFormAttributes.id_or_max) == sorted(new_contacts,
key=ContactFormAttributes.id_or_max)
| 42.142857
| 115
| 0.700565
|
6367d643d791a7ae23dfa0bb59986c3a07f5e4a3
| 3,707
|
py
|
Python
|
tests/test_execution_mode.py
|
ShellAddicted/pex
|
f1060b784fc9c4337a514ed21357ea9e8c2e4f41
|
[
"Apache-2.0"
] | 2,160
|
2015-01-06T17:57:39.000Z
|
2022-03-30T19:59:01.000Z
|
tests/test_execution_mode.py
|
sthagen/pex
|
9bd4c178c93556faad3c8a1e75989c9288d09416
|
[
"Apache-2.0"
] | 1,242
|
2015-01-22T14:56:46.000Z
|
2022-03-31T18:02:38.000Z
|
tests/test_execution_mode.py
|
Satertek/pex
|
64de1c4cf031118ef446ac98a8c164c91c23bb9b
|
[
"Apache-2.0"
] | 248
|
2015-01-15T13:34:50.000Z
|
2022-03-26T01:24:18.000Z
|
# Copyright 2021 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import absolute_import
import os.path
import subprocess
import sys
from subprocess import CalledProcessError
import pytest
from pex.layout import Layout
from pex.testing import run_pex_command
from pex.typing import TYPE_CHECKING
if TYPE_CHECKING:
import attr # vendor:skip
from typing import Any, Callable, Dict, Iterable, Tuple
CreateColorsPex = Callable[[Iterable[str]], str]
ExecuteColorsPex = Callable[[str, Dict[str, str]], Tuple[str, str]]
else:
from pex.third_party import attr
@pytest.fixture
def create_colors_pex(tmpdir):
# type: (Any) -> CreateColorsPex
def create(extra_args):
pex_file = os.path.join(str(tmpdir), "colors.pex")
results = run_pex_command(["ansicolors==1.1.8", "-o", pex_file] + list(extra_args))
results.assert_success()
return pex_file
return create
@pytest.fixture
def execute_colors_pex(tmpdir):
# type: (Any) -> ExecuteColorsPex
def execute(colors_pex, extra_env):
pex_root = os.path.join(str(tmpdir), "pex_root")
env = os.environ.copy()
env.update(extra_env)
env["PEX_ROOT"] = pex_root
args = [colors_pex] if os.path.isfile(colors_pex) else [sys.executable, colors_pex]
output = subprocess.check_output(
args=args + ["-c", "import colors; print(colors.__file__)"], env=env
)
return output.strip().decode("utf-8"), pex_root
return execute
@attr.s(frozen=True)
class ExecutionMode(object):
extra_args = attr.ib() # type: Iterable[str]
isort_code_dir = attr.ib() # type: Callable[[Layout.Value], str]
venv_exception_expected = attr.ib() # type: bool
def installed_wheels_or_deps(layout):
# type: (Layout.Value) -> str
return "{app_root}/.deps/" if layout == Layout.LOOSE else "{pex_root}/installed_wheels/"
@pytest.mark.parametrize(
"execution_mode",
[
pytest.param(
ExecutionMode(
extra_args=[],
isort_code_dir=installed_wheels_or_deps,
venv_exception_expected=True,
),
id="PEX",
),
pytest.param(
ExecutionMode(
extra_args=["--include-tools"],
isort_code_dir=installed_wheels_or_deps,
venv_exception_expected=False,
),
id="PEX --include-tools",
),
pytest.param(
ExecutionMode(
extra_args=["--venv"],
isort_code_dir=lambda _: "{pex_root}/venvs/",
venv_exception_expected=False,
),
id="VENV",
),
],
)
@pytest.mark.parametrize(
"layout", [pytest.param(layout, id=layout.value) for layout in Layout.values()]
)
def test_execution_mode(
create_colors_pex, # type: CreateColorsPex
execute_colors_pex, # type: ExecuteColorsPex
execution_mode, # type: ExecutionMode
layout, # type: Layout.Value
):
# type: (...) -> None
pex_app = create_colors_pex(list(execution_mode.extra_args) + ["--layout", layout.value])
output, pex_root = execute_colors_pex(pex_app, {})
assert output.startswith(
execution_mode.isort_code_dir(layout).format(app_root=pex_app, pex_root=pex_root),
)
if execution_mode.venv_exception_expected:
with pytest.raises(CalledProcessError):
execute_colors_pex(pex_app, {"PEX_VENV": "1"})
else:
output, pex_root = execute_colors_pex(pex_app, {"PEX_VENV": "1"})
assert output.startswith(os.path.join(pex_root, "venvs"))
| 30.891667
| 93
| 0.643108
|
f098c89e089c920c13e89a2802b30dc5cac833b5
| 235
|
py
|
Python
|
guitarfan/scrapy/settings.py
|
timgates42/GuitarFan
|
1a6d6bcc7708cbb214648e7f5657728f6c27c48a
|
[
"MIT"
] | 48
|
2015-02-02T02:25:07.000Z
|
2022-03-11T12:39:39.000Z
|
guitarfan/scrapy/settings.py
|
timgates42/GuitarFan
|
1a6d6bcc7708cbb214648e7f5657728f6c27c48a
|
[
"MIT"
] | 2
|
2015-09-13T14:00:41.000Z
|
2021-08-04T16:28:25.000Z
|
guitarfan/scrapy/settings.py
|
timgates42/GuitarFan
|
1a6d6bcc7708cbb214648e7f5657728f6c27c48a
|
[
"MIT"
] | 16
|
2015-01-09T08:15:13.000Z
|
2020-06-20T11:07:49.000Z
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
SPIDER_MODULES = ['guitarfan.spiders']
NEWSPIDER_MODULE = 'guitarfan.spiders'
DEFAULT_ITEM_CLASS = 'guitarfan.items.Artist'
# ITEM_PIPELINES = ['guitarfan.pipelines.FilterWordsPipeline']
| 26.111111
| 62
| 0.748936
|
8e3e1d5776999705797e2d801b590ebb2070b20c
| 3,459
|
py
|
Python
|
test/test_api_v2___ula_api.py
|
Fates-List/fateslist.py-autogen
|
0643434d9d0e71f781f99b2703a2ef52f49d8875
|
[
"MIT"
] | null | null | null |
test/test_api_v2___ula_api.py
|
Fates-List/fateslist.py-autogen
|
0643434d9d0e71f781f99b2703a2ef52f49d8875
|
[
"MIT"
] | null | null | null |
test/test_api_v2___ula_api.py
|
Fates-List/fateslist.py-autogen
|
0643434d9d0e71f781f99b2703a2ef52f49d8875
|
[
"MIT"
] | null | null | null |
# coding: utf-8
"""
Fates List
Current API: v2 beta 3 Default API: v2 API Docs: https://apidocs.fateslist.xyz Enum Reference: https://apidocs.fateslist.xyz/structures/enums.autogen # noqa: E501
OpenAPI spec version: 0.3.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import unittest
import swagger_client
from swagger_client.api.api_v2___ula_api import APIV2ULAApi # noqa: E501
from swagger_client.rest import ApiException
class TestAPIV2ULAApi(unittest.TestCase):
"""APIV2ULAApi unit test stubs"""
def setUp(self):
self.api = APIV2ULAApi() # noqa: E501
def tearDown(self):
pass
def test_delete_endpoint_api_v2_ula_user_id_list_url_endpoint_feature_delete(self):
"""Test case for delete_endpoint_api_v2_ula_user_id_list_url_endpoint_feature_delete
Delete Endpoint # noqa: E501
"""
pass
def test_delete_list_api_v2_ula_user_id_list_url_delete(self):
"""Test case for delete_list_api_v2_ula_user_id_list_url_delete
Delete List # noqa: E501
"""
pass
def test_edit_endpoint_api_v2_ula_user_id_list_url_endpoints_patch(self):
"""Test case for edit_endpoint_api_v2_ula_user_id_list_url_endpoints_patch
Edit Endpoint # noqa: E501
"""
pass
def test_edit_list_api_v2_ula_user_id_list_url_patch(self):
"""Test case for edit_list_api_v2_ula_user_id_list_url_patch
Edit List # noqa: E501
"""
pass
def test_get_all_lists_api_v2_ula_lists_get(self):
"""Test case for get_all_lists_api_v2_ula_lists_get
Get All Lists # noqa: E501
"""
pass
def test_get_bot_api_v2_ula_bots_bot_id_get(self):
"""Test case for get_bot_api_v2_ula_bots_bot_id_get
Get Bot # noqa: E501
"""
pass
def test_get_feature_by_id_api_v2_ula_feature_id_id_get(self):
"""Test case for get_feature_by_id_api_v2_ula_feature_id_id_get
Get Feature By Id # noqa: E501
"""
pass
def test_get_feature_by_internal_name_api_v2_ula_feature_iname_iname_get(self):
"""Test case for get_feature_by_internal_name_api_v2_ula_feature_iname_iname_get
Get Feature By Internal Name # noqa: E501
"""
pass
def test_get_list_api_v2_ula_list_url_get(self):
"""Test case for get_list_api_v2_ula_list_url_get
Get List # noqa: E501
"""
pass
def test_get_user_voted_api_v2_ula_bots_bot_id_votes_check_post(self):
"""Test case for get_user_voted_api_v2_ula_bots_bot_id_votes_check_post
Get User Voted # noqa: E501
"""
pass
def test_new_endpoint_api_v2_ula_user_id_list_url_endpoints_put(self):
"""Test case for new_endpoint_api_v2_ula_user_id_list_url_endpoints_put
New Endpoint # noqa: E501
"""
pass
def test_new_list_api_v2_ula_user_id_lists_put(self):
"""Test case for new_list_api_v2_ula_user_id_lists_put
New List # noqa: E501
"""
pass
def test_post_stats_api_v2_ula_bots_bot_id_stats_post(self):
"""Test case for post_stats_api_v2_ula_bots_bot_id_stats_post
Post Stats # noqa: E501
"""
pass
if __name__ == '__main__':
unittest.main()
| 27.672
| 225
| 0.681989
|
a7c2bc4f5d4b4c072bfdec49461054deba30fe90
| 1,160
|
py
|
Python
|
euctr/crawl/base/config.py
|
jeekim/euctr-tracker-code
|
5a041bcd188ec089cfadf3e97b64013408177033
|
[
"MIT"
] | 3
|
2018-09-11T11:35:48.000Z
|
2020-09-10T10:39:23.000Z
|
euctr/crawl/base/config.py
|
jeekim/euctr-tracker-code
|
5a041bcd188ec089cfadf3e97b64013408177033
|
[
"MIT"
] | 61
|
2018-08-06T14:03:16.000Z
|
2022-03-17T12:30:42.000Z
|
euctr/crawl/base/config.py
|
jeekim/euctr-tracker-code
|
5a041bcd188ec089cfadf3e97b64013408177033
|
[
"MIT"
] | 4
|
2018-09-11T11:35:53.000Z
|
2020-12-18T14:21:15.000Z
|
# -*- coding: utf-8 -*-
import os
import logging
import logging.config
# Environment
EUCTR_DEBUG = os.environ.get('EUCTR_DEBUG', 'development')
if EUCTR_DEBUG == "yes":
ENV = 'testing'
elif EUCTR_DEBUG == "no":
ENV = 'production'
else:
assert False, "yes or no for debug"
WAREHOUSE_URL = os.environ['EUCTR_OPENTRIALS_DB']
# Scrapy
CRAWLERA_APIKEY = os.getenv('EUCTR_CRAWLERA_APIKEY', None)
SCRAPY_SETTINGS = {
'SPIDER_MODULES': [
'crawl.spider',
],
# effectively unlimited delays, we use Autothrottle below to limit
'DOWNLOAD_DELAY': 0, # milliseconds
'CONCURRENT_REQUESTS_PER_DOMAIN': 32,
'CONCURRENT_REQUESTS': 32,
'AUTOTHROTTLE_ENABLED': True,
'AUTOTHROTTLE_TARGET_CONCURRENCY': 10, # our actual crawling constraint
'AUTOTHROTTLE_DEBUG': True,
'AUTOTHROTTLE_START_DELAY': 0.01, # 10 milliseconds
'RETRY_ENABLED': True,
'RETRY_TIMES': 24,
'ITEM_PIPELINES': {
'crawl.base.pipelines.Warehouse': 100,
},
'DOWNLOADER_MIDDLEWARES': {'scrapy_crawlera.CrawleraMiddleware': 600},
'CRAWLERA_ENABLED': CRAWLERA_APIKEY or False,
'CRAWLERA_APIKEY': CRAWLERA_APIKEY
}
| 26.363636
| 75
| 0.697414
|
73b1642a53228e3ec7f1652d046d497330ab2fa8
| 2,605
|
py
|
Python
|
coordinateparser.py
|
vkuhlmann/minecraft-proximity
|
34d2b867da308d7b8e4b2e4ea47ae82a0a88b84b
|
[
"MIT"
] | null | null | null |
coordinateparser.py
|
vkuhlmann/minecraft-proximity
|
34d2b867da308d7b8e4b2e4ea47ae82a0a88b84b
|
[
"MIT"
] | null | null | null |
coordinateparser.py
|
vkuhlmann/minecraft-proximity
|
34d2b867da308d7b8e4b2e4ea47ae82a0a88b84b
|
[
"MIT"
] | null | null | null |
from PIL import Image, ImageGrab
import positioner
import fontdecoder
import re
import logging
import threading
import time
# Dependency of coordinatereader.py
WHITE_COLOR = (252, 252, 252)
GRAY_COLOR = (221, 221, 221)
class CoordinateParser:
def __init__(self, snippeter):
self.snippeter = snippeter
self.debug = False
self.regex = re.compile(
r"\s*(Z:)?\s*(?P<x>[+-]?\d+(\.\d+)?)(\s|\s*/)\s*" +
r"(?P<y>[+-]?\d+(\.\d+)?)(\s|\s*/)\s*" +
r"(?P<z>[+-]?\d+(\.\d+)?).*")
def readCharacter(self, im, pixels, scale):
raster = []
nonZeroLength = 0
spaceLength = 0
for x in range(self.x, im.size[0] // scale):
val = 0
count = 0
for y in range(0, im.size[1] // scale):
col = pixels[x * scale, y * scale][:3]
if col == WHITE_COLOR or col == GRAY_COLOR:
count += 1
val += 2**y
if val != 0 or nonZeroLength > 0:
raster += [val]
if count == 0:
spaceLength += 1
if nonZeroLength != 0:
while raster[0] == 0:
raster = raster[1:]
symb = fontdecoder.decodeSymbol(raster)
raster = []
self.x = x + 1
return symb
# if superVal in DECIPHER:
# return DECIPHER[superVal]
# else:
# return f"[{superVal}]"
# print(nonZeroLength)
nonZeroLength = 0
if spaceLength > 3:
self.x = x
return " "
else:
nonZeroLength += 1
spaceLength = 0
return None
def getCoordinates(self):
im, pixels, scale = self.snippeter.snippet()
return self.parseCoordinates(im, pixels, scale)
def parseCoordinates(self, im, pixels, scale):
if pixels == None:
return None
self.x = 0
outp = ""
while True:
ch = self.readCharacter(im, pixels, scale)
if ch == None:
break
outp += ch
if self.debug:
logging.info(f"[{outp}]")
match = self.regex.fullmatch(outp)
if match != None:
x = float(match.group("x"))
y = float(match.group("y"))
z = float(match.group("z"))
return {"x": x, "y": y, "z": z}
return None
| 28.944444
| 63
| 0.443762
|
30430ccd15fa515065e837890a8d8ca934172400
| 16,168
|
py
|
Python
|
sdks/python/apache_beam/transforms/window_test.py
|
dexterchan/beam
|
01e500c2dd0d699aea0434154b69fd59d824700f
|
[
"Apache-2.0"
] | 2
|
2019-12-14T04:24:33.000Z
|
2020-02-21T07:17:40.000Z
|
sdks/python/apache_beam/transforms/window_test.py
|
dexterchan/beam
|
01e500c2dd0d699aea0434154b69fd59d824700f
|
[
"Apache-2.0"
] | 6
|
2020-09-26T00:28:18.000Z
|
2022-02-10T01:07:25.000Z
|
sdks/python/apache_beam/transforms/window_test.py
|
dexterchan/beam
|
01e500c2dd0d699aea0434154b69fd59d824700f
|
[
"Apache-2.0"
] | 1
|
2020-02-09T02:51:50.000Z
|
2020-02-09T02:51:50.000Z
|
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Unit tests for the windowing classes."""
from __future__ import absolute_import
from __future__ import division
import unittest
from builtins import range
import apache_beam as beam
from apache_beam.coders import coders
from apache_beam.runners import pipeline_context
from apache_beam.testing.test_pipeline import TestPipeline
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import CombinePerKey
from apache_beam.transforms import Create
from apache_beam.transforms import FlatMapTuple
from apache_beam.transforms import GroupByKey
from apache_beam.transforms import Map
from apache_beam.transforms import MapTuple
from apache_beam.transforms import WindowInto
from apache_beam.transforms import combiners
from apache_beam.transforms import core
from apache_beam.transforms.core import Windowing
from apache_beam.transforms.trigger import AccumulationMode
from apache_beam.transforms.trigger import AfterCount
from apache_beam.transforms.window import FixedWindows
from apache_beam.transforms.window import GlobalWindow
from apache_beam.transforms.window import GlobalWindows
from apache_beam.transforms.window import IntervalWindow
from apache_beam.transforms.window import NonMergingWindowFn
from apache_beam.transforms.window import Sessions
from apache_beam.transforms.window import SlidingWindows
from apache_beam.transforms.window import TimestampCombiner
from apache_beam.transforms.window import TimestampedValue
from apache_beam.transforms.window import WindowedValue
from apache_beam.transforms.window import WindowFn
from apache_beam.utils.timestamp import MAX_TIMESTAMP
from apache_beam.utils.timestamp import MIN_TIMESTAMP
def context(element, timestamp):
return WindowFn.AssignContext(timestamp, element)
class ReifyWindowsFn(core.DoFn):
def process(self, element, window=core.DoFn.WindowParam):
key, values = element
yield "%s @ %s" % (key, window), values
reify_windows = core.ParDo(ReifyWindowsFn())
class TestCustomWindows(NonMergingWindowFn):
"""A custom non merging window fn which assigns elements into interval windows
[0, 3), [3, 5) and [5, element timestamp) based on the element timestamps.
"""
def assign(self, context):
timestamp = context.timestamp
if timestamp < 3:
return [IntervalWindow(0, 3)]
elif timestamp < 5:
return [IntervalWindow(3, 5)]
else:
return [IntervalWindow(5, timestamp)]
def get_window_coder(self):
return coders.IntervalWindowCoder()
class WindowTest(unittest.TestCase):
def test_timestamped_value_cmp(self):
self.assertEqual(TimestampedValue('a', 2), TimestampedValue('a', 2))
self.assertEqual(TimestampedValue('a', 2), TimestampedValue('a', 2.0))
self.assertNotEqual(TimestampedValue('a', 2), TimestampedValue('a', 2.1))
self.assertNotEqual(TimestampedValue('a', 2), TimestampedValue('b', 2))
def test_global_window(self):
self.assertEqual(GlobalWindow(), GlobalWindow())
self.assertNotEqual(GlobalWindow(),
IntervalWindow(MIN_TIMESTAMP, MAX_TIMESTAMP))
self.assertNotEqual(IntervalWindow(MIN_TIMESTAMP, MAX_TIMESTAMP),
GlobalWindow())
self.assertTrue(GlobalWindow().max_timestamp() < MAX_TIMESTAMP)
def test_fixed_windows(self):
# Test windows with offset: 2, 7, 12, 17, ...
windowfn = FixedWindows(size=5, offset=2)
self.assertEqual([IntervalWindow(7, 12)],
windowfn.assign(context('v', 7)))
self.assertEqual([IntervalWindow(7, 12)],
windowfn.assign(context('v', 11)))
self.assertEqual([IntervalWindow(12, 17)],
windowfn.assign(context('v', 12)))
# Test windows without offset: 0, 5, 10, 15, ...
windowfn = FixedWindows(size=5)
self.assertEqual([IntervalWindow(5, 10)],
windowfn.assign(context('v', 5)))
self.assertEqual([IntervalWindow(5, 10)],
windowfn.assign(context('v', 9)))
self.assertEqual([IntervalWindow(10, 15)],
windowfn.assign(context('v', 10)))
# Test windows with offset out of range.
windowfn = FixedWindows(size=5, offset=12)
self.assertEqual([IntervalWindow(7, 12)],
windowfn.assign(context('v', 11)))
def test_sliding_windows_assignment(self):
windowfn = SlidingWindows(size=15, period=5, offset=2)
expected = [IntervalWindow(7, 22),
IntervalWindow(2, 17),
IntervalWindow(-3, 12)]
self.assertEqual(expected, windowfn.assign(context('v', 7)))
self.assertEqual(expected, windowfn.assign(context('v', 8)))
self.assertEqual(expected, windowfn.assign(context('v', 11)))
def test_sliding_windows_assignment_fraction(self):
windowfn = SlidingWindows(size=3.5, period=2.5, offset=1.5)
self.assertEqual([IntervalWindow(1.5, 5.0), IntervalWindow(-1.0, 2.5)],
windowfn.assign(context('v', 1.7)))
self.assertEqual([IntervalWindow(1.5, 5.0)],
windowfn.assign(context('v', 3)))
def test_sliding_windows_assignment_fraction_large_offset(self):
windowfn = SlidingWindows(size=3.5, period=2.5, offset=4.0)
self.assertEqual([IntervalWindow(1.5, 5.0), IntervalWindow(-1.0, 2.5)],
windowfn.assign(context('v', 1.7)))
self.assertEqual([IntervalWindow(4.0, 7.5), IntervalWindow(1.5, 5.0)],
windowfn.assign(context('v', 4.5)))
def test_sessions_merging(self):
windowfn = Sessions(10)
def merge(*timestamps):
windows = [windowfn.assign(context(None, t)) for t in timestamps]
running = set()
class TestMergeContext(WindowFn.MergeContext):
def __init__(self):
super(TestMergeContext, self).__init__(running)
def merge(self, to_be_merged, merge_result):
for w in to_be_merged:
if w in running:
running.remove(w)
running.add(merge_result)
for ws in windows:
running.update(ws)
windowfn.merge(TestMergeContext())
windowfn.merge(TestMergeContext())
return sorted(running)
self.assertEqual([IntervalWindow(2, 12)], merge(2))
self.assertEqual([IntervalWindow(2, 12), IntervalWindow(19, 29)],
merge(2, 19))
self.assertEqual([IntervalWindow(2, 19)], merge(2, 9))
self.assertEqual([IntervalWindow(2, 19)], merge(9, 2))
self.assertEqual([IntervalWindow(2, 19), IntervalWindow(19, 29)],
merge(2, 9, 19))
self.assertEqual([IntervalWindow(2, 19), IntervalWindow(19, 29)],
merge(19, 9, 2))
self.assertEqual([IntervalWindow(2, 25)], merge(2, 15, 10))
def timestamped_key_values(self, pipeline, key, *timestamps):
return (pipeline | 'start' >> Create(timestamps)
| Map(lambda x: WindowedValue((key, x), x, [GlobalWindow()])))
def test_sliding_windows(self):
with TestPipeline() as p:
pcoll = self.timestamped_key_values(p, 'key', 1, 2, 3)
result = (pcoll
| 'w' >> WindowInto(SlidingWindows(period=2, size=4))
| GroupByKey()
| beam.MapTuple(lambda k, vs: (k, sorted(vs)))
| reify_windows)
expected = [('key @ [-2.0, 2.0)', [1]),
('key @ [0.0, 4.0)', [1, 2, 3]),
('key @ [2.0, 6.0)', [2, 3])]
assert_that(result, equal_to(expected))
def test_sessions(self):
with TestPipeline() as p:
pcoll = self.timestamped_key_values(p, 'key', 1, 2, 3, 20, 35, 27)
sort_values = Map(lambda k_vs: (k_vs[0], sorted(k_vs[1])))
result = (pcoll
| 'w' >> WindowInto(Sessions(10))
| GroupByKey()
| sort_values
| reify_windows)
expected = [('key @ [1.0, 13.0)', [1, 2, 3]),
('key @ [20.0, 45.0)', [20, 27, 35])]
assert_that(result, equal_to(expected))
def test_timestamped_value(self):
with TestPipeline() as p:
result = (p
| 'start' >> Create([(k, k) for k in range(10)])
| Map(lambda x_t: TimestampedValue(x_t[0], x_t[1]))
| 'w' >> WindowInto(FixedWindows(5))
| Map(lambda v: ('key', v))
| GroupByKey()
| beam.MapTuple(lambda k, vs: (k, sorted(vs))))
assert_that(result, equal_to([('key', [0, 1, 2, 3, 4]),
('key', [5, 6, 7, 8, 9])]))
def test_rewindow(self):
with TestPipeline() as p:
result = (p
| Create([(k, k) for k in range(10)])
| Map(lambda x_t1: TimestampedValue(x_t1[0], x_t1[1]))
| 'window' >> WindowInto(SlidingWindows(period=2, size=6))
# Per the model, each element is now duplicated across
# three windows. Rewindowing must preserve this duplication.
| 'rewindow' >> WindowInto(FixedWindows(5))
| 'rewindow2' >> WindowInto(FixedWindows(5))
| Map(lambda v: ('key', v))
| GroupByKey()
| beam.MapTuple(lambda k, vs: (k, sorted(vs))))
assert_that(result, equal_to([('key', sorted([0, 1, 2, 3, 4] * 3)),
('key', sorted([5, 6, 7, 8, 9] * 3))]))
def test_rewindow_regroup(self):
with TestPipeline() as p:
grouped = (p
| Create(range(5))
| Map(lambda t: TimestampedValue(('key', t), t))
| 'window' >> WindowInto(FixedWindows(5, offset=3))
| GroupByKey()
| MapTuple(lambda k, vs: (k, sorted(vs))))
# Both of these group-and-ungroup sequences should be idempotent.
regrouped1 = (grouped
| 'w1' >> WindowInto(FixedWindows(5, offset=3))
| 'g1' >> GroupByKey()
| FlatMapTuple(lambda k, vs: [(k, v) for v in vs]))
regrouped2 = (grouped
| FlatMapTuple(lambda k, vs: [(k, v) for v in vs])
| 'w2' >> WindowInto(FixedWindows(5, offset=3))
| 'g2' >> GroupByKey()
| MapTuple(lambda k, vs: (k, sorted(vs))))
with_windows = Map(lambda e, w=beam.DoFn.WindowParam: (e, w))
expected = [(('key', [0, 1, 2]), IntervalWindow(-2, 3)),
(('key', [3, 4]), IntervalWindow(3, 8))]
assert_that(grouped | 'ww' >> with_windows, equal_to(expected))
assert_that(
regrouped1 | 'ww1' >> with_windows, equal_to(expected), label='r1')
assert_that(
regrouped2 | 'ww2' >> with_windows, equal_to(expected), label='r2')
def test_timestamped_with_combiners(self):
with TestPipeline() as p:
result = (p
# Create some initial test values.
| 'start' >> Create([(k, k) for k in range(10)])
# The purpose of the WindowInto transform is to establish a
# FixedWindows windowing function for the PCollection.
# It does not bucket elements into windows since the timestamps
# from Create are not spaced 5 ms apart and very likely they all
# fall into the same window.
| 'w' >> WindowInto(FixedWindows(5))
# Generate timestamped values using the values as timestamps.
# Now there are values 5 ms apart and since Map propagates the
# windowing function from input to output the output PCollection
# will have elements falling into different 5ms windows.
| Map(lambda x_t2: TimestampedValue(x_t2[0], x_t2[1]))
# We add a 'key' to each value representing the index of the
# window. This is important since there is no guarantee of
# order for the elements of a PCollection.
| Map(lambda v: (v // 5, v)))
# Sum all elements associated with a key and window. Although it
# is called CombinePerKey it is really CombinePerKeyAndWindow the
# same way GroupByKey is really GroupByKeyAndWindow.
sum_per_window = result | CombinePerKey(sum)
# Compute mean per key and window.
mean_per_window = result | combiners.Mean.PerKey()
assert_that(sum_per_window, equal_to([(0, 10), (1, 35)]),
label='assert:sum')
assert_that(mean_per_window, equal_to([(0, 2.0), (1, 7.0)]),
label='assert:mean')
def test_custom_windows(self):
with TestPipeline() as p:
pcoll = self.timestamped_key_values(p, 'key', 0, 1, 2, 3, 4, 5, 6)
# pylint: disable=abstract-class-instantiated
result = (pcoll
| 'custom window' >> WindowInto(TestCustomWindows())
| GroupByKey()
| 'sort values' >> MapTuple(lambda k, vs: (k, sorted(vs))))
assert_that(result, equal_to([('key', [0, 1, 2]),
('key', [3, 4]),
('key', [5]),
('key', [6])]))
def test_window_assignment_idempotency(self):
with TestPipeline() as p:
pcoll = self.timestamped_key_values(p, 'key', 0, 2, 4)
result = (pcoll
| 'window' >> WindowInto(FixedWindows(2))
| 'same window' >> WindowInto(FixedWindows(2))
| 'same window again' >> WindowInto(FixedWindows(2))
| GroupByKey())
assert_that(result, equal_to([('key', [0]),
('key', [2]),
('key', [4])]))
def test_window_assignment_through_multiple_gbk_idempotency(self):
with TestPipeline() as p:
pcoll = self.timestamped_key_values(p, 'key', 0, 2, 4)
result = (pcoll
| 'window' >> WindowInto(FixedWindows(2))
| 'gbk' >> GroupByKey()
| 'same window' >> WindowInto(FixedWindows(2))
| 'another gbk' >> GroupByKey()
| 'same window again' >> WindowInto(FixedWindows(2))
| 'gbk again' >> GroupByKey())
assert_that(result, equal_to([('key', [[[0]]]),
('key', [[[2]]]),
('key', [[[4]]])]))
class RunnerApiTest(unittest.TestCase):
def test_windowfn_encoding(self):
for window_fn in (GlobalWindows(),
FixedWindows(37),
SlidingWindows(2, 389),
Sessions(5077)):
context = pipeline_context.PipelineContext()
self.assertEqual(
window_fn,
WindowFn.from_runner_api(window_fn.to_runner_api(context), context))
def test_windowing_encoding(self):
for windowing in (
Windowing(GlobalWindows()),
Windowing(FixedWindows(1, 3), AfterCount(6),
accumulation_mode=AccumulationMode.ACCUMULATING),
Windowing(SlidingWindows(10, 15, 21), AfterCount(28),
timestamp_combiner=TimestampCombiner.OUTPUT_AT_LATEST,
accumulation_mode=AccumulationMode.DISCARDING)):
context = pipeline_context.PipelineContext()
self.assertEqual(
windowing,
Windowing.from_runner_api(windowing.to_runner_api(context), context))
if __name__ == '__main__':
unittest.main()
| 43.114667
| 80
| 0.614485
|
1ec49dde5e99f895705294ef7166f8b72de0e292
| 1,556
|
py
|
Python
|
heisenbridge/appservice.py
|
pstn/heisenbridge
|
c01bb90315cb0fba8a439e40bbe076445e0b62b8
|
[
"MIT"
] | null | null | null |
heisenbridge/appservice.py
|
pstn/heisenbridge
|
c01bb90315cb0fba8a439e40bbe076445e0b62b8
|
[
"MIT"
] | null | null | null |
heisenbridge/appservice.py
|
pstn/heisenbridge
|
c01bb90315cb0fba8a439e40bbe076445e0b62b8
|
[
"MIT"
] | null | null | null |
from abc import ABC
from abc import abstractmethod
from typing import List
from heisenbridge.matrix import Matrix
from heisenbridge.matrix import MatrixNotFound
class Room:
pass
class AppService(ABC):
api: Matrix
user_id: str
server_name: str
config: dict
async def load(self):
try:
self.config.update(await self.api.get_user_account_data(self.user_id, "irc"))
except MatrixNotFound:
await self.save()
async def save(self):
await self.api.put_user_account_data(self.user_id, "irc", self.config)
async def create_room(self, name: str, topic: str, invite: List[str]) -> str:
resp = await self.api.post_room_create(
{
"visibility": "private",
"name": name,
"topic": topic,
"invite": invite,
"is_direct": False,
"power_level_content_override": {
"users_default": 0,
"invite": 100,
"kick": 100,
"redact": 100,
"ban": 100,
"events": {
"m.room.name": 0,
"m.room.avatar": 0, # these work as long as rooms are private
},
},
}
)
return resp["room_id"]
@abstractmethod
def register_room(self, room: Room):
pass
@abstractmethod
def find_rooms(self, type, user_id: str = None) -> List[Room]:
pass
| 26.372881
| 89
| 0.517352
|
c7f9df6f4ed5a889546007ca78f5afbc30b7a9f2
| 4,361
|
py
|
Python
|
app/app/settings/base.py
|
trevore23/tvr-23-gke
|
e16e9f0e8f734fee0b6a3a245de4ff9bab278b98
|
[
"MIT"
] | null | null | null |
app/app/settings/base.py
|
trevore23/tvr-23-gke
|
e16e9f0e8f734fee0b6a3a245de4ff9bab278b98
|
[
"MIT"
] | null | null | null |
app/app/settings/base.py
|
trevore23/tvr-23-gke
|
e16e9f0e8f734fee0b6a3a245de4ff9bab278b98
|
[
"MIT"
] | null | null | null |
"""
Django settings for app project.
Generated by 'django-admin startproject' using Django 2.2.1.
For more information on this file, see
https://docs.djangoproject.com/en/2.2/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.2/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
PROJECT_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
BASE_DIR = os.path.dirname(PROJECT_DIR)
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.2/howto/deployment/checklist/
# Application definition
INSTALLED_APPS = [
'core',
'home',
'search',
'wagtail.contrib.forms',
'wagtail.contrib.redirects',
'wagtail.embeds',
'wagtail.sites',
'wagtail.users',
'wagtail.snippets',
'wagtail.documents',
'wagtail.images',
'wagtail.search',
'wagtail.admin',
'wagtail.core',
'modelcluster',
'taggit',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
MIDDLEWARE = [
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'django.middleware.security.SecurityMiddleware',
'wagtail.core.middleware.SiteMiddleware',
'wagtail.contrib.redirects.middleware.RedirectMiddleware',
]
ROOT_URLCONF = 'app.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [
os.path.join(PROJECT_DIR, 'templates'),
],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'app.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.2/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/2.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.2/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.2/howto/static-files/
STATICFILES_FINDERS = [
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
]
STATICFILES_DIRS = [
os.path.join(PROJECT_DIR, 'static'),
]
# ManifestStaticFilesStorage is recommended in production, to prevent outdated
# Javascript / CSS assets being served from cache (e.g. after a Wagtail upgrade).
# See https://docs.djangoproject.com/en/2.2/ref/contrib/staticfiles/#manifeststaticfilesstorage
STATICFILES_STORAGE = 'django.contrib.staticfiles.storage.ManifestStaticFilesStorage'
STATIC_ROOT = os.path.join(BASE_DIR, 'static')
STATIC_URL = '/static/'
MEDIA_ROOT = os.path.join(BASE_DIR, 'media')
MEDIA_URL = '/media/'
# Wagtail settings
WAGTAIL_SITE_NAME = "app"
# Base URL to use when referring to full URLs within the Wagtail admin backend -
# e.g. in notification emails. Don't include '/admin' or a trailing slash
BASE_URL = 'http://example.com'
# Use my custom user model
AUTH_USER_MODEL = 'core.User'
| 25.958333
| 95
| 0.69961
|
7ed381e98fff28b3470088aed08c429eb5b66f41
| 194
|
py
|
Python
|
project/utils/mocks.py
|
iiii4966/django-test
|
b194bbedf86cb6e279558ab1647f3681f150a994
|
[
"BSD-3-Clause"
] | null | null | null |
project/utils/mocks.py
|
iiii4966/django-test
|
b194bbedf86cb6e279558ab1647f3681f150a994
|
[
"BSD-3-Clause"
] | null | null | null |
project/utils/mocks.py
|
iiii4966/django-test
|
b194bbedf86cb6e279558ab1647f3681f150a994
|
[
"BSD-3-Clause"
] | null | null | null |
from django.http import HttpRequest
class MockRequest(HttpRequest):
def __init__(self, *args, **kwargs):
super(MockRequest, self).__init__()
self.META = kwargs.get('META')
| 24.25
| 43
| 0.680412
|
6c1d48fa504809b1ac0eca8c2aa205c352d7d4fd
| 8,035
|
py
|
Python
|
examples/workloads/wrapper/wrapper/wrapper_main.py
|
Rajpratik71/workload-collocation-agent
|
6cf7bdab97ff61d85c21c3effecea632a225c668
|
[
"Apache-2.0"
] | 40
|
2019-05-16T16:42:33.000Z
|
2021-11-18T06:33:03.000Z
|
examples/workloads/wrapper/wrapper/wrapper_main.py
|
Rajpratik71/workload-collocation-agent
|
6cf7bdab97ff61d85c21c3effecea632a225c668
|
[
"Apache-2.0"
] | 72
|
2019-05-09T02:30:25.000Z
|
2020-11-17T09:24:44.000Z
|
examples/workloads/wrapper/wrapper/wrapper_main.py
|
Rajpratik71/workload-collocation-agent
|
6cf7bdab97ff61d85c21c3effecea632a225c668
|
[
"Apache-2.0"
] | 26
|
2019-05-20T09:13:38.000Z
|
2021-12-15T17:57:21.000Z
|
# Copyright (c) 2018 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
import json
import logging
import subprocess
import shlex
import threading
from functools import partial
from wca.storage import KafkaStorage, LogStorage
from wca.logger import TRACE
from wrapper.parser import (default_parse, parse_loop, DEFAULT_REGEXP,
ParseFunc, ServiceLevelArgs, append_service_level_metrics)
from wca.platforms import get_wca_version
log = logging.getLogger(__name__)
def main(parse: ParseFunc = default_parse):
"""
Launches workload and parser with processed arguments. Handles workload shutdown.
"""
arg_parser = prepare_argument_parser()
# It is assumed that unknown arguments should be passed to workload.
args = arg_parser.parse_args()
# Additional argparse checks.
if not ((args.load_metric_name is not None and args.peak_load is not None) or
(args.load_metric_name is None and args.peak_load is None)):
print("Both load_metric_name and peak_load have to be set, or none of them.")
exit(1)
# Needs to be passed to parse_loop
service_level_args = ServiceLevelArgs(args.slo, args.sli_metric_name,
args.inverse_sli_metric_value,
args.peak_load, args.load_metric_name)
# Configuring log
logging.basicConfig(
level=TRACE if args.log_level == 'TRACE' else args.log_level,
format="%(asctime)-15s %(levelname)s %(module)s %(message)s")
log.debug("Logger configured with {0}".format(args.log_level))
log.info("Starting wrapper version {}".format(get_wca_version()))
command_splited = shlex.split(args.command)
log.info("Running command: {}".format(command_splited))
workload_process = subprocess.Popen(command_splited,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
universal_newlines=True,
bufsize=1,
shell=args.subprocess_shell,
)
input = workload_process.stderr if args.stderr else workload_process.stdout
labels = json.loads(args.labels)
parse = partial(parse, regexp=args.regexp, separator=args.separator, labels=labels,
input=input, metric_name_prefix=args.metric_name_prefix)
append_service_level_metrics_func = partial(
append_service_level_metrics, labels=labels, service_level_args=service_level_args)
# create kafka storage with list of kafka brokers from arguments
kafka_brokers_addresses = args.kafka_brokers.replace(" ", "").split(',')
if kafka_brokers_addresses != [""]:
log.info("KafkaStorage {}".format(kafka_brokers_addresses))
storage = KafkaStorage(brokers_ips=kafka_brokers_addresses,
max_timeout_in_seconds=5.0,
topic=args.kafka_topic)
else:
storage = LogStorage(args.storage_output_filename, overwrite=True, include_timestamp=False)
t = threading.Thread(target=parse_loop, args=(parse, storage,
append_service_level_metrics_func))
t.start()
t.join()
# terminate all spawned processes
workload_process.terminate()
def prepare_argument_parser():
parser = argparse.ArgumentParser(
description='Wrapper that exposes APMs using Prometheus format.'
)
parser.add_argument(
'--command',
help='Workload run command',
dest='command',
required=True,
type=str
)
parser.add_argument(
'--stderr',
help='If 0, parser will use stdout, if 1 stderr',
dest='stderr',
default=0,
type=int
)
parser.add_argument(
'--regexp',
help='regexp used for parsing with the default parsing function\n'
'Needs to contain 2 named groups "name" and "value"'
'Defaults to {0} that matches values in format "a=4.0"'.format(DEFAULT_REGEXP),
dest='regexp',
type=str,
default=DEFAULT_REGEXP
)
parser.add_argument(
'--metric_name_prefix',
help='metric name prefix (only relevant for default parse function)',
default=''
)
parser.add_argument(
'--separator',
help='String that separates workload outputs',
dest='separator',
type=str,
default=None
)
parser.add_argument(
'--log_level',
help='Logging level',
dest='log_level',
default='ERROR',
choices=['ERROR', 'WARNING', 'INFO', 'DEBUG', 'TRACE'],
type=str)
parser.add_argument(
'--labels',
help="Prometheus labels. Provide them in a dict format."
"Example: ""{'workload':'stress-ng','exper':'2'}""",
dest='labels',
type=str,
default="{}"
)
parser.add_argument(
'--kafka_brokers',
help='list of addresses with ports of kafka brokers (kafka nodes). Coma separated',
dest='kafka_brokers',
default="",
type=str
)
parser.add_argument(
'--kafka_topic',
help='Kafka messages topic, passed to KafkaStorage',
dest='kafka_topic',
default='wca_apms',
type=str
)
parser.add_argument(
'--storage_output_filename',
help='When Kafka storage is not used, allows to redirect metrics to file',
dest='storage_output_filename',
default=None,
type=str
)
parser.add_argument(
'--peak_load',
help='Expected maximum load.',
default=None,
type=int
)
parser.add_argument(
'--load_metric_name',
help='Metric name parsed from the application stream '
'used as load level indicator. If set to `const` '
'the behaviour is slightly different: as real load were all the time '
'equal to peak_load (then load_normalized == 1).',
default=None,
type=str
)
parser.add_argument(
'--slo',
help='Service level objective. '
'Must be expressed in the same units as SLI. '
'Default value is +inf. '
'Being used only if sli_metric_name also defined.',
default=float("inf"),
type=float
)
parser.add_argument(
'--sli_metric_name',
help='Metric name parsed from the application stream '
'used as service level indicator.',
default=None,
type=str
)
parser.add_argument(
'--inverse_sli_metric_value',
help='Add this flag if value of a metric used to calculate service ' +
'level indicator should be inversed.',
action='store_true',
default=False,
)
parser.add_argument(
'--subprocess_shell',
help='Run subprocess command with full shell support.',
action='store_true',
default=False,
)
return parser
def debug():
"""Debug hook to allow entering debug mode in compiled pex.
Run it as PEX_MODULE=wrapper.wrapper_main:debug
"""
import warnings
try:
import ipdb as pdb
except ImportError:
warnings.warn('ipdb not available, using pdb')
import pdb
pdb.set_trace()
main()
if __name__ == "__main__":
main()
| 34.484979
| 99
| 0.619291
|
a0b15620180359fd9b42143ed2ec9e15860efe31
| 7,381
|
py
|
Python
|
dolphin/state_manager.py
|
Peachball/gym-dolphin
|
dedccd742079fa45cf0c5a8ca6e3a79d1529e324
|
[
"MIT"
] | 60
|
2017-02-22T15:04:13.000Z
|
2021-08-14T19:15:30.000Z
|
dolphin/state_manager.py
|
Peachball/gym-dolphin
|
dedccd742079fa45cf0c5a8ca6e3a79d1529e324
|
[
"MIT"
] | 1
|
2018-02-20T10:30:26.000Z
|
2018-02-20T10:30:26.000Z
|
dolphin/state_manager.py
|
Peachball/gym-dolphin
|
dedccd742079fa45cf0c5a8ca6e3a79d1529e324
|
[
"MIT"
] | 15
|
2017-02-23T05:12:52.000Z
|
2020-12-21T22:47:29.000Z
|
import struct
import attr
from . import ssbm, fields
def generic_wrapper(value, wrapper, default):
if wrapper is not None:
try:
value = wrapper(value)
except ValueError:
value = default
return value
intStruct = struct.Struct('>i')
byte_mask = 0xFF
short_mask = 0xFFFF
int_mask = 0xFFFFFFFF
@attr.s
class IntHandler:
shift = attr.ib(default=0)
mask = attr.ib(default=int_mask)
wrapper = attr.ib(default=None)
default = attr.ib(default=0)
def __call__(self, value):
transformed = (intStruct.unpack(value)[0] >> self.shift) & self.mask
return generic_wrapper(transformed, self.wrapper, self.default)
intHandler = IntHandler()
byteHandler = IntHandler(shift=24, mask=byte_mask)
shortHandler = IntHandler(shift=16, mask=short_mask)
floatStruct = struct.Struct('>f')
@attr.s
class FloatHandler:
wrapper = attr.ib(default=None)
default = attr.ib(default=0.0)
def __call__(self, value):
as_float = floatStruct.unpack(value)[0]
return generic_wrapper(as_float, self.wrapper, self.default)
floatHandler = FloatHandler()
@attr.s
class Handler:
path = attr.ib()
handler = attr.ib()
def __call__(self, obj, value):
fields.setPath(obj, self.path, self.handler(value))
# TODO: use numbers instead of strings to hash addresses
def add_address(x, y):
"""Returns a string representation of the sum of the two parameters.
x is a hex string address that can be converted to an int.
y is an int.
"""
return "{0:08X}".format(int(x, 16) + y)
# see https://docs.google.com/spreadsheets/d/1JX2w-r2fuvWuNgGb6D3Cs4wHQKLFegZe2jhbBuIhCG8
global_addresses = {}
global_addresses['80479D60'] = Handler(['frame'], intHandler)
global_addresses['80479D30'] = Handler(['menu'], IntHandler(mask=byte_mask))#, Menu, Menu.Characters)
global_addresses['804D6CAD'] = Handler(['stage'], shortHandler)#, Stage, Stage.Unselected)
def playerAddresses(player_id, addresses=None):
if addresses is None:
addresses = {}
player_path = ['players', player_id]
def playerHandler(field, handler):
return Handler(player_path + field.split('/'), handler)
cursor_x_address = add_address('81118DEC', -0xB80 * player_id)
cursor_y_address = add_address('81118DF0', -0xB80 * player_id)
addresses[cursor_x_address] = playerHandler('cursor_x', floatHandler)
addresses[cursor_y_address] = playerHandler('cursor_y', floatHandler)
type_address = add_address('803F0E08', 0x24 * player_id)
type_handler = playerHandler('type', byteHandler) #, PlayerType, PlayerType.Unselected)
character_handler = playerHandler('character', IntHandler(8, byte_mask)) #, Character, Character.Unselected)
addresses[type_address] = [character_handler]#, type_handler]
button_address = add_address('0x804C1FAC', 0x44 * player_id)
button_locs = dict(
Z = 4,
L = 5,
R = 6,
A = 8,
B = 9,
X = 10,
Y = 11,
START = 12
).items()
addresses[button_address] = [playerHandler('controller/button_%s' % b, IntHandler(mask=1<<i)) for b, i in button_locs]
stick_address = 0x804C1FCC
for stick in ['MAIN', 'C']:
for axis in ['x', 'y']:
address = "{0:08X}".format(stick_address + 0x44 * player_id)
addresses[address] = playerHandler("controller/stick_%s/%s" % (stick, axis), floatHandler)
stick_address += 4
static_pointer = 0x80453080 + 0xE90 * player_id
def add_static_address(offset, name, handler):
address = "{0:08X}".format(static_pointer + offset)
handle = playerHandler(name, handler)
if address not in addresses:
addresses[address] = [handle]
else:
addresses[address].append(handle)
add_static_address(0x60, 'percent', shortHandler)
# add_static_address(0x1890, 'percent', floatHandler)
add_static_address(0x8E, 'stock', byteHandler)
# nametag positions
add_static_address(0x10, 'x', floatHandler)
add_static_address(0x14, 'y', floatHandler)
add_static_address(0x18, 'z', floatHandler)
""" TODO: figure out why these don't work
#add_static_address(0x688, 'controller/stick_MAIN/x', floatHandler)
#add_static_address(0x68C, 'controller/stick_MAIN/y', floatHandler)
add_static_address(0x698, 'controller/stick_C/x', floatHandler)
add_static_address(0x69C, 'controller/stick_C/y', floatHandler)
add_static_address(0x6BC, 'controller/button_Z', IntHandler(mask=1<<4))
add_static_address(0x6BC, 'controller/button_L', IntHandler(mask=1<<5))
add_static_address(0x6BC, 'controller/button_R', IntHandler(mask=1<<6))
add_static_address(0x6BC, 'controller/button_A', IntHandler(mask=1<<8))
add_static_address(0x6BC, 'controller/button_B', IntHandler(mask=1<<9))
add_static_address(0x6BC, 'controller/button_X', IntHandler(mask=1<<10))
add_static_address(0x6BC, 'controller/button_Y', IntHandler(mask=1<<11))
"""
# hitbox positions
# add_static_address(0x18B4, 'x', floatHandler)
# add_static_address(0x18B8, 'y', floatHandler)
# add_static_address(0x18BC, 'z', floatHandler)
data_pointer = add_address('80453130', 0xE90 * player_id)
def add_data_address(offset, name, handler):
address = data_pointer + ' ' + offset
handle = playerHandler(name, handler)
if address not in addresses:
addresses[address] = [handle]
else:
addresses[address].append(handle)
add_data_address('70', 'action_state', intHandler)
add_data_address('20CC', 'action_counter', shortHandler)
add_data_address('8F4', 'action_frame', floatHandler)
add_data_address('19EC', 'invulnerable', intHandler)
add_data_address('19BC', 'hitlag_frames_left', floatHandler)
add_data_address('23A0', 'hitstun_frames_left', floatHandler)
# TODO: make this an actal int
# 2 = charging, 3 = attacking, 0 = otherwise
add_data_address('2174', 'charging_smash', IntHandler(mask=0x2))
add_data_address('19F8', 'shield_size', floatHandler)
add_data_address('19C8', 'jumps_used', byteHandler)
add_data_address('140', 'in_air', intHandler)
add_data_address('E0', 'speed_air_x_self', floatHandler)
add_data_address('E4', 'speed_y_self', floatHandler)
add_data_address('EC', 'speed_x_attack', floatHandler)
add_data_address('F0', 'speed_y_attack', floatHandler)
add_data_address('14C', 'speed_ground_x_self', floatHandler)
add_data_address('8C', 'facing', floatHandler) # 1 is right, -1 is left
#add_data_address('1E4', 'speed_fastfall_self', floatHandler)
return addresses
class StateManager:
def __init__(self, player_ids=range(4)):
self.addresses = global_addresses.copy()
for player_id in player_ids:
playerAddresses(player_id, self.addresses)
def handle(self, obj, address, value):
"""Convert the raw address and value into changes in the State."""
assert address in self.addresses
handlers = self.addresses[address]
if isinstance(handlers, list):
for handler in handlers:
handler(obj, value)
else:
handlers(obj, value)
def locations(self):
"""Returns a list of addresses for exporting to Locations.txt."""
return self.addresses.keys()
| 34.981043
| 122
| 0.686086
|
59a2a619406a60773e3713eac6495c7d6bc4ce55
| 20,517
|
py
|
Python
|
ProgettoLube/WebInspector/venv/Lib/site-packages/skimage/morphology/extrema.py
|
Lube-Project/ProgettoLube
|
cbf33971e2c2e865783ec1a2302625539186a338
|
[
"MIT"
] | 2
|
2022-03-19T09:45:18.000Z
|
2022-03-19T15:26:24.000Z
|
ProgettoLube/WebInspector/venv/Lib/site-packages/skimage/morphology/extrema.py
|
Lube-Project/ProgettoLube
|
cbf33971e2c2e865783ec1a2302625539186a338
|
[
"MIT"
] | 7
|
2021-06-08T21:46:24.000Z
|
2022-03-12T00:35:31.000Z
|
ProgettoLube/WebInspector/venv/Lib/site-packages/skimage/morphology/extrema.py
|
Lube-Project/ProgettoLube
|
cbf33971e2c2e865783ec1a2302625539186a338
|
[
"MIT"
] | null | null | null |
"""extrema.py - local minima and maxima
This module provides functions to find local maxima and minima of an image.
Here, local maxima (minima) are defined as connected sets of pixels with equal
gray level which is strictly greater (smaller) than the gray level of all
pixels in direct neighborhood of the connected set. In addition, the module
provides the related functions h-maxima and h-minima.
Soille, P. (2003). Morphological Image Analysis: Principles and Applications
(2nd ed.), Chapter 6. Springer-Verlag New York, Inc.
"""
import numpy as np
from ..util import dtype_limits, invert, crop
from .._shared.utils import warn
from . import greyreconstruct, _util
from ._extrema_cy import _local_maxima
def _add_constant_clip(image, const_value):
"""Add constant to the image while handling overflow issues gracefully.
"""
min_dtype, max_dtype = dtype_limits(image, clip_negative=False)
if const_value > (max_dtype - min_dtype):
raise ValueError("The added constant is not compatible"
"with the image data type.")
result = image + const_value
result[image > max_dtype-const_value] = max_dtype
return(result)
def _subtract_constant_clip(image, const_value):
"""Subtract constant from image while handling underflow issues.
"""
min_dtype, max_dtype = dtype_limits(image, clip_negative=False)
if const_value > (max_dtype-min_dtype):
raise ValueError("The subtracted constant is not compatible"
"with the image data type.")
result = image - const_value
result[image < (const_value + min_dtype)] = min_dtype
return(result)
def h_maxima(image, h, selem=None):
"""Determine all maxima of the image with height >= h.
The local maxima are defined as connected sets of pixels with equal
grey level strictly greater than the grey level of all pixels in direct
neighborhood of the set.
A local maximum M of height h is a local maximum for which
there is at least one path joining M with a higher maximum on which the
minimal value is f(M) - h (i.e. the values along the path are not
decreasing by more than h with respect to the maximum's value) and no
path for which the minimal value is greater.
Parameters
----------
image : ndarray
The input image for which the maxima are to be calculated.
h : unsigned integer
The minimal height of all extracted maxima.
selem : ndarray, optional
The neighborhood expressed as an n-D array of 1's and 0's.
Default is the ball of radius 1 according to the maximum norm
(i.e. a 3x3 square for 2D images, a 3x3x3 cube for 3D images, etc.)
Returns
-------
h_max : ndarray
The maxima of height >= h. The resulting image is a binary image, where
pixels belonging to the selected maxima take value 1, the others
take value 0.
See also
--------
skimage.morphology.extrema.h_minima
skimage.morphology.extrema.local_maxima
skimage.morphology.extrema.local_minima
References
----------
.. [1] Soille, P., "Morphological Image Analysis: Principles and
Applications" (Chapter 6), 2nd edition (2003), ISBN 3540429883.
Examples
--------
>>> import numpy as np
>>> from skimage.morphology import extrema
We create an image (quadratic function with a maximum in the center and
4 additional constant maxima.
The heights of the maxima are: 1, 21, 41, 61, 81
>>> w = 10
>>> x, y = np.mgrid[0:w,0:w]
>>> f = 20 - 0.2*((x - w/2)**2 + (y-w/2)**2)
>>> f[2:4,2:4] = 40; f[2:4,7:9] = 60; f[7:9,2:4] = 80; f[7:9,7:9] = 100
>>> f = f.astype(np.int)
We can calculate all maxima with a height of at least 40:
>>> maxima = extrema.h_maxima(f, 40)
The resulting image will contain 3 local maxima.
"""
# Check for h value that is larger then range of the image. If this
# is True then there are no h-maxima in the image.
if h > np.ptp(image):
return np.zeros(image.shape, dtype=np.uint8)
# Check for floating point h value. For this to work properly
# we need to explicitly convert image to float64.
#
# FIXME: This could give incorrect results if image is int64 and
# has a very high dynamic range. The dtype of image is
# changed to float64, and different integer values could
# become the same float due to rounding.
#
# >>> ii64 = np.iinfo(np.int64)
# >>> a = np.array([ii64.max, ii64.max - 2])
# >>> a[0] == a[1]
# False
# >>> b = a.astype(np.float64)
# >>> b[0] == b[1]
# True
#
if np.issubdtype(type(h), np.floating) and \
np.issubdtype(image.dtype, np.integer):
if ((h % 1) != 0):
warn('possible precision loss converting image to '
'floating point. To silence this warning, '
'ensure image and h have same data type.',
stacklevel=2)
image = image.astype(np.float_)
else:
h = image.dtype.type(h)
if (h == 0):
raise ValueError("h = 0 is ambiguous, use local_maxima() "
"instead?")
if np.issubdtype(image.dtype, np.floating):
# The purpose of the resolution variable is to allow for the
# small rounding errors that inevitably occur when doing
# floating point arithmetic. We want shifted_img to be
# guaranteed to be h less than image. If we only subtract h
# there may be pixels were shifted_img ends up being
# slightly greater than image - h.
#
# The resolution is scaled based on the pixel values in the
# image because floating point precision is relative. A
# very large value of 1.0e10 will have a large precision,
# say +-1.0e4, and a very small value of 1.0e-10 will have
# a very small precision, say +-1.0e-16.
#
resolution = 2 * np.finfo(image.dtype).resolution * np.abs(image)
shifted_img = image - h - resolution
else:
shifted_img = _subtract_constant_clip(image, h)
rec_img = greyreconstruct.reconstruction(shifted_img, image,
method='dilation', selem=selem)
residue_img = image - rec_img
return (residue_img >= h).astype(np.uint8)
def h_minima(image, h, selem=None):
"""Determine all minima of the image with depth >= h.
The local minima are defined as connected sets of pixels with equal
grey level strictly smaller than the grey levels of all pixels in direct
neighborhood of the set.
A local minimum M of depth h is a local minimum for which
there is at least one path joining M with a deeper minimum on which the
maximal value is f(M) + h (i.e. the values along the path are not
increasing by more than h with respect to the minimum's value) and no
path for which the maximal value is smaller.
Parameters
----------
image : ndarray
The input image for which the minima are to be calculated.
h : unsigned integer
The minimal depth of all extracted minima.
selem : ndarray, optional
The neighborhood expressed as an n-D array of 1's and 0's.
Default is the ball of radius 1 according to the maximum norm
(i.e. a 3x3 square for 2D images, a 3x3x3 cube for 3D images, etc.)
Returns
-------
h_min : ndarray
The minima of depth >= h. The resulting image is a binary image, where
pixels belonging to the selected minima take value 1, the other pixels
take value 0.
See also
--------
skimage.morphology.extrema.h_maxima
skimage.morphology.extrema.local_maxima
skimage.morphology.extrema.local_minima
References
----------
.. [1] Soille, P., "Morphological Image Analysis: Principles and
Applications" (Chapter 6), 2nd edition (2003), ISBN 3540429883.
Examples
--------
>>> import numpy as np
>>> from skimage.morphology import extrema
We create an image (quadratic function with a minimum in the center and
4 additional constant maxima.
The depth of the minima are: 1, 21, 41, 61, 81
>>> w = 10
>>> x, y = np.mgrid[0:w,0:w]
>>> f = 180 + 0.2*((x - w/2)**2 + (y-w/2)**2)
>>> f[2:4,2:4] = 160; f[2:4,7:9] = 140; f[7:9,2:4] = 120; f[7:9,7:9] = 100
>>> f = f.astype(np.int)
We can calculate all minima with a depth of at least 40:
>>> minima = extrema.h_minima(f, 40)
The resulting image will contain 3 local minima.
"""
if h > np.ptp(image):
return np.zeros(image.shape, dtype=np.uint8)
if np.issubdtype(type(h), np.floating) and \
np.issubdtype(image.dtype, np.integer):
if ((h % 1) != 0):
warn('possible precision loss converting image to '
'floating point. To silence this warning, '
'ensure image and h have same data type.',
stacklevel=2)
image = image.astype(np.float_)
else:
h = image.dtype.type(h)
if (h == 0):
raise ValueError("h = 0 is ambiguous, use local_minima() "
"instead?")
if np.issubdtype(image.dtype, np.floating):
resolution = 2 * np.finfo(image.dtype).resolution * np.abs(image)
shifted_img = image + h + resolution
else:
shifted_img = _add_constant_clip(image, h)
rec_img = greyreconstruct.reconstruction(shifted_img, image,
method='erosion', selem=selem)
residue_img = rec_img - image
return (residue_img >= h).astype(np.uint8)
def local_maxima(image, selem=None, connectivity=None, indices=False,
allow_borders=True):
"""Find local maxima of n-dimensional array.
The local maxima are defined as connected sets of pixels with equal gray
level (plateaus) strictly greater than the gray levels of all pixels in the
neighborhood.
Parameters
----------
image : ndarray
An n-dimensional array.
selem : ndarray, optional
A structuring element used to determine the neighborhood of each
evaluated pixel (``True`` denotes a connected pixel). It must be a
boolean array and have the same number of dimensions as `image`. If
neither `selem` nor `connectivity` are given, all adjacent pixels are
considered as part of the neighborhood.
connectivity : int, optional
A number used to determine the neighborhood of each evaluated pixel.
Adjacent pixels whose squared distance from the center is less than or
equal to `connectivity` are considered neighbors. Ignored if
`selem` is not None.
indices : bool, optional
If True, the output will be a tuple of one-dimensional arrays
representing the indices of local maxima in each dimension. If False,
the output will be a boolean array with the same shape as `image`.
allow_borders : bool, optional
If true, plateaus that touch the image border are valid maxima.
Returns
-------
maxima : ndarray or tuple[ndarray]
If `indices` is false, a boolean array with the same shape as `image`
is returned with ``True`` indicating the position of local maxima
(``False`` otherwise). If `indices` is true, a tuple of one-dimensional
arrays containing the coordinates (indices) of all found maxima.
Warns
-----
UserWarning
If `allow_borders` is false and any dimension of the given `image` is
shorter than 3 samples, maxima can't exist and a warning is shown.
See Also
--------
skimage.morphology.local_minima
skimage.morphology.h_maxima
skimage.morphology.h_minima
Notes
-----
This function operates on the following ideas:
1. Make a first pass over the image's last dimension and flag candidates
for local maxima by comparing pixels in only one direction.
If the pixels aren't connected in the last dimension all pixels are
flagged as candidates instead.
For each candidate:
2. Perform a flood-fill to find all connected pixels that have the same
gray value and are part of the plateau.
3. Consider the connected neighborhood of a plateau: if no bordering sample
has a higher gray level, mark the plateau as a definite local maximum.
Examples
--------
>>> from skimage.morphology import local_maxima
>>> image = np.zeros((4, 7), dtype=int)
>>> image[1:3, 1:3] = 1
>>> image[3, 0] = 1
>>> image[1:3, 4:6] = 2
>>> image[3, 6] = 3
>>> image
array([[0, 0, 0, 0, 0, 0, 0],
[0, 1, 1, 0, 2, 2, 0],
[0, 1, 1, 0, 2, 2, 0],
[1, 0, 0, 0, 0, 0, 3]])
Find local maxima by comparing to all neighboring pixels (maximal
connectivity):
>>> local_maxima(image)
array([[False, False, False, False, False, False, False],
[False, True, True, False, False, False, False],
[False, True, True, False, False, False, False],
[ True, False, False, False, False, False, True]])
>>> local_maxima(image, indices=True)
(array([1, 1, 2, 2, 3, 3]), array([1, 2, 1, 2, 0, 6]))
Find local maxima without comparing to diagonal pixels (connectivity 1):
>>> local_maxima(image, connectivity=1)
array([[False, False, False, False, False, False, False],
[False, True, True, False, True, True, False],
[False, True, True, False, True, True, False],
[ True, False, False, False, False, False, True]])
and exclude maxima that border the image edge:
>>> local_maxima(image, connectivity=1, allow_borders=False)
array([[False, False, False, False, False, False, False],
[False, True, True, False, True, True, False],
[False, True, True, False, True, True, False],
[False, False, False, False, False, False, False]])
"""
image = np.asarray(image, order="C")
if image.size == 0:
# Return early for empty input
if indices:
# Make sure that output is a tuple of 1 empty array per dimension
return np.nonzero(image)
else:
return np.zeros(image.shape, dtype=np.bool)
if allow_borders:
# Ensure that local maxima are always at least one smaller sample away
# from the image border
image = _util._fast_pad(image, image.min())
# Array of flags used to store the state of each pixel during evaluation.
# See _extrema_cy.pyx for their meaning
flags = np.zeros(image.shape, dtype=np.uint8)
_util._set_border_values(flags, value=3)
if any(s < 3 for s in image.shape):
# Warn and skip if any dimension is smaller than 3
# -> no maxima can exist & structuring element can't be applied
warn(
"maxima can't exist for an image with any dimension smaller 3 "
"if borders aren't allowed",
stacklevel=3
)
else:
selem = _util._resolve_neighborhood(selem, connectivity, image.ndim)
neighbor_offsets = _util._offsets_to_raveled_neighbors(
image.shape, selem, center=((1,) * image.ndim)
)
try:
_local_maxima(image.ravel(), flags.ravel(), neighbor_offsets)
except TypeError:
if image.dtype == np.float16:
# Provide the user with clearer error message
raise TypeError("dtype of `image` is float16 which is not "
"supported, try upcasting to float32")
else:
raise # Otherwise raise original message
if allow_borders:
# Revert padding performed at the beginning of the function
flags = crop(flags, 1)
else:
# No padding was performed but set edge values back to 0
_util._set_border_values(flags, value=0)
if indices:
return np.nonzero(flags)
else:
return flags.view(np.bool)
def local_minima(image, selem=None, connectivity=None, indices=False,
allow_borders=True):
"""Find local minima of n-dimensional array.
The local minima are defined as connected sets of pixels with equal gray
level (plateaus) strictly smaller than the gray levels of all pixels in the
neighborhood.
Parameters
----------
image : ndarray
An n-dimensional array.
selem : ndarray, optional
A structuring element used to determine the neighborhood of each
evaluated pixel (``True`` denotes a connected pixel). It must be a
boolean array and have the same number of dimensions as `image`. If
neither `selem` nor `connectivity` are given, all adjacent pixels are
considered as part of the neighborhood.
connectivity : int, optional
A number used to determine the neighborhood of each evaluated pixel.
Adjacent pixels whose squared distance from the center is less than or
equal to `connectivity` are considered neighbors. Ignored if
`selem` is not None.
indices : bool, optional
If True, the output will be a tuple of one-dimensional arrays
representing the indices of local minima in each dimension. If False,
the output will be a boolean array with the same shape as `image`.
allow_borders : bool, optional
If true, plateaus that touch the image border are valid minima.
Returns
-------
minima : ndarray or tuple[ndarray]
If `indices` is false, a boolean array with the same shape as `image`
is returned with ``True`` indicating the position of local minima
(``False`` otherwise). If `indices` is true, a tuple of one-dimensional
arrays containing the coordinates (indices) of all found minima.
See Also
--------
skimage.morphology.local_maxima
skimage.morphology.h_maxima
skimage.morphology.h_minima
Notes
-----
This function operates on the following ideas:
1. Make a first pass over the image's last dimension and flag candidates
for local minima by comparing pixels in only one direction.
If the pixels aren't connected in the last dimension all pixels are
flagged as candidates instead.
For each candidate:
2. Perform a flood-fill to find all connected pixels that have the same
gray value and are part of the plateau.
3. Consider the connected neighborhood of a plateau: if no bordering sample
has a smaller gray level, mark the plateau as a definite local minimum.
Examples
--------
>>> from skimage.morphology import local_minima
>>> image = np.zeros((4, 7), dtype=int)
>>> image[1:3, 1:3] = -1
>>> image[3, 0] = -1
>>> image[1:3, 4:6] = -2
>>> image[3, 6] = -3
>>> image
array([[ 0, 0, 0, 0, 0, 0, 0],
[ 0, -1, -1, 0, -2, -2, 0],
[ 0, -1, -1, 0, -2, -2, 0],
[-1, 0, 0, 0, 0, 0, -3]])
Find local minima by comparing to all neighboring pixels (maximal
connectivity):
>>> local_minima(image)
array([[False, False, False, False, False, False, False],
[False, True, True, False, False, False, False],
[False, True, True, False, False, False, False],
[ True, False, False, False, False, False, True]])
>>> local_minima(image, indices=True)
(array([1, 1, 2, 2, 3, 3]), array([1, 2, 1, 2, 0, 6]))
Find local minima without comparing to diagonal pixels (connectivity 1):
>>> local_minima(image, connectivity=1)
array([[False, False, False, False, False, False, False],
[False, True, True, False, True, True, False],
[False, True, True, False, True, True, False],
[ True, False, False, False, False, False, True]])
and exclude minima that border the image edge:
>>> local_minima(image, connectivity=1, allow_borders=False)
array([[False, False, False, False, False, False, False],
[False, True, True, False, True, True, False],
[False, True, True, False, True, True, False],
[False, False, False, False, False, False, False]])
"""
return local_maxima(
image=invert(image),
selem=selem,
connectivity=connectivity,
indices=indices,
allow_borders=allow_borders
)
| 38.206704
| 79
| 0.63016
|
779d73aa539551bc4900c328c3c726df75c5ef72
| 6,259
|
py
|
Python
|
src/config.py
|
AlbertSuarez/3d-net
|
6b1be62e224030e8c1d9d4695b114a64e8948b02
|
[
"MIT"
] | 1
|
2020-11-30T06:44:30.000Z
|
2020-11-30T06:44:30.000Z
|
src/config.py
|
AlbertSuarez/3d-net
|
6b1be62e224030e8c1d9d4695b114a64e8948b02
|
[
"MIT"
] | null | null | null |
src/config.py
|
AlbertSuarez/3d-net
|
6b1be62e224030e8c1d9d4695b114a64e8948b02
|
[
"MIT"
] | null | null | null |
import os
THINGIVERSE_FLASK_PORT = 8080
THINGIVERSE_FLASK_WAIT_PRE = 3
THINGIVERSE_FLASK_WAIT_POST = 300
THINGIVERSE_FLASK_WAIT_ENABLE = False
THINGIVERSE_FLASK_ENDPOINT = '/download'
THINGIVERSE_API_NUMBER_PAGES = 1000
THINGIVERSE_API_PER_PAGE = 500
THINGIVERSE_API_CONCURRENCY = 10
THINGIVERSE_API_CONCURRENCY_DOWNLOAD = 50
THINGIVERSE_API_AUTH = 'https://www.thingiverse.com/login/oauth/authorize'
THINGIVERSE_API_TOKEN = 'https://www.thingiverse.com/login/oauth/access_token'
THINGIVERSE_API_DONE = 'https://asuarez.dev/3d-net/docs/images/done.jpeg'
THINGIVERSE_API_PACKAGE = 'https://api.thingiverse.com/things/{}/package-url'
THINGIVERSE_API_SEARCH = 'https://api.thingiverse.com/search/' \
'?page={}&per_page={}&sort=popular&category_id={}&type=things'
DATASET_FOLDER = 'data'
DATASET_FOLDER_DOWNLOADED = os.path.join(DATASET_FOLDER, 'downloaded')
DATASET_FOLDER_STANDARDIZED = os.path.join(DATASET_FOLDER, 'standardized')
DATASET_FOLDER_PREPROCESSED = os.path.join(DATASET_FOLDER, 'preprocessed')
DATASET_FOLDER_WEIGHTS = os.path.join(DATASET_FOLDER, 'weights')
DATASET_FOLDER_WEIGHTS_FINAL = 'weights'
DATASET_SUB_FOLDER_TRAINING = 'training'
DATASET_SUB_FOLDER_VALIDATION = 'validation'
DATASET_CATEGORIES = {
'3d__printer_accessories': {'category_id': 127, 'main_category': '3d'},
'3d__printer_extruders': {'category_id': 152, 'main_category': '3d'},
'3d__printer_parts': {'category_id': 128, 'main_category': '3d'},
'3d__printers': {'category_id': 126, 'main_category': '3d'},
'3d__printing_tests': {'category_id': 129, 'main_category': '3d'},
'art__2d': {'category_id': 144, 'main_category': 'art'},
'art__tools': {'category_id': 75, 'main_category': 'art'},
'art__coins_badges': {'category_id': 143, 'main_category': 'art'},
'art__interactive': {'category_id': 78, 'main_category': 'art'},
'art__math': {'category_id': 79, 'main_category': 'art'},
'art__scans_replicas': {'category_id': 145, 'main_category': 'art'},
'art__sculptures': {'category_id': 80, 'main_category': 'art'},
'art__signs_logos': {'category_id': 76, 'main_category': 'art'},
'fashion__accessories': {'category_id': 81, 'main_category': 'fashion'},
'fashion__bracelets': {'category_id': 82, 'main_category': 'fashion'},
'fashion__costume': {'category_id': 142, 'main_category': 'fashion'},
'fashion__earrings': {'category_id': 139, 'main_category': 'fashion'},
'fashion__glasses': {'category_id': 83, 'main_category': 'fashion'},
'fashion__jewelry': {'category_id': 84, 'main_category': 'fashion'},
'fashion__keychains': {'category_id': 130, 'main_category': 'fashion'},
'fashion__rings': {'category_id': 85, 'main_category': 'fashion'},
'gadgets__audio': {'category_id': 141, 'main_category': 'gadgets'},
'gadgets__camera': {'category_id': 86, 'main_category': 'gadgets'},
'gadgets__computer': {'category_id': 87, 'main_category': 'gadgets'},
'gadgets__mobile_phone': {'category_id': 88, 'main_category': 'gadgets'},
'gadgets__tablet': {'category_id': 90, 'main_category': 'gadgets'},
'gadgets__video_games': {'category_id': 91, 'main_category': 'gadgets'},
'hobby__automotive': {'category_id': 155, 'main_category': 'hobby'},
'hobby__diy': {'category_id': 93, 'main_category': 'hobby'},
'hobby__electronics': {'category_id': 92, 'main_category': 'hobby'},
'hobby__music': {'category_id': 94, 'main_category': 'hobby'},
'hobby__rc_vehicles': {'category_id': 95, 'main_category': 'hobby'},
'hobby__robotics': {'category_id': 96, 'main_category': 'hobby'},
'hobby__sport_outdoors': {'category_id': 140, 'main_category': 'hobby'},
'household__bathroom': {'category_id': 147, 'main_category': 'household'},
'household__containers': {'category_id': 146, 'main_category': 'household'},
'household__decor': {'category_id': 97, 'main_category': 'household'},
'household__supplies': {'category_id': 99, 'main_category': 'household'},
'household__kitchen_dining': {'category_id': 100, 'main_category': 'household'},
'household__office_organization': {'category_id': 101, 'main_category': 'household'},
'household__outdoor_garden': {'category_id': 98, 'main_category': 'household'},
'household__pets': {'category_id': 103, 'main_category': 'household'},
'learning__biology': {'category_id': 106, 'main_category': 'learning'},
'learning__engineering': {'category_id': 104, 'main_category': 'learning'},
'learning__math': {'category_id': 105, 'main_category': 'learning'},
'learning__physics_astronomy': {'category_id': 148, 'main_category': 'learning'},
'models__animals': {'category_id': 107, 'main_category': 'models'},
'models__buildings_structures': {'category_id': 108, 'main_category': 'models'},
'models__creatures': {'category_id': 109, 'main_category': 'models'},
'models__food_drink': {'category_id': 110, 'main_category': 'models'},
'models__furniture': {'category_id': 111, 'main_category': 'models'},
'models__robots': {'category_id': 115, 'main_category': 'models'},
'models__people': {'category_id': 112, 'main_category': 'models'},
'models__props': {'category_id': 114, 'main_category': 'models'},
'models__vehicles': {'category_id': 116, 'main_category': 'models'},
'tools__hand': {'category_id': 118, 'main_category': 'tools'},
'tools__machine': {'category_id': 117, 'main_category': 'tools'},
'tools__holders_boxes': {'category_id': 120, 'main_category': 'tools'},
'toys__chess': {'category_id': 151, 'main_category': 'toys'},
'toys__construction': {'category_id': 121, 'main_category': 'toys'},
'toys__dice': {'category_id': 122, 'main_category': 'toys'},
'toys__games': {'category_id': 123, 'main_category': 'toys'},
'toys__mechanical': {'category_id': 124, 'main_category': 'toys'},
'toys__playsets': {'category_id': 113, 'main_category': 'toys'},
'toys__puzzles': {'category_id': 125, 'main_category': 'toys'},
'toys__accessories': {'category_id': 149, 'main_category': 'toys'}
}
REQUEST_TIMEOUT = 30
STANDARDIZE_CONCURRENCY = 30
TRAIN_EPOCHS = len(DATASET_CATEGORIES)
TRAIN_INPUT_SIZE = 256
TRAIN_STRATEGY = 'sigmoid' # (relu, sigmoid)
TRAIN_MODEL_FILE = 'model.h5'
| 59.609524
| 89
| 0.70155
|
ca4b595c1629e771acabdfff364656768cc081f3
| 7,323
|
py
|
Python
|
lib/python/treadmill/cli/ssh.py
|
ms-petrugarstea/treadmill
|
4c813baf8831918744b5af020938c8a51ed15c72
|
[
"Apache-2.0"
] | 1
|
2019-04-14T20:17:07.000Z
|
2019-04-14T20:17:07.000Z
|
lib/python/treadmill/cli/ssh.py
|
crazyrex/treadmill
|
75be287a808a4cbdacab67b3f62a3cb3eb1eab67
|
[
"Apache-2.0"
] | null | null | null |
lib/python/treadmill/cli/ssh.py
|
crazyrex/treadmill
|
75be287a808a4cbdacab67b3f62a3cb3eb1eab67
|
[
"Apache-2.0"
] | null | null | null |
"""Trace treadmill application events.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import logging
import os
import socket
import subprocess
import sys
import click
import gevent
from gevent import queue as g_queue
import six
from six.moves import urllib_parse
from treadmill import context
from treadmill import cli
from treadmill import restclient
from treadmill import utils
from treadmill.websocket import client as ws_client
_LOGGER = logging.getLogger(__name__)
if sys.platform == 'win32':
_DEFAULT_SSH = 'putty.exe'
else:
_DEFAULT_SSH = 'ssh'
def _connect(host, port):
"""Check host:port is up."""
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.settimeout(1)
try:
sock.connect((host, int(port)))
sock.close()
return True
except socket.error:
return False
def _check_handle(handle):
"""Checks if provided file handle is valid."""
return handle is not None and handle.fileno() >= 0
def run_ssh(host, port, ssh, command):
"""Runs ssh."""
if sys.platform == 'win32':
run_putty(host, port, ssh, command)
else:
run_unix(host, port, ssh, command)
def run_unix(host, port, ssh, command):
"""Runs standard ssh (non-windows)."""
if not host or not port:
return -2
if not utils.which(ssh):
cli.bad_exit('{} cannot be found in the PATH'.format(ssh))
ssh = [ssh,
'-o', 'UserKnownHostsFile=/dev/null',
'-o', 'StrictHostKeyChecking=no',
'-p', port, host] + command
_LOGGER.debug('Starting ssh: %s', ssh)
return utils.sane_execvp(ssh[0], ssh)
def run_putty(host, port, sshcmd, command):
"""Runs plink/putty (windows)."""
if not host or not port:
return -2
# Trick putty into storing ssh key automatically.
plink = os.path.join(os.path.dirname(sshcmd), 'plink.exe')
if not utils.which(plink):
cli.bad_exit('{} cannot be found in the PATH'.format(plink))
store_key_cmd = [plink, '-P', port,
'%s@%s' % (os.environ['USERNAME'], host), 'exit']
_LOGGER.debug('Importing host key: %s', store_key_cmd)
store_key_proc = subprocess.Popen(store_key_cmd,
stdout=subprocess.PIPE,
stdin=subprocess.PIPE,
stderr=subprocess.PIPE)
out, err = store_key_proc.communicate(input='y\n\n\n\n\n\n\n\n\n'.encode())
_LOGGER.debug('plink STDOUT: %s', out)
_LOGGER.debug('plink STDERR: %s', err)
if command:
sshcmd = plink
ssh = [sshcmd, '-P', port, '%s@%s' % (os.environ['USERNAME'], host)]
if command:
ssh.extend(command)
devnull = {}
def _get_devnull():
"""Gets handle to the null device."""
if not devnull:
devnull['fd'] = os.open(os.devnull, os.O_RDWR)
return devnull['fd']
if not utils.which(sshcmd):
cli.bad_exit('{} cannot be found in the PATH'.format(sshcmd))
_LOGGER.debug('Starting ssh: %s', ssh)
try:
if os.path.basename(sshcmd).lower() == 'putty.exe':
utils.sane_execvp(ssh[0], ssh)
else:
# Call plink. Redirect to devnull if std streams are empty/invalid.
subprocess.call(
ssh,
stdin=None if _check_handle(sys.stdin) else _get_devnull(),
stdout=None if _check_handle(sys.stdout) else _get_devnull(),
stderr=None if _check_handle(sys.stderr) else _get_devnull()
)
except KeyboardInterrupt:
sys.exit(0)
finally:
if devnull:
os.close(devnull['fd'])
def _wait_for_ssh(queue, ssh, command, timeout=1, attempts=40):
"""Wait until a successful connection to the ssh endpoint can be made."""
try:
host, port = queue.get(timeout=timeout * attempts)
except g_queue.Empty:
cli.bad_exit('No SSH endpoint found.')
for _ in six.moves.range(attempts):
_LOGGER.debug('Checking SSH endpoint %s:%s', host, port)
if _connect(host, port):
run_ssh(host, port, ssh, list(command))
break # if run_ssh doesn't end with os.execvp()...
try:
host, port = queue.get(timeout=timeout)
queue.task_done()
except g_queue.Empty:
pass
# Either all the connection attempts failed or we're after run_ssh
# (not resulting in os.execvp) so let's "clear the queue" so the thread
# can join
queue.task_done()
def _wait_for_app(ssh, app, command, queue=None):
"""Use websockets to wait for the app to start"""
# JoinableQueue is filled with a dummy item otherwise queue.join() unblocks
# immediately wo/ actually letting the ws_loop and _wait_for_ssh to run.
queue = queue or g_queue.JoinableQueue(items=[('dummy.host', 1234)])
def on_message(result, queue=queue):
"""Callback to process trace message."""
_LOGGER.debug('Endpoint trase msg: %r', result)
queue.put((result['host'], result['port']))
return False
def on_error(result):
"""Callback to process errors."""
click.echo('Error: %s' % result['_error'], err=True)
try:
gevent.spawn(_wait_for_ssh, queue, ssh, command)
gevent.spawn(ws_client.ws_loop,
context.GLOBAL.ws_api(),
{'topic': '/endpoints',
'filter': app,
'proto': 'tcp',
'endpoint': 'ssh'},
False,
on_message,
on_error)
queue.join()
except ws_client.WSConnectionError:
cli.bad_exit('Could not connect to any Websocket APIs')
def init():
"""Return top level command handler."""
@click.command()
@click.option('--cell', required=True,
envvar='TREADMILL_CELL',
callback=cli.handle_context_opt,
expose_value=False)
@click.option('--wait', help='Wait until the app starts up',
is_flag=True, default=False)
@click.option('--ssh', help='SSH client to use.',
type=click.Path(exists=True, readable=True))
@click.argument('app')
@click.argument('command', nargs=-1)
def ssh(ssh, app, command, wait):
"""SSH into Treadmill container."""
if ssh is None:
ssh = _DEFAULT_SSH
if wait:
_wait_for_app(ssh, app, command)
else:
apis = context.GLOBAL.state_api()
url = '/endpoint/{}/tcp/ssh'.format(urllib_parse.quote(app))
response = restclient.get(apis, url)
endpoints = response.json()
_LOGGER.debug('endpoints: %r', endpoints)
if not endpoints:
cli.bad_exit('No ssh endpoint(s) found for %s', app)
# Take the first one, if there are more than one, then this is
# consistent with when 1 is returned.
endpoint = endpoints[0]
run_ssh(
endpoint['host'],
str(endpoint['port']), ssh, list(command)
)
return ssh
| 30.135802
| 79
| 0.592107
|
9ed3db48a9e81fcf7596c34d54d0d0a658820341
| 152
|
py
|
Python
|
hackerrank/warmup/01-SolveMeFirst.py
|
MrSquanchee/ProblemSolving
|
309160f6a2fb43ae7673210b01957ffca9247d0d
|
[
"MIT"
] | 39
|
2020-09-27T05:32:05.000Z
|
2022-01-08T18:04:05.000Z
|
hackerrank/warmup/01-SolveMeFirst.py
|
MrSquanchee/ProblemSolving
|
309160f6a2fb43ae7673210b01957ffca9247d0d
|
[
"MIT"
] | 5
|
2020-10-02T13:33:00.000Z
|
2021-03-01T14:06:08.000Z
|
hackerrank/warmup/01-SolveMeFirst.py
|
MrSquanchee/ProblemSolving
|
309160f6a2fb43ae7673210b01957ffca9247d0d
|
[
"MIT"
] | 8
|
2021-01-31T10:31:12.000Z
|
2022-03-13T09:15:55.000Z
|
def solveMeFirst(a,b):
# Hint: Type return a+b below
return a+b
num1 = int(input())
num2 = int(input())
res = solveMeFirst(num1,num2)
print(res)
| 15.2
| 30
| 0.664474
|
40367478e6ecb717f451987758bce4df07736be3
| 4,076
|
py
|
Python
|
alipay/aop/api/request/AlipayOpenAppDeveloperCheckdevelopervalidQueryRequest.py
|
snowxmas/alipay-sdk-python-all
|
96870ced60facd96c5bce18d19371720cbda3317
|
[
"Apache-2.0"
] | 213
|
2018-08-27T16:49:32.000Z
|
2021-12-29T04:34:12.000Z
|
alipay/aop/api/request/AlipayOpenAppDeveloperCheckdevelopervalidQueryRequest.py
|
snowxmas/alipay-sdk-python-all
|
96870ced60facd96c5bce18d19371720cbda3317
|
[
"Apache-2.0"
] | 29
|
2018-09-29T06:43:00.000Z
|
2021-09-02T03:27:32.000Z
|
alipay/aop/api/request/AlipayOpenAppDeveloperCheckdevelopervalidQueryRequest.py
|
snowxmas/alipay-sdk-python-all
|
96870ced60facd96c5bce18d19371720cbda3317
|
[
"Apache-2.0"
] | 59
|
2018-08-27T16:59:26.000Z
|
2022-03-25T10:08:15.000Z
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
from alipay.aop.api.FileItem import FileItem
from alipay.aop.api.constant.ParamConstants import *
from alipay.aop.api.domain.AlipayOpenAppDeveloperCheckdevelopervalidQueryModel import AlipayOpenAppDeveloperCheckdevelopervalidQueryModel
class AlipayOpenAppDeveloperCheckdevelopervalidQueryRequest(object):
def __init__(self, biz_model=None):
self._biz_model = biz_model
self._biz_content = None
self._version = "1.0"
self._terminal_type = None
self._terminal_info = None
self._prod_code = None
self._notify_url = None
self._return_url = None
self._udf_params = None
self._need_encrypt = False
@property
def biz_model(self):
return self._biz_model
@biz_model.setter
def biz_model(self, value):
self._biz_model = value
@property
def biz_content(self):
return self._biz_content
@biz_content.setter
def biz_content(self, value):
if isinstance(value, AlipayOpenAppDeveloperCheckdevelopervalidQueryModel):
self._biz_content = value
else:
self._biz_content = AlipayOpenAppDeveloperCheckdevelopervalidQueryModel.from_alipay_dict(value)
@property
def version(self):
return self._version
@version.setter
def version(self, value):
self._version = value
@property
def terminal_type(self):
return self._terminal_type
@terminal_type.setter
def terminal_type(self, value):
self._terminal_type = value
@property
def terminal_info(self):
return self._terminal_info
@terminal_info.setter
def terminal_info(self, value):
self._terminal_info = value
@property
def prod_code(self):
return self._prod_code
@prod_code.setter
def prod_code(self, value):
self._prod_code = value
@property
def notify_url(self):
return self._notify_url
@notify_url.setter
def notify_url(self, value):
self._notify_url = value
@property
def return_url(self):
return self._return_url
@return_url.setter
def return_url(self, value):
self._return_url = value
@property
def udf_params(self):
return self._udf_params
@udf_params.setter
def udf_params(self, value):
if not isinstance(value, dict):
return
self._udf_params = value
@property
def need_encrypt(self):
return self._need_encrypt
@need_encrypt.setter
def need_encrypt(self, value):
self._need_encrypt = value
def add_other_text_param(self, key, value):
if not self.udf_params:
self.udf_params = dict()
self.udf_params[key] = value
def get_params(self):
params = dict()
params[P_METHOD] = 'alipay.open.app.developer.checkdevelopervalid.query'
params[P_VERSION] = self.version
if self.biz_model:
params[P_BIZ_CONTENT] = json.dumps(obj=self.biz_model.to_alipay_dict(), ensure_ascii=False, sort_keys=True, separators=(',', ':'))
if self.biz_content:
if hasattr(self.biz_content, 'to_alipay_dict'):
params['biz_content'] = json.dumps(obj=self.biz_content.to_alipay_dict(), ensure_ascii=False, sort_keys=True, separators=(',', ':'))
else:
params['biz_content'] = self.biz_content
if self.terminal_type:
params['terminal_type'] = self.terminal_type
if self.terminal_info:
params['terminal_info'] = self.terminal_info
if self.prod_code:
params['prod_code'] = self.prod_code
if self.notify_url:
params['notify_url'] = self.notify_url
if self.return_url:
params['return_url'] = self.return_url
if self.udf_params:
params.update(self.udf_params)
return params
def get_multipart_params(self):
multipart_params = dict()
return multipart_params
| 28.110345
| 148
| 0.653582
|
62e38d442a378acac4af22c599c5559181ba15b0
| 3,221
|
py
|
Python
|
Comparison.py
|
franreysaycon/BORKOV
|
238cf69d1e81bb522aacfc3bff48e63ff2a26f0a
|
[
"MIT"
] | 1
|
2019-09-17T06:51:38.000Z
|
2019-09-17T06:51:38.000Z
|
Comparison.py
|
franreysaycon/BORKOV
|
238cf69d1e81bb522aacfc3bff48e63ff2a26f0a
|
[
"MIT"
] | null | null | null |
Comparison.py
|
franreysaycon/BORKOV
|
238cf69d1e81bb522aacfc3bff48e63ff2a26f0a
|
[
"MIT"
] | null | null | null |
from __future__ import division
import os
import shutil
import numpy as np
import matplotlib.image as mpimg
import matplotlib.pyplot as plt
import SimpleITK as sitk
from tkFileDialog import askdirectory
from tkFileDialog import askopenfilename
from matplotlib import colors
from Tkinter import Tk
###
startslice = 290
endslice = 300
height = -1
width = -1
colormap = []
tp = 0 #true positive
tn = 0 #true negative
fp = 0 #false positive
fn = 0 #false negative
tani = 0
def compare(knnmrf, groundtruth):
global colormap, tp, tn, fp, fn
for i in range(height):
for j in range(width):
if knnmrf[i][j] != 0 and groundtruth[i][j] == 1:
colormap[i][j] = 3 #white for common area
tp += 1
elif groundtruth[i][j] == 1:
colormap[i][j] = 2 #green for ground truth
fn += 1
elif knnmrf[i][j] != 0:
colormap[i][j] = 1 #cyan for knn mrf
fp += 1
else:
colormap[i][j] = 0
tn += 1
def openMRI(title):
Tk().withdraw()
filename = askopenfilename(title = title)
image = sitk.ReadImage(str(filename))
return filename, sitk.GetArrayFromImage(image)
def writeStats(folder):
textfile = open(folder + "\\Stats.txt", "w")
textfile.write("True positive: %s\n" % tp)
textfile.write("True negative: %s\n" % tn)
textfile.write("False positive: %s\n" % fp)
textfile.write("False negative: %s\n" % fn)
textfile.write("Tanimoto Coefficient: " + str(tani) + "\n")
textfile.close()
def computeTani():
return tp/(tp+fp+fn)
if __name__ == "__main__" :
print "> Load KNN-MRF segmented image."
directory1, knnmrf = openMRI('Load KNN-MRF segmented image')
print "\tLoaded ", directory1
print "> Load corresponding ground truth."
directory2, groundtruth = openMRI('Load corresponding ground truth')
groundtruth = groundtruth[startslice:endslice+1]
print "\tLoaded ", directory2
height = len(groundtruth[0])
width = len(groundtruth[0][0])
folder = directory1.split("/")
folder = folder[len(folder)-2]
folder = "SegmentedMRI_Compared\\" + folder
if os.path.exists(folder): shutil.rmtree(folder)
try: os.makedirs(folder)
except OSError as exc: raise
print "> Start comparison. [ from slice", startslice, "to", endslice, "]"
for i in range(len(knnmrf)):
print "\tProcessing slice#", i+startslice, "...\r",
colormap = [[0 for j in range(width)] for k in range(height)]
compare(knnmrf[i], groundtruth[i])
fig, ax = plt.subplots()
frame = plt.gca()
frame.axes.get_xaxis().set_visible(False)
frame.axes.get_yaxis().set_visible(False)
cmap = colors.ListedColormap(['black', 'deepskyblue', 'gold', 'white'])
cax = ax.imshow(np.array(colormap), cmap=cmap, vmin=0, vmax=3)
bar = fig.colorbar(cax, ticks=[0, 1, 2, 3])
bar.ax.set_yticklabels(['Non-Lesion', 'KNN-MRF', 'Manual', 'Common'])
filename = folder + "\\Slice" + str(i+startslice) + ".png"
if os.path.isfile(filename): os.remove(filename)
plt.savefig(filename)
plt.close()
print "\tProcessing slice#", endslice, "...",
print "done"
tani = computeTani()
writeStats(folder)
print "> Stats.txt created."
print ">> Done! Your segmented MR image is compared."
| 26.841667
| 75
| 0.656628
|
4d1e17543772bf19d11e758410148cc5e5f17ca9
| 799
|
py
|
Python
|
test_autofit/database/query/internal/test_functionality.py
|
vishalbelsare/PyAutoFit
|
04b927419fa95c302e0eebab632fc7ebcdc3f06b
|
[
"MIT"
] | null | null | null |
test_autofit/database/query/internal/test_functionality.py
|
vishalbelsare/PyAutoFit
|
04b927419fa95c302e0eebab632fc7ebcdc3f06b
|
[
"MIT"
] | null | null | null |
test_autofit/database/query/internal/test_functionality.py
|
vishalbelsare/PyAutoFit
|
04b927419fa95c302e0eebab632fc7ebcdc3f06b
|
[
"MIT"
] | null | null | null |
import pytest
from autofit.database import query as q
def test_trivial():
assert (q.Q("a") & q.Q("a")).query == q.Q("a").query
def test_second():
assert (q.Q("a") & q.Q("a", q.Q("b"))).query == (q.Q("a", q.Q("b"))).query
def test_and_commutativity():
a_and_b = q.And(q.Q("a"), q.Q("b"))
combined = a_and_b & q.Q("c")
assert combined == q.And(q.Q("a"), q.Q("b"), q.Q("c"))
assert len(combined.conditions) == 3
def test_single_argument():
assert isinstance(
q.And(q.Q("a")),
q.Q
)
def test_already_compared(
aggregator
):
with pytest.raises(
AssertionError
):
print((aggregator.centre == 1) == 1)
with pytest.raises(
AttributeError
):
print((aggregator.centre == 1).intesity)
| 19.487805
| 78
| 0.550688
|
282bfdfe9601ca570e28bf74d992b0512979f5a9
| 3,422
|
py
|
Python
|
lib_youtube_cd_burner/cd.py
|
jfuruness/lib_youtube_cd_burner
|
30e0d19243a49a762f51cc58dddb691dea78fad0
|
[
"BSD-3-Clause"
] | null | null | null |
lib_youtube_cd_burner/cd.py
|
jfuruness/lib_youtube_cd_burner
|
30e0d19243a49a762f51cc58dddb691dea78fad0
|
[
"BSD-3-Clause"
] | null | null | null |
lib_youtube_cd_burner/cd.py
|
jfuruness/lib_youtube_cd_burner
|
30e0d19243a49a762f51cc58dddb691dea78fad0
|
[
"BSD-3-Clause"
] | null | null | null |
import subprocess
import time
import fcntl
import os
from contextlib import contextmanager
from .disk_values_enum import DiskValues
from .song_holder import SongHolder
class CDFullError(Exception):
pass
class CD(SongHolder):
"""CD class that adds songs and burns cds"""
def __init__(self, max_seconds):
"""initializes cd and max seconds a cd can hold"""
self.max_seconds = max_seconds
self.total_seconds = 0
super(CD, self).__init__()
def add_track(self, song):
"""Adds a song to a cd, returns false if over limit"""
# If the song is not over the limit of seconds
if self.total_seconds + song.seconds <= self.max_seconds:
# Add the song
self.songs.append(song)
# Increase total seconds
self.total_seconds += song.seconds
# Song too long return false, cd full
else:
raise CDFullError
def burn(self, times_to_burn=1):
"""Burns a cd times_to_burn times"""
for i in range(times_to_burn):
# Wait for disk insertion
if self._get_disk():
# args for bash command
args = [#"sudo",
"wodim",
"-v",
"dev=/dev/sr0",
"-dao", # sao????? same in wodim????
"-audio",
"-pad",
"speed=8" # for my cd player 10 is lowest
]
# Adds all the songs to burn in order
args.extend([x.path for x in self.songs])
# Actually burns the cd
output = subprocess.run(args)
logging.debug(output)
logging.info("Just burned {}".format(self))
# Pops the new cd out
CD.eject()
else:
logging.warning("Disk not inserted, exiting")
def _get_disk(self):
"""Waits for disk insertion"""
# Pops out cd
CD.eject()
logging.info("Insert cd!")
while self._get_disk_val() == Disk_Values.OPEN.value:
logging.info("Disk tray open\r")
time.sleep(1)
while self._get_disk_val() == Disk_Values.READING.value:
logging.info("Reading in disk\r")
time.sleep(1)
if self._get_disk_val() == Disk_Values.NO_DISK.value:
logging.warning("No disk inserted")
return False
elif self._get_disk_val() == Disk_Values.DISK_IN_TRAY.value:
logging.info("Disk in tray and read")
return True
def _get_disk_val(self):
# https://superuser.com/a/1367091
# 1 for no disk, 2 for open, 3 for reading, 4 for disk in tray
with self._open_disk_fd() as fd:
return fcntl.ioctl(fd, 0x5326)
@contextmanager
def _open_disk_fd(self):
fd = os.open('/dev/sr0', os.O_RDONLY | os.O_NONBLOCK)
yield fd
os.close(fd)
@staticmethod
def eject(self):
"""Pops out CD"""
subprocess.run(["eject"])
def __str__(self):
"""For when cd's are printed"""
lines = ["cd is {} minutes".format(self.total_seconds/60),
"songs:"]
[lines.append(" " + x.__str__()) for x in self.songs]
lines.append("\n")
return "\n".join(lines)
| 31.394495
| 70
| 0.536821
|
6f70d91e2178389cf1cbba8aef9c6856c6e63081
| 642
|
py
|
Python
|
backend/ipproject/core/permissions/resources.py
|
FedotenkoM/ipsproject
|
f02ce8acd560b3e10e5357f0605e923396aaafa0
|
[
"MIT"
] | 1
|
2021-07-28T22:16:53.000Z
|
2021-07-28T22:16:53.000Z
|
backend/ipproject/core/permissions/resources.py
|
FedotenkoM/ipsproject
|
f02ce8acd560b3e10e5357f0605e923396aaafa0
|
[
"MIT"
] | 11
|
2021-05-14T12:34:18.000Z
|
2021-08-22T14:52:01.000Z
|
backend/ipproject/core/permissions/resources.py
|
FedotenkoM/ipsproject
|
f02ce8acd560b3e10e5357f0605e923396aaafa0
|
[
"MIT"
] | null | null | null |
from starlette.responses import JSONResponse
from ..utils import jwt_required
from ..models import PermissionModel
@jwt_required
async def get_apps(request, user):
sub = PermissionModel.alias()
apps = await PermissionModel.outerjoin(
sub, PermissionModel.id == sub.id
).select().where(
PermissionModel.role_id == user.role_id
).gino.load(
PermissionModel.distinct(PermissionModel.app_name).load(
actions=sub.action
)
).all()
result = {}
for app in apps:
_app = app.jsonify()
result[_app['appName']] = _app['actions']
return JSONResponse(result)
| 24.692308
| 64
| 0.658879
|
dac5830b39ee5d68abbfe25be05adb74b482be71
| 2,375
|
py
|
Python
|
examples/model_fitting/plot_function_fit.py
|
maahn/meteo_si
|
821bf0e428f2d8793d30253cb41d71b7c621afef
|
[
"MIT"
] | null | null | null |
examples/model_fitting/plot_function_fit.py
|
maahn/meteo_si
|
821bf0e428f2d8793d30253cb41d71b7c621afef
|
[
"MIT"
] | null | null | null |
examples/model_fitting/plot_function_fit.py
|
maahn/meteo_si
|
821bf0e428f2d8793d30253cb41d71b7c621afef
|
[
"MIT"
] | null | null | null |
"""
================================
Fitting a function with meteo_si
================================
Meteo SI contains a number of tools for fitting functions to
data. This example shows us how to load data into python, fit
a function to our datapoints with meteo_si, and then plot the
result.
This example is meant to demonstrate the functionality of
sphinx-gallery, which allows you to generate narrative-style
documents from python files.
"""
import os.path as op
import numpy as np
import matplotlib.pyplot as plt
import meteo_si as sb
plt.style.use('ggplot')
###############################################################################
# Loading data
# ------------
#
# First, we'll load some data into meteo_si.
data_path = op.join(sb.__path__[0], 'data')
ortho_x, ortho_y, ortho_n = sb.transform_data(op.join(data_path, 'ortho.csv'))
para_x, para_y, para_n = sb.transform_data(op.join(data_path, 'para.csv'))
###############################################################################
# Fitting a model
# ---------------
#
# With meteo_si, models are created with the :ref:Model class.
# This class has a `fit` method that returns the coefficients for the given
# input data.
# Instantiate our model and fit it on two datasets
model = sb.Model()
ortho_fit = model.fit(ortho_x, ortho_y)
para_fit = model.fit(para_x, para_y)
# These are the parameters that our model has discovered
print(ortho_fit.params)
print(para_fit.params)
###############################################################################
# Visualizing results
# -------------------
#
# Now we will visualize the results of our model fit. We'll generate
# a vector of input points, and use them to determine the model's output
# for each input. Then we'll plot what these curves look like.
# Create figure and generate input points
fig, ax = plt.subplots(1)
x_predict = np.linspace(0, 1, 100)
# Make the first plot
for x, y, n in zip(ortho_x, ortho_y, ortho_n):
ax.plot(x, y, 'bo', markersize=n)
ax.plot(x_predict, ortho_fit.predict(x_predict), 'b')
# Make the second plot
for x, y, n in zip(para_x, para_y, para_n):
ax.plot(x, y, 'go', markersize=n)
ax.plot(x_predict, para_fit.predict(x_predict), 'g')
ax.set_xlabel('Contrast in interval 1')
ax.set_ylabel("Proportion answers '1'")
ax.set_ylim([-0.1, 1.1])
ax.set_xlim([-0.1, 1.1])
fig.set_size_inches([8, 8])
| 31.25
| 79
| 0.626947
|
969403cac71be069946988ff305f6e5fa91e1b2c
| 5,639
|
py
|
Python
|
docs/conf.py
|
Sanjay-B/Pyblox-Docs
|
0ba8ec9515b799074b98d2abfd763abf4b23dd1a
|
[
"MIT"
] | null | null | null |
docs/conf.py
|
Sanjay-B/Pyblox-Docs
|
0ba8ec9515b799074b98d2abfd763abf4b23dd1a
|
[
"MIT"
] | 2
|
2018-02-01T10:56:25.000Z
|
2019-06-06T11:04:12.000Z
|
docs/conf.py
|
Sanjay-B/Pyblox-Docs
|
0ba8ec9515b799074b98d2abfd763abf4b23dd1a
|
[
"MIT"
] | 1
|
2018-03-11T16:59:24.000Z
|
2018-03-11T16:59:24.000Z
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# pybloxdocs documentation build configuration file, created by
# sphinx-quickstart on Sun Jul 16 10:17:27 2017.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
# import os
# import sys
# sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = []
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'Pyblox'
copyright = '2017, Sanjay-B(Sanjay Bhadra)'
author = 'Sanjay-B(Sanjay Bhadra)'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.0.2'
# The full version, including alpha/beta/rc tags.
release = '0.0.2'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'alabaster'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Custom sidebar templates, must be a dictionary that maps document names
# to template names.
#
# This is required for the alabaster theme
# refs: http://alabaster.readthedocs.io/en/latest/installation.html#sidebars
html_sidebars = {
'**': [
'about.html',
'navigation.html',
'relations.html', # needs 'show_related': True theme option to display
'searchbox.html',
'donate.html',
]
}
# -- Options for HTMLHelp output ------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = 'pybloxdocsdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'Pyblox.tex', 'Pyblox Documentation',
'Sanjay-B(Sanjay Bhadra)', 'manual'),
]
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'Pyblox', 'Pyblox Documentation',
[author], 1)
]
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'Pyblox', 'Pyblox Documentation',
author, 'Pyblox', 'One line description of project.',
'Miscellaneous'),
]
# Source Parser
from recommonmark.parser import CommonMarkParser
source_parsers = {
'.md': CommonMarkParser,
}
source_suffix = ['.rst', '.md']
# Themes
import sphinx_rtd_theme
html_theme = "sphinx_rtd_theme"
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
| 30.814208
| 80
| 0.659691
|
347afad0a6c92156f6b77ca2111749bd0171223d
| 1,795
|
py
|
Python
|
src/invdx.py
|
djamriska/project_light
|
69632023fee0f3164df5fef4dd204fd8f7ee7d34
|
[
"MIT"
] | null | null | null |
src/invdx.py
|
djamriska/project_light
|
69632023fee0f3164df5fef4dd204fd8f7ee7d34
|
[
"MIT"
] | null | null | null |
src/invdx.py
|
djamriska/project_light
|
69632023fee0f3164df5fef4dd204fd8f7ee7d34
|
[
"MIT"
] | null | null | null |
#invdx.py
# An inverted index
__author__ = 'Nick Hirakawa'
class InvertedIndex:
def __init__(self):
self.index = dict()
def __contains__(self, item):
return item in self.index
def __getitem__(self, item):
return self.index[item]
def add(self, word, docid):
if word in self.index:
if docid in self.index[word]:
self.index[word][docid] += 1
else:
self.index[word][docid] = 1
else:
d = dict()
d[docid] = 1
self.index[word] = d
# frequency of word in document
def get_document_frequency(self, word, docid):
if word in self.index:
if docid in self.index[word]:
return self.index[word][docid]
else:
raise LookupError('%s not in document %s' % (str(word), str(docid)))
else:
raise LookupError('%s not in index' % str(word))
# frequency of word in index, i.e. number of documents that contain word
def get_index_frequency(self, word):
if word in self.index:
return len(self.index[word])
else:
raise LookupError('%s not in index' % word)
class DocumentLengthTable:
def __init__(self):
self.table = dict()
def __len__(self):
return len(self.table)
def add(self, docid, length):
self.table[docid] = length
def get_length(self, docid):
if docid in self.table:
return self.table[docid]
else:
raise LookupError('%s not found in table' % str(docid))
def get_average_length(self):
sum = 0
for length in self.table.items():
sum += length[1]
return float(sum) / float(len(self.table))
def build_data_structures(corpus):
idx = InvertedIndex()
dlt = DocumentLengthTable()
for docid in corpus:
# build inverted index
for word in corpus[docid]:
idx.add(str(word), str(docid))
# build document length table
length = len(corpus[str(docid)])
dlt.add(docid, length)
return idx, dlt
| 21.369048
| 73
| 0.68078
|
8d20b6699a9e5d8891a10bdeb52e7332a15664c3
| 2,133
|
py
|
Python
|
checks/resolution/plot_resolution.py
|
ReynLieu/tf-pwa
|
f354b5036bc8c37ffba95849de5ec3367934eef8
|
[
"MIT"
] | 4
|
2021-05-10T15:17:24.000Z
|
2021-08-16T07:40:06.000Z
|
checks/resolution/plot_resolution.py
|
ReynLieu/tf-pwa
|
f354b5036bc8c37ffba95849de5ec3367934eef8
|
[
"MIT"
] | 45
|
2020-10-24T08:26:19.000Z
|
2022-03-20T06:14:58.000Z
|
checks/resolution/plot_resolution.py
|
ReynLieu/tf-pwa
|
f354b5036bc8c37ffba95849de5ec3367934eef8
|
[
"MIT"
] | 8
|
2020-10-24T06:41:06.000Z
|
2022-01-03T01:29:49.000Z
|
import matplotlib.pyplot as plt
import numpy as np
import tensorflow as tf
from tf_pwa.config_loader import ConfigLoader
from tf_pwa.data import data_index, data_split
from tf_pwa.histogram import Hist1D
def sum_resolution(amps, weights, size=1):
amps = tf.reshape(amps * weights, (-1, size))
amps = tf.reduce_sum(amps, axis=-1).numpy()
return amps
def main():
config = ConfigLoader("config.yml")
config.set_params("final_params.json")
amp = config.get_amplitude()
data = config.get_data("data_origin")[0]
phsp = config.get_data("phsp_plot")[0]
phsp_re = config.get_data("phsp_plot_re")[0]
print("data loaded")
amps = amp(phsp_re)
pw = amp.partial_weight(phsp_re)
re_weight = phsp_re["weight"]
re_size = config.resolution_size
amps = sum_resolution(amps, re_weight, re_size)
pw = [sum_resolution(i, re_weight, re_size) for i in pw]
m_idx = config.get_data_index("mass", "R_BC")
m_phsp = data_index(phsp, m_idx).numpy()
m_data = data_index(data, m_idx).numpy()
m_min, m_max = np.min(m_phsp), np.max(m_phsp)
scale = m_data.shape[0] / np.sum(amps)
get_hist = lambda m, w: Hist1D.histogram(
m, weights=w, range=(m_min, m_max), bins=100
)
data_hist = get_hist(m_data, None)
phsp_hist = get_hist(m_phsp, scale * amps)
pw_hist = []
for i in pw:
pw_hist.append(get_hist(m_phsp, scale * i))
ax2 = plt.subplot2grid((4, 1), (3, 0), rowspan=1)
ax = plt.subplot2grid((4, 1), (0, 0), rowspan=3, sharex=ax2)
data_hist.draw_error(ax, label="data")
phsp_hist.draw(ax, label="fit")
for i, j in zip(pw_hist, config.get_decay()):
i.draw_kde(ax, label=str(j.inner[0]))
(data_hist - phsp_hist).draw_pull(ax2)
ax.set_ylim((1, None))
ax.legend()
ax.set_yscale("log")
ax.set_ylabel("Events/{:.1f} MeV".format((m_max - m_min) * 10))
ax2.set_xlabel("M( R_BC )")
ax2.set_ylabel("pull")
ax2.set_xlim((1.3, 1.7))
ax2.set_ylim((-5, 5))
plt.setp(ax.get_xticklabels(), visible=False)
plt.savefig("m_R_BC_fit.png")
if __name__ == "__main__":
main()
| 28.44
| 67
| 0.652602
|
1a9ab6b6f68386d61327db7dc4bc2efb30f9a927
| 5,541
|
py
|
Python
|
rpython/translator/translator.py
|
microvm/pypy-mu
|
6b03fbe93052d0eb3a4c67152c987c16837b3484
|
[
"Apache-2.0",
"OpenSSL"
] | null | null | null |
rpython/translator/translator.py
|
microvm/pypy-mu
|
6b03fbe93052d0eb3a4c67152c987c16837b3484
|
[
"Apache-2.0",
"OpenSSL"
] | null | null | null |
rpython/translator/translator.py
|
microvm/pypy-mu
|
6b03fbe93052d0eb3a4c67152c987c16837b3484
|
[
"Apache-2.0",
"OpenSSL"
] | null | null | null |
"""PyPy Translator Frontend
The Translator is a glue class putting together the various pieces of the
translation-related code. It can be used for interactive testing of the
translator; see pypy/bin/translatorshell.py.
"""
import sys
import types
from rpython.translator import simplify
from rpython.flowspace.model import FunctionGraph, checkgraph, Block
from rpython.flowspace.objspace import build_flow
from rpython.tool.ansi_print import AnsiLogger
from rpython.tool.sourcetools import nice_repr_for_func
from rpython.config.translationoption import get_platform
log = AnsiLogger("flowgraph")
class TranslationContext(object):
FLOWING_FLAGS = {
'verbose': False,
'list_comprehension_operations': False, # True, - not super-tested
}
def __init__(self, config=None, **flowing_flags):
if config is None:
from rpython.config.translationoption import get_combined_translation_config
config = get_combined_translation_config(translating=True)
# ZZZ should go away in the end
for attr in ['verbose', 'list_comprehension_operations']:
if attr in flowing_flags:
setattr(config.translation, attr, flowing_flags[attr])
self.config = config
self.platform = get_platform(config)
self.annotator = None
self.rtyper = None
self.exceptiontransformer = None
self.graphs = [] # [graph]
self.callgraph = {} # {opaque_tag: (caller-graph, callee-graph)}
self._prebuilt_graphs = {} # only used by the pygame viewer
def buildflowgraph(self, func, mute_dot=False):
"""Get the flow graph for a function."""
if not isinstance(func, types.FunctionType):
raise TypeError("buildflowgraph() expects a function, "
"got %r" % (func,))
if func in self._prebuilt_graphs:
graph = self._prebuilt_graphs.pop(func)
else:
if self.config.translation.verbose:
log(nice_repr_for_func(func))
graph = build_flow(func)
simplify.simplify_graph(graph)
if self.config.translation.list_comprehension_operations:
simplify.detect_list_comprehension(graph)
if not self.config.translation.verbose and not mute_dot:
log.dot()
self.graphs.append(graph) # store the graph in our list
return graph
def update_call_graph(self, caller_graph, callee_graph, position_tag):
# update the call graph
key = caller_graph, callee_graph, position_tag
self.callgraph[key] = caller_graph, callee_graph
def buildannotator(self, policy=None):
if self.annotator is not None:
raise ValueError("we already have an annotator")
from rpython.annotator.annrpython import RPythonAnnotator
self.annotator = RPythonAnnotator(self, policy=policy)
return self.annotator
def buildrtyper(self):
if self.annotator is None:
raise ValueError("no annotator")
if self.rtyper is not None:
raise ValueError("we already have an rtyper")
from rpython.rtyper.rtyper import RPythonTyper
self.rtyper = RPythonTyper(self.annotator)
return self.rtyper
def getexceptiontransformer(self):
if self.rtyper is None:
raise ValueError("no rtyper")
if self.exceptiontransformer is not None:
return self.exceptiontransformer
from rpython.translator.exceptiontransform import ExceptionTransformer
self.exceptiontransformer = ExceptionTransformer(self)
return self.exceptiontransformer
def checkgraphs(self):
for graph in self.graphs:
checkgraph(graph)
# debug aids
def about(self, x, f=None):
"""Interactive debugging helper """
if f is None:
f = sys.stdout
if isinstance(x, Block):
for graph in self.graphs:
if x in graph.iterblocks():
print >>f, '%s is a %s' % (x, x.__class__)
print >>f, 'in %s' % (graph,)
break
else:
print >>f, '%s is a %s at some unknown location' % (
x, x.__class__.__name__)
print >>f, 'containing the following operations:'
for op in x.operations:
print >>f, " ",op
print >>f, '--end--'
return
raise TypeError("don't know about %r" % x)
def view(self):
"""Shows the control flow graph with annotations if computed.
Requires 'dot' and pygame."""
from rpython.translator.tool.graphpage import FlowGraphPage
FlowGraphPage(self).display()
show = view
def viewcg(self, center_graph=None, huge=100):
"""Shows the whole call graph and the class hierarchy, based on
the computed annotations."""
from rpython.translator.tool.graphpage import TranslatorPage
TranslatorPage(self, center_graph=center_graph, huge=huge).display()
showcg = viewcg
# _______________________________________________________________
# testing helper
def graphof(translator, func):
if isinstance(func, FunctionGraph):
return func
result = []
for graph in translator.graphs:
if getattr(graph, 'func', None) is func:
result.append(graph)
assert len(result) == 1
return result[0]
TranslationContext._graphof = graphof
| 37.187919
| 88
| 0.642844
|
3660b861f9cec4e958e149883f21a01c033e9355
| 459
|
py
|
Python
|
Mundo 3 Estruturas Compostas/ex083.py
|
costa53/curso_em_video_python3
|
4f859641324f8b35be56d807f40457d7dddc451f
|
[
"MIT"
] | 1
|
2022-02-17T16:23:52.000Z
|
2022-02-17T16:23:52.000Z
|
Mundo 3 Estruturas Compostas/ex083.py
|
costa53/curso_em_video_python3
|
4f859641324f8b35be56d807f40457d7dddc451f
|
[
"MIT"
] | null | null | null |
Mundo 3 Estruturas Compostas/ex083.py
|
costa53/curso_em_video_python3
|
4f859641324f8b35be56d807f40457d7dddc451f
|
[
"MIT"
] | null | null | null |
# DESAFIO 083
# Crie um programa onde o usuário digite uma expressão qualquer que use parênteses. Seu aplicativo
# deverá analisar se a expressão passada está com os parênteses abertos e fechados na ordem correta.
exp = list(str(input('Digite uma expressão: ')))
cont = 0
for c in exp:
if c == "(":
cont += 1
elif c == ")":
cont -= 1
if cont == 0:
print('Sua expressão está válida!')
else:
print('Sua expressão está errada!')
| 28.6875
| 100
| 0.660131
|
88442930916a328776fdf1d4f234c1b41220a067
| 1,864
|
py
|
Python
|
GPyOpt/acquisitions/EI.py
|
zhenwendai/GPyOpt
|
fd96875e7ec0cb0f78014d96813ece400648827d
|
[
"BSD-3-Clause"
] | 850
|
2015-05-31T21:12:41.000Z
|
2022-03-24T17:25:37.000Z
|
GPyOpt/acquisitions/EI.py
|
lakshaykc/GPyOpt
|
097ba66e81c7e22b5bf9fdbe64fd135753bc4a67
|
[
"BSD-3-Clause"
] | 340
|
2015-09-10T14:08:06.000Z
|
2022-03-28T20:35:26.000Z
|
GPyOpt/acquisitions/EI.py
|
lakshaykc/GPyOpt
|
097ba66e81c7e22b5bf9fdbe64fd135753bc4a67
|
[
"BSD-3-Clause"
] | 299
|
2015-07-30T13:18:37.000Z
|
2022-03-22T21:27:31.000Z
|
# Copyright (c) 2016, the GPyOpt Authors
# Licensed under the BSD 3-clause license (see LICENSE.txt)
from .base import AcquisitionBase
from ..util.general import get_quantiles
class AcquisitionEI(AcquisitionBase):
"""
Expected improvement acquisition function
:param model: GPyOpt class of model
:param space: GPyOpt class of domain
:param optimizer: optimizer of the acquisition. Should be a GPyOpt optimizer
:param cost_withGradients: function
:param jitter: positive value to make the acquisition more explorative.
.. Note:: allows to compute the Improvement per unit of cost
"""
analytical_gradient_prediction = True
def __init__(self, model, space, optimizer=None, cost_withGradients=None, jitter=0.01):
self.optimizer = optimizer
super(AcquisitionEI, self).__init__(model, space, optimizer, cost_withGradients=cost_withGradients)
self.jitter = jitter
@staticmethod
def fromConfig(model, space, optimizer, cost_withGradients, config):
return AcquisitionEI(model, space, optimizer, cost_withGradients, jitter=config['jitter'])
def _compute_acq(self, x):
"""
Computes the Expected Improvement per unit of cost
"""
m, s = self.model.predict(x)
fmin = self.model.get_fmin()
phi, Phi, u = get_quantiles(self.jitter, fmin, m, s)
f_acqu = s * (u * Phi + phi)
return f_acqu
def _compute_acq_withGradients(self, x):
"""
Computes the Expected Improvement and its derivative (has a very easy derivative!)
"""
fmin = self.model.get_fmin()
m, s, dmdx, dsdx = self.model.predict_withGradients(x)
phi, Phi, u = get_quantiles(self.jitter, fmin, m, s)
f_acqu = s * (u * Phi + phi)
df_acqu = dsdx * phi - Phi * dmdx
return f_acqu, df_acqu
| 35.846154
| 107
| 0.67221
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.