hexsha
stringlengths 40
40
| size
int64 5
2.06M
| ext
stringclasses 10
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 3
248
| max_stars_repo_name
stringlengths 5
125
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
248
| max_issues_repo_name
stringlengths 5
125
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
67k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
248
| max_forks_repo_name
stringlengths 5
125
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 5
2.06M
| avg_line_length
float64 1
1.02M
| max_line_length
int64 3
1.03M
| alphanum_fraction
float64 0
1
| count_classes
int64 0
1.6M
| score_classes
float64 0
1
| count_generators
int64 0
651k
| score_generators
float64 0
1
| count_decorators
int64 0
990k
| score_decorators
float64 0
1
| count_async_functions
int64 0
235k
| score_async_functions
float64 0
1
| count_documentation
int64 0
1.04M
| score_documentation
float64 0
1
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
a7bbd5a285c6bb9093667ccc2d4fc99f883f732d
| 1,573
|
py
|
Python
|
tests/fixtures.py
|
vfxetc/sgcache
|
670bfac2904373e19c2dac7504d2d7f87018833d
|
[
"BSD-3-Clause"
] | 13
|
2017-09-06T21:48:57.000Z
|
2022-02-08T20:50:52.000Z
|
tests/fixtures.py
|
vfxetc/sgcache
|
670bfac2904373e19c2dac7504d2d7f87018833d
|
[
"BSD-3-Clause"
] | 1
|
2021-04-04T18:07:04.000Z
|
2021-04-04T18:07:04.000Z
|
tests/fixtures.py
|
vfxetc/sgcache
|
670bfac2904373e19c2dac7504d2d7f87018833d
|
[
"BSD-3-Clause"
] | 1
|
2019-07-19T01:23:19.000Z
|
2019-07-19T01:23:19.000Z
|
from . import uuid
def task_crud(self, shotgun, trigger_poll=lambda: None):
shot_name = uuid(8)
shot = shotgun.create('Shot', {'code': shot_name})
name = uuid(8)
task = shotgun.create('Task', {'content': name, 'entity': shot})
trigger_poll()
x = self.cached.find_one('Task', [('id', 'is', task['id'])], ['content'])
self.assertSameEntity(task, x)
# entity field
x = self.cached.find_one('Task', [('id', 'is', task['id'])], ['entity'])
self.assertSameEntity(shot, x['entity'])
# return through entity field
x = self.cached.find_one('Task', [('id', 'is', task['id'])], ['entity.Shot.code'])
self.assertEqual(shot_name, x['entity.Shot.code'])
# Updates
name += '-2'
shotgun.update('Task', task['id'], {'content': name})
trigger_poll()
x = self.cached.find_one('Task', [('id', 'is', task['id'])], ['content'])
self.assertEqual(x['content'], name)
# Delete
shotgun.delete('Task', task['id'])
trigger_poll()
x = self.cached.find_one('Task', [('id', 'is', task['id'])], ['content'])
self.assertIs(x, None)
x = self.cached.find_one('Task', [('id', 'is', task['id'])], ['content'], retired_only=True)
self.assertSameEntity(task, x)
# Revive
shotgun.revive('Task', task['id'])
trigger_poll()
x = self.cached.find_one('Task', [('id', 'is', task['id'])], ['content'])
self.assertSameEntity(task, x, 'Should get revived task.')
x = self.cached.find_one('Task', [('id', 'is', task['id'])], ['content'], retired_only=True)
self.assertIs(x, None)
| 34.195652
| 96
| 0.590591
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 431
| 0.273999
|
a7bbfcd8b016b1703e3c92ed7c1cef2bc74d76e6
| 3,098
|
py
|
Python
|
config.py
|
ricsonc/aptools
|
c6ff775a7ae5a7ea7b27235748fd45769d037ae8
|
[
"MIT"
] | 1
|
2020-11-09T18:02:36.000Z
|
2020-11-09T18:02:36.000Z
|
config.py
|
ricsonc/aptools
|
c6ff775a7ae5a7ea7b27235748fd45769d037ae8
|
[
"MIT"
] | null | null | null |
config.py
|
ricsonc/aptools
|
c6ff775a7ae5a7ea7b27235748fd45769d037ae8
|
[
"MIT"
] | null | null | null |
from munch import Munch as M
cores = 20
demosaic_params = M(
# at most one of use_flat or use_lens_profile should be True
# strongly recommended to have at least 1 be True
use_flat = False,
use_lens_profile = True,
alg = 'DCB', #alternatively, use LMMSE
camera = 'auto', # alternatively, specify something like "Canon EOS 6D Mark II"
lens_make = 'auto', # alternatively, specify somnething like 'Nikon'
lens = 'Canon EF 70-200mm f/2.8L IS II USM', #'Nikkor 80-200mm f/2.8 ED',
)
detection_params = M(
Nsig = 3, # number of kernel sizes to try
min_sig = 1.0, # smallest kernel in px/std
max_sig = 6.0, # largest kernel in px/std
# only consider star candidates above this percentile of pixel luminosity
# 99.5 good for HA images, 99 for dark skies, 90 for typical use
lum_pthresh = 99.5,
# only consider candidates with an aspect ratio of no more than
unround_threshold = 2,
)
registration_params = M(
max_stars = 500, # use this many stars to register at most.
nneighbors = 500,
ba_max_ratio = 0.99,
cb_max_ratio = 0.99,
epsilon = 1E-3, # match tolerance.
min_abs_diff = 1, #abs and rel diff for match success
min_rel_diff = 1.4,
# we discard outliers from the registration via a ransac process
ransac_iters = 50,
ransac_keep_percentile = 99,
# a point is an outlier if it's more than this many pixels from the linear fit
linear_fit_tol = 2.0,
)
warping_params = M(
coarseness = 10,
use_thinplate = False, # recommend only for multi-spectra
thinplate_smoothing=0,
min_stars = 20, # don't attempt warp with fewer stars
)
stacking_params = M(
# higher noise mul = more denoising, less robust to registration errors
# lower noise mul = more robust, less denoising
noise_mul = 32.0, # could also try 4, 16, 64, usually looks the same
patch_size = 32,
cache_path = '.cache', # a large np array is temporarily stored here
)
postprocess_params = M(
# crop this many pixels from the edge of the image before any processing
border_crop = 400,
# parameters for removing background "gradient".
gradient_window = 32+1, # size of the median kernel (odd)
dilation = 16, # dilation factor for median kernel
gradient_max = 90, # all pixels more luminous than this threshold are not counted as bkg
# excl_box is either None, or a list of 4 integers [miny, maxy, minx, maxx]
# this region will be ignored for the purposes of estimating background
excl_box = None,
# alternatively, you can pass in a path to a mask file, to ignore non-box regions
mask_file = None, #
# a pair of (input, output) pairs for the tone curve.
tone_curve = [
(0.05, -0.02),
(0.3, 0.0),
],
curve_type = 'thin-plate', # can also choose "cubic" for less overshoot
# if output border is given, the saved output will be the excl box, plus output border
# otherwise, you can manually specify the [miny, maxy, minx, maxx] for the output
output_border = 400,
output_box = None,
)
| 34.422222
| 92
| 0.679471
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1,902
| 0.613944
|
a7bc5dad7ba38fc7552e7848f03173e599033cdb
| 10,809
|
py
|
Python
|
src/cogs/ide/dialogs/navigated_saved.py
|
boopdev/Jarvide
|
10920d53e4193c7e5526012b572c00f26536cd6c
|
[
"MIT"
] | null | null | null |
src/cogs/ide/dialogs/navigated_saved.py
|
boopdev/Jarvide
|
10920d53e4193c7e5526012b572c00f26536cd6c
|
[
"MIT"
] | null | null | null |
src/cogs/ide/dialogs/navigated_saved.py
|
boopdev/Jarvide
|
10920d53e4193c7e5526012b572c00f26536cd6c
|
[
"MIT"
] | null | null | null |
import disnake
import time
from disnake.ext import commands
from typing import Optional
from odmantic import Model
from src.utils import ExitButton, EmbedFactory, File, get_info
class FileModel(Model): # noqa
user_id: int
name: str
file_url: Optional[str] = None
folder: Optional[str] = None
create_epoch: float
last_edit_epoch: Optional[float] = None
class DefaultButtons(disnake.ui.View):
async def interaction_check(self, interaction: disnake.MessageInteraction) -> bool:
return (
interaction.author == self.ctx.author
and interaction.channel == self.ctx.channel
)
async def on_timeout(self) -> None:
for child in self.children:
if isinstance(child, disnake.ui.Button):
child.disabled = True
embed = EmbedFactory.ide_embed(
self.ctx, "Ide timed out. Feel free to make a new one!"
)
await self.bot_message.edit(view=self, embed=embed)
def __init__(self, ctx, bot_message):
self.ctx = ctx
self.bot_message = bot_message
self.bot = ctx.bot
self.path = "/"
self.SUDO = self.ctx.me.guild_permissions.manage_messages
super().__init__(timeout=300)
@disnake.ui.button(label="Move dir", style=disnake.ButtonStyle.green)
async def current_directory(
self, button: disnake.ui.Button, interaction: disnake.MessageInteraction
):
await interaction.response.send_message(
"What folder would you like to move into?", ephemeral=True
)
directory = await self.bot.wait_for(
"message",
check=lambda m: self.ctx.author == m.author
and m.channel == self.ctx.channel,
)
path = await self.bot.engine.find_one(
FileModel,
FileModel.user_id == self.ctx.author.id,
FileModel.folder == self.path,
FileModel.name == "folder: " + directory.content,
)
if not path:
if self.SUDO:
await directory.delete()
return await interaction.channel.send(
f"{directory.content} doesn't exist!", delete_after=15
)
self.path = f"{self.path}{path.name[8:]}/"
embed = EmbedFactory.ide_embed(
self.ctx,
f"Moved into dir: {self.path}\n"
f"{''.join(['-' for _ in range(len(self.path) + len('Moved into dir: '))])}",
)
await self.bot_message.edit(embed=embed)
@disnake.ui.button(label="View folder", style=disnake.ButtonStyle.green)
async def view_folder(
self, button: disnake.ui.Button, interaction: disnake.MessageInteraction
):
files = "\n - ".join(
[
f"{k.name}"
for k in await self.bot.engine.find(
FileModel,
FileModel.user_id == self.ctx.author.id,
FileModel.folder == self.path,
)
]
)
embed = EmbedFactory.ide_embed(
self.ctx,
f"""{self.path}:
- {files}""",
)
await interaction.response.defer()
await self.bot_message.edit(
embed=embed,
)
@disnake.ui.button(label="New folder", style=disnake.ButtonStyle.green)
async def create_folder(
self, button: disnake.ui.Button, interaction: disnake.MessageInteraction
):
await interaction.response.send_message(
"What is the name of the folder you would like to create?", ephemeral=True
)
folder = await self.bot.wait_for(
"message",
check=lambda m: self.ctx.author == m.author
and m.channel == self.ctx.channel,
)
if len(folder.content) >= 12:
if self.SUDO:
await folder.delete()
return await interaction.channel.send(
"The folder name has to be less than 12 characters long!",
delete_after=15,
)
dir_files = [
k.name
for k in await self.bot.engine.find(
FileModel,
FileModel.user_id == self.ctx.author.id,
FileModel.folder == self.path,
FileModel.name == folder.content,
)
]
if folder.content in dir_files:
return await interaction.response.send_message(
"You can't have a folder in with the same name!"
)
folder_ = FileModel(
name="folder: " + folder.content,
user_id=self.ctx.author.id,
create_epoch=int(time.time()),
folder=self.path,
)
embed = EmbedFactory.ide_embed(
self.ctx,
f"Created {folder.content}\n{''.join(['-' for _ in range(len(folder.content)+len('Created '))])}\nCurrent directory: {self.path}",
)
await self.bot.engine.save(folder_)
await self.bot_message.edit(embed=embed)
@disnake.ui.button(label="All files", style=disnake.ButtonStyle.green)
async def view_files(
self, button: disnake.ui.Button, interaction: disnake.MessageInteraction
):
files = "\n - ".join(
[
f"{k.name}"
for k in await self.bot.engine.find(
FileModel, FileModel.user_id == self.ctx.author.id
)
]
)
embed = EmbedFactory.ide_embed(
self.ctx,
f"""/:
- {files}""",
)
await interaction.response.defer()
await self.bot_message.edit(
embed=embed,
)
@disnake.ui.button(label="Delete", style=disnake.ButtonStyle.danger, row=2)
async def delete_button(
self, button: disnake.ui.Button, interaction: disnake.MessageInteraction
):
await interaction.response.send_message(
"What file/folder do you want to delete? Specify relative path",
ephemeral=True,
)
directory = await self.bot.wait_for(
"message",
check=lambda m: self.ctx.author == m.author
and m.channel == self.ctx.channel,
)
filename = directory.content.split("/")[-1]
file_ = await self.bot.engine.find_one(
FileModel,
FileModel.user_id == self.ctx.author.id,
FileModel.name == filename,
FileModel.folder == self.path,
)
if file_:
await self.bot.engine.delete(file_)
return await interaction.channel.send(f"Successfully deleted {file_.name}")
folder = directory.content
if directory.content.endswith("/"):
folder = directory.content.split("/")[-2]
folder_ = await self.bot.engine.find_one(
FileModel,
FileModel.user_id == self.ctx.author.id,
FileModel.name == "folder: " + folder,
FileModel.folder == self.path,
)
if folder_:
await self.bot.engine.delete(folder_)
await interaction.channel.send(f"Successfully deleted {file_.name}")
await interaction.channel.send(
f"I could not find a folder or file called {directory.content} in {self.path}"
)
class OpenFromSaved(DefaultButtons):
def __init__(self, ctx, bot_message):
super().__init__(ctx, bot_message)
self.ctx = ctx
self.bot = ctx.bot
self.bot_message = bot_message
self.add_item(ExitButton(self.ctx, self.bot_message, row=2))
@disnake.ui.button(label="Select file", style=disnake.ButtonStyle.danger, row=2)
async def select_button(
self, button: disnake.ui.Button, interaction: disnake.MessageInteraction
):
from . import FileView
await interaction.response.send_message(
"What file do you want to open?", ephemeral=True
)
filename = await self.bot.wait_for(
"message",
check=lambda m: self.ctx.author == m.author
and m.channel == self.ctx.channel,
)
file_model = await self.bot.engine.find_one(
FileModel,
FileModel.user_id == self.ctx.author.id,
FileModel.name == filename.content,
FileModel.folder == self.path,
)
if not file_model:
if self.SUDO:
await filename.delete()
return await interaction.channel.send(
f"{filename.content} doesnt exist!", delete_after=15
)
file_ = await File.from_url(bot=self.bot, url=file_model.file_url)
embed = EmbedFactory.ide_embed(
self.ctx,
f"Opened {filename.content}\n{''.join(['-' for _ in range(len(filename.content)+len('Opened '))])}\n{await get_info(file_)}",
)
await self.bot_message.edit(
embed=embed, view=FileView(self.ctx, file_, self.bot_message)
)
class SaveFile(DefaultButtons):
def __init__(
self, ctx: commands.Context, bot_message: disnake.Message, file_: File
):
super().__init__(ctx, bot_message)
self.ctx = ctx
self.bot = ctx.bot
self.bot_message = bot_message
self.file = file_
self.add_item(ExitButton(self.ctx, self.bot_message, row=2))
@disnake.ui.button(label="Save", style=disnake.ButtonStyle.danger, row=2)
async def save_button(
self, button: disnake.ui.Button, interaction: disnake.MessageInteraction
):
from . import FileView
attachment = await self.file.to_real()
dir_files = [
k.name
for k in await self.bot.engine.find(
FileModel,
FileModel.user_id == self.ctx.author.id,
FileModel.folder == self.path,
)
]
file_ = FileModel(
file_url=attachment.url,
name=self.file.filename,
user_id=self.ctx.author.id,
create_epoch=int(time.time()),
folder=self.path,
)
overwrote = (
f"Overwrote file {self.file.filename}"
+ "".join(["-" for _ in range(len(self.file.filename) + len("Saved "))])
+ "\n"
)
n = "\n"
embed = EmbedFactory.ide_embed(
self.ctx,
f"Saved {self.file.filename}\n{''.join(['-' for _ in range(len(self.file.filename)+len('Saved '))])}{overwrote if self.file.filename in dir_files else n}{await get_info(attachment)}",
)
await interaction.response.defer()
await self.bot.engine.save(file_)
await self.bot_message.edit(
embed=embed, view=FileView(self.ctx, self.file, self.bot_message)
)
| 33.258462
| 195
| 0.569248
| 10,617
| 0.982237
| 0
| 0
| 8,867
| 0.820335
| 8,879
| 0.821445
| 1,454
| 0.134518
|
a7be5a9cf5c8b15026fbcff5a02db179d5654ed0
| 6,098
|
py
|
Python
|
BiBloSA/exp_SICK/src/evaluator.py
|
mikimaus78/ml_monorepo
|
b2c2627ff0e86e27f6829170d0dac168d8e5783b
|
[
"BSD-3-Clause"
] | 116
|
2018-02-01T08:33:35.000Z
|
2021-08-04T05:28:04.000Z
|
BiBloSA/exp_SICK/src/evaluator.py
|
mikimaus78/ml_monorepo
|
b2c2627ff0e86e27f6829170d0dac168d8e5783b
|
[
"BSD-3-Clause"
] | 2
|
2019-02-23T18:54:22.000Z
|
2019-11-09T01:30:32.000Z
|
BiBloSA/exp_SICK/src/evaluator.py
|
mikimaus78/ml_monorepo
|
b2c2627ff0e86e27f6829170d0dac168d8e5783b
|
[
"BSD-3-Clause"
] | 35
|
2019-02-08T02:00:31.000Z
|
2022-03-01T23:17:00.000Z
|
from configs import cfg
from src.utils.record_log import _logger
import numpy as np
import tensorflow as tf
import scipy.stats as stats
class Evaluator(object):
def __init__(self, model):
self.model = model
self.global_step = model.global_step
## ---- summary----
self.build_summary()
self.writer = tf.summary.FileWriter(cfg.summary_dir)
def get_evaluation(self, sess, dataset_obj, global_step=None):
_logger.add()
_logger.add('getting evaluation result for %s' % dataset_obj.data_type)
logits_list, loss_list = [], []
target_score_list, predicted_score_list = [], []
for sample_batch, _, _, _ in dataset_obj.generate_batch_sample_iter():
feed_dict = self.model.get_feed_dict(sample_batch, 'dev')
logits, loss, predicted_score = sess.run([self.model.logits, self.model.loss,
self.model.predicted_score], feed_dict)
logits_list.append(np.argmax(logits, -1))
loss_list.append(loss)
predicted_score_list.append(predicted_score)
for sample in sample_batch:
target_score_list.append(sample['relatedness_score'])
logits_array = np.concatenate(logits_list, 0)
loss_value = np.mean(loss_list)
target_scores = np.array(target_score_list)
predicted_scores = np.concatenate(predicted_score_list, 0)
# pearson, spearman, mse
pearson_value = stats.pearsonr(target_scores, predicted_scores)[0]
spearman_value = stats.spearmanr(target_scores, predicted_scores)[0]
mse_value = np.mean((target_scores - predicted_scores) ** 2)
# todo: analysis
# analysis_save_dir = cfg.mkdir(cfg.answer_dir, 'gs_%d' % global_step or 0)
# OutputAnalysis.do_analysis(dataset_obj, logits_array, accu_array, analysis_save_dir,
# cfg.fine_grained)
if global_step is not None:
if dataset_obj.data_type == 'train':
summary_feed_dict = {
self.train_loss: loss_value,
self.train_pearson: pearson_value,
self.train_spearman: spearman_value,
self.train_mse: mse_value,
}
summary = sess.run(self.train_summaries, summary_feed_dict)
self.writer.add_summary(summary, global_step)
elif dataset_obj.data_type == 'dev':
summary_feed_dict = {
self.dev_loss: loss_value,
self.dev_pearson: pearson_value,
self.dev_spearman: spearman_value,
self.dev_mse: mse_value,
}
summary = sess.run(self.dev_summaries, summary_feed_dict)
self.writer.add_summary(summary, global_step)
else:
summary_feed_dict = {
self.test_loss: loss_value,
self.test_pearson: pearson_value,
self.test_spearman: spearman_value,
self.test_mse: mse_value,
}
summary = sess.run(self.test_summaries, summary_feed_dict)
self.writer.add_summary(summary, global_step)
return loss_value, (pearson_value, spearman_value, mse_value)
# --- internal use ------
def build_summary(self):
with tf.name_scope('train_summaries'):
self.train_loss = tf.placeholder(tf.float32, [], 'train_loss')
self.train_pearson = tf.placeholder(tf.float32, [], 'train_pearson')
self.train_spearman = tf.placeholder(tf.float32, [], 'train_spearman')
self.train_mse = tf.placeholder(tf.float32, [], 'train_mse')
tf.add_to_collection('train_summaries_collection', tf.summary.scalar('train_loss', self.train_loss))
tf.add_to_collection('train_summaries_collection', tf.summary.scalar('train_pearson', self.train_pearson))
tf.add_to_collection('train_summaries_collection', tf.summary.scalar('train_spearman', self.train_spearman))
tf.add_to_collection('train_summaries_collection', tf.summary.scalar('train_mse', self.train_mse))
self.train_summaries = tf.summary.merge_all('train_summaries_collection')
with tf.name_scope('dev_summaries'):
self.dev_loss = tf.placeholder(tf.float32, [], 'dev_loss')
self.dev_pearson = tf.placeholder(tf.float32, [], 'dev_pearson')
self.dev_spearman = tf.placeholder(tf.float32, [], 'dev_spearman')
self.dev_mse = tf.placeholder(tf.float32, [], 'dev_mse')
tf.add_to_collection('dev_summaries_collection', tf.summary.scalar('dev_loss',self.dev_loss))
tf.add_to_collection('dev_summaries_collection', tf.summary.scalar('dev_pearson', self.dev_pearson))
tf.add_to_collection('dev_summaries_collection', tf.summary.scalar('dev_spearman', self.dev_spearman))
tf.add_to_collection('dev_summaries_collection', tf.summary.scalar('dev_mse', self.dev_mse))
self.dev_summaries = tf.summary.merge_all('dev_summaries_collection')
with tf.name_scope('test_summaries'):
self.test_loss = tf.placeholder(tf.float32, [], 'test_loss')
self.test_pearson = tf.placeholder(tf.float32, [], 'test_pearson')
self.test_spearman = tf.placeholder(tf.float32, [], 'test_spearman')
self.test_mse = tf.placeholder(tf.float32, [], 'test_mse')
tf.add_to_collection('test_summaries_collection', tf.summary.scalar('test_loss',self.test_loss))
tf.add_to_collection('test_summaries_collection', tf.summary.scalar('test_pearson', self.test_pearson))
tf.add_to_collection('test_summaries_collection', tf.summary.scalar('test_spearman', self.test_spearman))
tf.add_to_collection('test_summaries_collection', tf.summary.scalar('test_mse', self.test_mse))
self.test_summaries = tf.summary.merge_all('test_summaries_collection')
| 51.677966
| 120
| 0.642178
| 5,955
| 0.97655
| 0
| 0
| 0
| 0
| 0
| 0
| 1,114
| 0.182683
|
a7c14fda1fe9509c8caa57724445fdcaee4171b6
| 3,696
|
py
|
Python
|
pytorch_translate/tasks/translation_from_pretrained_xlm.py
|
dzhulgakov/translate
|
018d3eed8d93ff32e86c912e68045c7a3f4ed0b7
|
[
"BSD-3-Clause"
] | 748
|
2018-05-02T17:12:53.000Z
|
2022-03-26T04:44:44.000Z
|
pytorch_translate/tasks/translation_from_pretrained_xlm.py
|
dzhulgakov/translate
|
018d3eed8d93ff32e86c912e68045c7a3f4ed0b7
|
[
"BSD-3-Clause"
] | 352
|
2018-05-02T19:05:59.000Z
|
2022-02-25T16:54:27.000Z
|
pytorch_translate/tasks/translation_from_pretrained_xlm.py
|
dzhulgakov/translate
|
018d3eed8d93ff32e86c912e68045c7a3f4ed0b7
|
[
"BSD-3-Clause"
] | 193
|
2018-05-02T17:14:56.000Z
|
2022-02-24T21:10:56.000Z
|
#!/usr/bin/env python3
# Copyright (c) 2017-present, Facebook, Inc.
# All rights reserved.
#
# This source code is licensed under the license found in the LICENSE file in
# the root directory of this source tree. An additional grant of patent rights
# can be found in the PATENTS file in the same directory.
from fairseq import options, tokenizer
from fairseq.tasks import register_task
from pytorch_translate import constants
from pytorch_translate.data.masked_lm_dictionary import MaskedLMDictionary
from pytorch_translate.tasks.pytorch_translate_task import PytorchTranslateTask
@register_task("pytorch_translate_translation_from_pretrained_xlm")
class PytorchTranslateTranslationFromPretrainedXLMTask(PytorchTranslateTask):
"""
Same as TranslationTask except use the MaskedLMDictionary class so that
we can load data that was binarized with the MaskedLMDictionary class.
This task should be used for the entire training pipeline when we want to
train an NMT model from a pretrained XLM checkpoint: binarizing NMT data,
training NMT with the pretrained XLM checkpoint, and subsequent evaluation
of that trained model.
"""
@staticmethod
def add_args(parser):
PytorchTranslateTask.add_args(parser)
"""Add task-specific arguments to the parser."""
parser.add_argument(
"--save-only", action="store_true", help="skip eval and only do save"
)
@classmethod
def load_dictionary(cls, filename):
"""Load the masked LM dictionary from the filename
Args:
filename (str): the filename
"""
return MaskedLMDictionary.load(filename)
@classmethod
def build_dictionary(
cls, filenames, workers=1, threshold=-1, nwords=-1, padding_factor=8
):
"""Build the dictionary
Args:
filenames (list): list of filenames
workers (int): number of concurrent workers
threshold (int): defines the minimum word count
nwords (int): defines the total number of words in the final dictionary,
including special symbols
padding_factor (int): can be used to pad the dictionary size to be a
multiple of 8, which is important on some hardware (e.g., Nvidia
Tensor Cores).
"""
d = MaskedLMDictionary()
for filename in filenames:
MaskedLMDictionary.add_file_to_dictionary(
filename, d, tokenizer.tokenize_line, workers
)
d.finalize(threshold=threshold, nwords=nwords, padding_factor=padding_factor)
return d
@classmethod
def setup_task(cls, args, **kwargs):
args.left_pad_source = options.eval_bool(args.left_pad_source)
# Load dictionaries
source_dict = MaskedLMDictionary.load(args.source_vocab_file)
target_dict = MaskedLMDictionary.load(args.target_vocab_file)
source_lang = args.source_lang or "src"
target_lang = args.target_lang or "tgt"
print(f"| [{source_lang}] dictionary: {len(source_dict)} types")
print(f"| [{target_lang}] dictionary: {len(target_dict)} types")
use_char_source = (args.char_source_vocab_file != "") or (
getattr(args, "arch", "") in constants.ARCHS_FOR_CHAR_SOURCE
)
if use_char_source:
char_source_dict = MaskedLMDictionary.load(args.char_source_vocab_file)
# this attribute is used for CharSourceModel construction
args.char_source_dict_size = len(char_source_dict)
else:
char_source_dict = None
return cls(args, source_dict, target_dict, char_source_dict)
| 39.319149
| 85
| 0.688582
| 3,042
| 0.823052
| 0
| 0
| 3,110
| 0.84145
| 0
| 0
| 1,740
| 0.470779
|
a7c26519485c15577e8b274b47dd3a678ef542d3
| 5,084
|
py
|
Python
|
drugresnet/seya/layers/memnn2.py
|
Naghipourfar/CCLE
|
cd557928a003200c683861b29c607942029bffb1
|
[
"MIT"
] | 429
|
2015-08-11T09:48:34.000Z
|
2021-07-31T15:13:23.000Z
|
drugresnet/seya/layers/memnn2.py
|
Naghipourfar/CCLE
|
cd557928a003200c683861b29c607942029bffb1
|
[
"MIT"
] | 55
|
2015-09-10T11:57:58.000Z
|
2021-04-24T14:13:31.000Z
|
drugresnet/seya/layers/memnn2.py
|
Naghipourfar/CCLE
|
cd557928a003200c683861b29c607942029bffb1
|
[
"MIT"
] | 135
|
2015-08-31T17:52:26.000Z
|
2022-02-07T05:31:12.000Z
|
import theano.tensor as T
import keras.backend as K
from keras.layers.core import LambdaMerge
from keras import initializations
class MemN2N(LambdaMerge):
def __init__(self, layers, output_dim, input_dim, input_length,
memory_length, hops=3, bow_mode="bow", mode="adjacent",
emb_init="uniform", init="glorot_uniform", **kwargs):
self.output_dim = output_dim
self.input_dim = input_dim
self.input_length = input_length
self.memory_length = memory_length
self.hops = hops
self.bow_mode = bow_mode
self.mode = mode
self.init = initializations.get(init)
self.emb_init = initializations.get(emb_init)
output_shape = (self.output_dim, )
super(MemN2N, self).__init__(layers, lambda x: x, output_shape)
def build(self):
# list of embedding layers
self.outputs = []
self.memory = []
# self.Hs = [] # if self.mode == "rnn"
self.trainable_weights = []
for i in range(self.hops):
# memory embedding - A
if self.mode == "adjacent" and i > 0:
A = self.outputs[-1]
else:
A = self.emb_init((self.input_dim, self.output_dim),
name="{}_A_{}".format(self.name, i))
self.trainable_weights += [A]
self.memory.append(A)
# outputs embedding - C
# if self.mode == "adjacent" and i > 1:
# Wo = self.outputs[-1]
# elif self.mode == "untied" or i == 0:
C = self.emb_init((self.input_dim, self.output_dim),
name="{}_C_{}".format(self.name, i))
self.trainable_weights += [C]
self.outputs.append(C)
# if self.mode == "rnn"
# H = self.init((self.output_dim, self.output_dim),
# name="{}_H_{}".format(self.name, i))
# self.trainable_weights += [H]
# b = K.zeros((self.input_dim,),
# name="{}_b_{}".format(self.name, i))
# self.Hs += [H]
# self.trainable_weights += [H]
if self.mode == "adjacent":
self.W = self.outputs[-1].T
self.b = K.zeros((self.input_dim,), name="{}_b".format(self.name))
# self.trainable_weights += [self.b]
# question embedding - B
self.B = self.emb_init((self.input_dim, self.output_dim),
name="{}_B".format(self.name))
self.trainable_weights += [self.B]
# Temporal embedding
self.Te = self.emb_init((self.input_length, self.output_dim))
self.trainable_weights += [self.Te]
def get_output(self, train=False):
inputs = [layer.get_output(train) for layer in self.layers]
facts, question = inputs
# WARN make sure input layers are Embedding layers with identity init
# facts = K.argmax(facts, axis=-1)
# question = K.argmax(question, axis=-1)
u, mask_q = self.lookup(question, self.B, 1) # just 1 question
for A, C in zip(self.memory, self.outputs):
m, mask_m = self.lookup(facts, A, self.memory_length)
c, mask_c = self.lookup(facts, C, self.memory_length)
# attention weights
p = self.attention(m, u, mask_m)
# output
o = self.calc_output(c, p)
u = o + u
# u = K.dot(u[:, 0, :], self.W) + self.b
return u[:, 0, :] # K.softmax(u)
def lookup(self, x, W, memory_length):
# shape: (batch*memory_length, input_length)
x = K.cast(K.reshape(x, (-1, self.input_length)), 'int32')
mask = K.expand_dims(K.not_equal(x, 0.), dim=-1)
# shape: (batch*memory_length, input_length, output_dim)
X = K.gather(W, x)
if self.bow_mode == "bow":
# shape: (batch*memory_length, output_dim)
X = K.sum(X + K.expand_dims(self.Te, 0), axis=1)
# shape: (batch, memory_length, output_dim)
X = K.reshape(X, (-1, memory_length, self.output_dim))
return X, mask
def attention(self, m, q, mask):
# mask original shape is (batch*memory_length, input_length, 1)
# shape (batch, memory)
mask = K.reshape(mask[:, 0], (-1, self.memory_length))
# shape: (batch, memory_length, 1)
p = T.batched_tensordot(m, q, (2, 2))
# shape: (batch, memory_length)
p = K.softmax(p[:, :, 0]) # * K.cast(mask, 'float32')
# shape: (batch, 1, memory_length)
return K.expand_dims(p, dim=1)
def calc_output(self, c, p):
# shape: (batch, memory_length, 1)
p = K.permute_dimensions(p, (0, 2, 1))
# shape: (batch, output_dim)
o = K.sum(c * p, axis=1)
# if self.mode == "rnn":
# import theano
# W = theano.printing.Print('[Debug] W shape: ', attrs=("shape",))(W)
# o = K.dot(o, W) + b
# shape: (batch, 1, output_dim)
return K.expand_dims(o, dim=1)
| 39.71875
| 78
| 0.54288
| 4,953
| 0.974233
| 0
| 0
| 0
| 0
| 0
| 0
| 1,542
| 0.303304
|
a7c2682f5396348598753e892a05447ab558cc24
| 419
|
py
|
Python
|
nyan/utils/io.py
|
TWRogers/nyan
|
c224279467475c9ce81e4709dd357897e0e5c963
|
[
"Apache-2.0"
] | 2
|
2019-10-11T16:41:16.000Z
|
2019-10-11T16:42:08.000Z
|
nyan/utils/io.py
|
TWRogers/nyan
|
c224279467475c9ce81e4709dd357897e0e5c963
|
[
"Apache-2.0"
] | null | null | null |
nyan/utils/io.py
|
TWRogers/nyan
|
c224279467475c9ce81e4709dd357897e0e5c963
|
[
"Apache-2.0"
] | null | null | null |
import cv2
from PIL import Image
import os
import numpy as np
IMAGE_BE = os.environ.get('NYAN_IMAGE_BE', 'PIL')
if IMAGE_BE == 'PIL':
def IMREAD_FN(x):
return np.array(Image.open(x).convert('RGB')).astype(np.uint8)
elif IMAGE_BE == 'cv2':
def IMREAD_FN(x):
return cv2.imread(x).astype(np.uint8)[:, :, ::-1]
else:
raise NotImplementedError('IMAGE_BE {} not implemented'.format(IMAGE_BE))
| 23.277778
| 77
| 0.668258
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 64
| 0.152745
|
a7c28ef23aa63e38de4b879a5620a01103796308
| 164
|
py
|
Python
|
src/server.py
|
sqweelygig/a-pi-api
|
7c83bf5d1a00e01a45edc7fda9b4887bf02b064a
|
[
"Apache-2.0"
] | null | null | null |
src/server.py
|
sqweelygig/a-pi-api
|
7c83bf5d1a00e01a45edc7fda9b4887bf02b064a
|
[
"Apache-2.0"
] | null | null | null |
src/server.py
|
sqweelygig/a-pi-api
|
7c83bf5d1a00e01a45edc7fda9b4887bf02b064a
|
[
"Apache-2.0"
] | null | null | null |
import RPi.GPIO as GPIO
import connexion
if __name__ == '__main__':
app = connexion.App('a-pi-api')
app.add_api('v0/spec.yml')
app.run(host='0.0.0.0', port=80)
| 20.5
| 33
| 0.682927
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 42
| 0.256098
|
a7c34c83fcf273716cc8990f3dfde892e307229c
| 661
|
py
|
Python
|
detect_fraud_email_enron/tools/k_best_selector.py
|
gotamist/other_machine_learning
|
70c7f5367ed5cf9b6fd4818cda16add24a2b468d
|
[
"MIT"
] | null | null | null |
detect_fraud_email_enron/tools/k_best_selector.py
|
gotamist/other_machine_learning
|
70c7f5367ed5cf9b6fd4818cda16add24a2b468d
|
[
"MIT"
] | null | null | null |
detect_fraud_email_enron/tools/k_best_selector.py
|
gotamist/other_machine_learning
|
70c7f5367ed5cf9b6fd4818cda16add24a2b468d
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python2
# -*- coding: utf-8 -*-
"""
Created on Tue Sep 25 18:26:45 2018
@author: gotamist
"""
import sys
sys.path.append("../tools/")
from feature_format import featureFormat, targetFeatureSplit
from sklearn.feature_selection import SelectKBest
def KBestSelector(data_dict, features_list, k):
data_array = featureFormat(data_dict, features_list)
labels, features = targetFeatureSplit(data_array)
kbest = SelectKBest(k=k)
kbest.fit(features, labels)
scores = kbest.scores_
tuples = zip(features_list[1:], scores)
top_k_features = sorted(tuples, key=lambda x: x[1], reverse=True)
return top_k_features[:k]
| 28.73913
| 69
| 0.721634
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 118
| 0.178517
|
a7c3a82db91bce9cfb29cde148a1916bca6eaffc
| 697
|
py
|
Python
|
Python/179.py
|
jaimeliew1/Project_Euler_Solutions
|
963c9c6d6571cade8f87341f97a6a2cd1af202bb
|
[
"MIT"
] | null | null | null |
Python/179.py
|
jaimeliew1/Project_Euler_Solutions
|
963c9c6d6571cade8f87341f97a6a2cd1af202bb
|
[
"MIT"
] | 1
|
2018-04-16T21:01:50.000Z
|
2018-04-16T21:01:50.000Z
|
Python/179.py
|
jaimeliew1/Project_Euler_Solutions
|
963c9c6d6571cade8f87341f97a6a2cd1af202bb
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Solution to Project Euler problem 179 - Consecutive positive divisors
Author: Jaime Liew
https://github.com/jaimeliew1/Project_Euler_Solutions
"""
def run():
N = int(1e7)
n_factors = [1 for _ in range(N + 1)]
# can start at 2 because 1 is a divisor for all numbers and wont change the
# relative count.
# Factor counting loop
for i in range(2, N + 1):
n = i
while n < N:
n_factors[n] += 1
n += i
# Evaluate factor count array
count = 0
for i in range(N):
if n_factors[i] == n_factors[i + 1]:
count += 1
return count
if __name__ == "__main__":
print(run())
| 19.914286
| 79
| 0.573888
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 328
| 0.470588
|
a7c3c43019bf1d3df920d62ddaf471e6b921d131
| 7,417
|
py
|
Python
|
tweetx/environment.py
|
Druid-of-Luhn/TweetX
|
15a2dc0ad619f846b40880ad5fc3ab690d835e0d
|
[
"MIT"
] | null | null | null |
tweetx/environment.py
|
Druid-of-Luhn/TweetX
|
15a2dc0ad619f846b40880ad5fc3ab690d835e0d
|
[
"MIT"
] | null | null | null |
tweetx/environment.py
|
Druid-of-Luhn/TweetX
|
15a2dc0ad619f846b40880ad5fc3ab690d835e0d
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
import asyncio, entity, io, json, logging, queue, random, threading, time, websockets, whale
from bot import bot
from random import randrange
logging.basicConfig()
log = logging.getLogger('tweetx')
log.setLevel(logging.DEBUG)
class Event:
def __init__(self):
self.callbacks = []
def __call__(self, *a, **kw):
for callback in self.callbacks:
callback(*a, **kw)
def __iadd__(self, callback):
self.callbacks.append(callback)
return self
class Environment:
def __init__(self):
self.spaceship = entity.Spaceship(self, 0, 0)
self.entities = [self.spaceship]
self.entity_added = Event()
self.entity_removed = Event()
self.entity_moved = Event()
def add_entity(self, entity):
self.entities.append(entity)
self.entity_added(entity)
def remove_entity(self, entity):
self.entities.remove(entity)
self.entity_removed(entity)
def space_contains(self, x, y):
for entity in self.entities:
if (entity.x-(entity.width/2) <= x <= entity.x+(entity.width/2)) and (entity.y-(entity.height/2) <= y <= entity.y+(entity.height/2)):
return entity
return None
def update_positions(self):
for ent in self.entities:
ent.tick()
ent.x += ent.velocity_x
ent.y += ent.velocity_y
if ent.velocity_x != 0 or ent.velocity_y != 0:
self.entity_moved(ent)
collide_entity = self.space_contains(ent.x, ent.y)
if entity != None:
ent.velocity_x = -1*ent.velocity_x
ent.velocity_y = -1*ent.velocity_y
collide_entity.velocity_x = -1*collide_entity.velocity_x
collide_entity.velocity_y = -1*collide_entity.velocity_y
if isinstance(collide_entity, entity.Spaceship):
collide_entity.health -= 1
if isinstance(collide_entity, entity.Spaceship):
collide_entity.health -= 1
def generate_entities(self):
appearance_probabilities = (
(0.15, lambda: random.choice([whale.Dolphin])),
(0.1, lambda: entity.Meteor),
(0.05, lambda: entity.Planet)
)
k = random.uniform(0, 1)
for p, choose in appearance_probabilities:
if p > k:
while True:
dx = (1 if random.uniform(0, 1) > 0.5 else -1) * max(0, random.uniform(80, 500))
dy = (1 if random.uniform(0, 1) > 0.5 else -1) * max(0, random.uniform(80, 500))
new_entity = choose()(self.spaceship.x + dx, self.spaceship.y + dy)
if self.space_contains(new_entity.x, new_entity.y) is None:
break
self.add_entity(new_entity)
log.debug('Generated a %s at (%s, %s)' % (type(new_entity).__name__, new_entity.x, new_entity.y))
break
class Game:
class Client:
def __init__(self, game, websocket, path):
self.game = game
self.websocket = websocket
self.path = path
self.queue = queue.Queue()
def push(self, change):
if self.game.active:
log.debug('Sending to %s: %s' % (self.websocket.remote_address, change))
self.queue.put(change)
async def loop(self):
while self.game.active:
change = self.queue.get()
await self.websocket.send(json.dumps(change))
self.game.clients.remove(self)
def handle_entity_added(self, e):
self.push({
'entity': e.id,
'type': type(e).__name__,
'pos': (e.x, e.y),
'velocity': (e.velocity_x, e.velocity_y),
'width': e.width,
'height': e.height,
'direction': e.direction_orientation,
'added': True
})
def handle_entity_removed(self, e):
self.push({
'entity': e.id,
'removed': True
})
def handle_entity_moved(self, e):
self.push({
'entity': e.id,
'pos': (e.x, e.y),
'width': e.width,
'height': e.height,
'velocity': (e.velocity_x, e.velocity_y),
'direction': e.direction_orientation,
})
# The internal tick length, in seconds
TICK_LENGTH = 0.5
# The number of internal ticks to a command tick
TICKS_PER_COMMAND_TICK = 10
def __init__(self, host = 'localhost', port = 17922):
self.active = False
self.environment = Environment()
self.host = host
self.port = port
self.clients = []
self.changes = queue.Queue()
self.ticks = 0
self.ticks_since_last_command = 0
self.bot = bot.TwitterBot(self)
self.exit_event = threading.Event()
async def start_server(self):
async def new_client(websocket, path):
log.info('New client! %s' % (websocket.remote_address,))
client = self.Client(self, websocket, path)
self.clients.append(client)
self.environment.entity_added += client.handle_entity_added
self.environment.entity_removed += client.handle_entity_removed
self.environment.entity_moved += client.handle_entity_moved
for entity in self.environment.entities:
client.handle_entity_added(entity)
await client.loop()
self.websocket = await websockets.serve(new_client, self.host, self.port)
log.info('Started listening on %s:%d' % (self.host, self.port))
def tick(self):
while self.active:
self.ticks += 1
log.debug('Tick!')
for client in self.clients:
client.push({
'power': self.environment.spaceship.reactor.power,
'shield': self.environment.spaceship.shield.charge,
'engines': self.environment.spaceship.engine_power.charge,
'weapon': self.environment.spaceship.weapon.charge
})
if self.ticks_since_last_command == 0:
log.debug('Performing a command tick...')
self.bot.tick()
self.ticks_since_last_command = self.TICKS_PER_COMMAND_TICK
else:
self.ticks_since_last_command -= 1
self.environment.update_positions()
self.environment.generate_entities()
time.sleep(self.TICK_LENGTH)
def run(self):
self.active = True
self.bot.start()
self.tick_thread = threading.Thread(target = self.tick)
self.tick_thread.start()
event_loop = asyncio.get_event_loop()
event_loop.run_until_complete(self.start_server())
event_loop.run_forever()
def stop(self, crashed=False):
self.active = False
self.exit_event.set()
self.bot.stop(crashed=crashed)
if __name__ == "__main__":
sim = Game()
try:
sim.run()
except KeyboardInterrupt:
sim.stop()
raise
except:
sim.stop(crashed=True)
raise
| 32.674009
| 145
| 0.556829
| 6,966
| 0.939194
| 0
| 0
| 0
| 0
| 960
| 0.129432
| 425
| 0.057301
|
a7c3c5875178bbdc7bd8d2fd3aaff1ff122b86d9
| 682
|
py
|
Python
|
retired/example_process_discharge_simulation.py
|
changliao1025/pyswat
|
cdcda1375be8c0f71459a78438b1e9f8a22a77bc
|
[
"MIT"
] | 2
|
2021-12-11T01:39:00.000Z
|
2022-02-15T17:57:45.000Z
|
retired/example_process_discharge_simulation.py
|
changliao1025/pyswat
|
cdcda1375be8c0f71459a78438b1e9f8a22a77bc
|
[
"MIT"
] | 5
|
2022-03-10T16:38:30.000Z
|
2022-03-28T17:31:20.000Z
|
retired/example_process_discharge_simulation.py
|
changliao1025/pyswat
|
cdcda1375be8c0f71459a78438b1e9f8a22a77bc
|
[
"MIT"
] | null | null | null |
from swaty.simulation.swat_main import swat_main
from swaty.swaty_read_model_configuration_file import swat_read_model_configuration_file
from swaty.classes.pycase import swaty
from swaty.postprocess.extract.swat_extract_stream_discharge import swat_extract_stream_discharge
sFilename_configuration_in = '/global/homes/l/liao313/workspace/python/swaty/swaty/shared/swat_simulation.xml'
#step 1
aConfig = swat_read_model_configuration_file(sFilename_configuration_in)
# iCase_index_in=iCase_index_in, sJob_in=sJob_in, iFlag_mode_in=iFlag_mode_in)
aConfig['sFilename_model_configuration'] = sFilename_configuration_in
oModel = swaty(aConfig)
swat_extract_stream_discharge(oModel)
| 52.461538
| 110
| 0.879765
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 197
| 0.288856
|
a7c4df68721fac6742030901c0c135b22a7c5979
| 861
|
py
|
Python
|
ThinkPython/chap9/ex9.py
|
sokolowskik/Tutorials
|
d2681d4f18b03e00f90f9132c77f0b23b74d2629
|
[
"MIT"
] | null | null | null |
ThinkPython/chap9/ex9.py
|
sokolowskik/Tutorials
|
d2681d4f18b03e00f90f9132c77f0b23b74d2629
|
[
"MIT"
] | null | null | null |
ThinkPython/chap9/ex9.py
|
sokolowskik/Tutorials
|
d2681d4f18b03e00f90f9132c77f0b23b74d2629
|
[
"MIT"
] | null | null | null |
def is_reverse(i, j):
"""
Convert 2-digit numbers to strings and check if they are palindromic.
If one of the numbers has less then 2 digits, fill with zeros.
"""
str_i = str(i)
str_j = str(j)
if len(str_i) < 2:
str_i = str_i.zfill(2)
if len(str_j) < 2:
str_j = str_j.zfill(2)
return str_j[::-1] == str_i
age_diff = 15
d_age = 0
while age_diff <= 50:
reversible = 0
for d_age in range(0,80):
m_age = d_age + age_diff
if is_reverse(d_age, m_age):
reversible += 1
if reversible == 6:
print 'The daughter is', d_age, 'years old'
if reversible == 8:
print 'At the 8th time the daughter will be', d_age, 'years old and the mother will be', m_age, 'years old'
break
d_age += 1
age_diff += 1
| 26.90625
| 123
| 0.551684
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 263
| 0.305459
|
a7c5b09df90f26ab3c6cd5143e35bfba31e9f2b0
| 10,237
|
py
|
Python
|
Segnet/训练.py
|
1044197988/-
|
5c3ee7c9431ae85c68f418901378326b91c6f00d
|
[
"Apache-2.0"
] | 186
|
2019-03-20T08:54:08.000Z
|
2022-03-30T04:34:37.000Z
|
Segnet/训练.py
|
sunkaiyue0/Semantic-segmentation-of-remote-sensing-images
|
5c3ee7c9431ae85c68f418901378326b91c6f00d
|
[
"Apache-2.0"
] | 6
|
2019-08-29T08:18:13.000Z
|
2021-10-09T10:29:58.000Z
|
Segnet/训练.py
|
sunkaiyue0/Semantic-segmentation-of-remote-sensing-images
|
5c3ee7c9431ae85c68f418901378326b91c6f00d
|
[
"Apache-2.0"
] | 60
|
2019-10-23T03:50:36.000Z
|
2022-03-25T03:16:25.000Z
|
#coding=utf-8
import matplotlib
matplotlib.use("Agg")
import tensorflow as tf
import argparse
import numpy as np
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Conv2D,MaxPooling2D,UpSampling2D,BatchNormalization,Reshape,Permute,Activation
from tensorflow.keras.utils import to_categorical
from tensorflow.keras.preprocessing.image import img_to_array
from tensorflow.keras.callbacks import ModelCheckpoint
from sklearn.preprocessing import LabelEncoder
from PIL import Image
import matplotlib.pyplot as plt
import cv2
import random
import os
from tqdm import tqdm
seed = 7
np.random.seed(seed)
#设置图像大小
img_w = 32
img_h = 32
#分类
n_label=6
classes=[0.0,17.0,34.0,51.0,68.0,255.0]
labelencoder = LabelEncoder()
labelencoder.fit(classes)
#训练批次和每次数据量
EPOCHS = 5
BS = 32
#图像最大值
divisor=255.0
#图像根路径
filepath ='C:\\Users\Administrator\Desktop\Project\src\\'
#读取图片
def load_img(path, grayscale=False):
if grayscale:
img = cv2.imread(path,cv2.IMREAD_GRAYSCALE)
else:
img = cv2.imread(path)
img = np.array(img,dtype="float") / divisor
return img
#获取训练数据和测试数据地址
def get_train_val(val_rate = 0.25):
train_url = []
train_set = []
val_set = []
for pic in os.listdir(filepath + 'train'):
train_url.append(pic)
random.shuffle(train_url)
total_num = len(train_url)
val_num = int(val_rate * total_num)
for i in range(len(train_url)):
if i < val_num:
val_set.append(train_url[i])
else:
train_set.append(train_url[i])
return train_set,val_set
# 生成训练数据
def generateData(batch_size,data=[]):
while True:
train_data = []
train_label = []
batch = 0
for i in (range(len(data))):
url = data[i]
batch += 1
img = load_img(filepath + 'train/' + url)
img = img_to_array(img)
train_data.append(img)
label = load_img(filepath + 'label/' + url, grayscale=True)
label = img_to_array(label).reshape((img_w * img_h,))
train_label.append(label)
if batch % batch_size==0:
train_data = np.array(train_data)
train_label = np.array(train_label).flatten() #拍平
train_label = labelencoder.transform(train_label)
train_label = to_categorical(train_label, num_classes=n_label) #编码输出便签
train_label = train_label.reshape((batch_size,img_w,img_h,n_label))
yield (train_data,train_label)
train_data = []
train_label = []
batch = 0
#生成测试的数据
def generateValidData(batch_size,data=[]):
while True:
valid_data = []
valid_label = []
batch = 0
for i in (range(len(data))):
url = data[i]
batch += 1
img = load_img(filepath + 'train/' + url)
img = img_to_array(img)
valid_data.append(img)
label = load_img(filepath + 'label/' + url, grayscale=True)
label = img_to_array(label).reshape((img_w * img_h,))
valid_label.append(label)
if batch % batch_size==0:
valid_data = np.array(valid_data)
valid_label = np.array(valid_label).flatten()
valid_label = labelencoder.transform(valid_label)
valid_label = to_categorical(valid_label, num_classes=n_label)
valid_label = valid_label.reshape((batch_size,img_w,img_h,n_label))
yield (valid_data,valid_label)
valid_data = []
valid_label = []
batch = 0
#定义模型-网络模型
def SegNet():
model = Sequential()
#encoder
model.add(Conv2D(64,(3,3),strides=(1,1),input_shape=(img_w,img_h,3),padding='same',activation='relu',data_format='channels_last'))
model.add(BatchNormalization())
model.add(Conv2D(64,(3,3),strides=(1,1),padding='same',activation='relu'))
model.add(BatchNormalization())
model.add(MaxPooling2D(pool_size=(2,2)))
#(128,128)
model.add(Conv2D(128, (3, 3), strides=(1, 1), padding='same', activation='relu'))
model.add(BatchNormalization())
model.add(Conv2D(128, (3, 3), strides=(1, 1), padding='same', activation='relu'))
model.add(BatchNormalization())
model.add(MaxPooling2D(pool_size=(2,2)))
#(64,64)
model.add(Conv2D(256, (3, 3), strides=(1, 1), padding='same', activation='relu'))
model.add(BatchNormalization())
model.add(Conv2D(256, (3, 3), strides=(1, 1), padding='same', activation='relu'))
model.add(BatchNormalization())
model.add(Conv2D(256, (3, 3), strides=(1, 1), padding='same', activation='relu'))
model.add(BatchNormalization())
model.add(MaxPooling2D(pool_size=(2, 2)))
#(32,32)
model.add(Conv2D(512, (3, 3), strides=(1, 1), padding='same', activation='relu'))
model.add(BatchNormalization())
model.add(Conv2D(512, (3, 3), strides=(1, 1), padding='same', activation='relu'))
model.add(BatchNormalization())
model.add(Conv2D(512, (3, 3), strides=(1, 1), padding='same', activation='relu'))
model.add(BatchNormalization())
model.add(MaxPooling2D(pool_size=(2, 2)))
#(16,16)
model.add(Conv2D(512, (3, 3), strides=(1, 1), padding='same', activation='relu'))
model.add(BatchNormalization())
model.add(Conv2D(512, (3, 3), strides=(1, 1), padding='same', activation='relu'))
model.add(BatchNormalization())
model.add(Conv2D(512, (3, 3), strides=(1, 1), padding='same', activation='relu'))
model.add(BatchNormalization())
model.add(MaxPooling2D(pool_size=(2, 2)))
#(8,8)
#decoder
model.add(UpSampling2D(size=(2,2)))
#(16,16)
model.add(Conv2D(512, (3, 3), strides=(1, 1), padding='same', activation='relu'))
model.add(BatchNormalization())
model.add(Conv2D(512, (3, 3), strides=(1, 1), padding='same', activation='relu'))
model.add(BatchNormalization())
model.add(Conv2D(512, (3, 3), strides=(1, 1), padding='same', activation='relu'))
model.add(BatchNormalization())
model.add(UpSampling2D(size=(2, 2)))
#(32,32)
model.add(Conv2D(512, (3, 3), strides=(1, 1), padding='same', activation='relu'))
model.add(BatchNormalization())
model.add(Conv2D(512, (3, 3), strides=(1, 1), padding='same', activation='relu'))
model.add(BatchNormalization())
model.add(Conv2D(512, (3, 3), strides=(1, 1), padding='same', activation='relu'))
model.add(BatchNormalization())
model.add(UpSampling2D(size=(2, 2)))
#(64,64)
model.add(Conv2D(256, (3, 3), strides=(1, 1), padding='same', activation='relu'))
model.add(BatchNormalization())
model.add(Conv2D(256, (3, 3), strides=(1, 1), padding='same', activation='relu'))
model.add(BatchNormalization())
model.add(Conv2D(256, (3, 3), strides=(1, 1), padding='same', activation='relu'))
model.add(BatchNormalization())
model.add(UpSampling2D(size=(2, 2)))
#(128,128)
model.add(Conv2D(128, (3, 3), strides=(1, 1), padding='same', activation='relu'))
model.add(BatchNormalization())
model.add(Conv2D(128, (3, 3), strides=(1, 1), padding='same', activation='relu'))
model.add(BatchNormalization())
model.add(UpSampling2D(size=(2, 2)))
#(256,256)
model.add(Conv2D(64, (3, 3), strides=(1, 1), input_shape=(img_w, img_h,3), padding='same', activation='relu',data_format='channels_last'))
model.add(BatchNormalization())
model.add(Conv2D(64, (3, 3), strides=(1, 1), padding='same', activation='relu'))
model.add(BatchNormalization())
model.add(Conv2D(n_label, (1, 1), strides=(1, 1), padding='same'))
model.add(Activation('softmax'))
model.compile(loss='categorical_crossentropy',optimizer='sgd',metrics=['accuracy'])
model.summary()
return model
#开始训练
def train(args):
model = SegNet()
modelcheck = ModelCheckpoint(args['model'],monitor='val_acc',save_best_only=True,mode='max')
callable = [modelcheck,tf.keras.callbacks.TensorBoard(log_dir='.')]
train_set,val_set = get_train_val()
train_numb = len(train_set)
valid_numb = len(val_set)
print ("the number of train data is",train_numb)
print ("the number of val data is",valid_numb)
H = model.fit(x=generateData(BS,train_set),steps_per_epoch=(train_numb//BS),epochs=EPOCHS,verbose=2,
validation_data=generateValidData(BS,val_set),validation_steps=(valid_numb//BS),callbacks=callable)
# plot the training loss and accuracy
plt.style.use("ggplot")
plt.figure()
N = EPOCHS
plt.plot(np.arange(0, N), H.history["loss"], label="train_loss")
plt.plot(np.arange(0, N), H.history["val_loss"], label="val_loss")
plt.plot(np.arange(0, N), H.history["acc"], label="train_acc")
plt.plot(np.arange(0, N), H.history["val_acc"], label="val_acc")
plt.title("Training Loss and Accuracy on SegNet Satellite Seg")
plt.xlabel("Epoch #")
plt.ylabel("Loss/Accuracy")
plt.legend(loc="lower left")
plt.savefig(args["plot"])
#获取参数
def args_parse():
# construct the argument parse and parse the arguments
ap = argparse.ArgumentParser()
ap.add_argument("-a", "--augment", help="using data augment or not",
action="store_true", default=False)
ap.add_argument("-m", "--model", required=False,default="segnet.h5",
help="path to output model")
ap.add_argument("-p", "--plot", type=str, default="plot.png",
help="path to output accuracy/loss plot")
args = vars(ap.parse_args())
return args
#运行程序
if __name__=='__main__':
args = args_parse()
train(args)
print("完成")
#predict()
| 40.623016
| 143
| 0.602911
| 0
| 0
| 2,195
| 0.210794
| 0
| 0
| 0
| 0
| 1,469
| 0.141074
|
a7ca81e026978ae19cb5d85995f5f61a2647b878
| 1,465
|
py
|
Python
|
Python/295. FindMedianFromDataStream.py
|
nizD/LeetCode-Solutions
|
7f4ca37bab795e0d6f9bfd9148a8fe3b62aa5349
|
[
"MIT"
] | 263
|
2020-10-05T18:47:29.000Z
|
2022-03-31T19:44:46.000Z
|
Python/295. FindMedianFromDataStream.py
|
nizD/LeetCode-Solutions
|
7f4ca37bab795e0d6f9bfd9148a8fe3b62aa5349
|
[
"MIT"
] | 1,264
|
2020-10-05T18:13:05.000Z
|
2022-03-31T23:16:35.000Z
|
Python/295. FindMedianFromDataStream.py
|
nizD/LeetCode-Solutions
|
7f4ca37bab795e0d6f9bfd9148a8fe3b62aa5349
|
[
"MIT"
] | 760
|
2020-10-05T18:22:51.000Z
|
2022-03-29T06:06:20.000Z
|
"""
Problem:
--------
Design a data structure that supports the following two operations:
- `void addNum(int num)`: Add a integer number from the data stream to the data structure.
- `double findMedian()`: Return the median of all elements so far.
"""
class MedianFinder:
def __init__(self):
"""
Initialize your data structure here.
"""
self.list = []
def addNum(self, num: int) -> None:
# Traverse through the list and check if `num` > ith element
# If yes, insert `num` in that index
# This keeps the list sorted at all times
for i in range(len(self.list)):
if num > self.list[i]:
self.list.insert(i, num)
return
# If `num` is the largest element or is the first one to be added
self.list.append(num)
def findMedian(self) -> float:
# Find index of the middle element (floor division by 2)
mid_index = len(self.list) // 2
if len(self.list) % 2 == 0:
# If number of elements = EVEN
# Return average of the middle 2 elements
return (self.list[mid_index - 1] + self.list[mid_index]) / 2
else:
# If number of elements = ODD
# Return the middle element
return self.list[mid_index]
# Your MedianFinder object will be instantiated and called as such:
# obj = MedianFinder()
# obj.addNum(num)
# param_2 = obj.findMedian()
| 29.897959
| 90
| 0.595222
| 1,068
| 0.72901
| 0
| 0
| 0
| 0
| 0
| 0
| 832
| 0.567918
|
a7cab894219f6b6cb8d8b7237bfbc980ae9c6023
| 1,089
|
py
|
Python
|
openproblems/data/human_blood_nestorowa2016.py
|
bendemeo/SingleCellOpenProblems
|
e4c009f8c232bdae4c9e20b8e435d0fe474b3daf
|
[
"MIT"
] | 134
|
2020-08-19T07:35:56.000Z
|
2021-05-19T11:37:50.000Z
|
openproblems/data/human_blood_nestorowa2016.py
|
bendemeo/SingleCellOpenProblems
|
e4c009f8c232bdae4c9e20b8e435d0fe474b3daf
|
[
"MIT"
] | 175
|
2020-08-17T15:26:06.000Z
|
2021-05-14T11:03:46.000Z
|
openproblems/data/human_blood_nestorowa2016.py
|
LuckyMD/SingleCellOpenProblems
|
0ae39db494557e1dd9f28e59dda765527191eee1
|
[
"MIT"
] | 46
|
2020-10-08T21:11:37.000Z
|
2021-04-25T07:05:28.000Z
|
from . import utils
import os
import scanpy as sc
import scprep
import tempfile
URL = "https://ndownloader.figshare.com/files/25555751"
@utils.loader
def load_human_blood_nestorowa2016(test=False):
"""Download Nesterova data from Figshare."""
if test:
# load full data first, cached if available
adata = load_human_blood_nestorowa2016(test=False)
# Subsample data
adata = adata[:, :500].copy()
utils.filter_genes_cells(adata)
sc.pp.subsample(adata, n_obs=500)
# Note: could also use 200-500 HVGs rather than 200 random genes
# Ensure there are no cells or genes with 0 counts
utils.filter_genes_cells(adata)
return adata
else:
with tempfile.TemporaryDirectory() as tempdir:
filepath = os.path.join(tempdir, "human_blood_nestorowa2016.h5ad")
scprep.io.download.download_url(URL, filepath)
adata = sc.read(filepath)
# Ensure there are no cells or genes with 0 counts
utils.filter_genes_cells(adata)
return adata
| 27.225
| 78
| 0.663912
| 0
| 0
| 0
| 0
| 948
| 0.870523
| 0
| 0
| 348
| 0.319559
|
a7cb29cc32a2319fccf961ffb48796199a5ff0d3
| 1,110
|
py
|
Python
|
jskparser/ast/stmt/ifstmt.py
|
natebragg/java-sketch
|
f5ac26f2cc46ae4556f9a61c55afd37f55c961ff
|
[
"MIT"
] | 15
|
2015-12-15T18:33:50.000Z
|
2021-09-29T11:48:54.000Z
|
jskparser/ast/stmt/ifstmt.py
|
natebragg/java-sketch
|
f5ac26f2cc46ae4556f9a61c55afd37f55c961ff
|
[
"MIT"
] | 11
|
2015-11-16T22:14:58.000Z
|
2021-09-23T05:28:40.000Z
|
jskparser/ast/stmt/ifstmt.py
|
natebragg/java-sketch
|
f5ac26f2cc46ae4556f9a61c55afd37f55c961ff
|
[
"MIT"
] | 8
|
2015-11-16T21:50:08.000Z
|
2021-03-23T15:15:34.000Z
|
#!/usr/bin/env python
from .statement import Statement
from . import _import
class IfStmt(Statement):
def __init__(self, kwargs={}):
super(IfStmt, self).__init__(kwargs)
locs = _import()
# Expression condition;
con = kwargs.get(u'condition', {})
self._condition = locs[con[u'@t']](con) if con else None
# Statement thenStmt;
then = kwargs.get(u'thenStmt', {})
self._thenStmt = locs[then[u'@t']](then) if then else None
# Statement elseStmt;
el = kwargs.get(u'elseStmt', {})
self._elseStmt = locs[el[u'@t']](el) if el else None
self.add_as_parent([self.condition, self.thenStmt, self.elseStmt])
@property
def condition(self): return self._condition
@condition.setter
def condition(self, v): self._condition = v
@property
def thenStmt(self): return self._thenStmt
@thenStmt.setter
def thenStmt(self, v): self._thenStmt = v
@property
def elseStmt(self): return self._elseStmt
@elseStmt.setter
def elseStmt(self, v): self._elseStmt = v
| 27.75
| 74
| 0.621622
| 1,029
| 0.927027
| 0
| 0
| 356
| 0.320721
| 0
| 0
| 135
| 0.121622
|
a7cb442e6c3a091d70b52f85f03d36c21282e2fd
| 8,501
|
py
|
Python
|
stdpages/profiling.py
|
nhartland/dashengine
|
7e9f68f0fb1c447fa438eb18b2430cd9095ab17a
|
[
"MIT"
] | 12
|
2020-02-06T02:55:32.000Z
|
2021-11-08T13:50:42.000Z
|
stdpages/profiling.py
|
nhartland/dashengine
|
7e9f68f0fb1c447fa438eb18b2430cd9095ab17a
|
[
"MIT"
] | 1
|
2020-01-31T10:20:51.000Z
|
2020-01-31T10:20:51.000Z
|
stdpages/profiling.py
|
nhartland/dashengine
|
7e9f68f0fb1c447fa438eb18b2430cd9095ab17a
|
[
"MIT"
] | 2
|
2020-06-19T01:35:11.000Z
|
2021-06-07T09:01:18.000Z
|
""" Page for the monitoring of query performance characteristics. """
import json
# Plotly
import plotly.graph_objs as go
# Dash
import dash_table as dt
import dash_core_components as dcc
import dash_html_components as html
from dash.dependencies import Input, Output
# DashEngine
from dashengine.dashapp import dashapp
import dashengine.bigquery as bigquery
# Route for profiling page
ROUTE = "/profile"
# Name used when linking, for example in the navigation bar
LINKNAME = "Profiling"
# Helper functions #################################################
def __fetch_query_from_uuid(uuid: str) -> bigquery.BigQueryResult:
""" Fetches a cached BigQuery result from its UUID.
Args:
uuid (str): The UUID of the query to be retrieved.
Returns:
(BigQueryResult): The corresponding BigQuery result object.
"""
# Fetch cached queries
queries = bigquery.fetch_cached_queries()
selected_query = None
for query in queries:
if query.uuid == uuid:
# Select a query UUID
selected_query = query
if selected_query is None:
raise RuntimeError(f"Cannot find query with UUID {uuid}")
return selected_query
def __index_query(query, key: str) -> float:
""" Returns a property of the query class, keyed by a string.
The key must be one of:
['Memory', 'Duration', 'Bytes Processed', 'Bytes Billed']
Args:
query (BigQueryResult): A BigQuery result class
key (string): A key of the BigQueryResult object
Returns:
(float): The value in `query` corresponding to the key.
"""
ResultDict = {
"Memory": query.memory_usage(),
"Duration": query.duration,
"Bytes Processed": query.bytes_processed,
"Bytes Billed": query.bytes_billed,
}
return ResultDict[key]
def __normalising_constants(cached_queries: list):
""" Computes totals over the full set of cached queries to normalise the summary chart. """
totals = {
"Memory": 0.0,
"Duration": 0.0,
"Bytes Processed": 0.0,
"Bytes Billed": 0.0,
}
for query in cached_queries:
for key in totals:
totals[key] += __index_query(query, key)
# Avoid dividing by zero
for key in totals:
if totals[key] == 0:
totals[key] = 1
return totals
# Dash callbacks #################################################
@dashapp.callback(
Output("query-profile-summary-chart", "figure"),
[Input("profile-trigger", "children")],
)
def _query_profile_summary_chart(_) -> go.Figure:
""" Generates a set of bar charts for a single query. """
cached_queries = bigquery.fetch_cached_queries()
yvals = ["Memory", "Duration", "Bytes Processed", "Bytes Billed"]
totals = __normalising_constants(cached_queries)
def __bar(query):
""" Generate a single bar. """
return go.Bar(
y=yvals,
x=[100 * __index_query(query, key) / totals[key] for key in yvals],
name=query.uuid,
orientation="h",
)
bar_charts = [__bar(query) for query in cached_queries]
layout = go.Layout(barmode="stack")
return go.Figure(data=bar_charts, layout=layout)
@dashapp.callback(
Output("query-profile-table-div", "children"),
[Input("profile-trigger", "children")],
)
def _query_profile_table(_) -> dt.DataTable:
""" Generates a table profiling all cached queries. """
cached_queries = bigquery.fetch_cached_queries()
# Setup all data for the table
data = [
{
"ID": query.source.query_id,
"UUID": query.uuid,
"Parameters": json.dumps(query.parameters, default=str),
"Duration": query.duration,
"Memory Usage": query.memory_usage(),
"Bytes Processed": query.bytes_processed,
"Bytes Billed": query.bytes_billed,
}
for query in cached_queries
]
hidden_columns = ["Parameters"]
# Build list of columns from the data keys
columns = [{"name": i, "id": i} for i in data[0]]
# Build datatable
return dt.DataTable(
id="query-profile-table",
columns=columns,
data=data,
hidden_columns=hidden_columns,
sort_action="native",
sort_mode="single",
row_selectable="single",
# Used to hide the toggle button generated by using hidden_columns
css=[{"selector": ".show-hide", "rule": "display: none"}],
style_header={"backgroundColor": "white", "fontWeight": "bold"},
style_cell_conditional=[
{"if": {"column_id": c}, "textAlign": "left"} for c in ["ID", "UUID"]
],
style_as_list_view=True,
)
def _query_profile_body(selected_query) -> dcc.Markdown:
""" Returns the formatted SQL body of the selected query. """
# Build query body in markdown code block
query_code = " ``` \n " + selected_query.source.body + " \n ```"
return dcc.Markdown(query_code)
def _query_profile_parameters(selected_query):
""" Returns the parameters of the selected query. """
parameters = selected_query.parameters
if len(parameters) == 0:
return html.H6("No parameters")
# Build a table consisting of query parameters
columns = [
{"name": "Parameter", "id": "Parameter"},
{"name": "Value", "id": "Value"},
]
parameter_data = [
{"Parameter": key, "Value": str(value)} for key, value in parameters.items()
]
return dt.DataTable(
id="query-profile-parameter-table",
columns=columns,
data=parameter_data,
style_table={"margin-bottom": "30px"},
style_cell={"minWidth": "0px", "maxWidth": "180px", "whiteSpace": "normal"},
)
def _query_profile_preview(selected_query) -> dt.DataTable:
""" Returns the formatted SQL body of the selected query. """
df = selected_query.result.head()
return dt.DataTable(
id="query-profile-preview-table",
columns=[{"name": i, "id": i} for i in df.columns],
style_table={"margin-bottom": "30px"},
data=df.to_dict("records"),
)
@dashapp.callback(
Output("query-profile-details", "children"),
[
Input("query-profile-table", "derived_virtual_data"),
Input("query-profile-table", "derived_virtual_selected_rows"),
],
)
def _query_profile_details(rows, selected_row_indices) -> list:
""" Returns the details (SQL and parameters) of the selected query. """
if rows is None or len(selected_row_indices) != 1:
return [
html.H5(
"Select a query to view details",
style={"textAlign": "center", "margin-top": "30px"},
)
]
# Determine selected UUID
selected_queryID = rows[selected_row_indices[0]]["ID"]
selected_params = json.loads(rows[selected_row_indices[0]]["Parameters"])
selected_query = bigquery.run_query(selected_queryID, selected_params)
return [
html.H3("Query Details", style={"textAlign": "center", "margin-top": "30px"}),
html.H4("Query Body", style={"textAlign": "left"}),
html.Div(children=_query_profile_body(selected_query)),
html.H4("Query Parameters", style={"textAlign": "left"}),
html.Div(children=_query_profile_parameters(selected_query)),
html.H4("Query Preview", style={"textAlign": "left"}),
html.Div(children=_query_profile_preview(selected_query)),
]
# Layout #################################################################
def layout() -> list:
""" Generates the layout for the query profiling page. """
# No queries cached
if bigquery.fetch_num_cached_queries() == 0:
return html.H4(
"No queries in cache", style={"textAlign": "center", "margin-top": "30px"}
)
return [
html.H3(
"Cached Query Profiler", style={"textAlign": "center", "margin-top": "30px"}
),
dcc.Loading(
id="query-profile-loading",
children=[
html.Div(id="profile-trigger", children=[], style={"display": "none"}),
dcc.Graph(id="query-profile-summary-chart"),
],
type="graph",
fullscreen=True,
),
html.Div(id="query-profile-table-div"),
dcc.Loading(
id="query-details-loading", children=[html.Div(id="query-profile-details")]
),
]
| 33.337255
| 95
| 0.606399
| 0
| 0
| 0
| 0
| 3,631
| 0.427126
| 0
| 0
| 3,432
| 0.403717
|
a7cd3abce5d928c3da35821e7b78b76d44e1ec29
| 2,465
|
py
|
Python
|
trial_inputs_pb2.py
|
adeandrade/bayesian-optimizer
|
30427943d69130179f7ccb32f63a08a1c57462f8
|
[
"Apache-2.0"
] | null | null | null |
trial_inputs_pb2.py
|
adeandrade/bayesian-optimizer
|
30427943d69130179f7ccb32f63a08a1c57462f8
|
[
"Apache-2.0"
] | null | null | null |
trial_inputs_pb2.py
|
adeandrade/bayesian-optimizer
|
30427943d69130179f7ccb32f63a08a1c57462f8
|
[
"Apache-2.0"
] | null | null | null |
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: trial_inputs.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='trial_inputs.proto',
package='com.wattpad.bayesian_optimizer',
syntax='proto3',
serialized_options=None,
serialized_pb=_b('\n\x12trial_inputs.proto\x12\x1e\x63om.wattpad.bayesian_optimizer\".\n\x0bTrialInputs\x12\x0f\n\x07version\x18\x01 \x01(\t\x12\x0e\n\x06inputs\x18\x02 \x03(\x01\x62\x06proto3')
)
_TRIALINPUTS = _descriptor.Descriptor(
name='TrialInputs',
full_name='com.wattpad.bayesian_optimizer.TrialInputs',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='version', full_name='com.wattpad.bayesian_optimizer.TrialInputs.version', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='inputs', full_name='com.wattpad.bayesian_optimizer.TrialInputs.inputs', index=1,
number=2, type=1, cpp_type=5, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=54,
serialized_end=100,
)
DESCRIPTOR.message_types_by_name['TrialInputs'] = _TRIALINPUTS
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
TrialInputs = _reflection.GeneratedProtocolMessageType('TrialInputs', (_message.Message,), dict(
DESCRIPTOR = _TRIALINPUTS,
__module__ = 'trial_inputs_pb2'
# @@protoc_insertion_point(class_scope:com.wattpad.bayesian_optimizer.TrialInputs)
))
_sym_db.RegisterMessage(TrialInputs)
# @@protoc_insertion_point(module_scope)
| 32.012987
| 196
| 0.76146
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 725
| 0.294118
|
a7cdd2df8475207fb1659d9d8f61d9efb7105a47
| 22,348
|
py
|
Python
|
neutron_plugin_contrail/plugins/opencontrail/vnc_client/router_res_handler.py
|
alexelshamouty/tf-neutron-plugin
|
3effc5e80f3fa0d8d0252d5f994a36386b987f7f
|
[
"Apache-2.0"
] | 3
|
2021-09-07T05:02:24.000Z
|
2022-02-11T04:25:43.000Z
|
neutron_plugin_contrail/plugins/opencontrail/vnc_client/router_res_handler.py
|
alexelshamouty/tf-neutron-plugin
|
3effc5e80f3fa0d8d0252d5f994a36386b987f7f
|
[
"Apache-2.0"
] | 1
|
2021-09-27T08:05:08.000Z
|
2021-09-27T08:05:08.000Z
|
neutron_plugin_contrail/plugins/opencontrail/vnc_client/router_res_handler.py
|
alexelshamouty/tf-neutron-plugin
|
3effc5e80f3fa0d8d0252d5f994a36386b987f7f
|
[
"Apache-2.0"
] | 5
|
2020-07-14T07:52:05.000Z
|
2022-03-24T15:08:02.000Z
|
# Copyright 2015. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import netaddr
try:
from neutron_lib import constants
except ImportError:
from neutron.plugins.common import constants
from neutron_plugin_contrail.common.utils import get_tenant_id
from vnc_api import vnc_api
from vnc_api import exceptions as vnc_exc
from neutron_plugin_contrail.plugins.opencontrail.vnc_client.contrail_res_handler import (
ResourceCreateHandler,
ResourceDeleteHandler,
ResourceGetHandler,
ResourceUpdateHandler,
)
class LogicalRouterMixin(object):
@staticmethod
def _get_external_gateway_info(rtr_obj):
vn_refs = rtr_obj.get_virtual_network_refs()
if vn_refs:
return vn_refs[0]['uuid']
def _neutron_dict_to_rtr_obj(self, router_q, rtr_obj):
rtr_name = router_q.get('name')
id_perms = rtr_obj.get_id_perms()
if 'admin_state_up' in router_q:
id_perms.enable = router_q['admin_state_up']
rtr_obj.set_id_perms(id_perms)
if rtr_name:
rtr_obj.display_name = rtr_name
return rtr_obj
def _rtr_obj_to_neutron_dict(self, rtr_obj,
contrail_extensions_enabled=True,
fields=None):
rtr_q_dict = {}
rtr_q_dict['id'] = rtr_obj.uuid
if not rtr_obj.display_name:
rtr_q_dict['name'] = rtr_obj.get_fq_name()[-1]
else:
rtr_q_dict['name'] = rtr_obj.display_name
rtr_q_dict['tenant_id'] = self._project_id_vnc_to_neutron(
rtr_obj.parent_uuid)
rtr_q_dict['project_id'] = rtr_q_dict['tenant_id']
rtr_q_dict['admin_state_up'] = rtr_obj.get_id_perms().enable
rtr_q_dict['shared'] = False
rtr_q_dict['status'] = constants.NET_STATUS_ACTIVE
rtr_q_dict['gw_port_id'] = None
ext_net_uuid = self._get_external_gateway_info(rtr_obj)
if not ext_net_uuid:
rtr_q_dict['external_gateway_info'] = None
else:
rtr_q_dict['external_gateway_info'] = {'network_id': ext_net_uuid,
'enable_snat': True}
if contrail_extensions_enabled:
rtr_q_dict.update({'fq_name': rtr_obj.get_fq_name()})
if fields:
rtr_q_dict = self._filter_res_dict(rtr_q_dict, fields)
return rtr_q_dict
def _router_update_gateway(self, router_q, rtr_obj):
ext_gateway = router_q.get('external_gateway_info')
old_ext_gateway = self._get_external_gateway_info(rtr_obj)
if ext_gateway or old_ext_gateway:
network_id = None
if ext_gateway:
network_id = ext_gateway.get('network_id')
if network_id:
if old_ext_gateway and network_id == old_ext_gateway:
return
try:
vn_obj = self._vnc_lib.virtual_network_read(id=network_id)
if not vn_obj.get_router_external():
self._raise_contrail_exception(
'BadRequest', resource='router',
msg="Network %s is not a valid "
"external network" % network_id)
except vnc_exc.NoIdError:
self._raise_contrail_exception('NetworkNotFound',
net_id=network_id)
self._router_set_external_gateway(rtr_obj, vn_obj)
else:
self._router_clear_external_gateway(rtr_obj)
def _router_set_external_gateway(self, router_obj, ext_net_obj):
router_obj.set_virtual_network(ext_net_obj)
self._vnc_lib.logical_router_update(router_obj)
def _router_clear_external_gateway(self, router_obj):
router_obj.set_virtual_network_list([])
self._vnc_lib.logical_router_update(router_obj)
class LogicalRouterCreateHandler(ResourceCreateHandler, LogicalRouterMixin):
resource_create_method = 'logical_router_create'
def _create_router(self, router_q):
project_id = self._project_id_neutron_to_vnc(router_q['tenant_id'])
project_obj = self._project_read(proj_id=project_id)
id_perms = vnc_api.IdPermsType(enable=True)
return vnc_api.LogicalRouter(router_q.get('name'), project_obj,
id_perms=id_perms)
def resource_create(self, context, router_q):
rtr_obj = self._neutron_dict_to_rtr_obj(
router_q, self._create_router(router_q))
rtr_uuid = self._resource_create(rtr_obj)
contrail_extensions_enabled = self._kwargs.get(
'contrail_extensions_enabled', False)
# read it back to update id perms
rtr_obj = self._resource_get(id=rtr_uuid)
self._router_update_gateway(router_q, rtr_obj)
return self._rtr_obj_to_neutron_dict(
rtr_obj, contrail_extensions_enabled=contrail_extensions_enabled)
class LogicalRouterDeleteHandler(ResourceDeleteHandler, LogicalRouterMixin):
resource_delete_method = 'logical_router_delete'
def resource_delete(self, context, rtr_id):
try:
rtr_obj = self._resource_get(id=rtr_id)
if rtr_obj.get_virtual_machine_interface_refs():
self._raise_contrail_exception('RouterInUse',
router_id=rtr_id)
except vnc_exc.NoIdError:
self._raise_contrail_exception('RouterNotFound',
router_id=rtr_id)
self._router_clear_external_gateway(rtr_obj)
try:
self._resource_delete(id=rtr_id)
except vnc_exc.RefsExistError:
self._raise_contrail_exception('RouterInUse', router_id=rtr_id)
class LogicalRouterUpdateHandler(ResourceUpdateHandler, LogicalRouterMixin):
resource_update_method = 'logical_router_update'
def _get_rtr_obj(self, router_q):
return self._resource_get(id=router_q.get('id'))
def resource_update(self, context, rtr_id, router_q):
router_q['id'] = rtr_id
rtr_obj = self._neutron_dict_to_rtr_obj(
router_q, self._get_rtr_obj(router_q))
self._resource_update(rtr_obj)
self._router_update_gateway(router_q, rtr_obj)
return self._rtr_obj_to_neutron_dict(rtr_obj)
class LogicalRouterGetHandler(ResourceGetHandler, LogicalRouterMixin):
resource_get_method = 'logical_router_read'
resource_list_method = 'logical_routers_list'
def _router_list_project(self, project_id=None, detail=False):
resp = self._resource_list(parent_id=project_id, detail=detail)
if detail:
return resp
return resp['logical-routers']
def _get_router_list_for_ids(self, rtr_ids, extensions_enabled=True):
ret_list = []
for rtr_id in rtr_ids or []:
try:
rtr_obj = self._resource_get(id=rtr_id)
rtr_info = self._rtr_obj_to_neutron_dict(
rtr_obj,
contrail_extensions_enabled=extensions_enabled)
ret_list.append(rtr_info)
except vnc_exc.NoIdError:
pass
return ret_list
def _get_router_list_for_project(self, project_id=None):
project_rtrs = self._router_list_project(project_id=project_id)
rtr_uuids = [rtr['uuid'] for rtr in project_rtrs]
return self._get_router_list_for_ids(rtr_uuids)
def _fip_pool_ref_routers(self, project_id):
"""TODO."""
return []
def get_vmi_obj_router_id(self, vmi_obj, project_id=None):
from neutron_plugin_contrail.plugins.opencontrail.vnc_client.vmi_res_handler import VMInterfaceGetHandler
vmi_get_handler = VMInterfaceGetHandler(self._vnc_lib)
port_net_id = vmi_obj.get_virtual_network_refs()[0]['uuid']
# find router_id from port
router_list = self._router_list_project(project_id=project_id,
detail=True)
for router_obj in router_list or []:
for vmi in (router_obj.get_virtual_machine_interface_refs() or []):
vmi_obj = vmi_get_handler.get_vmi_obj(vmi['uuid'])
if (vmi_obj.get_virtual_network_refs()[0]['uuid'] ==
port_net_id):
return router_obj.uuid
def resource_get(self, context, rtr_uuid, fields=None):
try:
rtr_obj = self._resource_get(id=rtr_uuid)
except vnc_exc.NoIdError:
self._raise_contrail_exception('RouterNotFound',
router_id=rtr_uuid)
return self._rtr_obj_to_neutron_dict(rtr_obj, fields=fields)
def resource_list(self, context, filters, fields=None):
extensions_enabled = self._kwargs.get(
'contrail_extensions_enabled', False)
ret_list = []
if filters and 'shared' in filters:
if filters['shared'][0]:
# no support for shared routers
return ret_list
if not filters:
if context['is_admin']:
return self._get_router_list_for_project()
else:
proj_id = self._project_id_neutron_to_vnc(
get_tenant_id(context))
return self._get_router_list_for_project(project_id=proj_id)
all_rtrs = [] # all n/ws in all projects
if 'id' in filters:
return self._get_router_list_for_ids(filters['id'],
extensions_enabled)
if 'tenant_id' in filters:
# read all routers in project, and prune below
project_ids = self._validate_project_ids(
context, project_ids=filters['tenant_id'])
for p_id in project_ids:
if 'router:external' in filters:
all_rtrs.append(self._fip_pool_ref_routers(p_id))
else:
project_rtrs = self._router_list_project(p_id)
all_rtrs.append(project_rtrs)
else:
# read all routers in all projects
project_rtrs = self._router_list_project()
all_rtrs.append(project_rtrs)
# prune phase
for project_rtrs in all_rtrs:
for proj_rtr in project_rtrs:
proj_rtr_id = proj_rtr['uuid']
if not self._filters_is_present(filters, 'id', proj_rtr_id):
continue
proj_rtr_fq_name = str(proj_rtr['fq_name'])
if not self._filters_is_present(filters, 'fq_name',
proj_rtr_fq_name):
continue
try:
rtr_obj = self._resource_get(id=proj_rtr['uuid'])
if not self._filters_is_present(
filters, 'name',
rtr_obj.get_display_name() or rtr_obj.name):
continue
rtr_info = self._rtr_obj_to_neutron_dict(
rtr_obj,
contrail_extensions_enabled=extensions_enabled,
fields=fields)
ret_list.append(rtr_info)
except vnc_exc.NoIdError:
continue
return ret_list
def resource_count(self, context, filters=None):
count = self._resource_count_optimized(filters)
if count is not None:
return count
rtrs_info = self.router_list(filters=filters)
return len(rtrs_info)
class LogicalRouterInterfaceHandler(ResourceGetHandler,
ResourceUpdateHandler,
LogicalRouterMixin):
resource_get_method = 'logical_router_read'
resource_list_method = 'logical_routers_list'
resource_update_method = 'logical_router_update'
def __init__(self, vnc_lib):
super(LogicalRouterInterfaceHandler, self).__init__(vnc_lib)
from neutron_plugin_contrail.plugins.opencontrail.vnc_client.subnet_res_handler import SubnetHandler
from neutron_plugin_contrail.plugins.opencontrail.vnc_client.vmi_res_handler import VMInterfaceHandler
self._subnet_handler = SubnetHandler(self._vnc_lib)
self._vmi_handler = VMInterfaceHandler(self._vnc_lib)
def _get_subnet_cidr(self, subnet_id, subnet_dict):
for subnet in subnet_dict:
if subnet['id'] == subnet_id:
return subnet['cidr']
def _check_for_dup_router_subnet(self, router_obj, subnet_id, subnet_cidr):
from neutron_plugin_contrail.plugins.opencontrail.vnc_client.subnet_res_handler import SubnetHandler
try:
router_vmi_objs = []
if router_obj.get_virtual_machine_interface_refs():
vmis = [x['uuid']
for x in router_obj.virtual_machine_interface_refs]
router_vmi_objs = self._vnc_lib.virtual_machine_interfaces_list(
obj_uuids=vmis, detail=True,
fields=['instance_ip_back_refs'])
# It's possible router ports are on the same network, but
# different subnets.
new_ipnet = netaddr.IPNetwork(subnet_cidr)
port_req_memo = {'virtual-machines': {},
'instance-ips': {},
'subnets': {}}
for vmi_obj in router_vmi_objs:
net_id = self._vmi_handler.get_vmi_net_id(vmi_obj)
vn_obj = self._vnc_lib.virtual_network_read(id=net_id)
fixed_ips = self._vmi_handler.get_vmi_ip_dict(vmi_obj, vn_obj,
port_req_memo)
vn_subnets = (SubnetHandler.get_vn_subnets(vn_obj))
for ip in fixed_ips:
if ip['subnet_id'] == subnet_id:
msg = ("Router %s already has a port on subnet %s"
% (router_obj.uuid, subnet_id))
self._raise_contrail_exception(
'BadRequest', resource='router', msg=msg)
sub_id = ip['subnet_id']
cidr = self._get_subnet_cidr(sub_id, vn_subnets)
ipnet = netaddr.IPNetwork(cidr)
match1 = netaddr.all_matching_cidrs(new_ipnet, [cidr])
match2 = netaddr.all_matching_cidrs(ipnet, [subnet_cidr])
if match1 or match2:
data = {'subnet_cidr': subnet_cidr,
'subnet_id': subnet_id,
'cidr': cidr,
'sub_id': sub_id}
msg = (("Cidr %(subnet_cidr)s of subnet "
"%(subnet_id)s overlaps with cidr %(cidr)s "
"of subnet %(sub_id)s") % data)
self._raise_contrail_exception(
'BadRequest', resource='router', msg=msg)
except vnc_exc.NoIdError:
pass
def _get_router_iface_vnc_info(self, context, router_obj, port_id=None,
subnet_id=None):
if port_id:
vmi_obj, vn_obj, rtr_uuid, fixed_ips = self._get_vmi_info(port_id)
net_id = vn_obj.uuid
if rtr_uuid:
self._raise_contrail_exception('PortInUse',
net_id=net_id,
port_id=port_id,
device_id=rtr_uuid)
if len(fixed_ips) != 1:
self._raise_contrail_exception(
'BadRequest', resource='router',
msg='Router port must have exactly one fixed IP')
subnet_id = fixed_ips[0]['subnet_id']
subnet_vnc = self._subnet_handler._subnet_read(subnet_id=subnet_id)
if not subnet_vnc.default_gateway:
self._raise_contrail_exception(
'BadRequest', resource='router',
msg='Subnet for router interface must have a gateway IP')
subnet_cidr = '%s/%s' % (subnet_vnc.subnet.get_ip_prefix(),
subnet_vnc.subnet.get_ip_prefix_len())
self._check_for_dup_router_subnet(router_obj, subnet_id, subnet_cidr)
if not port_id:
vn_obj = self._subnet_handler.get_vn_obj_for_subnet_id(subnet_id)
fixed_ip = {'ip_address': subnet_vnc.default_gateway,
'subnet_id': subnet_id}
port_q = {
'tenant_id': self._project_id_vnc_to_neutron(
vn_obj.parent_uuid),
'network_id': vn_obj.uuid,
'fixed_ips': [fixed_ip],
'admin_state_up': True,
'device_id': router_obj.uuid,
'device_owner': constants.DEVICE_OWNER_ROUTER_INTF,
'name': ''}
port = self._vmi_handler.resource_create(context=context,
port_q=port_q)
vmi_obj = self._vmi_handler.get_vmi_obj(port['id'])
return vmi_obj, vn_obj, subnet_id
def _get_vmi_info(self, port_id):
vmi_obj = self._vmi_handler.get_vmi_obj(
port_id, fields=['logical_router_back_refs',
'instance_ip_back_refs'])
net_id = self._vmi_handler.get_vmi_net_id(vmi_obj)
port_req_memo = {'virtual-machines': {},
'instance-ips': {},
'subnets': {}}
router_refs = getattr(vmi_obj, 'logical_router_back_refs', None)
if router_refs:
rtr_uuid = router_refs[0]['uuid']
else:
vm_ref = vmi_obj.get_virtual_machine_refs()
if vm_ref:
rtr_uuid = self._vmi_handler.get_port_gw_id(vm_ref[0],
port_req_memo)
else:
rtr_uuid = None
vn_obj = self._vnc_lib.virtual_network_read(id=net_id)
fixed_ips = self._vmi_handler.get_vmi_ip_dict(vmi_obj, vn_obj,
port_req_memo)
return vmi_obj, vn_obj, rtr_uuid, fixed_ips
def add_router_interface(self, context, router_id, port_id=None,
subnet_id=None):
router_obj = self._resource_get(id=router_id)
if not port_id and not subnet_id:
self._raise_contrail_exception(
'BadRequest', resource='router',
msg='Either port or subnet must be specified')
vmi_obj, vn_obj, subnet_id = self._get_router_iface_vnc_info(
context, router_obj, port_id=port_id, subnet_id=subnet_id)
vmi_obj.set_virtual_machine_interface_device_owner(
constants.DEVICE_OWNER_ROUTER_INTF)
self._vnc_lib.virtual_machine_interface_update(vmi_obj)
router_obj.add_virtual_machine_interface(vmi_obj)
self._resource_update(router_obj)
info = {
'id': router_id,
'tenant_id': self._project_id_vnc_to_neutron(vn_obj.parent_uuid),
'port_id': vmi_obj.uuid,
'subnet_id': subnet_id}
return info
def remove_router_interface(self, context, router_id, port_id=None,
subnet_id=None):
router_obj = self._resource_get(id=router_id)
tenant_id = None
vmi_obj = None
if port_id:
vmi_obj, vn_obj, rtr_uuid, fixed_ips = self._get_vmi_info(port_id)
if not rtr_uuid:
self._raise_contrail_exception('RouterInterfaceNotFound',
router_id=router_id,
port_id=port_id)
port_subnet_id = fixed_ips[0]['subnet_id']
if subnet_id and (port_subnet_id != subnet_id):
self._raise_contrail_exception('SubnetMismatchForPort',
port_id=port_id,
subnet_id=subnet_id)
subnet_id = port_subnet_id
elif subnet_id:
vn_obj = self._subnet_handler.get_vn_obj_for_subnet_id(subnet_id)
for intf in router_obj.get_virtual_machine_interface_refs() or []:
port_id = intf['uuid']
_, _, _, fixed_ips = self._get_vmi_info(port_id)
if subnet_id == fixed_ips[0]['subnet_id']:
break
else:
msg = ("Subnet %s not connected to router %s "
% (router_id, subnet_id))
self._raise_contrail_exception('BadRequest',
resource='router', msg=msg)
tenant_id = self._project_id_vnc_to_neutron(vn_obj.parent_uuid)
if not vmi_obj:
vmi_obj = self._vnc_lib.virtual_machine_interface_read(id=port_id)
router_obj.del_virtual_machine_interface(vmi_obj)
self._vnc_lib.logical_router_update(router_obj)
self._vmi_handler.resource_delete(context, port_id=port_id)
info = {'id': router_id,
'tenant_id': tenant_id,
'port_id': port_id,
'subnet_id': subnet_id}
return info
class LogicalRouterHandler(LogicalRouterGetHandler,
LogicalRouterCreateHandler,
LogicalRouterDeleteHandler,
LogicalRouterUpdateHandler):
pass
| 43.05973
| 113
| 0.593297
| 21,253
| 0.951002
| 0
| 0
| 169
| 0.007562
| 0
| 0
| 2,692
| 0.120458
|
a7ce8d54807f93e96a382235bdf7d3f14bebe67b
| 467
|
py
|
Python
|
src/screenlogger.py
|
swbooking/RobotMaria
|
2553358629a870b10458564524315ff4cfda0bd1
|
[
"MIT"
] | null | null | null |
src/screenlogger.py
|
swbooking/RobotMaria
|
2553358629a870b10458564524315ff4cfda0bd1
|
[
"MIT"
] | null | null | null |
src/screenlogger.py
|
swbooking/RobotMaria
|
2553358629a870b10458564524315ff4cfda0bd1
|
[
"MIT"
] | null | null | null |
class ScreenLogger:
def __init__(self, loghandler=None, verbose = True):
self.LogMessage = None
self.LogHandler = loghandler
self.Verbose = verbose
return
def Log(self, message):
if self.LogMessage != message:
self.LogMessage = message
if self.LogHandler != None:
self.LogHandler(self.LogMessage)
if self.Verbose:
print self.LogMessage
return
| 31.133333
| 56
| 0.578158
| 466
| 0.997859
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
a7d11260063260bb345e5b5925deed0ee559e5c2
| 725
|
py
|
Python
|
ffmpeg-3.2.5/tools/zmqshell.py
|
huyu0415/FFmpeg
|
7a3f75791cb3255805bf17126d4074a328f46c8c
|
[
"Apache-2.0"
] | 3,645
|
2016-08-25T09:31:17.000Z
|
2022-03-25T06:28:34.000Z
|
ffmpeg-3.2.5/tools/zmqshell.py
|
huyu0415/FFmpeg
|
7a3f75791cb3255805bf17126d4074a328f46c8c
|
[
"Apache-2.0"
] | 395
|
2020-04-18T08:22:18.000Z
|
2021-12-08T13:04:49.000Z
|
ffmpeg-3.2.5/tools/zmqshell.py
|
huyu0415/FFmpeg
|
7a3f75791cb3255805bf17126d4074a328f46c8c
|
[
"Apache-2.0"
] | 764
|
2016-08-26T09:19:00.000Z
|
2022-03-22T12:07:16.000Z
|
#!/usr/bin/env python2
import sys, zmq, cmd
class LavfiCmd(cmd.Cmd):
prompt = 'lavfi> '
def __init__(self, bind_address):
context = zmq.Context()
self.requester = context.socket(zmq.REQ)
self.requester.connect(bind_address)
cmd.Cmd.__init__(self)
def onecmd(self, cmd):
if cmd == 'EOF':
sys.exit(0)
print 'Sending command:[%s]' % cmd
self.requester.send(cmd)
message = self.requester.recv()
print 'Received reply:[%s]' % message
try:
bind_address = sys.argv[1] if len(sys.argv) > 1 else "tcp://localhost:5555"
LavfiCmd(bind_address).cmdloop('FFmpeg libavfilter interactive shell')
except KeyboardInterrupt:
pass
| 26.851852
| 79
| 0.627586
| 482
| 0.664828
| 0
| 0
| 0
| 0
| 0
| 0
| 139
| 0.191724
|
a7d18ac67c2f08b3162719b80096d5cd8f93412f
| 984
|
py
|
Python
|
minidoc/minidoc.py
|
ihgazni2/minidoc
|
b2859069bad5e718692b57d5498389473c66bd2e
|
[
"MIT"
] | null | null | null |
minidoc/minidoc.py
|
ihgazni2/minidoc
|
b2859069bad5e718692b57d5498389473c66bd2e
|
[
"MIT"
] | null | null | null |
minidoc/minidoc.py
|
ihgazni2/minidoc
|
b2859069bad5e718692b57d5498389473c66bd2e
|
[
"MIT"
] | null | null | null |
from minidoc import svg
from minidoc import tst
from efdir import fs
import shutil
import os
def creat_one_svg(k,v,i=None,**kwargs):
if("dst_dir" in kwargs):
dst_dir = kwargs['dst_dir']
else:
dst_dir = "./images"
screen_size = svg.get_screen_size(v,**kwargs)
kwargs['screen_size'] = screen_size
cmds_str = svg.cmds_arr2str(v,**kwargs)
output_path = svg.creat_svg(cmds_str,**kwargs)
#name = tst.get_svg_name(k) + "." + str(i) + ".svg"
name = tst.get_svg_name(k) + ".svg"
dst = os.path.join(dst_dir,name)
shutil.move(output_path,dst)
return(dst)
#still_frames
#rownums
#colnums
def creat_svgs(kl,vl,**kwargs):
if("dst_dir" in kwargs):
dst_dir = kwargs['dst_dir']
else:
dst_dir = "./images"
fs.mkdir(dst_dir)
arr = []
for i in range(kl.__len__()):
k = kl[i]
v = vl[i]
dst = creat_one_svg(k,v,i=i,**kwargs)
arr.append(dst)
return(arr)
####
####
| 20.081633
| 55
| 0.602642
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 163
| 0.16565
|
a7d2785f99402cef40bc5312be1781d2a6eaf683
| 3,843
|
py
|
Python
|
qinhaifang/src/evalTools/script/convert_label_map_to_geojson.py
|
SpaceNetChallenge/BuildingFootprintDetectors
|
3def3c44b5847c744cd2f3356182892d92496579
|
[
"Apache-2.0"
] | 161
|
2017-02-03T05:33:01.000Z
|
2022-03-31T02:11:21.000Z
|
qinhaifang/src/evalTools/script/convert_label_map_to_geojson.py
|
SpaceNetChallenge/BuildingFootprintDetectors
|
3def3c44b5847c744cd2f3356182892d92496579
|
[
"Apache-2.0"
] | 5
|
2017-02-03T05:51:38.000Z
|
2019-06-18T18:54:00.000Z
|
qinhaifang/src/evalTools/script/convert_label_map_to_geojson.py
|
SpaceNetChallenge/BuildingFootprintDetectors
|
3def3c44b5847c744cd2f3356182892d92496579
|
[
"Apache-2.0"
] | 76
|
2017-03-23T23:15:46.000Z
|
2022-02-10T21:58:18.000Z
|
#!/usr/bin/env python
# encoding=gbk
"""
Convert mask to geojson format
"""
import os
import os.path
import re
import logging
import logging.config
from multiprocessing import Pool
import skimage.io as sk
import numpy as np
import scipy.io as sio
import setting
from spaceNet import geoTools as gT
import spaceNet.image_util as img_util
def process_convert_mask_to_geojson():
"""docstring for process_convert_mask_to_geojson"""
if setting.CONVERT_RES == 1:
label_map_file_list = os.listdir(setting.PREDICT_LABEL_MAP_DIR)
else:
label_map_file_list = os.listdir(setting.LABEL_MAP_DIR_4X)
pool_size = 8
pool = Pool(pool_size)
case = 0
for convert_res in pool.imap_unordered(convert_worker, label_map_file_list):
case += 1
if case % 100 == 0:
logging.info('Convert {}'.format(case))
image_id, msg = convert_res
pool.close()
pool.join()
def convert_worker(mat_file):
"""docstring for convert_worker"""
try:
if setting.CONVERT_RES == 1:
image_id = '_'.join(mat_file.split('.')[0].split('_')[1:])
print('image_id:{}'.format(image_id))
mat_file = os.path.join(setting.PREDICT_LABEL_MAP_DIR, mat_file)
mat = sio.loadmat(mat_file)
#print(mat.keys())
#exit(0)
label_map = mat['inst_img']
building_list = img_util.create_buildinglist_from_label_map(image_id, label_map)
geojson_file = os.path.join(setting.PREDICT_PIXEL_GEO_JSON_DIR, '{}_predict.geojson'.format(image_id))
else:
#print('{}'.format(mat_file))
image_id = '_'.join(mat_file.split('.')[0].split('_')[:])
#print('{}'.format(image_id))
mat_file = os.path.join(setting.LABEL_MAP_DIR_4X, mat_file)
mat = sio.loadmat(mat_file)
label_map = mat['GTinst']['Segmentation'][0][0]
building_list = img_util.create_buildinglist_from_label_map(image_id, label_map)
geojson_file = os.path.join(setting.PIXEL_GEO_JSON_DIR_4X, '{}_Pixel.geojson'.format(image_id))
gT.exporttogeojson(geojson_file, building_list)
return image_id, 'Done'
except Exception as e:
logging.warning('Convert Exception[{}] image_id[{}]'.format(e, image_id))
return image_id, e
def test_geojson():
"""docstring for test_geojson"""
label_map_file_list = os.listdir(setting.PREDICT_LABEL_MAP_DIR)
for mat_file in label_map_file_list:
image_id = '_'.join(mat_file.split('.')[0].split('_')[1:])
predict_geojson_file = os.path.join(setting.PREDICT_PIXEL_GEO_JSON_DIR, '{}_predict.geojson'.format(image_id))
image_name = os.path.join(setting.PIC_3BAND_DIR, '3band_{}.tif'.format(image_id))
img = sk.imread(image_name, True)
label_map = np.zeros(img.shape, dtype=np.uint8)
label_map = img_util.create_label_map_from_polygons(gT.importgeojson(predict_geojson_file),
label_map)
label_img = img_util.create_label_img(img, label_map)
save_file = os.path.join(setting.TMP_DIR, '{}_predict.png'.format(image_id))
sk.imsave(save_file, label_img)
truth_geojson_file = os.path.join(setting.PIXEL_GEO_JSON_DIR, '{}_Pixel.geojson'.format(image_id))
print('{}'.format(truth_geojson_file))
label_map = np.zeros(img.shape, dtype=np.uint8)
print('label_map shape{}'.format(label_map.shape))
label_map = img_util.create_label_map_from_polygons(gT.importgeojson(truth_geojson_file), label_map)
label_img = img_util.create_label_img(img, label_map)
save_file = os.path.join(setting.TMP_DIR, '{}_Pixel.png'.format(image_id))
sk.imsave(save_file, label_img)
if __name__ == '__main__':
process_convert_mask_to_geojson()
#test_geojson()
| 40.03125
| 118
| 0.674993
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 568
| 0.147801
|
a7d285c6e1ae9ac1ca025fdba430e5dba345f5fd
| 412
|
py
|
Python
|
core/migrations/0008_touristspot_photo.py
|
isnardsilva/django-attractions-api
|
feade087d840b72b603d2a4bf538b8c362aa91bd
|
[
"MIT"
] | 1
|
2021-12-31T12:59:49.000Z
|
2021-12-31T12:59:49.000Z
|
core/migrations/0008_touristspot_photo.py
|
isnardsilva/django-attractions-api
|
feade087d840b72b603d2a4bf538b8c362aa91bd
|
[
"MIT"
] | null | null | null |
core/migrations/0008_touristspot_photo.py
|
isnardsilva/django-attractions-api
|
feade087d840b72b603d2a4bf538b8c362aa91bd
|
[
"MIT"
] | null | null | null |
# Generated by Django 3.0.7 on 2020-07-19 03:55
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('core', '0007_auto_20200614_0254'),
]
operations = [
migrations.AddField(
model_name='touristspot',
name='photo',
field=models.ImageField(blank=True, null=True, upload_to='core'),
),
]
| 21.684211
| 77
| 0.604369
| 319
| 0.774272
| 0
| 0
| 0
| 0
| 0
| 0
| 104
| 0.252427
|
a7d331c87b462d04d58d991edd7603c1f1659a3c
| 712
|
py
|
Python
|
tests/integration/test_user_invite.py
|
donovan-PNW/dwellinglybackend
|
448df61f6ea81f00dde7dab751f8b2106f0eb7b1
|
[
"MIT"
] | 15
|
2020-07-09T20:51:09.000Z
|
2021-11-28T21:59:02.000Z
|
tests/integration/test_user_invite.py
|
donovan-PNW/dwellinglybackend
|
448df61f6ea81f00dde7dab751f8b2106f0eb7b1
|
[
"MIT"
] | 148
|
2020-03-28T22:10:30.000Z
|
2021-12-19T09:22:59.000Z
|
tests/integration/test_user_invite.py
|
donovan-PNW/dwellinglybackend
|
448df61f6ea81f00dde7dab751f8b2106f0eb7b1
|
[
"MIT"
] | 30
|
2020-03-12T02:31:27.000Z
|
2021-07-29T02:40:36.000Z
|
import pytest
from models.user import RoleEnum
from unittest.mock import patch
from resources.email import Email
@pytest.mark.usefixtures("client_class", "empty_test_db")
class TestUserInvite:
def setup(self):
self.endpoint = "/api/user/invite"
@patch.object(Email, "send_user_invite_msg")
def test_invite_user(self, send_user_invite_msg, valid_header, user_attributes):
response = self.client.post(
self.endpoint,
headers=valid_header,
json=user_attributes(role=RoleEnum.STAFF.value),
)
send_user_invite_msg.assert_called()
assert response.status_code == 201
assert response.json == {"message": "User Invited"}
| 30.956522
| 84
| 0.695225
| 538
| 0.755618
| 0
| 0
| 596
| 0.837079
| 0
| 0
| 92
| 0.129213
|
a7d350ee5dfc4cbea31fc30d8d13f43745b214cc
| 778
|
py
|
Python
|
setup.py
|
Louis-Navarro/decorators
|
6339a32aae66608cbf7fd8a1221b47ae88992d53
|
[
"MIT"
] | null | null | null |
setup.py
|
Louis-Navarro/decorators
|
6339a32aae66608cbf7fd8a1221b47ae88992d53
|
[
"MIT"
] | null | null | null |
setup.py
|
Louis-Navarro/decorators
|
6339a32aae66608cbf7fd8a1221b47ae88992d53
|
[
"MIT"
] | null | null | null |
import setuptools
from decorators.__init__ import __version__ as v
with open('README.md') as fp:
long_description = fp.read()
setuptools.setup(
name='decorators-LOUIS-NAVARRO',
version=v,
author='Louis Navarro',
description='Function decorators I made',
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/Louis-Navarro/decorators",
packages=setuptools.find_packages(),
classifiers=[
"Programming Language :: Python :: 3.7",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Environment :: Plugins",
"Intended Audience :: Developers",
"Natural Language:: English",
],
python_requires='>=3.6',
)
| 29.923077
| 54
| 0.669666
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 347
| 0.446015
|
a7d4fbe6bb2f93758e5bbd91a20e5e655a876720
| 7,386
|
py
|
Python
|
test_undirect_graf.py
|
rodrigondec/Grafos
|
dd3bb7ffd56909395cc211c6d68f9e3eaa5fa9ba
|
[
"Unlicense"
] | null | null | null |
test_undirect_graf.py
|
rodrigondec/Grafos
|
dd3bb7ffd56909395cc211c6d68f9e3eaa5fa9ba
|
[
"Unlicense"
] | null | null | null |
test_undirect_graf.py
|
rodrigondec/Grafos
|
dd3bb7ffd56909395cc211c6d68f9e3eaa5fa9ba
|
[
"Unlicense"
] | null | null | null |
from grafo import Grafo, DiGrafo
from no import No
from aresta import Aresta
import unittest
class TestStringMethods(unittest.TestCase):
def setUp(self):
self.grafo = Grafo()
def test_atingivel(self):
self.grafo.insertNo(No(1))
self.grafo.insertNo(No(2))
self.grafo.insertNo(No(3))
self.grafo.insertNo(No(4))
self.grafo.insertNo(No(5))
self.grafo.insertNo(No(6))
self.grafo.insertNo(No(7))
self.assertEqual(len(self.grafo.nos), 7)
self.grafo.insertAresta(Aresta(1, 2))
self.grafo.insertAresta(Aresta(1, 5))
self.grafo.insertAresta(Aresta(5, 2))
self.grafo.insertAresta(Aresta(5, 4))
self.grafo.insertAresta(Aresta(2, 3))
self.grafo.insertAresta(Aresta(3, 4))
self.grafo.insertAresta(Aresta(4, 6))
self.assertEqual(len(self.grafo.arestas), 7)
self.assertEqual(self.grafo.atingivel(1, 6), True)
self.assertEqual(self.grafo.atingivel(1, 7), False)
def test_caminho(self):
self.grafo.insertNo(No(1))
self.grafo.insertNo(No(2))
self.grafo.insertNo(No(3))
self.grafo.insertNo(No(4))
self.grafo.insertNo(No(5))
self.grafo.insertNo(No(6))
self.grafo.insertNo(No(7))
self.assertEqual(len(self.grafo.nos), 7)
self.grafo.insertAresta(Aresta(1, 2))
self.grafo.insertAresta(Aresta(1, 5))
self.grafo.insertAresta(Aresta(5, 2))
self.grafo.insertAresta(Aresta(5, 4))
self.grafo.insertAresta(Aresta(2, 3))
self.grafo.insertAresta(Aresta(3, 4))
self.grafo.insertAresta(Aresta(4, 6))
self.assertEqual(len(self.grafo.arestas), 7)
self.assertEqual(self.grafo.caminho(1, 6), [1, 5, 4, 6])
self.assertEqual(self.grafo.caminho(1, 3), [1, 2, 3])
def test_conexo(self):
self.grafo.insertNo(No(1))
self.grafo.insertNo(No(2))
self.grafo.insertNo(No(3))
self.grafo.insertNo(No(4))
self.grafo.insertNo(No(5))
self.grafo.insertNo(No(6))
self.assertEqual(len(self.grafo.nos), 6)
self.grafo.insertAresta(Aresta(1, 2))
self.grafo.insertAresta(Aresta(1, 5))
self.grafo.insertAresta(Aresta(5, 2))
self.grafo.insertAresta(Aresta(5, 4))
self.grafo.insertAresta(Aresta(2, 3))
self.grafo.insertAresta(Aresta(3, 4))
self.grafo.insertAresta(Aresta(4, 6))
self.assertEqual(len(self.grafo.arestas), 7)
self.assertEqual(self.grafo.conexo(), True)
self.grafo.insertNo(No(7))
self.assertEqual(len(self.grafo.nos), 7)
self.assertEqual(self.grafo.conexo(), False)
def test_ciclico_true(self):
self.grafo.insertNo(No(1))
self.grafo.insertNo(No(2))
self.grafo.insertNo(No(3))
self.grafo.insertNo(No(4))
self.grafo.insertNo(No(5))
self.grafo.insertNo(No(6))
self.assertEqual(len(self.grafo.nos), 6)
self.grafo.insertAresta(Aresta(1, 2))
self.grafo.insertAresta(Aresta(1, 5))
self.grafo.insertAresta(Aresta(5, 2))
self.grafo.insertAresta(Aresta(5, 4))
self.grafo.insertAresta(Aresta(2, 3))
self.grafo.insertAresta(Aresta(3, 4))
self.grafo.insertAresta(Aresta(4, 6))
self.assertEqual(len(self.grafo.arestas), 7)
self.assertEqual(self.grafo.ciclico(), True)
def test_ciclico_false(self):
self.grafo.insertNo(No(1))
self.grafo.insertNo(No(2))
self.grafo.insertNo(No(3))
self.grafo.insertNo(No(4))
self.grafo.insertNo(No(5))
self.grafo.insertNo(No(6))
self.assertEqual(len(self.grafo.nos), 6)
self.grafo.insertAresta(Aresta(1, 2))
self.grafo.insertAresta(Aresta(1, 5))
self.grafo.insertAresta(Aresta(5, 4))
self.grafo.insertAresta(Aresta(2, 3))
self.grafo.insertAresta(Aresta(4, 6))
self.assertEqual(len(self.grafo.arestas), 5)
print self.grafo
self.assertEqual(self.grafo.ciclico(), False)
def test_ciclico_n_conexo_true(self):
self.grafo.insertNo(No(1))
self.grafo.insertNo(No(2))
self.grafo.insertNo(No(3))
self.grafo.insertNo(No(4))
self.grafo.insertNo(No(5))
self.grafo.insertNo(No(6))
self.grafo.insertNo(No(7))
self.grafo.insertNo(No(8))
self.grafo.insertNo(No(9))
self.grafo.insertNo(No(10))
self.assertEqual(len(self.grafo.nos), 10)
self.grafo.insertAresta(Aresta(1, 2))
self.grafo.insertAresta(Aresta(1, 5))
self.grafo.insertAresta(Aresta(5, 4))
self.grafo.insertAresta(Aresta(2, 3))
self.grafo.insertAresta(Aresta(7, 6))
self.grafo.insertAresta(Aresta(8, 9))
self.grafo.insertAresta(Aresta(9, 10))
self.grafo.insertAresta(Aresta(8, 10))
self.assertEqual(len(self.grafo.arestas), 8)
self.assertEqual(self.grafo.ciclico(), True)
def test_ciclico_n_conexo_false(self):
self.grafo.insertNo(No(1))
self.grafo.insertNo(No(2))
self.grafo.insertNo(No(3))
self.grafo.insertNo(No(4))
self.grafo.insertNo(No(5))
self.grafo.insertNo(No(6))
self.grafo.insertNo(No(7))
self.grafo.insertNo(No(8))
self.grafo.insertNo(No(9))
self.grafo.insertNo(No(10))
self.assertEqual(len(self.grafo.nos), 10)
self.grafo.insertAresta(Aresta(1, 2))
self.grafo.insertAresta(Aresta(1, 5))
self.grafo.insertAresta(Aresta(5, 4))
self.grafo.insertAresta(Aresta(2, 3))
self.grafo.insertAresta(Aresta(7, 6))
self.grafo.insertAresta(Aresta(8, 9))
self.grafo.insertAresta(Aresta(9, 10))
self.assertEqual(len(self.grafo.arestas), 7)
self.assertEqual(self.grafo.ciclico(), False)
def test_num_componentes(self):
self.grafo.insertNo(No(1))
self.grafo.insertNo(No(2))
self.grafo.insertNo(No(3))
self.grafo.insertNo(No(4))
self.grafo.insertNo(No(5))
self.assertEqual(len(self.grafo.nos), 5)
self.grafo.insertAresta(Aresta(1, 2))
self.grafo.insertAresta(Aresta(1, 5))
self.grafo.insertAresta(Aresta(5, 4))
self.grafo.insertAresta(Aresta(2, 3))
self.assertEqual(len(self.grafo.arestas), 4)
self.assertEqual(self.grafo.num_componentes(), 1)
self.grafo.insertNo(No(6))
self.grafo.insertNo(No(7))
self.assertEqual(len(self.grafo.nos), 7)
self.grafo.insertAresta(Aresta(7, 6))
self.assertEqual(len(self.grafo.arestas), 5)
self.assertEqual(self.grafo.num_componentes(), 2)
self.grafo.insertNo(No(8))
self.grafo.insertNo(No(9))
self.grafo.insertNo(No(10))
self.assertEqual(len(self.grafo.nos), 10)
self.grafo.insertAresta(Aresta(8, 9))
self.grafo.insertAresta(Aresta(9, 10))
self.grafo.insertAresta(Aresta(8, 10))
self.assertEqual(len(self.grafo.arestas), 8)
self.assertEqual(self.grafo.num_componentes(), 3)
def test_bfs(self):
self.grafo.insertNo(No(1))
self.grafo.insertNo(No(2))
self.grafo.insertNo(No(3))
self.grafo.insertNo(No(4))
self.grafo.insertNo(No(5))
self.grafo.insertNo(No(6))
self.assertEqual(len(self.grafo.nos), 6)
self.grafo.insertAresta(Aresta(1, 2))
self.grafo.insertAresta(Aresta(1, 5))
self.grafo.insertAresta(Aresta(5, 2))
self.grafo.insertAresta(Aresta(5, 4))
self.grafo.insertAresta(Aresta(2, 3))
self.grafo.insertAresta(Aresta(3, 4))
self.grafo.insertAresta(Aresta(4, 6))
self.assertEqual(len(self.grafo.arestas), 7)
self.grafo.bfs(1)
def test_dfs(self):
self.grafo.insertNo(No(1))
self.grafo.insertNo(No(2))
self.grafo.insertNo(No(3))
self.grafo.insertNo(No(4))
self.grafo.insertNo(No(5))
self.grafo.insertNo(No(6))
self.assertEqual(len(self.grafo.nos), 6)
self.grafo.insertAresta(Aresta(1, 2))
self.grafo.insertAresta(Aresta(1, 5))
self.grafo.insertAresta(Aresta(5, 2))
self.grafo.insertAresta(Aresta(5, 4))
self.grafo.insertAresta(Aresta(2, 3))
self.grafo.insertAresta(Aresta(3, 4))
self.grafo.insertAresta(Aresta(4, 6))
self.assertEqual(len(self.grafo.arestas), 7)
self.grafo.dfs(1)
if __name__ == '__main__':
unittest.main()
| 29.544
| 58
| 0.716084
| 7,247
| 0.981181
| 0
| 0
| 0
| 0
| 0
| 0
| 10
| 0.001354
|
a7d69ea8dc41116a3648dee50adba54f390698c0
| 2,476
|
py
|
Python
|
fancy/config/option.py
|
susautw/fancy-config
|
25a3bd51a40df071d00327640caa05b6288bd970
|
[
"MIT"
] | 1
|
2022-03-17T04:47:52.000Z
|
2022-03-17T04:47:52.000Z
|
fancy/config/option.py
|
susautw/fancy-config
|
25a3bd51a40df071d00327640caa05b6288bd970
|
[
"MIT"
] | 9
|
2021-12-01T08:01:52.000Z
|
2022-03-16T13:05:07.000Z
|
fancy/config/option.py
|
susautw/fancy-config
|
25a3bd51a40df071d00327640caa05b6288bd970
|
[
"MIT"
] | 1
|
2022-03-07T09:29:06.000Z
|
2022-03-07T09:29:06.000Z
|
import warnings
from typing import Any, Callable, TYPE_CHECKING
from . import ConfigStructure
from .process import auto_process_typ
from ..config import identical
if TYPE_CHECKING:
from ..config import BaseConfig
class Option:
_type: Callable[[Any], Any]
_required: bool
_nullable: bool
_default: Any
_description: str
_config_name: str = None
def __init__(
self,
required=False,
nullable=False, default=None,
type=identical,
preprocess=identical,
description="",
name: str = None
):
self._config_name = name
self._required = required
self._nullable = nullable
self._default = default
self._description = description
if preprocess is not identical:
warnings.warn("preprocess has deprecated. use type to instead.", DeprecationWarning)
type = preprocess
self._type = auto_process_typ(type)
def __get__(self, instance: 'BaseConfig', owner):
if instance is None:
return self
# initialize value
if self._should_assign_default_value(instance):
if self._default is None and not self._nullable:
raise AttributeError("attribute must assign the value before access it.")
self.__set__(instance, self._default)
return vars(instance)[self.__name__]
def __set__(self, instance, raw_value):
if raw_value is None:
if not self._nullable:
raise ValueError('the value should not be none')
return None
value = self._type(raw_value)
if isinstance(value, ConfigStructure):
value.load_by_context(instance, raw_value)
vars(instance)[self.__name__] = value
def __delete__(self, instance):
del vars(instance)[self.__name__]
def __set_name__(self, owner, name):
self.__name__ = name
if self._config_name is None:
self._config_name = name
def is_assigned(self, instance) -> bool:
return self.__name__ in vars(instance)
def _should_assign_default_value(self, instance):
return not self.is_assigned(instance)
@property
def name(self) -> str:
return self._config_name
@property
def required(self) -> bool:
return self._required
@property
def description(self) -> str:
return self._description
| 26.913043
| 96
| 0.631664
| 2,254
| 0.910339
| 0
| 0
| 216
| 0.087237
| 0
| 0
| 162
| 0.065428
|
a7d7e86babb70ea3a37edccf05c803e8419f09d7
| 7,128
|
py
|
Python
|
data_loader_manual.py
|
Chen-Yifan/DEM_building_segmentation
|
1e9a41e87ec0ab1777a65146c5b31d88938480b7
|
[
"MIT"
] | null | null | null |
data_loader_manual.py
|
Chen-Yifan/DEM_building_segmentation
|
1e9a41e87ec0ab1777a65146c5b31d88938480b7
|
[
"MIT"
] | null | null | null |
data_loader_manual.py
|
Chen-Yifan/DEM_building_segmentation
|
1e9a41e87ec0ab1777a65146c5b31d88938480b7
|
[
"MIT"
] | null | null | null |
from PIL import Image
import numpy as np
import os
import re
import scipy.misc
import random
import sys
import csv
def is_feature_present(input_array):
return (np.sum(input_array!=0)>10) # select the image with more than 50 pixel label
def load_feature_data(dataroot, frame_dir, mask_dir, feature_type='erosion', dim=128):
'''load frames and masks into two numpy array respectively
-----
condition: with feature
arguments:
frame_dir, mask_dir,
feature_type: str, either erosion or building
dim: width and height of the image
process: always resize to 128x128 as model input
normalize on local image maxx and minn
-----
'''
low=0.1
hi=1.0
test_frames = []
test_masks = []
test_masks_ext = []
test_masks_MS = []
frames = []
masks = []
name_list = []
frame_names = os.listdir(frame_dir)
frame_names.sort(key=lambda var:[int(x) if x.isdigit() else x
for x in re.findall(r'[^0-9]|[0-9]+', var)]) # sort frame_names
print("** load image from directory loop starts:")
for i in range(len(frame_names)):
frame_file = frame_names[i]
# if len(frames)>1000:
# break
"""find mapped frame and mask path"""
frame_path = os.path.join(frame_dir, frame_file)
"""load image from tif and remove useless data"""
if feature_type=='erosion':
mask_path = os.path.join(mask_dir, frame_file)
x = np.load(frame_path)
# frame_array = np.concatenate((x[:,:,0:2], np.expand_dims(x[:,:,-1], axis=2)),axis=-1)
frame_array = x[:,:,-1]
label_array = np.load(mask_path)
else: # building
mask_file = frame_file.replace('mclean_fillnodata_','')
mask_path = os.path.join(mask_dir, mask_file)
#### for 128_0ver
# mask_path = os.path.join(mask_dir, frame_file.replace('DEM','label'))
if(frame_file[-3:]=='tif'):
if not os.path.exists(mask_path):
print('rm mask_path', mask_path)
# os.remove(frame_path)
continue
frame_array = np.array(Image.open(frame_path))
label_array = np.array(Image.open(mask_path))
else:
# os.remove(frame_path)
# if os.path.exists(mask_path):
# os.remove(mask_path)
# print('remove1',frame_file)
continue
# check the dimension, if dimension wrong, remove
dims = frame_array.shape
if dims[0]!=dim or dims[1]!=dim or (len(np.unique(frame_array))<3): # remove the file if the frame has less than 3 unique data
os.remove(mask_path)
# os.remove(frame_path)
print('remove2',frame_file)
continue
# both erosion and builiding, we check if feature is present
if not is_feature_present(label_array):
continue
"""Resize to dim"""
if frame_array.shape[0]!=dim:
frame_array = np.array(Image.fromarray(frame_array).resize((dim,dim), Image.BILINEAR))
label_array = np.array(Image.fromarray(label_array).resize((dim,dim), Image.NEAREST))
"""Try preprocess : Normalization"""
try:
minn, maxx = np.min(frame_array[frame_array > 0]), np.max(frame_array[frame_array > 0])
frame_array[frame_array > 0] = low + (frame_array[frame_array > 0] - minn) * (hi - low) / (maxx - minn)
except:
continue
# check label 0 1 2
unique_labels = np.unique(label_array)
label_array = np.where(label_array==2, 1, label_array)
if 2 in unique_labels and 1 not in unique_labels:
# load the manual labels
manual_mask_path = os.path.join(dataroot, "label_manual_test/", mask_file)
if not os.path.exists(manual_mask_path):
continue
test_frames.append(frame_array)
# add the MS labels
test_masks_MS.append(label_array)
label_array = np.array(Image.open(manual_mask_path))
test_masks_ext.append(label_array)
label_array = np.where(label_array==2, 0, label_array) # only care the label 1
test_masks.append(label_array)
else:
frames.append(frame_array)
masks.append(label_array)
name_list.append(frame_names[i])
"""Form array and name_list"""
frames, masks, test_frames, test_masks, test_masks_ext, test_masks_MS = np.array(frames), np.array(masks), np.array(test_frames), np.array(test_masks), \
np.array(test_masks_ext), np.array(test_masks_MS)
print("meta data: training feature/bkground ratio",np.sum(masks), np.sum(1-masks))
"""Extend to 4 dimensions for training """
if(frames.ndim != 4):
frames = np.expand_dims(frames, -1)
test_frames = np.expand_dims(test_frames, -1)
masks = np.expand_dims(masks, -1)
test_masks = np.expand_dims(test_masks, -1)
test_masks_ext = np.expand_dims(test_masks_ext, -1)
test_masks_MS = np.expand_dims(test_masks_MS, -1)
assert(test_masks.shape == test_masks_ext.shape)
assert(test_masks.shape == test_masks_MS.shape)
print("test_masks.shape = ", test_masks.shape)
# split frames/masks to train:val = 5:1
a = int(len(frames)*5/6)
train_frames, train_masks = frames[:a], masks[:a]
val_frames, val_masks = frames[a:], masks[a:]
return train_frames, val_frames, test_frames, train_masks, val_masks, test_masks, test_masks_ext, test_masks_MS, name_list
def load_data(opt):
"""
Load data to a dictionary containing train, val, test
Return: Data_dict
"""
train_frames, val_frames, test_frames, train_masks, val_masks, test_masks, test_masks_ext, test_masks_MS, name_list = \
load_feature_data(opt.dataroot, opt.frame_path, opt.mask_path, opt.dataset, opt.dim)
n_train, n_test, n_val = len(train_frames), len(test_frames), len(val_frames)
print('***** #train: #test: #val = %d : %d :%d ******'%(n_train, n_test, n_val))
Data_dict = {
'train':[train_frames.astype('float32'),
train_masks.astype('float32')],
'val':[val_frames.astype('float32'),
val_masks.astype('float32')],
'test':[test_frames.astype('float32'),
test_masks.astype('float32')],
'test_MS':[None,
test_masks_MS.astype('float32')],
'test_ext':[None,
test_masks_ext.astype('float32')],
}
return Data_dict
| 40.271186
| 157
| 0.571409
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1,801
| 0.252666
|
a7d7fa784412398969563696b9937bee43c267bd
| 5,010
|
py
|
Python
|
autotest/osr/osr_micoordsys.py
|
robe2/gdal
|
78573efe69f1506c112209501068c0b043438295
|
[
"MIT"
] | null | null | null |
autotest/osr/osr_micoordsys.py
|
robe2/gdal
|
78573efe69f1506c112209501068c0b043438295
|
[
"MIT"
] | null | null | null |
autotest/osr/osr_micoordsys.py
|
robe2/gdal
|
78573efe69f1506c112209501068c0b043438295
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
###############################################################################
# $Id$
#
# Project: GDAL/OGR Test Suite
# Purpose: Test some MITAB specific translation issues.
# Author: Even Rouault, <even dot rouault at mines dash paris dot org>
#
###############################################################################
# Copyright (c) 2010, Even Rouault <even dot rouault at mines-paris dot org>
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
###############################################################################
import sys
sys.path.append( '../pymod' )
import gdaltest
from osgeo import osr
###############################################################################
# Test the osr.SpatialReference.ImportFromMICoordSys() function.
#
def osr_micoordsys_1():
srs = osr.SpatialReference()
srs.ImportFromMICoordSys('Earth Projection 3, 62, "m", -117.474542888889, 33.7644620277778, 33.9036340277778, 33.6252900277778, 0, 0')
if abs(srs.GetProjParm(osr.SRS_PP_STANDARD_PARALLEL_1)-33.9036340277778)>0.0000005 \
or abs(srs.GetProjParm(osr.SRS_PP_STANDARD_PARALLEL_2)-33.6252900277778)>0.0000005 \
or abs(srs.GetProjParm(osr.SRS_PP_LATITUDE_OF_ORIGIN)-33.7644620277778)>0.0000005 \
or abs(srs.GetProjParm(osr.SRS_PP_CENTRAL_MERIDIAN)-(-117.474542888889))>0.0000005 \
or abs(srs.GetProjParm(osr.SRS_PP_FALSE_EASTING)-0.0)>0.0000005 \
or abs(srs.GetProjParm(osr.SRS_PP_FALSE_NORTHING)-0.0)>0.0000005:
print(srs.ExportToPrettyWkt())
gdaltest.post_reason('Can not export Lambert Conformal Conic projection.')
return 'fail'
return 'success'
###############################################################################
# Test the osr.SpatialReference.ExportToMICoordSys() function.
#
def osr_micoordsys_2():
srs = osr.SpatialReference()
srs.ImportFromWkt("""PROJCS["unnamed",GEOGCS["NAD27",\
DATUM["North_American_Datum_1927",\
SPHEROID["Clarke 1866",6378206.4,294.9786982139006,\
AUTHORITY["EPSG","7008"]],AUTHORITY["EPSG","6267"]],\
PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433],\
AUTHORITY["EPSG","4267"]],PROJECTION["Lambert_Conformal_Conic_2SP"],\
PARAMETER["standard_parallel_1",33.90363402777778],\
PARAMETER["standard_parallel_2",33.62529002777778],\
PARAMETER["latitude_of_origin",33.76446202777777],\
PARAMETER["central_meridian",-117.4745428888889],\
PARAMETER["false_easting",0],PARAMETER["false_northing",0],\
UNIT["metre",1,AUTHORITY["EPSG","9001"]]]""")
proj = srs.ExportToMICoordSys()
if proj != 'Earth Projection 3, 62, "m", -117.474542888889, 33.7644620277778, 33.9036340277778, 33.6252900277778, 0, 0':
print(proj)
gdaltest.post_reason('Can not import Lambert Conformal Conic projection.')
return 'fail'
return 'success'
###############################################################################
# Test EPSG:3857
#
def osr_micoordsys_3():
srs = osr.SpatialReference()
srs.ImportFromEPSG(3857)
proj = srs.ExportToMICoordSys()
if proj != 'Earth Projection 10, 157, "m", 0':
gdaltest.post_reason('failure')
print(proj)
return 'fail'
srs = osr.SpatialReference()
srs.ImportFromMICoordSys('Earth Projection 10, 157, "m", 0')
wkt = srs.ExportToWkt()
if wkt.find('EXTENSION["PROJ4"') < 0:
gdaltest.post_reason('failure')
print(wkt)
return 'fail'
# Transform again to MITAB (we no longer have the EPSG code, so we rely on PROJ4 extension node)
proj = srs.ExportToMICoordSys()
if proj != 'Earth Projection 10, 157, "m", 0':
gdaltest.post_reason('failure')
print(proj)
return 'fail'
return 'success'
gdaltest_list = [
osr_micoordsys_1,
osr_micoordsys_2,
osr_micoordsys_3 ]
if __name__ == '__main__':
gdaltest.setup_run( 'osr_micoordsys' )
gdaltest.run_tests( gdaltest_list )
gdaltest.summarize()
| 36.838235
| 138
| 0.644311
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 3,245
| 0.647705
|
a7d8ff63a50736d94af9bb52ce5984969f4467d7
| 2,194
|
py
|
Python
|
maintain.py
|
keioni/ink_01
|
5c87dcf16924dfa7bf9d0e3a7250ec1542045e72
|
[
"MIT"
] | null | null | null |
maintain.py
|
keioni/ink_01
|
5c87dcf16924dfa7bf9d0e3a7250ec1542045e72
|
[
"MIT"
] | null | null | null |
maintain.py
|
keioni/ink_01
|
5c87dcf16924dfa7bf9d0e3a7250ec1542045e72
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys
import json
from ink.maintainer import make_pickle, DatabaseMaintainer
from ink.sys.config import CONF
from ink.sys.database.connector.mysql import MySQLConnector
from ink.sys.database.connector.null import NullConnector
def _get_db_connector(dry_run: bool = False):
if dry_run:
db_connector = NullConnector()
else:
db_connector = MySQLConnector()
db_connector.connect(CONF.database.connect_config)
return db_connector
def cmd_mp():
conf_file = ''
pickle_file = ''
if len(args) > 1:
conf_file = args[1]
if len(args) > 2:
pickle_file = args[2]
print('>> Pickle Maker starting...')
make_pickle(conf_file, pickle_file)
print('>> Pickle Maker finished.')
def cmd_dbm():
db_connector = _get_db_connector(True)
dbman = DatabaseMaintainer(db_connector)
if len(args) > 1:
subcmd = args[1]
if subcmd == 's':
tables = dbman.get_defined_tables()
print(json.dumps(tables, indent=4))
elif subcmd == 'c':
dbman.create_tables()
elif subcmd == 'd':
dbman.destroy_tables()
def cmd_t_dbm():
db_connector = _get_db_connector()
dbman = DatabaseMaintainer(db_connector)
tables1 = dbman.get_defined_tables('tests/test_table_schema1.sql')
tables2 = dbman.get_defined_tables('tests/test_table_schema2.sql')
print(json.dumps(tables1, indent=4))
print(json.dumps(tables2, indent=4))
def cmd_dbrs():
name = ''
arg = ''
if len(args) > 1:
name = args[1]
if len(args) > 2:
arg = args[2]
db_connector = _get_db_connector()
dbman = DatabaseMaintainer(db_connector)
dbman.get_statement(name, arg)
def cmd_cc():
print(CONF)
print(CONF.database)
print(CONF.database.connect_string.host)
CONF.load()
cmd = sys.argv[1]
args = sys.argv[1:]
if cmd == 'debug':
cmd = 'dbm'
args = [cmd, 'c']
if cmd == 'mp':
cmd_mp()
elif cmd == 'dbm':
cmd_dbm()
elif cmd == 't_dbm':
cmd_t_dbm()
elif cmd == 'dbrs':
cmd_dbrs()
elif cmd == 'cc':
cmd_cc()
else:
print('Bad command: {}'.format(cmd))
| 23.094737
| 70
| 0.635369
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 235
| 0.10711
|
a7da1b3b72d6032aefdb12dd63739b250c1938d8
| 5,261
|
py
|
Python
|
serpentmonkee/UtilsMonkee.py
|
anthromorphic-ai/serpentmonkee
|
505bbe03fd8a77bc7737e46b8e27e0d91b87835b
|
[
"MIT"
] | null | null | null |
serpentmonkee/UtilsMonkee.py
|
anthromorphic-ai/serpentmonkee
|
505bbe03fd8a77bc7737e46b8e27e0d91b87835b
|
[
"MIT"
] | null | null | null |
serpentmonkee/UtilsMonkee.py
|
anthromorphic-ai/serpentmonkee
|
505bbe03fd8a77bc7737e46b8e27e0d91b87835b
|
[
"MIT"
] | null | null | null |
import requests
from dateutil import parser
import json
from datetime import datetime, timezone
import time
import sys
import random
import uuid
import copy
# --------------------------------------------------------------------
class RoundTripEncoder(json.JSONEncoder):
DATE_FORMAT = "%Y-%m-%d"
TIME_FORMAT = "%H:%M:%S.%f"
def default(self, obj):
if isinstance(obj, datetime):
return {
"_type": "datetime",
"value": obj.strftime("%s %s" % (self.DATE_FORMAT, self.TIME_FORMAT)),
}
return super(RoundTripEncoder, self).default(obj)
class RoundTripDecoder(json.JSONDecoder):
def __init__(self, *args, **kwargs):
json.JSONDecoder.__init__(
self, object_hook=self.object_hook, *args, **kwargs)
def object_hook(self, obj):
if "_type" not in obj:
return obj
type = obj["_type"]
if type == "datetime":
return parser.parse(obj["value"])
return obj
def call_cloud_function(url, headers, data):
"""
Calls the cloud function at url with headers and data passed as part of the POST.
Returns JSON response, passed through RoundTripDecoder
"""
response_data = None
try:
response = requests.post(url=url, data=data, headers=headers)
response_data = json.loads(response.text, cls=RoundTripDecoder)
except Exception as e:
print("ERROR in call_cloud_function: {}".format(str(e)))
return response_data
class MonkeeTracker: # --------------------------------------------------------------------
def __init__(self, db, app_name, function_name, human_uid):
self.tracker = []
self.db = db
self.app_name = app_name
self.human_uid = human_uid
self.function_name = function_name
def set_db(self, db_destination):
self.db = db_destination
def set_human_uid(self, human_uid):
self.human_uid = human_uid
def track(self, event_name):
self.tracker.append(
{"ts": datetime.now(timezone.utc), "e": event_name})
def persist(self, logUid=None):
self.track("persist_tracker")
if len(self.tracker) >= 1:
diff = self.tracker[-1]["ts"] - self.tracker[0]["ts"]
overall_diff_s = diff.seconds + diff.microseconds / 1000000
ti = 0
while ti < len(self.tracker) - 1:
next_time = self.tracker[ti + 1]["ts"]
elapsed_mcs = next_time - self.tracker[ti]["ts"]
self.tracker[ti]["elapsed_time"] = (
elapsed_mcs.seconds + elapsed_mcs.microseconds / 1000000
)
self.tracker[ti]["a_perc"] = (
str(
int(
round(
100 *
self.tracker[ti]["elapsed_time"] /
overall_diff_s, 0
)
)
)
+ "%"
)
ti += 1
track_dict = {
"humanUid": self.human_uid,
"calledFrom": self.function_name,
"duration": overall_diff_s,
"log": self.tracker,
}
r = int(10000 * random.random())
seconds = 9999999999 - time.time()
logUid = str(seconds) + str(r)
if logUid is None:
self.db.document().set(track_dict)
else:
self.db.document(logUid).set(track_dict)
def get_size(json_obj):
"""
returns the size of the JSON object in bytes
"""
dumps = json.dumps(json_obj, cls=RoundTripEncoder)
size_bytes = sys.getsizeof(dumps)
return size_bytes
def dateDiff(unit, ts1, ts2):
"""
returns the time delta between ts1 and ts2 in the provided unit.
Unit in: ['second','minute','hour','day']
"""
elapsedTime = ts2 - ts1
totalSeconds = elapsedTime.total_seconds()
if unit in ["s", "sec", "second"]:
return totalSeconds
elif unit in ["mn", "min", "minute"]:
return totalSeconds / 60
elif unit in ["hr", "hour"]:
return totalSeconds / 60 / 60
elif unit in ["d", "day"]:
return totalSeconds / 60 / 60 / 24
def getval(dictionary, key, default_value=None):
if dictionary is not None:
if key in dictionary:
ret = dictionary[key]
else:
ret = default_value
else:
ret = default_value
return ret
def get_uuid():
return str(uuid.uuid4())
def describe_time(hh_ago):
ret = ""
hh_ago = int(round(hh_ago))
if hh_ago == 0:
ret = "very recently"
elif hh_ago == 1:
ret = "an hour ago"
elif hh_ago <= 24:
ret = str(hh_ago) + " hours ago"
elif hh_ago <= 48:
ret = "yesterday"
else:
dd_ago = int(round(hh_ago / 24))
ret = str(dd_ago) + " days ago"
return ret
def makeAscendingUid():
"""
Creates a uid such that new uids are always alphabetically in front of older ones.
For typical use in creating FB doc UIDs such that new docs will show up at the top of the collection
"""
docUid = str(1625607464 * 3 - int(time.time()))
uuid = get_uuid()
return f'{docUid}_{uuid}'
| 27.984043
| 104
| 0.553317
| 2,761
| 0.524805
| 0
| 0
| 0
| 0
| 0
| 0
| 1,053
| 0.200152
|
a7dac603aaf8f14d5ec261bf9ee335b205d9767b
| 201
|
py
|
Python
|
backend/app/bucket.py
|
thanet-s/subme-selected-topics-project
|
fac1630839c580bbd66b93f2dc9004c8637a7b15
|
[
"MIT"
] | null | null | null |
backend/app/bucket.py
|
thanet-s/subme-selected-topics-project
|
fac1630839c580bbd66b93f2dc9004c8637a7b15
|
[
"MIT"
] | null | null | null |
backend/app/bucket.py
|
thanet-s/subme-selected-topics-project
|
fac1630839c580bbd66b93f2dc9004c8637a7b15
|
[
"MIT"
] | null | null | null |
from minio import Minio
import os
minio_client = Minio(
os.environ['MINIO_HOST'],
access_key=os.environ['MINIO_ROOT_USER'],
secret_key=os.environ['MINIO_ROOT_PASSWORD'],
secure=False
)
| 22.333333
| 49
| 0.731343
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 50
| 0.248756
|
a7db0d9490bcb10d7f38e66a3fcc8a697cd073d0
| 2,642
|
py
|
Python
|
publications/PrADA/experiments/income_census/train_config.py
|
UMDataScienceLab/research
|
279ee21444817903cb9ef9dc9d9583a502865336
|
[
"Apache-2.0"
] | 49
|
2020-11-04T03:15:59.000Z
|
2022-03-23T12:21:15.000Z
|
publications/PrADA/experiments/income_census/train_config.py
|
UMDataScienceLab/research
|
279ee21444817903cb9ef9dc9d9583a502865336
|
[
"Apache-2.0"
] | 2
|
2021-09-12T02:36:42.000Z
|
2021-11-25T13:19:58.000Z
|
publications/PrADA/experiments/income_census/train_config.py
|
UMDataScienceLab/research
|
279ee21444817903cb9ef9dc9d9583a502865336
|
[
"Apache-2.0"
] | 11
|
2020-11-11T12:14:49.000Z
|
2022-03-08T16:17:05.000Z
|
from data_process.census_process.census_data_creation_config import census_data_creation
fg_feature_extractor_architecture_list = [[28, 56, 28, 14],
[25, 50, 25, 12],
[56, 86, 56, 18],
[27, 54, 27, 13]]
intr_fg_feature_extractor_for_architecture_list = [[53, 78, 53, 15],
[84, 120, 84, 20],
[55, 81, 55, 15],
[81, 120, 81, 20],
[52, 78, 52, 15],
[83, 120, 83, 20]]
no_fg_feature_extractor_architecture = [136, 150, 60, 20]
pre_train_hyperparameters = {
"using_interaction": False,
"momentum": 0.99,
"weight_decay": 0.00001,
"lr": 5e-4,
"batch_size": 128,
"max_epochs": 600,
"epoch_patience": 2,
"valid_metric": ('ks', 'auc')
}
fine_tune_hyperparameters = {
"using_interaction": False,
"load_global_classifier": False,
"momentum": 0.99,
"weight_decay": 0.0,
"lr": 8e-4,
"batch_size": 128,
"valid_metric": ('ks', 'auc')
}
no_adaptation_hyperparameters = {
"apply_feature_group": False,
"train_data_tag": 'all', # can be either 'all' or 'tgt'
"momentum": 0.99,
"weight_decay": 0.00001,
"lr": 5e-4,
"batch_size": 128,
"max_epochs": 600,
"epoch_patience": 2,
"valid_metric": ('ks', 'auc')
}
data_dir = census_data_creation['processed_data_dir']
data_tag = 'all4000pos004'
data_hyperparameters = {
"source_ad_train_file_name": data_dir + f'undergrad_census9495_ad_{data_tag}_train.csv',
"source_ad_valid_file_name": data_dir + f'undergrad_census9495_ad_{data_tag}_valid.csv',
"src_tgt_train_file_name": data_dir + f'degree_src_tgt_census9495_{data_tag}_train.csv',
"target_ad_train_file_name": data_dir + f'grad_census9495_ad_{data_tag}_train.csv',
"target_ft_train_file_name": data_dir + f'grad_census9495_ft_{data_tag}_train.csv',
"target_ft_valid_file_name": data_dir + f'grad_census9495_ft_{data_tag}_valid.csv',
"target_ft_test_file_name": data_dir + f'grad_census9495_ft_{data_tag}_test.csv',
"census_fg_pretrained_model_dir": "census_fg_pretrained_model",
"census_fg_ft_target_model_dir": "census_fg_ft_target_model",
"census_no-fg_pretrained_model_dir": "census_no-fg_pretrained_model",
"census_no-fg_ft_target_model_dir": "census_no-fg_ft_target_model",
"census_no-ad_model_dir": "census_no-ad_model"
}
| 38.289855
| 92
| 0.608251
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1,202
| 0.454958
|
a7dbae6b6e0c89662cba5d9864585c9b7e89ef3a
| 444
|
py
|
Python
|
tools/create_transmit_grouped_command_cron.py
|
Vayel/GUCEM-BVC
|
e5645dec332756d3c9db083abf2c8f3625a10d4d
|
[
"WTFPL"
] | 2
|
2016-09-23T18:02:40.000Z
|
2017-04-28T18:35:59.000Z
|
tools/create_transmit_grouped_command_cron.py
|
Vayel/GUCEM-BVC
|
e5645dec332756d3c9db083abf2c8f3625a10d4d
|
[
"WTFPL"
] | 82
|
2016-09-26T14:38:31.000Z
|
2018-02-12T18:47:12.000Z
|
tools/create_transmit_grouped_command_cron.py
|
Vayel/GUCEM-BVC
|
e5645dec332756d3c9db083abf2c8f3625a10d4d
|
[
"WTFPL"
] | null | null | null |
import os
from cron_helper import create
JOB_COMMENT = 'BVC transmit grouped command reminder'
HERE = os.path.dirname(os.path.abspath(__file__))
def create_job(cron):
job = cron.new(
command=os.path.join(HERE, 'manage.sh transmit_grouped_command_reminder'),
comment=JOB_COMMENT,
)
job.day.every(1)
job.hour.on(2)
job.minute.on(10)
if __name__ == '__main__':
create(create_job, JOB_COMMENT)
| 21.142857
| 82
| 0.684685
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 94
| 0.211712
|
a7ddce06c356fe1ffbf7a25faa291f2561e6dd85
| 2,157
|
py
|
Python
|
network/plot_along_subunits.py
|
AspirinCode/MD-analysis-tools-scripts
|
dfc0d282c9a844f5b8b1935a3ae74b1aff577ff9
|
[
"MIT"
] | 5
|
2020-01-29T01:01:54.000Z
|
2022-02-11T09:19:20.000Z
|
network/plot_along_subunits.py
|
AspirinCode/MD-analysis-tools-scripts
|
dfc0d282c9a844f5b8b1935a3ae74b1aff577ff9
|
[
"MIT"
] | null | null | null |
network/plot_along_subunits.py
|
AspirinCode/MD-analysis-tools-scripts
|
dfc0d282c9a844f5b8b1935a3ae74b1aff577ff9
|
[
"MIT"
] | null | null | null |
import pickle
import numpy as np
import matplotlib.pyplot as plt
i = 1
fig = plt.figure(figsize=(30,30))
for pickle_file in pickle_list:
c_B = pickle.load(open(pickle_file, "rb"))
#plot_c_B(c_B, f"bet_centrality_with_{pickle_file[19:-4]}.png")
ax = fig.add_subplot(5,1,i)
#label = figname[-6:-4].upper()
cb, = ax.plot(c_B[:,0], c_B[:,1], label=f"frame-{i}") # Label: 3-Angs
#break
ss_range = {
"ANK-H1": (4, 13),
"ANK-H2": (17, 27),
"ANK-H3": (46, 53),
"ANK-H4": (61, 71),
"ANK-H5": (81, 86),
"ANK-H6": (103, 111),
"ANK-H7": (139, 146),
"ANK-H8": (151, 157),
"ANK-H9": (175, 181),
"ANK-H10": (187, 208),
"CP1": (223, 231),
"CP2": (235, 242),
"Beta-1": (255, 262),
"Beta-2": (264, 271),
"PreS1-H1": (282, 290),
"PreS1-H2": (296, 299),
"S1": (302, 336),
"S2": (357, 384),
"S3": (397, 418),
"S4": (424, 443),
"S5": (446, 485),
"S6": (545, 575),
"Turret": (486, 519),
"Pore-H": (520, 530),
"TRP-H": (579, 597),
"Beta-3": (613, 636)
}
helix = ["S1", "S2", "S3", "S4", "S5", "S6"]
auxillary_helix = ["PreS1-H1", "PreS1-H2", "Pore-H", "TRP-H"]
ank_repeat = ["ANK-H1", "ANK-H2", "ANK-H3", "ANK-H4", "ANK-H5", "ANK-H6", "ANK-H7",
"ANK-H8", "ANK-H9", "ANK-H10"]
beta_sheet = ["Beta-1", "Beta-2", "Beta-3"]
for hel in helix:
ax.axvspan(ss_range[hel][0], ss_range[hel][1], alpha=0.4, color='#8dd3c7')
for hel in auxillary_helix:
ax.axvspan(ss_range[hel][0], ss_range[hel][1], alpha=0.4, color='#ffffb3')
for repeat in ank_repeat:
ax.axvspan(ss_range[repeat][0], ss_range[repeat][1], alpha=0.4, color='#bebada')
for beta in beta_sheet:
ax.axvspan(ss_range[beta][0], ss_range[beta][1], alpha=0.4, color='#fb8072')
ax.set_xlim(0, 654)
ax.set_xlabel("Residue id of TRPV2", fontsize=20)
ax.set_ylabel("Betweenness centrality", fontsize=20)
plt.legend(fontsize="xx-large", handles=[cb], loc="upper right")
#break
i += 1
| 31.26087
| 88
| 0.51414
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 587
| 0.272137
|
38eb4d628bf96b1cec0ba5a9060d8732e87f164b
| 276
|
py
|
Python
|
runme.py
|
AndreWohnsland/Cocktailmaker_AW
|
30efdcb85d7fb58ac2980c873c611d7b9c2b37b1
|
[
"MIT"
] | 37
|
2019-07-06T11:54:08.000Z
|
2022-01-21T12:26:16.000Z
|
runme.py
|
AndreWohnsland/Cocktailmaker_AW
|
30efdcb85d7fb58ac2980c873c611d7b9c2b37b1
|
[
"MIT"
] | 5
|
2019-12-09T07:44:08.000Z
|
2022-02-01T12:00:24.000Z
|
runme.py
|
AndreWohnsland/Cocktailmaker_AW
|
30efdcb85d7fb58ac2980c873c611d7b9c2b37b1
|
[
"MIT"
] | 4
|
2019-07-06T12:45:01.000Z
|
2021-12-29T17:09:44.000Z
|
import sys
from PyQt5.QtWidgets import QApplication
import src_ui.setup_mainwindow as setupui
if __name__ == "__main__":
app = QApplication(sys.argv)
w = setupui.MainScreen()
w.showFullScreen()
w.setFixedSize(800, 480)
sys.exit(app.exec_())
| 21.230769
| 42
| 0.684783
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 10
| 0.036232
|
38ec2565ef6e55d70bfb76a776cd7b6192708820
| 8,271
|
py
|
Python
|
src/ellalgo/ell_stable.py
|
luk036/ellalgo
|
8e83587b271f35c906c0d0aa4175dac153e5e29b
|
[
"MIT"
] | null | null | null |
src/ellalgo/ell_stable.py
|
luk036/ellalgo
|
8e83587b271f35c906c0d0aa4175dac153e5e29b
|
[
"MIT"
] | null | null | null |
src/ellalgo/ell_stable.py
|
luk036/ellalgo
|
8e83587b271f35c906c0d0aa4175dac153e5e29b
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
import math
from typing import Tuple, Union
import numpy as np
from .cutting_plane import CUTStatus
Arr = Union[np.ndarray]
class ell_stable:
"""Ellipsoid Search Space
ell_stable = {x | (x − xc)' Q^−1 (x − xc) ≤ κ}
Returns:
[type] -- [description]
"""
# __slots__ = ('_n', '_c1', '_kappa', '_rho', '_sigma', '_delta', '_tsq',
# '_xc', '_Q', 'use_parallel_cut', 'no_defer_trick')
def __init__(self, val: Union[Arr, float], x: Arr):
"""Construct a new ell_stable object
Arguments:
val (Union[Arr, float]): [description]
x (Arr): [description]
"""
self.use_parallel_cut = True
self.no_defer_trick = False
self._n = n = len(x)
self._nSq = float(n * n)
self._nPlus1 = float(n + 1)
self._nMinus1 = float(n - 1)
self._halfN = float(n) / 2.0
self._halfNplus1 = self._nPlus1 / 2.0
self._halfNminus1 = self._nMinus1 / 2.0
self._c1 = self._nSq / (self._nSq - 1)
self._c2 = 2.0 / self._nPlus1
self._c3 = float(n) / self._nPlus1
self._xc = x
self._kappa = 1.0
if np.isscalar(val):
self._Q = np.eye(n)
if self.no_defer_trick:
self._Q *= val
else:
self._kappa = val
else:
self._Q = np.diag(val)
def copy(self):
"""[summary]
Returns:
ell_stable: [description]
"""
E = ell_stable(self._kappa, self.xc)
E._Q = self._Q.copy()
# E._c1 = self._c1
E.use_parallel_cut = self.use_parallel_cut
E.no_defer_trick = self.no_defer_trick
return E
@property
def xc(self):
"""copy the whole array anyway
Returns:
[type]: [description]
"""
return self._xc
@xc.setter
def xc(self, x: Arr):
"""Set the xc object
Arguments:
x ([type]): [description]
"""
self._xc = x
# @property
# def use_parallel_cut(self) -> bool:
# """[summary]
# Returns:
# bool: [description]
# """
# return self._use_parallel_cut
# @use_parallel_cut.setter
# def use_parallel_cut(self, b: bool):
# """[summary]
# Arguments:
# b (bool): [description]
# """
# self._use_parallel_cut = b
# Reference: Gill, Murray, and Wright, "Practical Optimization", p43.
# Author: Brian Borchers (borchers@nmt.edu)
def update(self, cut) -> Tuple[int, float]:
g, beta = cut
# calculate inv(L)*g: (n-1)*n/2 multiplications
invLg = g.copy() # initially
for i in range(1, self._n):
for j in range(i):
self._Q[i, j] = self._Q[j, i] * invLg[j]
# keep for rank-one update
invLg[i] -= self._Q[i, j]
# calculate inv(D)*inv(L)*g: n
invDinvLg = invLg.copy() # initially
for i in range(self._n):
invDinvLg[i] *= self._Q[i, i]
# calculate omega: n
gQg = invDinvLg * invLg
omega = sum(gQg)
self._tsq = self._kappa * omega
status = self._calc_ll(beta)
if status != CUTStatus.success:
return status, self._tsq
# calculate Q*g = inv(L')*inv(D)*inv(L)*g : (n-1)*n/2
Qg = invDinvLg.copy() # initially
for i in range(self._n - 1, 0, -1):
for j in range(i, self._n):
Qg[i - 1] -= self._Q[i, j] * Qg[j] # ???
# calculate xc: n
self._xc -= (self._rho / omega) * Qg
# rank-one update: 3*n + (n-1)*n/2
# r = self._sigma / omega
mu = self._sigma / (1.0 - self._sigma)
oldt = omega / mu # initially
m = self._n - 1
for j in range(m):
# p=sqrt(k)*vv(j)
# p = invLg[j]
# mup = mu * p
t = oldt + gQg[j]
# self._Q[j, j] /= t # update invD
beta2 = invDinvLg[j] / t
self._Q[j, j] *= oldt / t # update invD
for k in range(j + 1, self._n):
# v(k) -= p * self._Q[j, k]
self._Q[j, k] += beta2 * self._Q[k, j]
oldt = t
# p = invLg(n1)
# mup = mu * p
t = oldt + gQg[m]
self._Q[m, m] *= oldt / t # update invD
self._kappa *= self._delta
# if (self.no_defer_trick)
# {
# self._Q *= self._kappa
# self._kappa = 1.
# }
return status, self._tsq
def _calc_ll(self, beta) -> CUTStatus:
"""parallel or deep cut
Arguments:
beta ([type]): [description]
Returns:
int: [description]
"""
if np.isscalar(beta):
return self._calc_dc(beta)
if len(beta) < 2: # unlikely
return self._calc_dc(beta[0])
return self._calc_ll_core(beta[0], beta[1])
def _calc_ll_core(self, b0: float, b1: float) -> CUTStatus:
"""Calculate new ellipsoid under Parallel Cut
g' (x − xc) + β0 ≤ 0
g' (x − xc) + β1 ≥ 0
Arguments:
b0 (float): [description]
b1 (float): [description]
Returns:
int: [description]
"""
b1sqn = b1 * (b1 / self._tsq)
t1n = 1 - b1sqn
if t1n < 0 or not self.use_parallel_cut:
return self._calc_dc(b0)
bdiff = b1 - b0
if bdiff < 0:
return CUTStatus.nosoln # no sol'n
if b0 == 0:
self._calc_ll_cc(b1, b1sqn)
return CUTStatus.success
b0b1n = b0 * (b1 / self._tsq)
if self._n * b0b1n < -1: # unlikely
return CUTStatus.noeffect # no effect
# parallel cut
t0n = 1.0 - b0 * (b0 / self._tsq)
# t1 = self._tsq - b1sq
bsum = b0 + b1
bsumn = bsum / self._tsq
bav = bsum / 2.0
tempn = self._halfN * bsumn * bdiff
xi = math.sqrt(t0n * t1n + tempn * tempn)
self._sigma = self._c3 + (1.0 - b0b1n - xi) / (bsumn * bav * self._nPlus1)
self._rho = self._sigma * bav
self._delta = self._c1 * ((t0n + t1n) / 2 + xi / self._n)
return CUTStatus.success
def _calc_ll_cc(self, b1: float, b1sqn: float):
"""Calculate new ellipsoid under Parallel Cut, one of them is central
g' (x − xc) ≤ 0
g' (x − xc) + β1 ≥ 0
Arguments:
b1 (float): [description]
b1sq (float): [description]
"""
n = self._n
xi = math.sqrt(1 - b1sqn + (self._halfN * b1sqn) ** 2)
self._sigma = self._c3 + self._c2 * (1.0 - xi) / b1sqn
self._rho = self._sigma * b1 / 2.0
self._delta = self._c1 * (1.0 - b1sqn / 2.0 + xi / n)
def _calc_dc(self, beta: float) -> CUTStatus:
"""Calculate new ellipsoid under Deep Cut
g' (x − xc) + β ≤ 0
Arguments:
beta (float): [description]
Returns:
int: [description]
"""
try:
tau = math.sqrt(self._tsq)
except ValueError:
print("Warning: tsq is negative: {}".format(self._tsq))
self._tsq = 0.0
tau = 0.0
bdiff = tau - beta
if bdiff < 0.0:
return CUTStatus.nosoln # no sol'n
if beta == 0.0:
self._calc_cc(tau)
return CUTStatus.success
n = self._n
gamma = tau + n * beta
if gamma < 0.0:
return CUTStatus.noeffect # no effect, unlikely
self._mu = (bdiff / gamma) * self._halfNminus1
self._rho = gamma / self._nPlus1
self._sigma = 2.0 * self._rho / (tau + beta)
self._delta = self._c1 * (1.0 - beta * (beta / self._tsq))
return CUTStatus.success
def _calc_cc(self, tau: float):
"""Calculate new ellipsoid under Central Cut
Arguments:
tau (float): [description]
"""
self._mu = self._halfNminus1
self._sigma = self._c2
self._rho = tau / self._nPlus1
self._delta = self._c1
| 28.42268
| 82
| 0.489784
| 8,172
| 0.981504
| 0
| 0
| 310
| 0.037233
| 0
| 0
| 2,908
| 0.349267
|
38ec28f788cf955ae54138334251cf48eba69a0b
| 855
|
py
|
Python
|
app/auth/forms.py
|
PhysicsUofRAUI/lifeLongLearning
|
36e098d4319d3500509861454fa3e27a67416802
|
[
"MIT"
] | null | null | null |
app/auth/forms.py
|
PhysicsUofRAUI/lifeLongLearning
|
36e098d4319d3500509861454fa3e27a67416802
|
[
"MIT"
] | 38
|
2020-06-09T00:07:09.000Z
|
2021-02-06T17:18:20.000Z
|
app/auth/forms.py
|
PhysicsUofRAUI/lifeLongLearning
|
36e098d4319d3500509861454fa3e27a67416802
|
[
"MIT"
] | null | null | null |
from flask_wtf import FlaskForm
from wtforms import PasswordField, StringField, SubmitField
from wtforms.validators import DataRequired
#
# Purpose: This from will be used to collect the information for the user logging
# and logging out.
#
# Fields:
# Password: The password to validate the user
# Username: This contains the name that a user has chosen to represent them
# Submit: This is the field that the user uses to signal that everything has been
# filled out.
#
# Returns:
# All the material that the user filled out (bassically all the fields but filled
# out).
#
class LoginForm(FlaskForm):
"""
Form for users to login
"""
username = StringField('Username', validators=[DataRequired()])
password = PasswordField('Password', validators=[DataRequired()])
submit = SubmitField('Login')
| 32.884615
| 85
| 0.71462
| 243
| 0.284211
| 0
| 0
| 0
| 0
| 0
| 0
| 526
| 0.615205
|
38ec8de985f6a5e8fed887f162aa2455ff186416
| 1,365
|
py
|
Python
|
netmiko/f5/f5_tmsh_ssh.py
|
josephwhite13/netmiko
|
c08c5ebb3484383f034e22b9576f88be07525f72
|
[
"MIT"
] | 1
|
2021-04-13T19:18:57.000Z
|
2021-04-13T19:18:57.000Z
|
netmiko/f5/f5_tmsh_ssh.py
|
josephwhite13/netmiko
|
c08c5ebb3484383f034e22b9576f88be07525f72
|
[
"MIT"
] | null | null | null |
netmiko/f5/f5_tmsh_ssh.py
|
josephwhite13/netmiko
|
c08c5ebb3484383f034e22b9576f88be07525f72
|
[
"MIT"
] | null | null | null |
import time
from netmiko.base_connection import BaseConnection
class F5TmshSSH(BaseConnection):
def session_preparation(self):
"""Prepare the session after the connection has been established."""
self._test_channel_read()
self.set_base_prompt()
self.tmsh_mode()
self.set_base_prompt()
self._config_mode = False
cmd = 'run /util bash -c "stty cols 255"'
self.set_terminal_width(command=cmd, pattern="run")
self.disable_paging(
command="modify cli preference pager disabled display-threshold 0"
)
self.clear_buffer()
def tmsh_mode(self, delay_factor=1):
"""tmsh command is equivalent to config command on F5."""
delay_factor = self.select_delay_factor(delay_factor)
self.clear_buffer()
command = f"{self.RETURN}tmsh{self.RETURN}"
self.write_channel(command)
time.sleep(1 * delay_factor)
self.clear_buffer()
return None
def check_config_mode(self, check_string="", pattern=""):
"""Checks if the device is in configuration mode or not."""
return True
def config_mode(self, config_command=""):
"""No config mode for F5 devices."""
return ""
def exit_config_mode(self, exit_config=""):
"""No config mode for F5 devices."""
return ""
| 33.292683
| 78
| 0.641026
| 1,299
| 0.951648
| 0
| 0
| 0
| 0
| 0
| 0
| 399
| 0.292308
|
38eda790aa9bd3615e42c068cced417ca94aa56a
| 2,099
|
py
|
Python
|
tools/database_tool.py
|
noahzhy/qumaishou
|
f776e5c750b350ca3b741fccf3e5dfd199c1296b
|
[
"Apache-2.0"
] | null | null | null |
tools/database_tool.py
|
noahzhy/qumaishou
|
f776e5c750b350ca3b741fccf3e5dfd199c1296b
|
[
"Apache-2.0"
] | null | null | null |
tools/database_tool.py
|
noahzhy/qumaishou
|
f776e5c750b350ca3b741fccf3e5dfd199c1296b
|
[
"Apache-2.0"
] | null | null | null |
import os
import pandas as pd
import sys
import glob
# 导入同级目录下其他文件夹下的文件
sys.path.append("./")
db_dir_path = 'database'
def db_save(db_name, df):
# index 表示是否显示行名,default=True
df = remove_repetition(df)
if df.to_csv(os.path.join(db_dir_path, '{}.csv'.format(db_name)), index=False, sep=','):
return True
else:
return False
def remove_repetition(df, key=None):
return df.drop_duplicates(subset=key, keep='first', inplace=False)
def db_brand(db_name, df):
#字典中的 key 值即为 csv 中列名
df = remove_repetition(df)
print('db_brand:', df.shape[0])
db_save(db_name, df)
return df
def db_brand_product(db_name, df):
dataframe = pd.DataFrame(df)
print('brand product:', dataframe.shape[0])
db_save('brand_product/brand_product_{}'.format(db_name), df)
return df
def merge_brand_product_in_one():
# print(os.getcwd())
frames = []
# print(glob.glob(r'database/brand_product_*.csv'))
for i in glob.glob('database/brand_product/brand_product_*.csv'):
df = pd.read_csv(i)
frames.append(df)
result = pd.concat(frames)
# result = remove_repetition(result, 'product_No')
db_save('db_total_product', result)
pass
def intersection_db_brand():
'''合并品牌数据库,最终英文版的'''
d1 = pd.read_csv(os.path.join(db_dir_path, 'db_brand_eng.csv'))
d2 = pd.read_csv(os.path.join(db_dir_path, 'db_brand_chn.csv'))
df = pd.merge(d1, d2, how='left', on='brand_name')
df = remove_repetition(df, 'brand_name')
df = df.loc[:, ['dispShopNo_x', 'brand_name', 'brand_url_x']]
db_save('db_brand_final', df)
print('df_merged:', df.shape[0])
return df
def get_FileSize(filePath):
# filePath = unicode(filePath,'utf8')
fsize = os.path.getsize(filePath)
fsize = fsize / float(1024)
return round(fsize, 2)
def check_dir_with_brand_final():
('database/brand_product/brand_product_{}.csv')
pass
def main():
# db_brand_eng()
# db_brand_merge()
# intersection_db_brand()
merge_brand_product_in_one()
pass
if __name__ == "__main__":
main()
| 23.852273
| 92
| 0.666508
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 744
| 0.338644
|
38ef272433c8c121f27894e2882710bf38e90294
| 1,331
|
py
|
Python
|
flika/tests/test_settings.py
|
flika-org/flika
|
68b87e8f75f77f4b59344e418c7783b24184adaa
|
[
"MIT"
] | 19
|
2016-08-11T21:17:17.000Z
|
2021-04-30T19:21:03.000Z
|
flika/tests/test_settings.py
|
flika-org/flika
|
68b87e8f75f77f4b59344e418c7783b24184adaa
|
[
"MIT"
] | 28
|
2017-03-15T18:40:33.000Z
|
2021-06-01T20:35:50.000Z
|
flika/tests/test_settings.py
|
flika-org/flika
|
68b87e8f75f77f4b59344e418c7783b24184adaa
|
[
"MIT"
] | 2
|
2019-03-08T18:51:12.000Z
|
2019-05-05T16:31:15.000Z
|
from .. import global_vars as g
from ..window import Window
import numpy as np
from ..roi import makeROI
class TestSettings():
def test_random_roi_color(self):
initial = g.settings['roi_color']
g.settings['roi_color'] = 'random'
w1 = Window(np.random.random([10, 10, 10]))
roi1 = makeROI('rectangle', [[1, 1], [3, 3]])
roi2 = makeROI('rectangle', [[2, 2], [3, 3]])
assert roi1.pen.color().name() != roi2.pen.color().name(), 'Random ROI color is the same. This could be a random chance. Run repeatedly.'
g.settings['roi_color'] = '#00ff00'
roi3 = makeROI('rectangle', [[3, 3], [3, 3]])
assert roi3.pen.color().name() == "#00ff00", 'ROI color set. all rois are same color'
g.settings['roi_color'] = initial
def test_multitrace(self):
initial = g.settings['multipleTraceWindows']
g.settings['multipleTraceWindows'] = False
w1 = Window(np.random.random([10, 10, 10]))
roi1 = makeROI('rectangle', [[1, 1], [3, 3]])
roi1.plot()
roi2 = makeROI('rectangle', [[2, 2], [3, 3]])
roi2.plot()
assert roi1.traceWindow == roi2.traceWindow, 'Traces not plotted together.'
g.settings['multipleTraceWindows'] = True
roi3 = makeROI('rectangle', [[3, 3], [3, 3]])
roi3.plot()
assert roi3.traceWindow != roi1.traceWindow, 'Multiple trace windows'
g.settings['multipleTraceWindows'] = initial
| 34.128205
| 139
| 0.661908
| 1,224
| 0.919609
| 0
| 0
| 0
| 0
| 0
| 0
| 396
| 0.297521
|
38f003c85d91841bc389c08c6a91fa5429cad832
| 40,888
|
py
|
Python
|
tests/test_runner.py
|
varunvarma/panoptes
|
733e1b17e01d47fe0a399e2fe635f614cc5a0b88
|
[
"Apache-2.0"
] | null | null | null |
tests/test_runner.py
|
varunvarma/panoptes
|
733e1b17e01d47fe0a399e2fe635f614cc5a0b88
|
[
"Apache-2.0"
] | null | null | null |
tests/test_runner.py
|
varunvarma/panoptes
|
733e1b17e01d47fe0a399e2fe635f614cc5a0b88
|
[
"Apache-2.0"
] | null | null | null |
"""
Copyright 2018, Oath Inc.
Licensed under the terms of the Apache 2.0 license. See LICENSE file in project root for terms.
"""
import re
import unittest
from mock import patch, MagicMock, Mock, PropertyMock
from testfixtures import LogCapture
from yahoo_panoptes.framework.plugins.panoptes_base_plugin import PanoptesPluginInfo, PanoptesBasePlugin
from yahoo_panoptes.polling.polling_plugin import PanoptesPollingPlugin
from yahoo_panoptes.polling.polling_plugin_agent import polling_plugin_task, PanoptesPollingPluginKeyValueStore, \
PanoptesSecretsStore, PanoptesPollingPluginAgentKeyValueStore
from yahoo_panoptes.discovery.discovery_plugin_agent import PanoptesDiscoveryPluginAgentKeyValueStore, \
PanoptesDiscoveryPluginKeyValueStore, PanoptesSecretsStore, discovery_plugin_task
from yahoo_panoptes.framework.resources import PanoptesContext, PanoptesResource, PanoptesResourcesKeyValueStore
from yahoo_panoptes.framework.plugins.runner import PanoptesPluginRunner, PanoptesPluginWithEnrichmentRunner
from yahoo_panoptes.framework.metrics import PanoptesMetric, PanoptesMetricsGroupSet
from tests.mock_panoptes_producer import MockPanoptesMessageProducer
from test_framework import PanoptesTestKeyValueStore, panoptes_mock_kazoo_client, panoptes_mock_redis_strict_client
from helpers import get_test_conf_file
_TIMESTAMP = 1
def _callback(*args):
pass
def _callback_with_exception(*args):
raise Exception
class PanoptesTestPluginNoLock(PanoptesBasePlugin):
name = None
signature = None
data = {}
execute_now = True
plugin_object = None
def run(self, context):
pass
class PanoptesTestPluginRaisePluginReleaseException:
name = None
version = None
last_executed = None
last_executed_age = None
last_results = None
last_results_age = None
moduleMtime = None
configMtime = None
signature = None
data = {}
execute_now = True
lock = MagicMock(locked=True, release=MagicMock(side_effect=Exception))
def run(self, context):
raise Exception
class MockPluginExecuteNow:
execute_now = False
class MockPluginLockException:
name = None
signature = None
data = {}
execute_now = True
lock = MagicMock(side_effect=Exception)
class MockPluginLockNone:
name = None
signature = None
data = {}
execute_now = True
lock = None
class MockPluginLockIsNotLocked:
name = None
signature = None
data = {}
execute_now = True
lock = MagicMock(locked=False)
_, global_panoptes_test_conf_file = get_test_conf_file()
class TestPanoptesPluginRunner(unittest.TestCase):
@staticmethod
def extract(record):
message = record.getMessage()
match_obj = re.match(r'(?P<name>.*):\w+(?P<body>.*)', message)
if match_obj:
message = match_obj.group('name') + match_obj.group('body')
match_obj = re.match(r'(?P<start>.*[R|r]an in\s)\d+\.?\d*.*(?P<end>seconds.*)', message)
if match_obj:
return record.name, record.levelname, match_obj.group('start') + match_obj.group('end')
match_obj = re.match(r'(?P<start>.*took\s*)\d+\.?\d*.*(?P<seconds>seconds\D*)\d+\s(?P<end>garbage objects.*)',
message)
if match_obj:
return record.name, record.levelname, match_obj.group('start') + match_obj.group('seconds') + \
match_obj.group('end')
match_obj = re.match(
r'(?P<start>Attempting to get lock for plugin .*with lock path) \".*\".*(?P<id> and identifier).*'
r'(?P<in> in) \d\.?\d*(?P<seconds> seconds)',
message)
if match_obj:
return record.name, record.levelname, match_obj.group('start') + match_obj.group('id') + \
match_obj.group('in') + match_obj.group('seconds')
match_obj = re.match(
r'(?P<delete>Deleting module: yapsy_loaded_plugin_Test_Polling_Plugin_Second_Instance|'
r'Deleting module: yapsy_loaded_plugin_Test_Polling_Plugin).*',
message
)
if match_obj:
return record.name, record.levelname, match_obj.group('delete')
return record.name, record.levelname, message
@patch('redis.StrictRedis', panoptes_mock_redis_strict_client)
@patch('kazoo.client.KazooClient', panoptes_mock_kazoo_client)
def setUp(self):
self.my_dir, self.panoptes_test_conf_file = get_test_conf_file()
self._panoptes_context = PanoptesContext(self.panoptes_test_conf_file,
key_value_store_class_list=[PanoptesTestKeyValueStore,
PanoptesResourcesKeyValueStore,
PanoptesPollingPluginKeyValueStore,
PanoptesSecretsStore,
PanoptesPollingPluginAgentKeyValueStore,
PanoptesDiscoveryPluginAgentKeyValueStore,
PanoptesDiscoveryPluginKeyValueStore],
create_message_producer=False, async_message_producer=False,
create_zookeeper_client=True)
self._runner_class = PanoptesPluginRunner
self._log_capture = LogCapture(attributes=self.extract)
def tearDown(self):
self._log_capture.uninstall()
def test_logging_methods(self):
runner = self._runner_class("Test Polling Plugin", "polling", PanoptesPollingPlugin, PanoptesPluginInfo,
None, self._panoptes_context, PanoptesTestKeyValueStore,
PanoptesTestKeyValueStore, PanoptesTestKeyValueStore, "plugin_logger",
PanoptesMetricsGroupSet, _callback)
# Ensure logging methods run:
runner.info(PanoptesTestPluginNoLock(), "Test Info log message")
runner.warn(PanoptesTestPluginNoLock(), "Test Warning log message")
runner.error(PanoptesTestPluginNoLock(), "Test Error log message", Exception)
runner.exception(PanoptesTestPluginNoLock(), "Test Exception log message")
self._log_capture.check(('panoptes.tests.test_runner', 'INFO', '[None] [{}] Test Info log message'),
('panoptes.tests.test_runner', 'WARNING', '[None] [{}] Test Warning log message'),
('panoptes.tests.test_runner', 'ERROR',
"[None] [{}] Test Error log message: <type 'exceptions.Exception'>"),
('panoptes.tests.test_runner', 'ERROR', '[None] [{}] Test Exception log message:'))
def test_basic_operations(self):
runner = self._runner_class("Test Polling Plugin", "polling", PanoptesPollingPlugin, PanoptesPluginInfo,
None, self._panoptes_context, PanoptesTestKeyValueStore,
PanoptesTestKeyValueStore, PanoptesTestKeyValueStore, "plugin_logger",
PanoptesMetricsGroupSet, _callback)
runner.execute_plugin()
self._log_capture.check_present(('panoptes.tests.test_runner', 'INFO',
'Attempting to execute plugin "Test Polling Plugin"'),
('panoptes.tests.test_runner', 'DEBUG',
'''Starting Plugin Manager for "polling" plugins with the following '''
'''configuration: {'polling': <class'''
""" 'yahoo_panoptes.polling.polling_plugin.PanoptesPollingPlugin'>}, """
"""['tests/plugins/polling'], panoptes-plugin"""),
('panoptes.tests.test_runner', 'DEBUG', 'Found 3 plugins'),
('panoptes.tests.test_runner', 'DEBUG',
'Loaded plugin '
'"Test Polling Plugin", version "0.1" of type "polling"'
', category "polling"'),
('panoptes.tests.test_runner',
'DEBUG',
'Loaded plugin "Test Polling Plugin 2", '
'version "0.1" of type "polling", category "polling"'),
('panoptes.tests.test_runner', 'DEBUG',
'Loaded plugin "Test Polling Plugin Second Instance", '
'version "0.1" of type "polling", category "polling"'),
('panoptes.tests.test_runner', 'INFO',
'''[Test Polling Plugin] [None] '''
'''Attempting to get lock for plugin "Test Polling Plugin"'''),
('panoptes.tests.test_runner', 'DEBUG',
'Attempting to get lock for plugin "Test Polling Plugin", with lock path and '
'identifier in seconds'),
('panoptes.tests.test_runner',
'INFO',
'[Test Polling Plugin] [None] Acquired lock'),
('panoptes.tests.test_runner',
'INFO',
'[Test Polling Plugin] [None]'
' Ran in seconds'),
('panoptes.tests.test_runner',
'INFO',
'[Test Polling Plugin] [None] Released lock'),
('panoptes.tests.test_runner',
'INFO',
'[Test Polling Plugin] [None] Plugin returned'
' a result set with 1 members'),
('panoptes.tests.test_runner',
'INFO',
'[Test Polling Plugin] [None]'
' Callback function ran in seconds'),
('panoptes.tests.test_runner',
'INFO',
'[Test Polling Plugin] [None] GC took seconds. There are garbage objects.'),
('panoptes.tests.test_runner',
'DEBUG',
'Deleting module: yapsy_loaded_plugin_Test_Polling_Plugin'),
('panoptes.tests.test_runner',
'DEBUG',
'Deleting module: yapsy_loaded_plugin_Test_Polling_Plugin'),
('panoptes.tests.test_runner',
'DEBUG',
'Deleting module: yapsy_loaded_plugin_Test_Polling_Plugin_Second_Instance'),
order_matters=False
)
def test_nonexistent_plugin(self):
runner = self._runner_class("Non-existent Plugin", "polling", PanoptesPollingPlugin, PanoptesPluginInfo,
None, self._panoptes_context, PanoptesTestKeyValueStore,
PanoptesTestKeyValueStore, PanoptesTestKeyValueStore, "plugin_logger",
PanoptesMetricsGroupSet, _callback)
runner.execute_plugin()
self._log_capture.check_present(('panoptes.tests.test_runner', 'INFO',
'Attempting to execute plugin "Non-existent Plugin"'),
('panoptes.tests.test_runner', 'DEBUG',
'Starting Plugin Manager for "polling" plugins with the following '
"configuration: {'polling': <class 'yahoo_panoptes.polling.polling_plugin."
"PanoptesPollingPlugin'>}, "
"['tests/plugins/polling'], panoptes-plugin"),
('panoptes.tests.test_runner', 'DEBUG', 'Found 3 plugins'),
('panoptes.tests.test_runner', 'DEBUG',
'Loaded plugin "Test Polling Plugin", version "0.1" of type "polling", '
'category "polling"'),
('panoptes.tests.test_runner', 'DEBUG',
'Loaded plugin "Test Polling Plugin Second Instance", version "0.1" of type '
'"polling", category "polling"'),
('panoptes.tests.test_runner', 'WARNING',
'No plugin named "Non-existent Plugin" found in "'
'''['tests/plugins/polling']"'''),
order_matters=False)
def test_bad_plugin_type(self):
runner = self._runner_class("Test Polling Plugin", "bad", PanoptesPollingPlugin, PanoptesPluginInfo,
None, self._panoptes_context, PanoptesTestKeyValueStore,
PanoptesTestKeyValueStore, PanoptesTestKeyValueStore, "plugin_logger",
PanoptesMetricsGroupSet, _callback)
runner.execute_plugin()
self._log_capture.check_present(('panoptes.tests.test_runner', 'ERROR',
'''Error trying to load plugin "Test Polling Plugin": KeyError('bad',)'''))
def test_execute_now_false(self):
mock_get_plugin_by_name = MagicMock(return_value=MockPluginExecuteNow())
with patch('yahoo_panoptes.framework.plugins.runner.PanoptesPluginManager.getPluginByName',
mock_get_plugin_by_name):
runner = self._runner_class("Test Polling Plugin", "polling", PanoptesPollingPlugin, PanoptesPluginInfo,
None, self._panoptes_context, PanoptesTestKeyValueStore,
PanoptesTestKeyValueStore, PanoptesTestKeyValueStore, "plugin_logger",
PanoptesMetricsGroupSet, _callback)
runner.execute_plugin()
self._log_capture.check_present(('panoptes.tests.test_runner', 'INFO',
'Attempting to execute plugin "Test Polling Plugin"'),
('panoptes.tests.test_runner', 'DEBUG',
'''Starting Plugin Manager for '''
'''"polling" plugins with the '''
'''following configuration: {'polling': '''
"""<class 'yahoo_panoptes.polling.polling_plugin.PanoptesPollingPlugin'"""
""">}, ['tests/plugins/polling'], panoptes-plugin"""),
('panoptes.tests.test_runner', 'DEBUG', 'Found 3 plugins'),
('panoptes.tests.test_runner', 'DEBUG',
'Loaded plugin '
'"Test Polling Plugin", version "0.1" of type "polling"'
', category "polling"'),
('panoptes.tests.test_runner', 'DEBUG',
'Loaded plugin "Test Polling Plugin Second Instance", '
'version "0.1" of type "polling", category "polling"'),
order_matters=False)
def test_callback_failure(self):
runner = self._runner_class("Test Polling Plugin", "polling", PanoptesPollingPlugin, PanoptesPluginInfo,
None, self._panoptes_context, PanoptesTestKeyValueStore,
PanoptesTestKeyValueStore, PanoptesTestKeyValueStore, "plugin_logger",
PanoptesMetricsGroupSet, _callback_with_exception)
runner.execute_plugin()
self._log_capture.check_present(('panoptes.tests.test_runner', 'ERROR',
'[Test Polling Plugin] '
'[None] Results callback function failed: :'))
def test_lock_no_lock_object(self):
mock_plugin = MagicMock(return_value=PanoptesTestPluginNoLock)
mock_get_context = MagicMock(return_value=self._panoptes_context)
with patch('yahoo_panoptes.framework.plugins.runner.PanoptesPluginManager.getPluginByName',
mock_plugin):
with patch('yahoo_panoptes.framework.plugins.runner.PanoptesPluginRunner._get_context', mock_get_context):
runner = self._runner_class("Test Polling Plugin", "polling", PanoptesPollingPlugin, PanoptesPluginInfo,
None, self._panoptes_context, PanoptesTestKeyValueStore,
PanoptesTestKeyValueStore, PanoptesTestKeyValueStore, "plugin_logger",
PanoptesMetricsGroupSet, _callback)
runner.execute_plugin()
self._log_capture.check_present(('panoptes.tests.test_runner', 'ERROR',
'[None] [{}] Error in acquiring lock:'))
def test_lock_is_none(self):
mock_get_plugin_by_name = MagicMock(return_value=MockPluginLockNone())
mock_get_context = MagicMock(return_value=self._panoptes_context)
with patch('yahoo_panoptes.framework.plugins.runner.PanoptesPluginManager.getPluginByName',
mock_get_plugin_by_name):
with patch('yahoo_panoptes.framework.plugins.runner.PanoptesPluginRunner._get_context', mock_get_context):
runner = self._runner_class("Test Polling Plugin", "polling", PanoptesPollingPlugin,
PanoptesPluginInfo, None, self._panoptes_context, PanoptesTestKeyValueStore,
PanoptesTestKeyValueStore, PanoptesTestKeyValueStore, "plugin_logger",
PanoptesMetricsGroupSet, _callback)
runner.execute_plugin()
self._log_capture.check_present(('panoptes.tests.test_runner', 'INFO',
'[None] [{}] Attempting to get lock for plugin'
' "Test Polling Plugin"'))
def test_lock_is_not_locked(self):
mock_get_plugin_by_name = MagicMock(return_value=MockPluginLockIsNotLocked())
mock_get_context = MagicMock(return_value=self._panoptes_context)
with patch('yahoo_panoptes.framework.plugins.runner.PanoptesPluginManager.getPluginByName',
mock_get_plugin_by_name):
with patch('yahoo_panoptes.framework.plugins.runner.PanoptesPluginRunner._get_context', mock_get_context):
runner = self._runner_class("Test Polling Plugin", "polling", PanoptesPollingPlugin,
PanoptesPluginInfo, None, self._panoptes_context, PanoptesTestKeyValueStore,
PanoptesTestKeyValueStore, PanoptesTestKeyValueStore, "plugin_logger",
PanoptesMetricsGroupSet, _callback)
runner.execute_plugin()
self._log_capture.check_present(('panoptes.tests.test_runner', 'INFO',
'[None] [{}] Attempting to get lock for plugin'
' "Test Polling Plugin"'))
def test_plugin_failure(self):
mock_plugin = MagicMock(return_value=PanoptesTestPluginRaisePluginReleaseException)
mock_get_context = MagicMock(return_value=self._panoptes_context)
with patch('yahoo_panoptes.framework.plugins.runner.PanoptesPluginManager.getPluginByName',
mock_plugin):
with patch('yahoo_panoptes.framework.plugins.runner.PanoptesPluginRunner._get_context', mock_get_context):
runner = self._runner_class("Test Polling Plugin", "polling", PanoptesPollingPlugin, PanoptesPluginInfo,
None, self._panoptes_context, PanoptesTestKeyValueStore,
PanoptesTestKeyValueStore, PanoptesTestKeyValueStore, "plugin_logger",
PanoptesMetricsGroupSet, _callback)
runner.execute_plugin()
self._log_capture.check_present(('panoptes.tests.test_runner', 'ERROR',
'[None] [{}] Failed to execute plugin:'),
('panoptes.tests.test_runner', 'INFO',
'[None] [{}] Ran in seconds'),
('panoptes.tests.test_runner', 'ERROR',
'[None] [{}] Failed to release lock for plugin:'),
('panoptes.tests.test_runner', 'WARNING',
'[None] [{}] Plugin did not return any results'),
order_matters=False)
def test_plugin_wrong_result_type(self):
runner = self._runner_class("Test Polling Plugin 2", "polling", PanoptesPollingPlugin, PanoptesPluginInfo,
None, self._panoptes_context, PanoptesTestKeyValueStore,
PanoptesTestKeyValueStore, PanoptesTestKeyValueStore, "plugin_logger",
PanoptesMetricsGroupSet, _callback)
runner.execute_plugin()
self._log_capture.check_present(('panoptes.tests.test_runner', 'WARNING',
'[Test Polling Plugin 2] [None] Plugin returned an unexpected result type: '
'"PanoptesMetricsGroup"'))
class TestPanoptesPluginWithEnrichmentRunner(TestPanoptesPluginRunner):
@patch('redis.StrictRedis', panoptes_mock_redis_strict_client)
@patch('kazoo.client.KazooClient', panoptes_mock_kazoo_client)
def setUp(self):
super(TestPanoptesPluginWithEnrichmentRunner, self).setUp()
self._panoptes_resource = PanoptesResource(resource_site="test", resource_class="test",
resource_subclass="test", resource_type="test", resource_id="test",
resource_endpoint="test", resource_creation_timestamp=_TIMESTAMP,
resource_plugin="test")
self._runner_class = PanoptesPluginWithEnrichmentRunner
def test_basic_operations(self):
# Test where enrichment is None
mock_panoptes_enrichment_cache = Mock(return_value=None)
with patch('yahoo_panoptes.framework.plugins.runner.PanoptesEnrichmentCache', mock_panoptes_enrichment_cache):
runner = self._runner_class("Test Polling Plugin", "polling", PanoptesPollingPlugin, PanoptesPluginInfo,
self._panoptes_resource, self._panoptes_context, PanoptesTestKeyValueStore,
PanoptesTestKeyValueStore, PanoptesTestKeyValueStore, "plugin_logger",
PanoptesMetricsGroupSet, _callback)
runner.execute_plugin()
self._log_capture.check_present(('panoptes.tests.test_runner', 'ERROR',
'[Test Polling Plugin] [plugin|test|site|test|class|test|subclass|test|'
'type|test|id|test|endpoint|test] '
'Could not setup context for plugin:'),
order_matters=False
)
self._log_capture.uninstall()
self._log_capture = LogCapture(attributes=self.extract)
# Test with enrichment
runner = self._runner_class("Test Polling Plugin", "polling", PanoptesPollingPlugin, PanoptesPluginInfo,
self._panoptes_resource, self._panoptes_context, PanoptesTestKeyValueStore,
PanoptesTestKeyValueStore, PanoptesTestKeyValueStore, "plugin_logger",
PanoptesMetricsGroupSet, _callback)
runner.execute_plugin()
self._log_capture.check_present(('panoptes.tests.test_runner', 'INFO',
'Attempting to execute plugin "Test Polling Plugin"'),
('panoptes.tests.test_runner', 'DEBUG',
'''Starting Plugin Manager for "polling" plugins with the following '''
'''configuration: {'polling': <class'''
""" 'yahoo_panoptes.polling.polling_plugin.PanoptesPollingPlugin'>}, """
"""['tests/plugins/polling'], panoptes-plugin"""),
('panoptes.tests.test_runner', 'DEBUG', 'Found 3 plugins'),
('panoptes.tests.test_runner', 'DEBUG',
'Loaded plugin '
'"Test Polling Plugin", version "0.1" of type "polling"'
', category "polling"'),
('panoptes.tests.test_runner',
'DEBUG',
'Loaded plugin "Test Polling Plugin 2", '
'version "0.1" of type "polling", category "polling"'),
('panoptes.tests.test_runner', 'DEBUG',
'Loaded plugin "Test Polling Plugin Second Instance", '
'version "0.1" of type "polling", category "polling"'),
('panoptes.tests.test_runner',
'INFO',
'[Test Polling Plugin] [plugin|test|site|test|class|test|subclass|test|'
'type|test|id|test|endpoint|test] Attempting to get lock for plugin '
'"Test Polling Plugin"'),
('panoptes.tests.test_runner', 'DEBUG',
'Attempting to get lock for plugin "Test Polling Plugin", with lock path and '
'identifier in seconds'),
('panoptes.tests.test_runner',
'INFO',
'[Test Polling Plugin] [plugin|test|site|test|class|test|subclass|test|'
'type|test|id|test|endpoint|test] Acquired lock'),
('panoptes.tests.test_runner',
'INFO',
'[Test Polling Plugin] [plugin|test|site|test|class|test|subclass|test|'
'type|test|id|test|endpoint|test]'
' Ran in seconds'),
('panoptes.tests.test_runner',
'INFO',
'[Test Polling Plugin] [plugin|test|site|test|class|test|subclass|test|'
'type|test|id|test|endpoint|test] Released lock'),
('panoptes.tests.test_runner',
'INFO',
'[Test Polling Plugin] [plugin|test|site|test|class|test|subclass|test|'
'type|test|id|test|endpoint|test] Plugin returned'
' a result set with 1 members'),
('panoptes.tests.test_runner',
'INFO',
'[Test Polling Plugin] [plugin|test|site|test|class|test|subclass|test|'
'type|test|id|test|endpoint|test]'
' Callback function ran in seconds'),
('panoptes.tests.test_runner',
'INFO',
'[Test Polling Plugin] [plugin|test|site|test|class|test|subclass|test|type|'
'test|id|test|endpoint|test] GC took seconds. There are garbage objects.'),
('panoptes.tests.test_runner',
'ERROR',
'No enrichment data found on KV store for plugin Test Polling Plugin '
'resource test namespace test using key test'),
('panoptes.tests.test_runner',
'DEBUG',
'Successfully created PanoptesEnrichmentCache enrichment_data {} for plugin '
'Test Polling Plugin'),
order_matters=False
)
def test_callback_failure(self):
runner = self._runner_class("Test Polling Plugin", "polling", PanoptesPollingPlugin, PanoptesPluginInfo,
self._panoptes_resource, self._panoptes_context, PanoptesTestKeyValueStore,
PanoptesTestKeyValueStore, PanoptesTestKeyValueStore, "plugin_logger",
PanoptesMetricsGroupSet, _callback_with_exception)
runner.execute_plugin()
self._log_capture.check_present(('panoptes.tests.test_runner', 'ERROR',
'[Test Polling Plugin] '
'[plugin|test|site|test|class|test|subclass|test|'
'type|test|id|test|endpoint|test] Results callback function failed: :'))
# 'pass' is needed for these methods because the only difference in their logging output from
# TestPanoptesPluginRunner is the presence of the PanoptesResource in some log messages.
def test_lock_no_lock_object(self):
pass
def test_lock_is_none(self):
pass
def test_lock_is_not_locked(self):
pass
def test_plugin_failure(self):
pass
def test_plugin_wrong_result_type(self):
runner = self._runner_class("Test Polling Plugin 2", "polling", PanoptesPollingPlugin, PanoptesPluginInfo,
None, self._panoptes_context, PanoptesTestKeyValueStore,
PanoptesTestKeyValueStore, PanoptesTestKeyValueStore, "plugin_logger",
PanoptesMetric, _callback)
runner.execute_plugin()
self._log_capture.check_present(('panoptes.tests.test_runner',
'ERROR',
'[Test Polling Plugin 2] [None] Could not setup context for plugin:'))
class TestPanoptesPollingPluginRunner(TestPanoptesPluginWithEnrichmentRunner):
@patch('yahoo_panoptes.framework.metrics.time')
@patch('yahoo_panoptes.framework.context.PanoptesContext._get_message_producer')
@patch('yahoo_panoptes.framework.context.PanoptesContext.message_producer', new_callable=PropertyMock)
@patch('yahoo_panoptes.polling.polling_plugin_agent.PanoptesPollingTaskContext')
@patch('yahoo_panoptes.framework.resources.PanoptesResourceStore.get_resource')
def test_polling_plugin_agent(self, resource, panoptes_context, message_producer, message_producer_property, time):
producer = MockPanoptesMessageProducer()
time.return_value = 1
message_producer.return_value = producer
message_producer_property.return_value = producer
resource.return_value = self._panoptes_resource
panoptes_context.return_value = self._panoptes_context
polling_plugin_task('Test Polling Plugin', 'polling')
log_prefix = '[Test Polling Plugin] [plugin|test|site|test|class|test|' \
'subclass|test|type|test|id|test|endpoint|test]'
self._log_capture.check_present(
('panoptes.tests.test_runner', 'INFO', 'Attempting to execute plugin "Test Polling Plugin"'),
('panoptes.tests.test_runner', 'DEBUG',
'''Starting Plugin Manager for "polling" plugins with the following '''
'''configuration: {'polling': <class'''
""" 'yahoo_panoptes.polling.polling_plugin.PanoptesPollingPlugin'>}, """
"""['tests/plugins/polling'], panoptes-plugin"""),
('panoptes.tests.test_runner', 'DEBUG', 'Loaded plugin "Test Polling Plugin", '
'version "0.1" of type "polling", category "polling"'),
('panoptes.tests.test_runner', 'DEBUG', 'Loaded plugin "Test Polling Plugin 2", '
'version "0.1" of type "polling", category "polling"'),
('panoptes.tests.test_runner', 'ERROR', 'No enrichment data found on KV store for plugin Test'
' Polling Plugin resource test namespace test using key test'),
('panoptes.tests.test_runner', 'DEBUG', 'Successfully created PanoptesEnrichmentCache enrichment_data '
'{} for plugin Test Polling Plugin'),
('panoptes.tests.test_runner', 'DEBUG', 'Attempting to get lock for plugin "Test Polling Plugin", '
'with lock path and identifier in seconds'),
('panoptes.tests.test_runner', 'INFO', '{} Acquired lock'.format(log_prefix)),
('panoptes.tests.test_runner', 'INFO', '{} Plugin returned a result set with 1 members'.format(log_prefix)),
('panoptes.tests.test_runner', 'INFO', '{} Callback function ran in seconds'.format(log_prefix)),
('panoptes.tests.test_runner', 'INFO', '{} Ran in seconds'.format(log_prefix)),
('panoptes.tests.test_runner', 'INFO', '{} Released lock'.format(log_prefix)),
('panoptes.tests.test_runner', 'INFO', '{} GC took seconds. There are garbage objects.'.format(log_prefix)),
('panoptes.tests.test_runner', 'DEBUG', 'Deleting module: yapsy_loaded_plugin_Test_Polling_Plugin'),
('panoptes.tests.test_runner', 'DEBUG', 'Deleting module: yapsy_loaded_plugin_Test_Polling_Plugin'),
('panoptes.tests.test_runner', 'DEBUG', 'Deleting module: '
'yapsy_loaded_plugin_Test_Polling_Plugin_Second_Instance'),
order_matters=False
)
kafka_push_log = '"{"metrics_group_interval": 60, "resource": {"resource_site": "test", ' \
'"resource_class": "test", "resource_subclass": "test", "resource_type": ' \
'"test", "resource_id": "test", "resource_endpoint": "test", "resource_metadata":' \
' {"_resource_ttl": "604800"}, "resource_creation_timestamp": 1.0, ' \
'"resource_plugin": "test"}, "dimensions": [], "metrics_group_type": "Test", ' \
'"metrics": [{"metric_creation_timestamp": 1.0, "metric_type": "gauge", ' \
'"metric_name": "test", "metric_value": 0.0}], "metrics_group_creation_timestamp": ' \
'1.0, "metrics_group_schema_version": "0.2"}" to topic "test-processed" ' \
'with key "test:test" and partitioning key "test|Test|"'
# Timestamps need to be removed to check Panoptes Metrics
metric_groups_seen = 0
for line in self._log_capture.actual():
_, _, log = line
if log.find('metric group'):
log = re.sub(r"resource_creation_timestamp\": \d+\.\d+,",
"resource_creation_timestamp\": 1.0,",
log)
if log.startswith('Sent metric group'):
metric_groups_seen += 1
self.assertEqual(log.strip(), "Sent metric group {}".format(kafka_push_log))
if log.startswith('Going to send metric group'):
metric_groups_seen += 1
self.assertEqual(log.strip(), "Going to send metric group {}".format(kafka_push_log))
self.assertEqual(metric_groups_seen, 2)
class TestPanoptesDiscoveryPluginRunner(TestPanoptesPluginRunner):
@patch('yahoo_panoptes.framework.context.PanoptesContext._get_message_producer')
@patch('yahoo_panoptes.framework.context.PanoptesContext.message_producer', new_callable=PropertyMock)
@patch('yahoo_panoptes.discovery.discovery_plugin_agent.PanoptesDiscoveryTaskContext')
def test_discovery_plugin_task(self, panoptes_context, message_producer_property, message_producer):
producer = MockPanoptesMessageProducer()
message_producer_property.return_value = message_producer.return_value = producer
panoptes_context.return_value = self._panoptes_context
discovery_plugin_task("Test Discovery Plugin")
plugin_result = producer.messages
self.assertEqual(len(plugin_result), 1)
plugin_result = plugin_result[0]
self.assertTrue('Test_Discovery_Plugin' in plugin_result['key'])
plugin_result['key'] = 'Test_Discovery_Plugin'
expected_result = {
'topic': 'test_site-resources',
'message': '{"resource_set_creation_timestamp": 1.0, '
'"resource_set_schema_version": "0.1", "resources": '
'[{"resource_site": "test_site", "resource_class": '
'"test_class", "resource_subclass": "test_subclass", '
'"resource_type": "test_type", "resource_id": '
'"test_resource_id", "resource_endpoint": '
'"test_resource_endpoint", "resource_metadata": '
'{"_resource_ttl": "604800"},'
' "resource_creation_timestamp": 1.0,'
' "resource_plugin": "test_resource_plugin"}]}',
'key': 'Test_Discovery_Plugin'}
plugin_result['message'] = re.sub(
r"resource_set_creation_timestamp\": \d+\.\d+,",
"resource_set_creation_timestamp\": 1.0,",
plugin_result['message'])
plugin_result['message'] = re.sub(
r"resource_creation_timestamp\": \d+\.\d+,",
"resource_creation_timestamp\": 1.0,",
plugin_result['message'])
self.assertEqual(plugin_result, expected_result)
| 60.574815
| 123
| 0.531183
| 39,358
| 0.962581
| 0
| 0
| 11,389
| 0.278541
| 0
| 0
| 14,233
| 0.348097
|
38f0f18dc070774e4c59dd082f508779d0e46e34
| 940
|
py
|
Python
|
root/tpd_near_trainstops_per_line.py
|
transitanalystisarel/TransitAnalystIsrael
|
341de9272b352c18333ff136a00de0b97cd82216
|
[
"MIT"
] | null | null | null |
root/tpd_near_trainstops_per_line.py
|
transitanalystisarel/TransitAnalystIsrael
|
341de9272b352c18333ff136a00de0b97cd82216
|
[
"MIT"
] | null | null | null |
root/tpd_near_trainstops_per_line.py
|
transitanalystisarel/TransitAnalystIsrael
|
341de9272b352c18333ff136a00de0b97cd82216
|
[
"MIT"
] | 3
|
2019-05-08T04:36:03.000Z
|
2020-11-23T19:46:52.000Z
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# collect a set of trip_id s at all stops in a GTFS file over the selected week of the service period starting at serviceweekstartdate
# filter stops near trainstations based on input txt file - stopsneartrainstop_post_edit
# merge sets of trips at stops near each trainstation to count trips per hour and per day
#
#
import transitanalystisrael_config as cfg
import process_date
import trip_ids_at_stops_merge_near_trainstops_perday_v3
import stopswtrainstopidsandtpdperline_v1
import time
#
print("Local current time :", time.asctime( time.localtime(time.time()) ))
#
processdate = process_date.get_date_now()
trip_ids_at_stops_merge_near_trainstops_perday_v3.main(processdate, cfg.gtfspath, cfg.gtfsdirbase, cfg.processedpath, processdate)
stopswtrainstopidsandtpdperline_v1.main(processdate, cfg.processedpath)
print("Local current time :", time.asctime( time.localtime(time.time()) ))
| 40.869565
| 134
| 0.811702
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 404
| 0.429787
|
38f18e910ceb0b6a8c30ade6eeea28431583e6f1
| 5,252
|
py
|
Python
|
merlin/modules/normalization.py
|
ethereon/merlin
|
0babfed51e65197086d74479a1ca9150259b4f7f
|
[
"BSD-3-Clause"
] | 1
|
2019-08-15T16:22:20.000Z
|
2019-08-15T16:22:20.000Z
|
merlin/modules/normalization.py
|
ethereon/merlin
|
0babfed51e65197086d74479a1ca9150259b4f7f
|
[
"BSD-3-Clause"
] | null | null | null |
merlin/modules/normalization.py
|
ethereon/merlin
|
0babfed51e65197086d74479a1ca9150259b4f7f
|
[
"BSD-3-Clause"
] | null | null | null |
from typing import Callable, Optional, Union
import tensorflow as tf
from merlin.initializers import Init
from merlin.modules.keras import KerasAdapter
from merlin.shape import Axis
from merlin.spec import DynamicSpec, Spec
class BatchNormalization(KerasAdapter, tf.keras.layers.BatchNormalization):
class Config(Spec):
# The axis along which the normalization will be performed.
# If unspecified, the active context's channel axis is used.
axis: Optional[int] = None
# Momentum for the moving average.
momentum: float = 0.99
# Small float added to variance to avoid dividing by zero.
epsilon: float = 1e-3
# Whether to include the bias term "beta"
center: bool = True
# Wheter to include the scaling term "gamma"
scale: bool = True
# Bias initializer
beta_initializer: Init.Descriptor = 'zeros'
# Scale initializer
gamma_initializer: Init.Descriptor = 'ones'
# Moving mean initializer
moving_mean_initializer: Init.Descriptor = 'zeros'
# Moving variance initializer
moving_variance_initializer: Init.Descriptor = 'ones'
# Whether to use Batch Renormalization
# See: https://arxiv.org/abs/1702.03275
# This adds extra variables during training.
# Inference remains the same.
renorm: bool = False
# A dictionary that may map keys {rmax, rmin, dmax} to
# scalar Tensors used to clip the renorm correction. The correction
# (r, d) is used as:
# corrected_value = normalized_value * r + d
# with r clipped to [rmin, rmax], and d to [-dmax, dmax].
# Missing {rmax, rmin, dmax} are set to {inf, 0, inf} respectively.
renorm_clipping: Optional[dict] = None
# Momentum used to update the moving means and standard
# deviations with renorm. Unlike `momentum`, this affects training
# and should be neither too small (which would add noise) nor too large
# (which would give stale estimates). Note that `momentum` is still applied
# to get the means and variances for inference.
renorm_momentum: float = 0.99
# Whether to use the (faster) fused batch normalization implementation.
# If None, uses the fused implementation whenever possible.
fused: Optional[bool] = None
# Whether the batch norm parameters are "trainable".
# This also switches the batch norm to inference mode.
trainable: bool = True
# By default, `virtual_batch_size` is `None`,
# which means batch normalization is performed across the whole batch. When
# `virtual_batch_size` is not `None`, instead perform "Ghost Batch
# Normalization", which creates virtual sub-batches which are each
# normalized separately (with shared gamma, beta, and moving statistics).
# Must divide the actual batch size during execution.
virtual_batch_size: Optional[int] = None
# A function taking the Tensor containing the (dynamic) shape of
# the input tensor and returning a pair (scale, bias) to apply to the
# normalized values (before gamma and beta), only during training.
# For example, if axis is -1, then:
# adjustment = lambda shape: (
# tf.random.uniform(shape[-1:], 0.93, 1.07),
# tf.random.uniform(shape[-1:], -0.1, 0.1))
# will scale the normalized value by up to 7% up or down, then shift the
# result by up to 0.1 (with independent scaling and bias for each feature
# but shared across all examples), and finally apply gamma and/or beta.
# If None, no adjustment is applied.
# Cannot be specified if virtual_batch_size is specified.
adjustment: Optional[Callable] = None
# An optional module name
name: Optional[str] = None
def __init__(self, *args, **kwargs):
config = self.Config(*args, **kwargs)
if config.axis is None:
# Auto-set the normalization axis based on the currently active context
config.axis = Axis.channel
super().__init__(**config)
class Normalization:
# Mapping of supported normalizer layer names to types
_NAME_TO_NORMALIZER = {
'batch_norm': BatchNormalization,
'batch_normalization': BatchNormalization
}
class Config(DynamicSpec):
"""
Partial configuration for a normalization layer.
Any additional fields are forwarded to the specified normalization layer.
"""
# The type of normalization to use
kind: Union[str, Callable]
def __new__(cls, kind: Union[str, Callable], **normalizer_kwargs):
factory = kind if callable(kind) else cls.by_name(name=kind)
return factory(**normalizer_kwargs)
@classmethod
def by_name(cls, name):
"""
Returns the normalization module corresponding to the given name.
Raises a ValueError if no matching module is found.
"""
try:
return cls._NAME_TO_NORMALIZER[name]
except KeyError as err:
raise ValueError(f'Unknown normalizer: {name}') from err
| 41.354331
| 83
| 0.653465
| 5,020
| 0.955826
| 0
| 0
| 361
| 0.068736
| 0
| 0
| 3,032
| 0.577304
|
38f2cb2b1272511384ed4a1d9e959be7afec0fff
| 3,095
|
py
|
Python
|
Plots/Bar/NCL_bar_2.py
|
learn2free/GeoCAT-examples
|
3ac152a767e78a362a8ebb6f677005f3de320ca6
|
[
"Apache-2.0"
] | 1
|
2021-05-09T02:54:10.000Z
|
2021-05-09T02:54:10.000Z
|
Plots/Bar/NCL_bar_2.py
|
learn2free/GeoCAT-examples
|
3ac152a767e78a362a8ebb6f677005f3de320ca6
|
[
"Apache-2.0"
] | null | null | null |
Plots/Bar/NCL_bar_2.py
|
learn2free/GeoCAT-examples
|
3ac152a767e78a362a8ebb6f677005f3de320ca6
|
[
"Apache-2.0"
] | null | null | null |
"""
NCL_bar_2.py
===============
This script illustrates the following concepts:
- Drawing bars instead of curves in an XY plot
- Changing the aspect ratio of a bar plot
- Drawing filled bars up or down based on a Y reference value
- Setting the minimum/maximum value of the Y axis in a bar plot
- Using named colors to indicate a fill color
- Creating array of dates to use as x-axis tick labels
- Creating a main title
See following URLs to see the reproduced NCL plot & script:
- Original NCL script: https://www.ncl.ucar.edu/Applications/Scripts/bar_2.ncl
- Original NCL plot: https://www.ncl.ucar.edu/Applications/Images/bar_2_lg.png
"""
import geocat.datafiles as gdf
import matplotlib.pyplot as plt
###############################################################################
# Import packages:
import numpy as np
import xarray as xr
from geocat.viz import util as gvutil
###############################################################################
# Read in data:
# Open a netCDF data file using xarray default engine and load the data into xarrays
ds = xr.open_dataset(gdf.get("netcdf_files/soi.nc"))
dsoik = ds.DSOI_KET
date = ds.date
num_months = np.shape(date)[0]
# Dates in the file are represented by year and month (YYYYMM)
# representing them fractionally will make ploting the data easier
# This produces the same results as NCL's yyyymm_to_yyyyfrac() function
date_frac = np.empty_like(date)
for n in np.arange(0, num_months, 1):
yyyy = int(date[n] / 100)
mon = (date[n] / 100 - yyyy) * 100
date_frac[n] = yyyy + (mon - 1) / 12
###############################################################################
# Plot
# Generate figure (set its size (width, height) in inches) and axes
plt.figure(figsize=(12, 6))
ax = plt.axes()
# Create a list of colors based on the color bar values
colors = ['red' if (value > 0) else 'blue' for value in dsoik[::8]]
plt.bar(date_frac[::8],
dsoik[::8],
align='edge',
edgecolor='black',
color=colors,
width=8 / 12,
linewidth=.6)
# Use geocat.viz.util convenience function to add minor and major tick lines
gvutil.add_major_minor_ticks(ax,
x_minor_per_major=4,
y_minor_per_major=5,
labelsize=20)
# Use geocat.viz.util convenience function to set axes parameters
gvutil.set_axes_limits_and_ticks(ax,
ylim=(-3, 3),
yticks=np.linspace(-3, 3, 7),
yticklabels=np.linspace(-3, 3, 7),
xlim=(date_frac[40], date_frac[-16]),
xticks=np.linspace(1900, 1980, 5))
# Use geocat.viz.util convenience function to set titles and labels
gvutil.set_titles_and_labels(ax,
maintitle="Darwin Southern Oscillation Index",
ylabel='Anomalies',
maintitlefontsize=28,
labelfontsize=20)
plt.show()
| 37.289157
| 84
| 0.577383
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1,652
| 0.533764
|
38f35744d4413bb5e881ac526e9d7f661c57bec0
| 5,248
|
py
|
Python
|
gabbi/tests/test_driver.py
|
scottwallacesh/gabbi
|
5e76332ac06cd075c11477b384ad5da1d2eaa9d5
|
[
"Apache-2.0"
] | 145
|
2015-01-16T23:19:35.000Z
|
2022-03-15T00:21:54.000Z
|
gabbi/tests/test_driver.py
|
scottwallacesh/gabbi
|
5e76332ac06cd075c11477b384ad5da1d2eaa9d5
|
[
"Apache-2.0"
] | 250
|
2015-01-02T11:20:06.000Z
|
2022-03-22T19:55:18.000Z
|
gabbi/tests/test_driver.py
|
scottwallacesh/gabbi
|
5e76332ac06cd075c11477b384ad5da1d2eaa9d5
|
[
"Apache-2.0"
] | 49
|
2015-01-14T16:14:52.000Z
|
2022-03-21T11:37:26.000Z
|
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Test that the driver can build tests effectively."""
import os
import unittest
from gabbi import driver
TESTS_DIR = 'test_gabbits'
class DriverTest(unittest.TestCase):
def setUp(self):
super(DriverTest, self).setUp()
self.loader = unittest.defaultTestLoader
self.test_dir = os.path.join(os.path.dirname(__file__), TESTS_DIR)
def test_driver_loads_three_tests(self):
suite = driver.build_tests(self.test_dir, self.loader,
host='localhost', port=8001)
self.assertEqual(1, len(suite._tests),
'top level suite contains one suite')
self.assertEqual(3, len(suite._tests[0]._tests),
'contained suite contains three tests')
the_one_test = suite._tests[0]._tests[0]
self.assertEqual('test_driver_sample_one',
the_one_test.__class__.__name__,
'test class name maps')
self.assertEqual('one',
the_one_test.test_data['name'])
self.assertEqual('/', the_one_test.test_data['url'])
def test_driver_prefix(self):
suite = driver.build_tests(self.test_dir, self.loader,
host='localhost', port=8001,
prefix='/mountpoint')
the_one_test = suite._tests[0]._tests[0]
the_two_test = suite._tests[0]._tests[1]
self.assertEqual('/mountpoint', the_one_test.prefix)
self.assertEqual('/mountpoint', the_two_test.prefix)
def test_build_requires_host_or_intercept(self):
with self.assertRaises(AssertionError):
driver.build_tests(self.test_dir, self.loader)
def test_build_with_url_provides_host(self):
"""This confirms that url provides the required host."""
suite = driver.build_tests(self.test_dir, self.loader,
url='https://foo.example.com')
first_test = suite._tests[0]._tests[0]
full_url = first_test._parse_url(first_test.test_data['url'])
ssl = first_test.test_data['ssl']
self.assertEqual('https://foo.example.com/', full_url)
self.assertTrue(ssl)
def test_build_require_ssl(self):
suite = driver.build_tests(self.test_dir, self.loader,
host='localhost',
require_ssl=True)
first_test = suite._tests[0]._tests[0]
full_url = first_test._parse_url(first_test.test_data['url'])
self.assertEqual('https://localhost:8001/', full_url)
suite = driver.build_tests(self.test_dir, self.loader,
host='localhost',
require_ssl=False)
first_test = suite._tests[0]._tests[0]
full_url = first_test._parse_url(first_test.test_data['url'])
self.assertEqual('http://localhost:8001/', full_url)
def test_build_url_target(self):
suite = driver.build_tests(self.test_dir, self.loader,
host='localhost', port='999',
url='https://example.com:1024/theend')
first_test = suite._tests[0]._tests[0]
full_url = first_test._parse_url(first_test.test_data['url'])
self.assertEqual('https://example.com:1024/theend/', full_url)
def test_build_url_target_forced_ssl(self):
suite = driver.build_tests(self.test_dir, self.loader,
host='localhost', port='999',
url='http://example.com:1024/theend',
require_ssl=True)
first_test = suite._tests[0]._tests[0]
full_url = first_test._parse_url(first_test.test_data['url'])
self.assertEqual('https://example.com:1024/theend/', full_url)
def test_build_url_use_prior_test(self):
suite = driver.build_tests(self.test_dir, self.loader,
host='localhost',
use_prior_test=True)
for test in suite._tests[0]._tests:
if test.test_data['name'] != 'use_prior_false':
expected_use_prior = True
else:
expected_use_prior = False
self.assertEqual(expected_use_prior,
test.test_data['use_prior_test'])
suite = driver.build_tests(self.test_dir, self.loader,
host='localhost',
use_prior_test=False)
for test in suite._tests[0]._tests:
self.assertEqual(False, test.test_data['use_prior_test'])
| 44.10084
| 75
| 0.598895
| 4,560
| 0.868902
| 0
| 0
| 0
| 0
| 0
| 0
| 1,254
| 0.238948
|
38f399316a049f820f54f6ac2696a2ab3406ec0f
| 4,779
|
py
|
Python
|
mindspore/ops/operations/_inner_ops.py
|
ZephyrChenzf/mindspore
|
8f191847cf71e12715ced96bc3575914f980127a
|
[
"Apache-2.0"
] | null | null | null |
mindspore/ops/operations/_inner_ops.py
|
ZephyrChenzf/mindspore
|
8f191847cf71e12715ced96bc3575914f980127a
|
[
"Apache-2.0"
] | null | null | null |
mindspore/ops/operations/_inner_ops.py
|
ZephyrChenzf/mindspore
|
8f191847cf71e12715ced96bc3575914f980127a
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2020 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Inner operators."""
from ..._checkparam import Validator as validator
from ...common import dtype as mstype
from ..primitive import PrimitiveWithInfer, prim_attr_register
class ExtractImagePatches(PrimitiveWithInfer):
"""
Extract patches from images.
The input tensor must be a 4-D tensor and the data format is NHWC.
Args:
ksizes (Union[tuple[int], list[int]]): The size of sliding window, should be a tuple or list of int,
and the format is [1, ksize_row, ksize_col, 1].
strides (Union[tuple[int], list[int]]): Distance between the centers of the two consecutive patches,
should be a tuple or list of int, and the format is [1, stride_row, stride_col, 1].
rates (Union[tuple[int], list[int]]): In each extracted patch, the gap between the corresponding dim
pixel positions, should be a tuple or list of int, and the format is [1, rate_row, rate_col, 1].
padding (str): The type of padding algorithm, is a string whose value is "same" or "valid",
not case sensitive. Default: "valid".
- same: Means that the patch can take the part beyond the original image, and this part is filled with 0.
- valid: Means that the patch area taken must be completely contained in the original image.
Inputs:
- **input_x** (Tensor) - A 4-D tensor whose shape is [in_batch, in_row, in_col, in_depth] and
data type is number.
Outputs:
Tensor, a 4-D tensor whose data type is same as 'input_x',
and the shape is [out_batch, out_row, out_col, out_depth], the out_batch is same as the in_batch.
"""
@prim_attr_register
def __init__(self, ksizes, strides, rates, padding="valid"):
"""init"""
def _check_tuple_or_list(arg_name, arg_val, prim_name):
validator.check_value_type(f"{arg_name}s", ksizes, [tuple, list], self.name)
if len(arg_val) != 4 or arg_val[0] != 1 or arg_val[3] != 1:
raise ValueError(f"For \'{prim_name}\' the format of {arg_name}s should be [1, {arg_name}_row, "
f"{arg_name}_col, 1], but got {arg_val}.")
if not isinstance(arg_val[1], int) or not isinstance(arg_val[2], int) or arg_val[1] < 1 or arg_val[2] < 1:
raise ValueError(f"For '{prim_name}' the {arg_name}_row and {arg_name}_col in {arg_name}s should be an "
f"positive integer number, but got {arg_name}_row is {arg_val[1]}, {arg_name}_col "
f"is {arg_val[2]}")
_check_tuple_or_list("ksize", ksizes, self.name)
_check_tuple_or_list("stride", strides, self.name)
_check_tuple_or_list("rate", rates, self.name)
self.padding = validator.check_string('padding', padding.upper(), ['VALID', 'SAME'], self.name)
self.add_prim_attr("padding", self.padding)
def infer_shape(self, input_x):
"""infer shape"""
in_batch, in_row, in_col, in_depth = input_x
_, ksize_row, ksize_col, _ = self.ksizes
_, stride_row, stride_col, _ = self.strides
_, rate_row, rate_col, _ = self.rates
if len(input_x) != 4:
raise ValueError("The `input_x` should be a 4-D tensor, "
f"but got a {len(input_x)}-D tensor whose shape is {input_x}")
out_batch = in_batch
out_depth = ksize_row * ksize_col * in_depth
if self.padding == "VALID":
out_row = \
(in_row - (ksize_row + (ksize_row - 1) * (rate_row - 1))) // stride_row + 1
out_col = \
(in_col - (ksize_col + (ksize_col - 1) * (rate_col - 1))) // stride_col + 1
else:
out_row = (in_row - 1) // stride_row + 1
out_col = (in_col - 1) // stride_col + 1
out_shape = [out_batch, out_row, out_col, out_depth]
return out_shape
def infer_dtype(self, input_x):
"""infer dtype"""
validator.check_tensor_type_same({"input_x": input_x}, mstype.number_type, self.name)
return input_x
| 48.272727
| 120
| 0.623352
| 3,932
| 0.822766
| 0
| 0
| 1,255
| 0.262607
| 0
| 0
| 2,643
| 0.553045
|
38f39bc63224a405d7dddb6afc4bd67e4b1fcae8
| 434
|
py
|
Python
|
migrations/versions/7f447c94347a_.py
|
tipabu/jazzband-website
|
30102e87348924eb56b610e74609a3475d3a14de
|
[
"MIT"
] | null | null | null |
migrations/versions/7f447c94347a_.py
|
tipabu/jazzband-website
|
30102e87348924eb56b610e74609a3475d3a14de
|
[
"MIT"
] | null | null | null |
migrations/versions/7f447c94347a_.py
|
tipabu/jazzband-website
|
30102e87348924eb56b610e74609a3475d3a14de
|
[
"MIT"
] | null | null | null |
"""
Revision ID: 7f447c94347a
Revises: a78f4b5d7dee
Create Date: 2017-11-17 14:59:36.177805
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "7f447c94347a"
down_revision = "a78f4b5d7dee"
def upgrade():
op.add_column(
"projects", sa.Column("uploads_count", sa.SmallInteger(), nullable=True)
)
def downgrade():
op.drop_column("projects", "uploads_count")
| 18.083333
| 80
| 0.714286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 214
| 0.493088
|
38f4002d6f513a5cbe6493011e435271c5396522
| 5,308
|
py
|
Python
|
abcvoting/preferences.py
|
pbatko/abcvoting
|
55a8e7e23e35a3620921e3f5426a09925e83640e
|
[
"MIT"
] | null | null | null |
abcvoting/preferences.py
|
pbatko/abcvoting
|
55a8e7e23e35a3620921e3f5426a09925e83640e
|
[
"MIT"
] | null | null | null |
abcvoting/preferences.py
|
pbatko/abcvoting
|
55a8e7e23e35a3620921e3f5426a09925e83640e
|
[
"MIT"
] | null | null | null |
"""
Dichotomous (approval) preferences and preference profiles
Voters are indexed by 0, ..., len(profile)
Candidates are indexed by 0, ..., profile.num_cand
"""
from abcvoting.misc import str_candset
from collections import OrderedDict
class Profile(object):
"""
Preference profiles
"""
def __init__(self, num_cand, names=None):
if num_cand <= 0:
raise ValueError(str(num_cand) +
" is not a valid number of candidates")
self.num_cand = num_cand
self.preferences = []
self.names = [str(c) for c in range(num_cand)]
if names:
if len(names) < num_cand:
raise ValueError("names " + str(names) + " has length "
+ str(len(names)) + " < num_cand ("
+ str(num_cand) + ")")
self.names = [str(names[i]) for i in range(num_cand)]
def __len__(self):
return len(self.preferences)
def add_preferences(self, pref):
if type(pref) in [list, tuple]:
if len(pref) == 0:
return
if type(pref[0]) is int:
# list of integers
self.preferences.append(DichotomousPreferences(pref))
else:
# list of integer-lists or DichotomousPreferences
for p in pref:
if type(p) in [list, tuple]:
newpref = DichotomousPreferences(p)
newpref.is_valid(self.num_cand)
self.preferences.append(newpref)
elif isinstance(p, DichotomousPreferences):
p.is_valid(self.num_cand)
self.preferences.append(p)
else:
raise TypeError("Object of type " + str(type(p)) +
" not suitable as preferences")
elif isinstance(pref, DichotomousPreferences):
pref.is_valid(self.num_cand)
self.preferences.append(pref)
else:
raise TypeError("Object of type " + str(type(pref)) +
" not suitable as preferences")
def totalweight(self):
return sum(pref.weight for pref in self.preferences)
def has_unit_weights(self):
for p in self.preferences:
if p.weight != 1:
return False
return True
def __iter__(self):
return iter(self.preferences)
def __getitem__(self, i):
return self.preferences[i]
def __str__(self):
if self.has_unit_weights():
output = ("profile with %d votes and %d candidates:\n"
% (len(self.preferences), self.num_cand))
for p in self.preferences:
output += " " + str_candset(p.approved, self.names) + ",\n"
else:
output = ("weighted profile with %d votes and %d candidates:\n"
% (len(self.preferences), self.num_cand))
for p in self.preferences:
output += (" " + str(p.weight) + " * "
+ str_candset(p.approved, self.names) + ",\n")
return output[:-2]
def party_list(self):
"""
Is this party a party-list profile?
In a party-list profile all approval sets are either
disjoint or equal (see https://arxiv.org/abs/1704.02453).
"""
for pref1 in self.preferences:
for pref2 in self.preferences:
if ((len(pref1.approved & pref2.approved)
not in [0, len(pref1.approved)])):
return False
return True
def str_compact(self):
compact = OrderedDict()
for p in self.preferences:
if tuple(p.approved) in compact:
compact[tuple(p.approved)] += p.weight
else:
compact[tuple(p.approved)] = p.weight
if self.has_unit_weights():
output = ""
else:
output = "weighted "
output += ("profile with %d votes and %d candidates:\n"
% (len(self.preferences), self.num_cand))
for apprset in compact:
output += (" " + str(compact[apprset]) + " x "
+ str_candset(apprset, self.names) + ",\n")
output = output[:-2]
if not self.has_unit_weights():
output += "\ntotal weight: " + str(self.totalweight())
output += "\n"
return output
def aslist(self):
return [list(pref.approved) for pref in self.preferences]
class DichotomousPreferences():
def __init__(self, approved, weight=1):
self.approved = set(approved)
if approved: # empty approval sets are fine
self.is_valid(max(approved) + 1)
self.weight = weight
def __str__(self):
return str(list(self.approved))
def __len__(self):
return len(self.approved)
def __iter__(self):
return iter(self.approved)
def is_valid(self, num_cand):
for c in self.approved:
if c < 0 or c >= num_cand:
raise ValueError(str(self) + " not valid for num_cand = " +
str(num_cand))
return True
| 35.152318
| 75
| 0.527882
| 5,064
| 0.954032
| 0
| 0
| 0
| 0
| 0
| 0
| 888
| 0.167295
|
38f53f3c9f560f0765e57f3e8c5f7abb2dffb240
| 1,035
|
py
|
Python
|
java/create_solution.py
|
hermantai/kata
|
1b1d7af2f46bca994bede3f8a937dff96015e415
|
[
"Apache-2.0"
] | null | null | null |
java/create_solution.py
|
hermantai/kata
|
1b1d7af2f46bca994bede3f8a937dff96015e415
|
[
"Apache-2.0"
] | null | null | null |
java/create_solution.py
|
hermantai/kata
|
1b1d7af2f46bca994bede3f8a937dff96015e415
|
[
"Apache-2.0"
] | null | null | null |
import os
import sys
templ = """package kata;
import static kata.Printer.*;
import java.util.*;
/**
* Cracking the coding interview 6th ed. p.XX(TODO)
*/
public class %(classname)s {
static int %(methodname)s(String str) {
return 0;
}
public static void main(String args[]) {
runSample("abcabcdd");
}
static void runSample(String s, int ans) {
System.out.printf(
"%%s = %%s(%%s)\\n",
s,
%(methodname)s(s),
ans);
}
}
"""
def main():
if len(sys.argv) != 2:
print("Usage: create_solution.py ClassName")
return
classname = sys.argv[1]
methodname = classname[0].lower() + classname[1:]
filepath = os.path.join(os.path.join(os.path.dirname(__file__), "kata"),
classname + ".java")
if os.path.exists(filepath):
print("%s already exists" % filepath)
return
with open(filepath, "w") as f:
f.write(templ % locals())
print("Written to %s" % filepath)
if __name__ == '__main__':
main()
| 19.903846
| 76
| 0.575845
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 541
| 0.522705
|
38f5e5531d57aca7c42b9394241ccc224319e068
| 310
|
py
|
Python
|
tests/unit/helpers_test/test_password.py
|
alefeans/flask-base
|
e3daa4ce1020ba3711908c3ba5ef88b0cc599dfe
|
[
"MIT"
] | 11
|
2019-10-03T18:47:49.000Z
|
2022-02-01T10:42:02.000Z
|
tests/unit/helpers_test/test_password.py
|
alefeans/flask-base
|
e3daa4ce1020ba3711908c3ba5ef88b0cc599dfe
|
[
"MIT"
] | null | null | null |
tests/unit/helpers_test/test_password.py
|
alefeans/flask-base
|
e3daa4ce1020ba3711908c3ba5ef88b0cc599dfe
|
[
"MIT"
] | 8
|
2019-10-03T18:47:53.000Z
|
2021-06-07T14:47:51.000Z
|
import pytest
from app.helpers import check_password, encrypt_password
@pytest.mark.parametrize('sent', [
('test'),
('changeme'),
('1234123'),
])
def test_if_check_password_and_encrypt_password_works_properly(sent):
expected = encrypt_password(sent)
assert check_password(sent, expected)
| 23.846154
| 69
| 0.745161
| 0
| 0
| 0
| 0
| 236
| 0.76129
| 0
| 0
| 31
| 0.1
|
38f71f66b4d3452d83d78d02e5968d474fc84f07
| 1,717
|
py
|
Python
|
tests/output/test_pdf_to_png.py
|
ynikitenko/lena
|
d0fbae47f21007685edbd4e77bc91413421bebd1
|
[
"Apache-2.0"
] | 4
|
2020-03-01T14:01:48.000Z
|
2021-02-23T19:33:36.000Z
|
tests/output/test_pdf_to_png.py
|
ynikitenko/lena
|
d0fbae47f21007685edbd4e77bc91413421bebd1
|
[
"Apache-2.0"
] | 1
|
2021-05-09T15:47:17.000Z
|
2021-05-09T16:12:03.000Z
|
tests/output/test_pdf_to_png.py
|
ynikitenko/lena
|
d0fbae47f21007685edbd4e77bc91413421bebd1
|
[
"Apache-2.0"
] | null | null | null |
from __future__ import print_function
import os
import pytest
import subprocess
import sys
import lena.core
from lena.output import PDFToPNG
def test_pdf_to_png(mocker):
mocker.patch("subprocess.Popen.communicate", return_value=("stdout", "stderr"))
mocker.patch("subprocess.Popen.returncode", return_value=True, create=True)
mocker.patch("subprocess.Popen", return_value=subprocess.Popen)
pdf_to_png = PDFToPNG()
data = [
("output/file.csv", {"output": {"filename": "y", "filetype": "csv"}}),
("output/file.pdf", {"output": {"filename": "y", "filetype": "pdf"}}),
]
res = list(pdf_to_png.run(data))
assert res == [
('output/file.csv', {'output': {'filename': 'y', 'filetype': 'csv'}}),
# since no png file exists,
# mocker imitates creation of a new one, thus changed is True
('output/file.png', {'output': {'changed': True,
'filename': 'y', 'filetype': 'png'}})
]
command = ['pdftoppm', 'output/file.pdf', 'output/file', '-png', '-singlefile']
subprocess.Popen.assert_called_once_with(command)
# test "existing" png
mocker.patch("subprocess.Popen", return_value=subprocess.Popen)
def _os_path_exists(filepath):
return filepath == "output/file.png"
mocker.patch("os.path.exists", _os_path_exists)
pdf_data = [("output/file.pdf", {"output": {"filename": "y", "filetype": "pdf"}})]
assert list(pdf_to_png.run(pdf_data)) == [
('output/file.png',
{'output':
{'changed': False,
'filename': 'y',
'filetype': 'png'}})
]
# command was not called
assert not subprocess.Popen.called
| 35.040816
| 86
| 0.608037
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 672
| 0.39138
|
38f9a10a36632913442b3f03652d751ae087dae5
| 4,162
|
py
|
Python
|
third_party/maya/lib/usdMaya/testenv/testUsdExportSkeleton.py
|
navefx/YuksUSD
|
56c2e1def36ee07121f4ecb349c1626472b3c338
|
[
"AML"
] | 6
|
2018-08-26T13:27:22.000Z
|
2021-08-14T23:57:38.000Z
|
third_party/maya/lib/usdMaya/testenv/testUsdExportSkeleton.py
|
navefx/YuksUSD
|
56c2e1def36ee07121f4ecb349c1626472b3c338
|
[
"AML"
] | 1
|
2021-08-14T23:57:51.000Z
|
2021-08-14T23:57:51.000Z
|
third_party/maya/lib/usdMaya/testenv/testUsdExportSkeleton.py
|
navefx/YuksUSD
|
56c2e1def36ee07121f4ecb349c1626472b3c338
|
[
"AML"
] | 4
|
2018-06-14T18:14:59.000Z
|
2021-09-13T22:20:50.000Z
|
#!/pxrpythonsubst
#
# Copyright 2018 Pixar
#
# Licensed under the Apache License, Version 2.0 (the "Apache License")
# with the following modification; you may not use this file except in
# compliance with the Apache License and the following modification to it:
# Section 6. Trademarks. is deleted and replaced with:
#
# 6. Trademarks. This License does not grant permission to use the trade
# names, trademarks, service marks, or product names of the Licensor
# and its affiliates, except as required to comply with Section 4(c) of
# the License and to reproduce the content of the NOTICE file.
#
# You may obtain a copy of the Apache License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the Apache License with the above modification is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the Apache License for the specific
# language governing permissions and limitations under the Apache License.
#
import os
import unittest
from maya import cmds
from maya import standalone
from maya.api import OpenMaya as OM
from pxr import Gf, Usd, UsdSkel, Vt
class testUsdExportSkeleton(unittest.TestCase):
@classmethod
def setUpClass(cls):
standalone.initialize('usd')
cmds.file(os.path.abspath('UsdExportSkeleton.ma'), open=True,
force=True)
cmds.loadPlugin('pxrUsd', quiet=True)
@classmethod
def tearDownClass(cls):
standalone.uninitialize()
def _AssertMatricesClose(self, gfm1, gfm2):
for i in xrange(0, 4):
for j in xrange(0, 4):
self.assertAlmostEqual(gfm1[i][j], gfm2[i][j], places=3)
def testSkeletonTopology(self):
"""Tests that the joint topology is correct."""
usdFile = os.path.abspath('UsdExportSkeleton.usda')
cmds.usdExport(mergeTransformAndShape=True, file=usdFile,
shadingMode='none')
stage = Usd.Stage.Open(usdFile)
skeleton = UsdSkel.Skeleton.Get(stage, '/skeleton_Hip')
self.assertTrue(skeleton)
joints = skeleton.GetJointsAttr().Get()
self.assertEqual(joints, Vt.TokenArray([
"Hip",
"Hip/Spine",
"Hip/Spine/Neck",
"Hip/Spine/Neck/Head",
"Hip/Spine/Neck/LArm",
"Hip/Spine/Neck/LArm/LHand",
# note: skips ExtraJoints because it's not a joint
"Hip/Spine/Neck/LArm/LHand/ExtraJoints/ExtraJoint1",
"Hip/Spine/Neck/LArm/LHand/ExtraJoints/ExtraJoint1/ExtraJoint2",
"Hip/Spine/Neck/RArm",
"Hip/Spine/Neck/RArm/RHand",
"Hip/RLeg",
"Hip/RLeg/RFoot",
"Hip/LLeg",
"Hip/LLeg/LFoot"
]))
def testSkelTransformDecomposition(self):
"""
Tests that the decomposed transform values, when recomposed, recreate
the correct Maya transformation matrix.
"""
usdFile = os.path.abspath('UsdExportSkeleton.usda')
cmds.usdExport(mergeTransformAndShape=True, file=usdFile,
shadingMode='none', frameRange=[1, 30])
stage = Usd.Stage.Open(usdFile)
anim = UsdSkel.PackedJointAnimation.Get(stage,
'/skeleton_Hip/Animation')
self.assertEqual(anim.GetJointsAttr().Get()[8],
"Hip/Spine/Neck/RArm")
animT = anim.GetTranslationsAttr()
animR = anim.GetRotationsAttr()
animS = anim.GetScalesAttr()
selList = OM.MSelectionList()
selList.add("RArm")
rArmDagPath = selList.getDagPath(0)
fnTransform = OM.MFnTransform(rArmDagPath)
for i in xrange(1, 31):
cmds.currentTime(i, edit=True)
mayaXf = fnTransform.transformation().asMatrix()
usdT = animT.Get(i)[8]
usdR = animR.Get(i)[8]
usdS = animS.Get(i)[8]
usdXf = UsdSkel.MakeTransform(usdT, usdR, usdS)
self._AssertMatricesClose(usdXf, Gf.Matrix4d(*mayaXf))
if __name__ == '__main__':
unittest.main(verbosity=2)
| 34.683333
| 77
| 0.645603
| 2,870
| 0.689572
| 0
| 0
| 290
| 0.069678
| 0
| 0
| 1,778
| 0.427198
|
38fa54c4a5025900fd457356bdca81cf5e7db815
| 92
|
py
|
Python
|
datamux/src/datamux/simulate_mode.py
|
nirdslab/streaminghub
|
a0d9f5f8be0ee6f090bd2b48b9f596695497c2bf
|
[
"MIT"
] | null | null | null |
datamux/src/datamux/simulate_mode.py
|
nirdslab/streaminghub
|
a0d9f5f8be0ee6f090bd2b48b9f596695497c2bf
|
[
"MIT"
] | null | null | null |
datamux/src/datamux/simulate_mode.py
|
nirdslab/streaminghub
|
a0d9f5f8be0ee6f090bd2b48b9f596695497c2bf
|
[
"MIT"
] | 1
|
2020-01-22T15:35:29.000Z
|
2020-01-22T15:35:29.000Z
|
class SimulateMode:
@staticmethod
def start_simulation(device, guide=None):
return
| 15.333333
| 43
| 0.75
| 91
| 0.98913
| 0
| 0
| 68
| 0.73913
| 0
| 0
| 0
| 0
|
38fb3ca62ed5924a18e726aa270114cbccbf7a59
| 10,062
|
py
|
Python
|
tests/integration/test_k8s.py
|
lslebodn/conu
|
dee6fd958471f77d1c0511b031ea136dfaf8a77a
|
[
"MIT"
] | 95
|
2018-05-19T14:35:08.000Z
|
2022-01-08T23:31:40.000Z
|
tests/integration/test_k8s.py
|
lslebodn/conu
|
dee6fd958471f77d1c0511b031ea136dfaf8a77a
|
[
"MIT"
] | 179
|
2017-09-12T11:14:30.000Z
|
2018-04-26T05:36:13.000Z
|
tests/integration/test_k8s.py
|
lslebodn/conu
|
dee6fd958471f77d1c0511b031ea136dfaf8a77a
|
[
"MIT"
] | 16
|
2018-05-09T14:15:32.000Z
|
2021-08-02T21:11:33.000Z
|
# -*- coding: utf-8 -*-
#
# Copyright Contributors to the Conu project.
# SPDX-License-Identifier: MIT
#
"""
Tests for Kubernetes backend
"""
import urllib3
import pytest
from conu import DockerBackend, \
K8sBackend, K8sCleanupPolicy
from conu.backend.k8s.pod import Pod, PodPhase
from conu.backend.k8s.service import Service
from conu.backend.k8s.deployment import Deployment
from conu.backend.k8s.client import get_core_api
from conu.utils import get_oc_api_token, oc_command_exists, is_oc_cluster_running
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
@pytest.mark.skipif(not oc_command_exists(), reason="OpenShift is not installed!")
@pytest.mark.skipif(not is_oc_cluster_running(), reason="OpenShift cluster is not running!")
class TestK8s(object):
def test_pod(self):
api_key = get_oc_api_token()
with K8sBackend(api_key=api_key) as k8s_backend:
namespace = k8s_backend.create_namespace()
with DockerBackend() as backend:
image = backend.ImageClass("openshift/hello-openshift")
pod = image.run_in_pod(namespace=namespace)
try:
pod.wait(200)
assert pod.is_ready()
assert pod.get_phase() == PodPhase.RUNNING
finally:
pod.delete()
assert pod.get_phase() == PodPhase.TERMINATING
k8s_backend.delete_namespace(namespace)
def test_pod_from_template(self):
template = {
"apiVersion": "v1",
"kind": "Pod",
"metadata": {
"name": "myapp-pod",
"labels": {
"app": "myapp"
}
},
"spec": {
"containers": [
{
"name": "myapp-container",
"image": "busybox",
"command": [
"sh",
"-c",
"echo Hello Kubernetes! && sleep 3600"
]
}
]
}
}
api_key = get_oc_api_token()
with K8sBackend(api_key=api_key) as k8s_backend:
namespace = k8s_backend.create_namespace()
pod = Pod(namespace=namespace, from_template=template)
try:
pod.wait(200)
assert pod.is_ready()
assert pod.get_phase() == PodPhase.RUNNING
finally:
pod.delete()
assert pod.get_phase() == PodPhase.TERMINATING
k8s_backend.delete_namespace(namespace)
def test_database_deployment(self):
api_key = get_oc_api_token()
with K8sBackend(api_key=api_key) as k8s_backend:
namespace = k8s_backend.create_namespace()
with DockerBackend() as backend:
postgres_image = backend.ImageClass("centos/postgresql-10-centos7")
postgres_image_metadata = postgres_image.get_metadata()
# set up env variables
db_env_variables = {"POSTGRESQL_USER": "user",
"POSTGRESQL_PASSWORD": "pass",
"POSTGRESQL_DATABASE": "db"}
postgres_image_metadata.env_variables.update(db_env_variables)
db_labels = {"app": "postgres"}
db_service = Service(name="database", ports=["5432"], selector=db_labels,
namespace=namespace,
create_in_cluster=True)
db_deployment = Deployment(name="database", selector=db_labels, labels=db_labels,
image_metadata=postgres_image_metadata,
namespace=namespace,
create_in_cluster=True)
try:
db_deployment.wait(200)
assert db_deployment.all_pods_ready()
finally:
db_deployment.delete()
db_service.delete()
k8s_backend.delete_namespace(namespace)
def test_list_pods(self):
api_key = get_oc_api_token()
with K8sBackend(api_key=api_key) as k8s_backend:
namespace = k8s_backend.create_namespace()
with DockerBackend() as backend:
image = backend.ImageClass("openshift/hello-openshift")
pod = image.run_in_pod(namespace=namespace)
try:
pod.wait(200)
assert any(pod.name == p.name for p in k8s_backend.list_pods())
finally:
pod.delete()
k8s_backend.delete_namespace(namespace)
def test_list_services(self):
api_key = get_oc_api_token()
with K8sBackend(api_key=api_key) as k8s_backend:
namespace = k8s_backend.create_namespace()
labels = {"app": "postgres"}
service = Service(name="database", ports=["5432"], selector=labels, namespace=namespace,
create_in_cluster=True)
try:
assert any(service.name == s.name for s in k8s_backend.list_services())
finally:
service.delete()
k8s_backend.delete_namespace(namespace)
def test_list_deployments(self):
api_key = get_oc_api_token()
with K8sBackend(api_key=api_key) as k8s_backend:
namespace = k8s_backend.create_namespace()
with DockerBackend() as backend:
postgres_image = backend.ImageClass("centos/postgresql-10-centos7")
postgres_image_metadata = postgres_image.get_metadata()
# set up env variables
db_env_variables = {"POSTGRESQL_USER": "user",
"POSTGRESQL_PASSWORD": "pass",
"POSTGRESQL_DATABASE": "db"}
postgres_image_metadata.env_variables.update(db_env_variables)
db_labels = {"app": "postgres"}
db_deployment = Deployment(name="database", selector=db_labels, labels=db_labels,
image_metadata=postgres_image_metadata,
namespace=namespace,
create_in_cluster=True)
try:
db_deployment.wait(200)
assert db_deployment.all_pods_ready()
assert any(db_deployment.name == d.name for d in k8s_backend.list_deployments())
finally:
db_deployment.delete()
k8s_backend.delete_namespace(namespace)
def test_list_pod_for_namespace(self):
api_key = get_oc_api_token()
with K8sBackend(api_key=api_key) as k8s_backend:
namespace1 = k8s_backend.create_namespace()
namespace2 = k8s_backend.create_namespace()
with DockerBackend() as backend:
image = backend.ImageClass("openshift/hello-openshift")
pod1 = image.run_in_pod(namespace=namespace1)
try:
pod1.wait(200)
assert any(pod1.name == p.name for p in k8s_backend.list_pods(namespace1))
assert not any(pod1.name == p.name for p in k8s_backend.list_pods(namespace2))
finally:
pod1.delete()
k8s_backend.delete_namespace(namespace1)
k8s_backend.delete_namespace(namespace2)
def test_deployment_from_template(self):
api_key = get_oc_api_token()
with K8sBackend(api_key=api_key) as k8s_backend:
namespace = k8s_backend.create_namespace()
template = """
apiVersion: apps/v1
kind: Deployment
metadata:
name: hello-world
labels:
app: hello-world
spec:
replicas: 3
selector:
matchLabels:
app: hello-world
template:
metadata:
labels:
app: hello-world
spec:
containers:
- name: hello-openshift
image: openshift/hello-openshift
"""
test_deployment = Deployment(namespace=namespace, from_template=template,
create_in_cluster=True)
try:
test_deployment.wait(200)
assert test_deployment.all_pods_ready()
finally:
test_deployment.delete()
k8s_backend.delete_namespace(namespace)
def test_cleanup(self):
api = get_core_api()
# take just namespaces that are not in terminating state
number_of_namespaces = len(
[item for item in api.list_namespace().items if item.status.phase != "Terminating"])
api_key = get_oc_api_token()
with K8sBackend(api_key=api_key, cleanup=[K8sCleanupPolicy.NAMESPACES]) as k8s_backend:
# create two namespaces
k8s_backend.create_namespace()
k8s_backend.create_namespace()
# cleanup should delete two namespaces created with k8s backend
assert len(
[item for item in api.list_namespace().items
if item.status.phase != "Terminating"]) == number_of_namespaces
with K8sBackend(api_key=api_key) as k8s_backend:
# create two namespaces
k8s_backend.create_namespace()
k8s_backend.create_namespace()
# no cleanup - namespaces are not deleted after work with backend is finished
assert len(
[item for item in api.list_namespace().items
if item.status.phase != "Terminating"]) == number_of_namespaces + 2
| 35.305263
| 100
| 0.550487
| 9,286
| 0.922878
| 0
| 0
| 9,462
| 0.94037
| 0
| 0
| 1,680
| 0.166965
|
38fbd974e03682bea68c5248fd58ad877e5fa2e2
| 747
|
py
|
Python
|
fanyi.py
|
smithgoo/python3Learn
|
d0c066c10887db3942ca285b86ce464463998aad
|
[
"MIT"
] | 1
|
2019-05-30T08:08:34.000Z
|
2019-05-30T08:08:34.000Z
|
fanyi.py
|
smithgoo/python3Learn
|
d0c066c10887db3942ca285b86ce464463998aad
|
[
"MIT"
] | null | null | null |
fanyi.py
|
smithgoo/python3Learn
|
d0c066c10887db3942ca285b86ce464463998aad
|
[
"MIT"
] | null | null | null |
#!/usr/bin/python
#coding:utf-8
import requests
import json
headers ={"User-Agent": "Mozilla/5.0 (iPhone; CPU iPhone OS 11_0 like Mac OS X) AppleWebKit/604.1.38 (KHTML, like Gecko) Version/11.0 Mobile/15A372 Safari/604.1","Referer": "http://fanyi.baidu.com/translate?aldtype=16047&query=&keyfrom=baidu&smartresult=dict&lang=auto2zh"}
url = "http://fanyi.baidu.com/basetrans"
words = raw_input("中翻英:")
requestdic ={"query":words,"from":"en","to":"zh"}
response = requests.post(url,data=requestdic,headers =headers)
# response.encoding = "utf-8"
print(response)
print(response.content.decode())
htmlstr = response.content.decode()
str1 = json.loads(htmlstr)
print(str1)
print(type(str1))
str2 = str1["trans"][0]["dst"]
print(str2)
| 20.75
| 273
| 0.716198
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 399
| 0.52988
|
38fbddf88e24f691af151a049dc107ebe16a7e13
| 13,228
|
py
|
Python
|
frads/radmtx.py
|
LBNL-ETA/frads
|
dbd9980c7cfebd363089180d8fb1b7107e73ec92
|
[
"BSD-3-Clause-LBNL"
] | 8
|
2019-11-13T22:26:45.000Z
|
2022-03-23T15:30:37.000Z
|
frads/radmtx.py
|
LBNL-ETA/frads
|
dbd9980c7cfebd363089180d8fb1b7107e73ec92
|
[
"BSD-3-Clause-LBNL"
] | null | null | null |
frads/radmtx.py
|
LBNL-ETA/frads
|
dbd9980c7cfebd363089180d8fb1b7107e73ec92
|
[
"BSD-3-Clause-LBNL"
] | 2
|
2021-08-10T18:22:04.000Z
|
2021-08-30T23:16:27.000Z
|
""" Support matrices generation.
radmtx module contains two class objects: sender and receiver, representing
the ray sender and receiver in the rfluxmtx operation. sender object is can
be instantiated as a surface, a list of points, or a view, and these are
typical forms of a sender. Similarly, a receiver object can be instantiated as
a surface, sky, or suns.
"""
from __future__ import annotations
import os
import copy
import subprocess as sp
import tempfile as tf
import logging
from frads import makesky
from frads import radgeom
from frads import radutil, util
from typing import Optional
logger = logging.getLogger('frads.radmtx')
class Sender:
"""Sender object for matrix generation with the following attributes:
Attributes:
form(str): types of sender, {surface(s)|view(v)|points(p)}
sender(str): the sender object
xres(int): sender x dimension
yres(int): sender y dimension
"""
def __init__(self, *, form: str, sender: bytes,
xres: Optional[int], yres: Optional[int]):
"""Instantiate the instance.
Args:
form(str): Sender as (s, v, p) for surface, view, and points;
path(str): sender file path;
sender(str): content of the sender file;
xres(int): x resolution of the image;
yres(int): y resoluation or line count if form is pts;
"""
self.form = form
self.sender = sender
self.xres = xres
self.yres = yres
logger.debug("Sender: %s", sender)
@classmethod
def as_surface(cls, *, prim_list: list, basis: str,
offset=None, left=None):
"""
Construct a sender from a surface.
Args:
prim_list(list): a list of primitives
basis(str): sender sampling basis
offset(float): move the sender surface in its normal direction
left(bool): Use left-hand rule instead for matrix generation
Returns:
A sender object (Sender)
"""
prim_str = prepare_surface(prims=prim_list, basis=basis, offset=offset,
left=left, source=None, out=None)
return cls(form='s', sender=prim_str.encode(), xres=None, yres=None)
@classmethod
def as_view(cls, *, vu_dict: dict, ray_cnt: int, xres: int, yres: int) -> Sender:
"""
Construct a sender from a view.
Args:
vu_dict: a dictionary containing view parameters;
ray_cnt: ray count;
xres, yres: image resolution
c2c: Set to True to trim the fisheye corner rays.
Returns:
A sender object
"""
if None in (xres, yres):
raise ValueError("Need to specify resolution")
vcmd = f"vwrays {radutil.opt2str(vu_dict)} -x {xres} -y {yres} -d"
res_eval = util.spcheckout(vcmd.split()).decode().split()
xres, yres = int(res_eval[1]), int(res_eval[3])
logger.info("Changed resolution to %s %s", xres, yres)
cmd = f"vwrays -ff -x {xres} -y {yres} "
if ray_cnt > 1:
vu_dict['c'] = ray_cnt
vu_dict['pj'] = 0.7 # placeholder
logger.debug("Ray count is %s", ray_cnt)
cmd += radutil.opt2str(vu_dict)
if vu_dict['vt'] == 'a':
cmd += "|" + Sender.crop2circle(ray_cnt, xres)
vrays = sp.run(cmd, shell=True, check=True, stdout=sp.PIPE).stdout
return cls(form='v', sender=vrays, xres=xres, yres=yres)
@classmethod
def as_pts(cls, *, pts_list: list, ray_cnt=1) -> Sender:
"""Construct a sender from a list of points.
Args:
pts_list(list): a list of list of float
ray_cnt(int): sender ray count
Returns:
A sender object
"""
if pts_list is None:
raise ValueError("pts_list is None")
if not all(isinstance(item, list) for item in pts_list):
raise ValueError("All grid points has to be lists.")
pts_list = [i for i in pts_list for _ in range(ray_cnt)]
grid_str = os.linesep.join(
[' '.join(map(str, li)) for li in pts_list]) + os.linesep
return cls(form='p', sender=grid_str.encode(), xres=None, yres=len(pts_list))
@staticmethod
def crop2circle(ray_cnt: int, xres: int) -> str:
"""Flush the corner rays from a fisheye view
Args:
ray_cnt: ray count;
xres: resolution of the square image;
Returns:
Command to generate cropped rays
"""
cmd = "rcalc -if6 -of "
cmd += f'-e "DIM:{xres};CNT:{ray_cnt}" '
cmd += '-e "pn=(recno-1)/CNT+.5" '
cmd += '-e "frac(x):x-floor(x)" '
cmd += '-e "xpos=frac(pn/DIM);ypos=pn/(DIM*DIM)" '
cmd += '-e "incir=if(.25-(xpos-.5)*(xpos-.5)-(ypos-.5)*(ypos-.5),1,0)" '
cmd += ' -e "$1=$1;$2=$2;$3=$3;$4=$4*incir;$5=$5*incir;$6=$6*incir"'
if os.name == "posix":
cmd = cmd.replace('"', "'")
return cmd
class Receiver:
"""Receiver object for matrix generation."""
def __init__(self, receiver: str, basis: str, modifier=None) -> None:
"""Instantiate the receiver object.
Args:
receiver(str): receiver string which can be appended to one another
basis(str): receiver basis, usually kf, r4, r6;
modifier(str): modifiers to the receiver objects;
"""
self.receiver = receiver
self.basis = basis
self.modifier = modifier
logger.debug("Receivers: %s", receiver)
def __add__(self, other: Receiver) -> Receiver:
self.receiver += '\n' + other.receiver
return self
@classmethod
def as_sun(cls, *, basis, smx_path, window_normals, full_mod=False) -> Receiver:
"""Instantiate a sun receiver object.
Args:
basis: receiver sampling basis {kf | r1 | sc25...}
smx_path: sky/sun matrix file path
window_paths: window file paths
Returns:
A sun receiver object
"""
gensun = makesky.Gensun(int(basis[-1]))
if (smx_path is None) and (window_normals is None):
str_repr = gensun.gen_full()
return cls(receiver=str_repr, basis=basis, modifier=gensun.mod_str)
str_repr, mod_str = gensun.gen_cull(smx_path=smx_path, window_normals=window_normals)
if full_mod:
return cls(receiver=str_repr, basis=basis, modifier=gensun.mod_str)
return cls(receiver=str_repr, basis=basis, modifier=mod_str)
@classmethod
def as_sky(cls, basis) -> Receiver:
"""Instantiate a sky receiver object.
Args:
basis: receiver sampling basis {kf | r1 | sc25...}
Returns:
A sky receiver object
"""
assert basis.startswith('r'), 'Sky basis need to be Treganza/Reinhart'
sky_str = makesky.basis_glow(basis)
logger.debug(sky_str)
return cls(receiver=sky_str, basis=basis)
@classmethod
def as_surface(cls, prim_list: list, basis: str, out: str,
offset=None, left=False, source='glow') -> Receiver:
"""Instantiate a surface receiver object.
Args:
prim_list: list of primitives(dict)
basis: receiver sampling basis {kf | r1 | sc25...}
out: output path
offset: offset the surface in its normal direction
left: use instead left-hand rule for matrix generation
source: light source for receiver object {glow|light}
Returns:
A surface receiver object
"""
rcvr_str = prepare_surface(prims=prim_list, basis=basis, offset=offset,
left=left, source=source, out=out)
return cls(receiver=rcvr_str, basis=basis)
def prepare_surface(*, prims, basis, left, offset, source, out) -> str:
"""Prepare the sender or receiver surface, adding appropriate tags.
Args:
prims(list): list of primitives
basis(str): sampling basis
left(bool): use instead the left-hand rule
offset(float): offset surface in its normal direction
source(str): surface light source for receiver
out: output path
Returns:
The receiver as string
"""
if basis is None:
raise ValueError('Sampling basis cannot be None')
upvector = str(radutil.up_vector(prims)).replace(' ', ',')
upvector = "-" + upvector if left else upvector
modifier_set = {p.modifier for p in prims}
if len(modifier_set) != 1:
logger.warning("Primitives don't share modifier")
src_mod = f"rflx{prims[0].modifier}"
header = f'#@rfluxmtx h={basis} u={upvector}\n'
if out is not None:
header += f'#@rfluxmtx o="{out}"\n\n'
if source is not None:
source_line = f"void {source} {src_mod}\n0\n0\n4 1 1 1 0\n\n"
header += source_line
modifiers = [p.modifier for p in prims]
content = ''
for prim in prims:
if prim.identifier in modifiers:
_identifier = 'discarded'
else:
_identifier = prim.identifier
_modifier = src_mod
if offset is not None:
poly = radutil.parse_polygon(prim.real_arg)
offset_vec = poly.normal().scale(offset)
moved_pts = [pt + offset_vec for pt in poly.vertices]
_real_args = radgeom.Polygon(moved_pts).to_real()
else:
_real_args = prim.real_arg
new_prim = radutil.Primitive(
_modifier, prim.ptype, _identifier, prim.str_arg, _real_args)
content += str(new_prim) + '\n'
return header + content
def rfluxmtx(*, sender, receiver, env, opt=None, out=None):
"""Calling rfluxmtx to generate the matrices.
Args:
sender: Sender object
receiver: Receiver object
env: model environment, basically anything that's not the
sender or receiver
opt: option string
out: output path
Returns:
return the stdout of the command
"""
if None in (sender, receiver):
raise ValueError("Sender/Receiver object is None")
opt = '' if opt is None else opt
with tf.TemporaryDirectory() as tempd:
receiver_path = os.path.join(tempd, 'receiver')
with open(receiver_path, 'w') as wtr:
wtr.write(receiver.receiver)
if isinstance(env[0], dict):
env_path = os.path.join(tempd, 'env')
with open(env_path, 'w') as wtr:
[wtr.write(str(prim)) for prim in env]
env_paths = [env_path]
else:
env_paths = env
cmd = ['rfluxmtx'] + opt.split()
stdin = None
if sender.form == 's':
sender_path = os.path.join(tempd, 'sender')
with open(sender_path, 'wb') as wtr:
wtr.write(sender.sender)
cmd.extend([sender_path, receiver_path])
elif sender.form == 'p':
cmd.extend(['-I+', '-faa', '-y', str(sender.yres), '-', receiver_path])
stdin = sender.sender
elif sender.form == 'v':
cmd.extend(["-ffc", "-x", str(sender.xres), "-y", str(sender.yres), "-ld-"])
if out is not None:
util.mkdir_p(out)
out = os.path.join(out, '%04d.hdr')
cmd.extend(["-o", out])
cmd.extend(['-', receiver_path])
stdin = sender.sender
cmd.extend(env_paths)
return util.spcheckout(cmd, inp=stdin)
def rcvr_oct(receiver, env, oct_path):
"""Generate an octree of the environment and the receiver.
Args:
receiver: receiver object
env: environment file paths
oct_path: Path to write the octree to
"""
with tf.TemporaryDirectory() as tempd:
receiver_path = os.path.join(tempd, 'rcvr_path')
with open(receiver_path, 'w') as wtr:
wtr.write(receiver.receiver)
ocmd = ['oconv', '-f'] + env + [receiver_path]
octree = util.spcheckout(ocmd)
with open(oct_path, 'wb') as wtr:
wtr.write(octree)
def rcontrib(*, sender, modifier: str, octree, out, opt) -> None:
"""Calling rcontrib to generate the matrices.
Args:
sender: Sender object
modifier: modifier str listing the receivers in octree
octree: the octree that includes the environment and the receiver
opt: option string
out: output path
Returns:
None
"""
lopt = opt.split()
lopt.append('-fo+')
with tf.TemporaryDirectory() as tempd:
modifier_path = os.path.join(tempd, 'modifier')
with open(modifier_path, 'w') as wtr:
wtr.write(modifier)
cmd = ['rcontrib'] + lopt
stdin = sender.sender
if sender.form == 'p':
cmd += ['-I+', '-faf', '-y', str(sender.yres)]
elif sender.form == 'v':
util.mkdir_p(out)
out = os.path.join(out, '%04d.hdr')
cmd += ['-ffc', '-x', str(sender.xres), '-y', str(sender.yres)]
cmd += ['-o', out, '-M', modifier_path, octree]
util.spcheckout(cmd, inp=stdin)
| 36.043597
| 93
| 0.584971
| 7,237
| 0.547097
| 0
| 0
| 5,618
| 0.424705
| 0
| 0
| 5,663
| 0.428107
|
38ffdd9e33324c5571bb8c9331725ff628bfcf97
| 7,354
|
py
|
Python
|
QGrain/ui/NNResolverSettingWidget.py
|
yuriok/QGrain
|
39a136d4e89716a26a88d68ffa00d36ef6036279
|
[
"MIT"
] | 4
|
2021-09-04T11:00:07.000Z
|
2022-01-06T12:50:09.000Z
|
QGrain/ui/NNResolverSettingWidget.py
|
yuriok/QGrain
|
39a136d4e89716a26a88d68ffa00d36ef6036279
|
[
"MIT"
] | null | null | null |
QGrain/ui/NNResolverSettingWidget.py
|
yuriok/QGrain
|
39a136d4e89716a26a88d68ffa00d36ef6036279
|
[
"MIT"
] | 2
|
2021-08-23T02:39:07.000Z
|
2021-12-30T03:04:07.000Z
|
__all__ = ["NNResolverSettingWidget"]
import pickle
import numpy as np
import torch
from PySide2.QtCore import QSettings, Qt
from PySide2.QtWidgets import (QCheckBox, QComboBox, QDialog, QDoubleSpinBox,
QGridLayout, QLabel, QSpinBox)
from QGrain.models.NNResolverSetting import (NNResolverSetting,
built_in_distances)
class NNResolverSettingWidget(QDialog):
def __init__(self, parent=None, filename=None, group=None):
super().__init__(parent=parent, f=Qt.Window)
self.setWindowTitle(self.tr("NN Resolver Setting"))
if filename is not None:
self.setting_file = QSettings(filename, QSettings.Format.IniFormat)
if group is not None:
self.setting_file.beginGroup(group)
else:
self.setting_file = None
self.setAttribute(Qt.WA_StyledBackground, True)
self.initialize_ui()
def initialize_ui(self):
self.main_layout = QGridLayout(self)
self.device_label = QLabel(self.tr("Device"))
self.device_label.setToolTip(self.tr("The neural netowrk framwork, pytorch, also can use the GPU of Nvidia to do calculations."))
self.device_combo_box = QComboBox()
self.device_combo_box.addItem("cpu")
if torch.cuda.is_available():
self.device_combo_box.addItem("cuda")
self.main_layout.addWidget(self.device_label, 0, 0)
self.main_layout.addWidget(self.device_combo_box, 0, 1)
self.distance_label = QLabel(self.tr("Distance (Loss) Function"))
self.distance_label.setToolTip(self.tr("It's the function to calculate the difference (on the contrary, similarity) between two samples."))
self.distance_combo_box = QComboBox()
self.distance_combo_box.addItems(built_in_distances)
# self.distance_combo_box.setCurrentText("log10MSE")
self.distance_combo_box.currentTextChanged.connect(self.on_distance_changed)
self.main_layout.addWidget(self.distance_label, 1, 0)
self.main_layout.addWidget(self.distance_combo_box, 1, 1)
self.min_niter_label = QLabel(self.tr("Minimum N<sub>iteration</sub>"))
self.min_niter_label.setToolTip(self.tr("Minimum number of iterations to perform"))
self.min_niter_input = QSpinBox()
self.min_niter_input.setRange(10, 10000)
self.min_niter_input.setValue(2000)
self.main_layout.addWidget(self.min_niter_label, 2, 0)
self.main_layout.addWidget(self.min_niter_input, 2, 1)
self.max_niter_label = QLabel(self.tr("Maximum N<sub>iteration</sub>"))
self.max_niter_label.setToolTip(self.tr("Maximum number of iterations to perform"))
self.max_niter_input = QSpinBox()
self.max_niter_input.setRange(100, 100000)
self.max_niter_input.setValue(5000)
self.main_layout.addWidget(self.max_niter_label, 3, 0)
self.main_layout.addWidget(self.max_niter_input, 3, 1)
self.tol_label = QLabel(self.tr("-lg(loss<sub>tolerance</sub>)"))
self.tol_label.setToolTip(self.tr("Controls the tolerance of the loss function for termination."))
self.tol_input = QSpinBox()
self.tol_input.setRange(1, 100)
self.tol_input.setValue(10)
self.main_layout.addWidget(self.tol_label, 4, 0)
self.main_layout.addWidget(self.tol_input, 4, 1)
self.ftol_label = QLabel(self.tr("-lg(δ<sub>loss</sub>)"))
self.ftol_label.setToolTip(self.tr("Controls the precision goal for the value of loss function in the stopping criterion."))
self.ftol_input = QSpinBox()
self.ftol_input.setRange(1, 100)
self.ftol_input.setValue(10)
self.main_layout.addWidget(self.ftol_label, 5, 0)
self.main_layout.addWidget(self.ftol_input, 5, 1)
self.lr_label = QLabel(self.tr("Learning Rate (x10<sup>-3</sup>)"))
self.lr_label.setToolTip(self.tr("The learning rate of the neural network to update its weights from gradient."))
self.lr_input = QDoubleSpinBox()
self.lr_input.setDecimals(3)
self.lr_input.setRange(0.001, 1000)
self.lr_input.setValue(15)
self.main_layout.addWidget(self.lr_label, 6, 0)
self.main_layout.addWidget(self.lr_input, 6, 1)
self.eps_label = QLabel(self.tr("-lg(δ<sub>eps</sub>)"))
self.eps_label.setToolTip(self.tr("Controls the step size used for numerical approximation of the jacobian"))
self.eps_input = QSpinBox()
self.eps_input.setRange(1, 100)
self.eps_input.setValue(8)
self.main_layout.addWidget(self.eps_label, 7, 0)
self.main_layout.addWidget(self.eps_input, 7, 1)
def on_distance_changed(self, distance: str):
if distance == "log10MSE":
self.tol_label.setText(self.tr("-loss<sub>tolerance</sub>"))
else:
self.tol_label.setText(self.tr("-lg(loss<sub>tolerance</sub>)"))
@property
def setting(self):
devices = ["cpu", "cuda"]
device = devices[self.device_combo_box.currentIndex()]
distance = self.distance_combo_box.currentText()
min_niter = self.min_niter_input.value()
max_niter = self.max_niter_input.value()
# when using Lg(MSE) distance
tol = -self.tol_input.value() if distance == "log10MSE" else 10**(-self.tol_input.value())
ftol = 10**(-self.ftol_input.value())
lr = self.lr_input.value() / 1000.0
eps = 10**(-self.eps_input.value())
setting = NNResolverSetting(device=device, distance=distance,
min_niter=min_niter, max_niter=max_niter,
tol=tol, ftol=ftol, lr=lr, eps=eps)
return setting
@setting.setter
def setting(self, setting: NNResolverSetting):
self.device_combo_box.setCurrentText(setting.device)
self.distance_combo_box.setCurrentText(setting.distance)
self.min_niter_input.setValue(setting.min_niter)
self.max_niter_input.setValue(setting.max_niter)
if setting.distance == "log10MSE":
self.tol_input.setValue(-setting.tol)
else:
self.tol_input.setValue(-np.log10(setting.tol))
self.ftol_input.setValue(-np.log10(setting.ftol))
self.lr_input.setValue(setting.lr*1000.0)
self.eps_input.setValue(-np.log10(setting.eps))
def save(self):
if self.setting_file is not None:
setting_bytes = pickle.dumps(self.setting)
self.setting_file.setValue("nn_resolver_setting", setting_bytes)
def restore(self):
if self.setting_file is not None:
setting_bytes = self.setting_file.value("nn_resolver_setting", defaultValue=None)
if setting_bytes is not None:
setting = pickle.loads(setting_bytes)
self.setting = setting
else:
self.setting = NNResolverSetting()
if __name__ == "__main__":
import sys
from QGrain.entry import setup_app
app, splash = setup_app()
main = NNResolverSettingWidget()
main.show()
splash.finish(main)
setting = main.setting
setting.device = "cuda"
setting.tol = 1e-9
setting.ftol = 1e-23
setting.lr = 3e-2
setting.eps = 1e-12
main.setting = setting
sys.exit(app.exec_())
| 43.77381
| 147
| 0.663992
| 6,562
| 0.892061
| 0
| 0
| 1,436
| 0.195215
| 0
| 0
| 1,073
| 0.145867
|
ac012aecb07834c5fb29cd6c1e9f0c6f5ac9d379
| 284
|
py
|
Python
|
week3_greedy_algorithms/6_maximum_number_of_prizes/different_summands.py
|
thegautamkumarjaiswal/Algorithm-s_ToolBox_Solutions
|
bb265647ed183f44e0d56f14a4b8b966af73dfd2
|
[
"Apache-2.0"
] | null | null | null |
week3_greedy_algorithms/6_maximum_number_of_prizes/different_summands.py
|
thegautamkumarjaiswal/Algorithm-s_ToolBox_Solutions
|
bb265647ed183f44e0d56f14a4b8b966af73dfd2
|
[
"Apache-2.0"
] | null | null | null |
week3_greedy_algorithms/6_maximum_number_of_prizes/different_summands.py
|
thegautamkumarjaiswal/Algorithm-s_ToolBox_Solutions
|
bb265647ed183f44e0d56f14a4b8b966af73dfd2
|
[
"Apache-2.0"
] | null | null | null |
# Uses python3
n = int(input())
if n == 1:
print(1)
print(1)
quit()
W = n
prizes = []
for i in range(1, n):
if W>2*i:
prizes.append(i)
W -= i
else:
prizes.append(W)
break
print(len(prizes))
print(' '.join([str(i) for i in prizes]))
| 15.777778
| 41
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 17
| 0.059859
|
ac015c8aae392f0c5e8f71cda13e428662656402
| 1,158
|
py
|
Python
|
town/migrations/0002_shop.py
|
Snowman88/DODOTOWN
|
a817d645b3002d75a25dc543e8f253648f007ca7
|
[
"Apache-2.0"
] | 1
|
2015-01-07T14:54:19.000Z
|
2015-01-07T14:54:19.000Z
|
town/migrations/0002_shop.py
|
Snowman88/DODOTOWN
|
a817d645b3002d75a25dc543e8f253648f007ca7
|
[
"Apache-2.0"
] | 2
|
2015-01-12T13:48:52.000Z
|
2015-01-12T13:49:45.000Z
|
town/migrations/0002_shop.py
|
Snowman88/DODOTOWN
|
a817d645b3002d75a25dc543e8f253648f007ca7
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import django.utils.timezone
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('town', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Shop',
fields=[
('id', models.AutoField(auto_created=True, verbose_name='ID', serialize=False, primary_key=True)),
('name', models.CharField(max_length=255, verbose_name='Shop')),
('size', models.IntegerField()),
('address', models.CharField(blank=True, max_length=255, verbose_name='住所', null=True)),
('created', models.DateTimeField(default=django.utils.timezone.now)),
('modified', models.DateTimeField(default=django.utils.timezone.now)),
('owner', models.ForeignKey(to=settings.AUTH_USER_MODEL, verbose_name='所有者')),
],
options={
},
bases=(models.Model,),
),
]
| 35.090909
| 114
| 0.603627
| 997
| 0.853596
| 0
| 0
| 0
| 0
| 0
| 0
| 129
| 0.110445
|
ac01a8777ade5c22566c19425f199dbb6101a624
| 8,700
|
py
|
Python
|
testing/scipy_distutils-0.3.3_34.586/command/build_clib.py
|
fireballpoint1/fortranTOpy
|
55843a62c6f0a2f8e2a777ef70193940d3d2d141
|
[
"Apache-2.0"
] | 1
|
2018-08-26T05:10:56.000Z
|
2018-08-26T05:10:56.000Z
|
testing/scipy_distutils-0.3.3_34.586/command/build_clib.py
|
fireballpoint1/fortranTOpy
|
55843a62c6f0a2f8e2a777ef70193940d3d2d141
|
[
"Apache-2.0"
] | null | null | null |
testing/scipy_distutils-0.3.3_34.586/command/build_clib.py
|
fireballpoint1/fortranTOpy
|
55843a62c6f0a2f8e2a777ef70193940d3d2d141
|
[
"Apache-2.0"
] | 1
|
2018-06-26T18:06:44.000Z
|
2018-06-26T18:06:44.000Z
|
""" Modified version of build_clib that handles fortran source files.
"""
import os
import string
import sys
import re
from glob import glob
from types import *
from distutils.command.build_clib import build_clib as old_build_clib
from distutils.command.build_clib import show_compilers
from scipy_distutils import log, misc_util
from distutils.dep_util import newer_group
from scipy_distutils.misc_util import filter_sources, \
has_f_sources, has_cxx_sources
def get_headers(directory_list):
# get *.h files from list of directories
headers = []
for dir in directory_list:
head = glob(os.path.join(dir,"*.h")) #XXX: *.hpp files??
headers.extend(head)
return headers
def get_directories(list_of_sources):
# get unique directories from list of sources.
direcs = []
for file in list_of_sources:
dir = os.path.split(file)
if dir[0] != '' and not dir[0] in direcs:
direcs.append(dir[0])
return direcs
class build_clib(old_build_clib):
description = "build C/C++/F libraries used by Python extensions"
user_options = old_build_clib.user_options + [
('fcompiler=', None,
"specify the Fortran compiler type"),
]
def initialize_options(self):
old_build_clib.initialize_options(self)
self.fcompiler = None
return
def finalize_options(self):
old_build_clib.finalize_options(self)
self.set_undefined_options('build_ext',
('fcompiler', 'fcompiler'))
#XXX: This is hackish and probably unnecessary,
# could we get rid of this?
from scipy_distutils import misc_util
extra_includes = misc_util.get_environ_include_dirs()
if extra_includes:
print "XXX: are you sure you'll need PYTHONINCLUDES env. variable??"
self.include_dirs.extend(extra_includes)
return
def have_f_sources(self):
for (lib_name, build_info) in self.libraries:
if has_f_sources(build_info.get('sources',[])):
return 1
return 0
def have_cxx_sources(self):
for (lib_name, build_info) in self.libraries:
if has_cxx_sources(build_info.get('sources',[])):
return 1
return 0
def run(self):
if not self.libraries:
return
# Make sure that library sources are complete.
for (lib_name, build_info) in self.libraries:
if not misc_util.all_strings(build_info.get('sources',[])):
raise TypeError,'Library "%s" sources contains unresolved'\
' items (call build_src before built_clib).' % (lib_name)
from distutils.ccompiler import new_compiler
self.compiler = new_compiler(compiler=self.compiler,
dry_run=self.dry_run,
force=self.force)
self.compiler.customize(self.distribution,need_cxx=self.have_cxx_sources())
libraries = self.libraries
self.libraries = None
self.compiler.customize_cmd(self)
self.libraries = libraries
self.compiler.show_customization()
if self.have_f_sources():
from scipy_distutils.fcompiler import new_fcompiler
self.fcompiler = new_fcompiler(compiler=self.fcompiler,
verbose=self.verbose,
dry_run=self.dry_run,
force=self.force)
self.fcompiler.customize(self.distribution)
libraries = self.libraries
self.libraries = None
self.fcompiler.customize_cmd(self)
self.libraries = libraries
self.fcompiler.show_customization()
self.build_libraries(self.libraries)
return
def get_source_files(self):
from build_ext import is_local_src_dir
self.check_library_list(self.libraries)
filenames = []
def visit_func(filenames,dirname,names):
if os.path.basename(dirname) in ['CVS','.svn']:
names[:] = []
return
for name in names:
if name[-1] in "#~":
continue
fullname = os.path.join(dirname,name)
if os.path.isfile(fullname):
filenames.append(fullname)
for (lib_name, build_info) in self.libraries:
sources = build_info.get('sources',[])
sources = filter(lambda s:type(s) is StringType,sources)
filenames.extend(sources)
filenames.extend(get_headers(get_directories(sources)))
depends = build_info.get('depends',[])
for d in depends:
if is_local_src_dir(d):
os.path.walk(d,visit_func,filenames)
elif os.path.isfile(d):
filenames.append(d)
return filenames
def build_libraries(self, libraries):
compiler = self.compiler
fcompiler = self.fcompiler
for (lib_name, build_info) in libraries:
sources = build_info.get('sources')
if sources is None or type(sources) not in (ListType, TupleType):
raise DistutilsSetupError, \
("in 'libraries' option (library '%s'), " +
"'sources' must be present and must be " +
"a list of source filenames") % lib_name
sources = list(sources)
lib_file = compiler.library_filename(lib_name,
output_dir=self.build_clib)
depends = sources + build_info.get('depends',[])
if not (self.force or newer_group(depends, lib_file, 'newer')):
log.debug("skipping '%s' library (up-to-date)", lib_name)
continue
else:
log.info("building '%s' library", lib_name)
macros = build_info.get('macros')
include_dirs = build_info.get('include_dirs')
extra_postargs = build_info.get('extra_compiler_args') or []
c_sources, cxx_sources, f_sources, fmodule_sources \
= filter_sources(sources)
if self.compiler.compiler_type=='msvc':
# this hack works around the msvc compiler attributes
# problem, msvc uses its own convention :(
c_sources += cxx_sources
cxx_sources = []
if fmodule_sources:
print 'XXX: Fortran 90 module support not implemented or tested'
f_sources.extend(fmodule_sources)
objects = []
if c_sources:
log.info("compiling C sources")
objects = compiler.compile(c_sources,
output_dir=self.build_temp,
macros=macros,
include_dirs=include_dirs,
debug=self.debug,
extra_postargs=extra_postargs)
if cxx_sources:
log.info("compiling C++ sources")
old_compiler = self.compiler.compiler_so[0]
self.compiler.compiler_so[0] = self.compiler.compiler_cxx[0]
cxx_objects = compiler.compile(cxx_sources,
output_dir=self.build_temp,
macros=macros,
include_dirs=include_dirs,
debug=self.debug,
extra_postargs=extra_postargs)
objects.extend(cxx_objects)
self.compiler.compiler_so[0] = old_compiler
if f_sources:
log.info("compiling Fortran sources")
f_objects = fcompiler.compile(f_sources,
output_dir=self.build_temp,
macros=macros,
include_dirs=include_dirs,
debug=self.debug,
extra_postargs=[])
objects.extend(f_objects)
self.compiler.create_static_lib(objects, lib_name,
output_dir=self.build_clib,
debug=self.debug)
return
| 38.666667
| 83
| 0.545057
| 7,715
| 0.886782
| 0
| 0
| 0
| 0
| 0
| 0
| 1,113
| 0.127931
|
ac024dc71c56bdd789c2d35559cf132d917d749c
| 1,405
|
py
|
Python
|
mayan/apps/web_links/migrations/0004_make_labes_unique.py
|
nattangwiwat/Mayan-EDMS-recitation
|
fcf16afb56eae812fb99144d65ae1ae6749de0b7
|
[
"Apache-2.0"
] | 343
|
2015-01-05T14:19:35.000Z
|
2018-12-10T19:07:48.000Z
|
mayan/apps/web_links/migrations/0004_make_labes_unique.py
|
nattangwiwat/Mayan-EDMS-recitation
|
fcf16afb56eae812fb99144d65ae1ae6749de0b7
|
[
"Apache-2.0"
] | 191
|
2015-01-03T00:48:19.000Z
|
2018-11-30T09:10:25.000Z
|
mayan/apps/web_links/migrations/0004_make_labes_unique.py
|
nattangwiwat/Mayan-EDMS-recitation
|
fcf16afb56eae812fb99144d65ae1ae6749de0b7
|
[
"Apache-2.0"
] | 257
|
2019-05-14T10:26:37.000Z
|
2022-03-30T03:37:36.000Z
|
from django.db import migrations
def operation_make_labels_unique(apps, schema_editor):
WebLink = apps.get_model(app_label='web_links', model_name='WebLink')
for web_link in WebLink.objects.using(schema_editor.connection.alias).all():
# Look for instances with the same label
duplicate_queryset = WebLink.objects.using(
schema_editor.connection.alias
).filter(label=web_link.label).exclude(pk=web_link.pk)
if duplicate_queryset:
# If a duplicate is found, append the id to the original instance
# label
web_link.label = '{}__{}'.format(web_link.label, web_link.pk)
web_link.save()
def operation_make_labels_unique_reverse(apps, schema_editor):
WebLink = apps.get_model(app_label='web_links', model_name='WebLink')
for web_link in WebLink.objects.using(schema_editor.connection.alias).all():
if web_link.label.endswith('__{}'.format(web_link.pk)):
web_link.label = web_link.label.replace(
'__{}'.format(web_link.pk), ''
)
web_link.save()
class Migration(migrations.Migration):
dependencies = [
('web_links', '0003_auto_20191211_0233'),
]
operations = [
migrations.RunPython(
code=operation_make_labels_unique,
reverse_code=operation_make_labels_unique_reverse
),
]
| 34.268293
| 80
| 0.661922
| 291
| 0.207117
| 0
| 0
| 0
| 0
| 0
| 0
| 210
| 0.149466
|
ac0252914b7769682c26f81c801eeba081b42d28
| 6,251
|
py
|
Python
|
ansible-devel/test/lib/ansible_test/_data/sanity/validate-modules/validate_modules/module_args.py
|
satishcarya/ansible
|
ed091e174c26316f621ac16344a95c99f56bdc43
|
[
"MIT"
] | null | null | null |
ansible-devel/test/lib/ansible_test/_data/sanity/validate-modules/validate_modules/module_args.py
|
satishcarya/ansible
|
ed091e174c26316f621ac16344a95c99f56bdc43
|
[
"MIT"
] | null | null | null |
ansible-devel/test/lib/ansible_test/_data/sanity/validate-modules/validate_modules/module_args.py
|
satishcarya/ansible
|
ed091e174c26316f621ac16344a95c99f56bdc43
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
#
# Copyright (C) 2016 Matt Martz <matt@sivel.net>
# Copyright (C) 2016 Rackspace US, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import runpy
import json
import os
import subprocess
import sys
from contextlib import contextmanager
from ansible.executor.powershell.module_manifest import PSModuleDepFinder
from ansible.module_utils.basic import FILE_COMMON_ARGUMENTS
from ansible.module_utils.six import reraise
from ansible.module_utils._text import to_bytes, to_text
from .utils import CaptureStd, find_executable, get_module_name_from_filename
class AnsibleModuleCallError(RuntimeError):
pass
class AnsibleModuleImportError(ImportError):
pass
class AnsibleModuleNotInitialized(Exception):
pass
class _FakeAnsibleModuleInit:
def __init__(self):
self.args = tuple()
self.kwargs = {}
self.called = False
def __call__(self, *args, **kwargs):
self.args = args
self.kwargs = kwargs
self.called = True
raise AnsibleModuleCallError('AnsibleModuleCallError')
def _fake_load_params():
pass
@contextmanager
def setup_env(filename):
# Used to clean up imports later
pre_sys_modules = list(sys.modules.keys())
fake = _FakeAnsibleModuleInit()
module = __import__('ansible.module_utils.basic').module_utils.basic
_original_init = module.AnsibleModule.__init__
_original_load_params = module._load_params
setattr(module.AnsibleModule, '__init__', fake)
setattr(module, '_load_params', _fake_load_params)
try:
yield fake
finally:
setattr(module.AnsibleModule, '__init__', _original_init)
setattr(module, '_load_params', _original_load_params)
# Clean up imports to prevent issues with mutable data being used in modules
for k in list(sys.modules.keys()):
# It's faster if we limit to items in ansible.module_utils
# But if this causes problems later, we should remove it
if k not in pre_sys_modules and k.startswith('ansible.module_utils.'):
del sys.modules[k]
def get_ps_argument_spec(filename, collection):
fqc_name = get_module_name_from_filename(filename, collection)
pwsh = find_executable('pwsh')
if not pwsh:
raise FileNotFoundError('Required program for PowerShell arg spec inspection "pwsh" not found.')
module_path = os.path.join(os.getcwd(), filename)
b_module_path = to_bytes(module_path, errors='surrogate_or_strict')
with open(b_module_path, mode='rb') as module_fd:
b_module_data = module_fd.read()
ps_dep_finder = PSModuleDepFinder()
ps_dep_finder.scan_module(b_module_data, fqn=fqc_name)
# For ps_argspec.ps1 to compile Ansible.Basic it also needs the AddType module_util.
ps_dep_finder._add_module((b"Ansible.ModuleUtils.AddType", ".psm1", None), wrapper=False)
util_manifest = json.dumps({
'module_path': to_text(module_path, errors='surrogiate_or_strict'),
'ansible_basic': ps_dep_finder.cs_utils_module["Ansible.Basic"]['path'],
'ps_utils': dict([(name, info['path']) for name, info in ps_dep_finder.ps_modules.items()]),
})
script_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'ps_argspec.ps1')
proc = subprocess.Popen([script_path, util_manifest], stdout=subprocess.PIPE, stderr=subprocess.PIPE,
shell=False)
stdout, stderr = proc.communicate()
if proc.returncode != 0:
raise AnsibleModuleImportError("STDOUT:\n%s\nSTDERR:\n%s" % (stdout.decode('utf-8'), stderr.decode('utf-8')))
kwargs = json.loads(stdout)
# the validate-modules code expects the options spec to be under the argument_spec key not options as set in PS
kwargs['argument_spec'] = kwargs.pop('options', {})
return kwargs['argument_spec'], (), kwargs
def get_py_argument_spec(filename, collection):
name = get_module_name_from_filename(filename, collection)
with setup_env(filename) as fake:
try:
with CaptureStd():
runpy.run_module(name, run_name='__main__', alter_sys=True)
except AnsibleModuleCallError:
pass
except BaseException as e:
# we want to catch all exceptions here, including sys.exit
reraise(AnsibleModuleImportError, AnsibleModuleImportError('%s' % e), sys.exc_info()[2])
if not fake.called:
raise AnsibleModuleNotInitialized()
try:
# for ping kwargs == {'argument_spec':{'data':{'type':'str','default':'pong'}}, 'supports_check_mode':True}
if 'argument_spec' in fake.kwargs:
argument_spec = fake.kwargs['argument_spec']
else:
argument_spec = fake.args[0]
# If add_file_common_args is truish, add options from FILE_COMMON_ARGUMENTS when not present.
# This is the only modification to argument_spec done by AnsibleModule itself, and which is
# not caught by setup_env's AnsibleModule replacement
if fake.kwargs.get('add_file_common_args'):
for k, v in FILE_COMMON_ARGUMENTS.items():
if k not in argument_spec:
argument_spec[k] = v
return argument_spec, fake.args, fake.kwargs
except (TypeError, IndexError):
return {}, (), {}
def get_argument_spec(filename, collection):
if filename.endswith('.py'):
return get_py_argument_spec(filename, collection)
else:
return get_ps_argument_spec(filename, collection)
| 36.555556
| 117
| 0.699408
| 479
| 0.076628
| 982
| 0.157095
| 998
| 0.159654
| 0
| 0
| 2,098
| 0.335626
|
ac0406d097b2c425817270a16cec9aaa0dab57d1
| 425
|
py
|
Python
|
events/migrations/0003_invitation_detail.py
|
ebar0n/mishteh
|
dd025add9b80dff2253c1ee976fc656dff3abc03
|
[
"MIT"
] | null | null | null |
events/migrations/0003_invitation_detail.py
|
ebar0n/mishteh
|
dd025add9b80dff2253c1ee976fc656dff3abc03
|
[
"MIT"
] | null | null | null |
events/migrations/0003_invitation_detail.py
|
ebar0n/mishteh
|
dd025add9b80dff2253c1ee976fc656dff3abc03
|
[
"MIT"
] | null | null | null |
# Generated by Django 2.2 on 2019-10-13 19:09
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [("events", "0002_auto_20191013_1712")]
operations = [
migrations.AddField(
model_name="invitation",
name="detail",
field=models.TextField(default="", verbose_name="detail"),
preserve_default=False,
)
]
| 23.611111
| 70
| 0.623529
| 334
| 0.785882
| 0
| 0
| 0
| 0
| 0
| 0
| 108
| 0.254118
|
ac045942a364b8e9223f019c563338e05ffde92d
| 1,954
|
py
|
Python
|
pygitscrum/print.py
|
thib1984/pygitscrum
|
4f5f726e5b3c95f93da33c21da51953657bd0349
|
[
"MIT"
] | 2
|
2021-04-23T11:10:32.000Z
|
2021-04-23T11:10:41.000Z
|
pygitscrum/print.py
|
thib1984/pygitscrum
|
4f5f726e5b3c95f93da33c21da51953657bd0349
|
[
"MIT"
] | 2
|
2021-11-23T09:26:50.000Z
|
2021-11-23T09:27:02.000Z
|
pygitscrum/print.py
|
thib1984/pygitscrum
|
4f5f726e5b3c95f93da33c21da51953657bd0349
|
[
"MIT"
] | null | null | null |
"""
print scripts
"""
from termcolor import colored
from pygitscrum.args import compute_args
import colorama
def print_resume_list(list_to_print, message):
"""
print list summary
"""
if len(list_to_print) > 0:
print("")
print(
my_colored(
message + " : ",
"green",
)
)
print(
my_colored(
"\n".join(map(str, list_to_print)),
"yellow",
)
)
print(
my_colored(
"total : " + str(len(list_to_print)),
"green",
)
)
def print_resume_map(dict_to_print, message):
"""
print dict summary
"""
if len(dict_to_print) > 0:
print("")
print(my_colored(message + " : ", "green"))
for key in dict_to_print:
print(
my_colored(
key
+ " --> "
+ str(dict_to_print[key])
+ " elements",
"yellow",
)
)
print(
my_colored(
"total : "
+ str(len(dict_to_print))
+ " --> "
+ str(sum(dict_to_print.values()))
+ " elements ",
"green",
)
)
def print_debug(message):
"""
print debug message
"""
if compute_args().debug:
print("debug : " + message)
def print_y(message):
"""
print yellow message
"""
print(my_colored(message, "yellow"))
def print_g(message):
"""
print green message
"""
print(my_colored(message, "green"))
def print_r(message):
"""
print red message
"""
print(my_colored(message, "red"))
def my_colored(message,color):
if compute_args().nocolor:
return message
return colored(message, color)
| 20.14433
| 53
| 0.449335
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 377
| 0.192938
|
ac062225c63cd5c3323bbc8f4dcab95e8e43641a
| 261
|
py
|
Python
|
test/_test_compute_m.py
|
yiruiliu110/eegnn
|
253773c301681bb00b4789c34f48c82468ad16da
|
[
"MIT"
] | null | null | null |
test/_test_compute_m.py
|
yiruiliu110/eegnn
|
253773c301681bb00b4789c34f48c82468ad16da
|
[
"MIT"
] | null | null | null |
test/_test_compute_m.py
|
yiruiliu110/eegnn
|
253773c301681bb00b4789c34f48c82468ad16da
|
[
"MIT"
] | null | null | null |
import torch
from estimation import compute_m
i = [[0, 1, 1, 2],
[2, 0, 2, 1]]
v_z = [3, 4, 5, 2]
v_c = [0, 1, 1, 0]
z = torch.sparse_coo_tensor(i, v_z, (3, 3))
c = torch.sparse_coo_tensor(i, v_c, (3, 3))
max_K = 10
m = compute_m(z, c, max_K)
print(m)
| 15.352941
| 43
| 0.578544
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
ac07185d13ca3e632e2ca4e17fcc91869d099238
| 3,677
|
py
|
Python
|
test/SMSGateway_test.py
|
S2Innovation/ds-s2i-smsgateway
|
eed5ce3d630c26b0fd73117d79c84606a12bc783
|
[
"MIT"
] | null | null | null |
test/SMSGateway_test.py
|
S2Innovation/ds-s2i-smsgateway
|
eed5ce3d630c26b0fd73117d79c84606a12bc783
|
[
"MIT"
] | null | null | null |
test/SMSGateway_test.py
|
S2Innovation/ds-s2i-smsgateway
|
eed5ce3d630c26b0fd73117d79c84606a12bc783
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# This file is part of the SMSGateway project
#
#
#
# Distributed under the terms of the MIT license.
# See LICENSE.txt for more info.
"""Contain the tests for the SMSGateway for PANIC."""
# Path
import sys
import os
path = os.path.join(os.path.dirname(__file__), os.pardir)
sys.path.insert(0, os.path.abspath(path))
# Imports
from time import sleep
from mock import MagicMock
from PyTango import DevFailed, DevState
from devicetest import DeviceTestCase, main
from SMSGateway import SMSGateway
# Note:
#
# Since the device uses an inner thread, it is necessary to
# wait during the tests in order the let the device update itself.
# Hence, the sleep calls have to be secured enough not to produce
# any inconsistent behavior. However, the unittests need to run fast.
# Here, we use a factor 3 between the read period and the sleep calls.
#
# Look at devicetest examples for more advanced testing
# Device test case
class SMSGatewayDeviceTestCase(DeviceTestCase):
"""Test case for packet generation."""
# PROTECTED REGION ID(SMSGateway.test_additionnal_import) ENABLED START #
# PROTECTED REGION END # // SMSGateway.test_additionnal_import
device = SMSGateway
properties = {'IP': '', 'PIN': '9044',
}
empty = None # Should be []
@classmethod
def mocking(cls):
"""Mock external libraries."""
# Example : Mock numpy
# cls.numpy = SMSGateway.numpy = MagicMock()
# PROTECTED REGION ID(SMSGateway.test_mocking) ENABLED START #
# PROTECTED REGION END # // SMSGateway.test_mocking
def test_properties(self):
# test the properties
# PROTECTED REGION ID(SMSGateway.test_properties) ENABLED START #
# PROTECTED REGION END # // SMSGateway.test_properties
pass
def test_State(self):
"""Test for State"""
# PROTECTED REGION ID(SMSGateway.test_State) ENABLED START #
self.device.State()
# PROTECTED REGION END # // SMSGateway.test_State
def test_Status(self):
"""Test for Status"""
# PROTECTED REGION ID(SMSGateway.test_Status) ENABLED START #
self.device.Status()
# PROTECTED REGION END # // SMSGateway.test_Status
def test_Reset(self):
"""Test for Reset"""
# PROTECTED REGION ID(SMSGateway.test_Reset) ENABLED START #
self.device.Reset()
# PROTECTED REGION END # // SMSGateway.test_Reset
def test_Connect(self):
"""Test for Connect"""
# PROTECTED REGION ID(SMSGateway.test_Connect) ENABLED START #
self.device.Connect()
# PROTECTED REGION END # // SMSGateway.test_Connect
def test_SendSMS(self):
"""Test for SendSMS"""
# PROTECTED REGION ID(SMSGateway.test_SendSMS) ENABLED START #
self.device.SendSMS()
# PROTECTED REGION END # // SMSGateway.test_SendSMS
def test_SetPin(self):
"""Test for SetPin"""
# PROTECTED REGION ID(SMSGateway.test_SetPin) ENABLED START #
self.device.SetPin()
# PROTECTED REGION END # // SMSGateway.test_SetPin
def test_TextMessage(self):
"""Test for TextMessage"""
# PROTECTED REGION ID(SMSGateway.test_TextMessage) ENABLED START #
self.device.TextMessage
# PROTECTED REGION END # // SMSGateway.test_TextMessage
def test_Phone(self):
"""Test for Phone"""
# PROTECTED REGION ID(SMSGateway.test_Phone) ENABLED START #
self.device.Phone
# PROTECTED REGION END # // SMSGateway.test_Phone
# Main execution
if __name__ == "__main__":
main()
| 32.830357
| 77
| 0.661409
| 2,650
| 0.720696
| 0
| 0
| 292
| 0.079413
| 0
| 0
| 2,344
| 0.637476
|
ac07adb5420f61816fe1726ed429cadf16e37902
| 505
|
py
|
Python
|
simfile/_private/serializable.py
|
garcia/simfile
|
4e15660c964d8d3c0e6d1f69431138e7eb4db288
|
[
"MIT"
] | 22
|
2017-04-24T05:37:13.000Z
|
2022-03-08T00:41:37.000Z
|
simfile/_private/serializable.py
|
garcia/simfile
|
4e15660c964d8d3c0e6d1f69431138e7eb4db288
|
[
"MIT"
] | 10
|
2021-05-31T01:21:56.000Z
|
2022-03-17T04:26:54.000Z
|
simfile/_private/serializable.py
|
garcia/simfile
|
4e15660c964d8d3c0e6d1f69431138e7eb4db288
|
[
"MIT"
] | 3
|
2019-06-05T15:23:53.000Z
|
2021-09-11T02:39:36.000Z
|
from abc import ABCMeta, abstractmethod
from io import StringIO
from typing import TextIO
class Serializable(metaclass=ABCMeta):
@abstractmethod
def serialize(self, file: TextIO) -> None:
"""
Write the object to provided text file object as MSD.
"""
pass
def __str__(self) -> str:
"""
Convert the object to an MSD string.
"""
serialized = StringIO()
self.serialize(serialized)
return serialized.getvalue()
| 24.047619
| 61
| 0.615842
| 413
| 0.817822
| 0
| 0
| 161
| 0.318812
| 0
| 0
| 137
| 0.271287
|
ac07f9a51ba5bae2e9b9b9afd0ca35481fa33be3
| 214
|
py
|
Python
|
Flask/Lezione4/webapp/project/serate/templates/serate/forms.py
|
nick87ds/MaterialeSerate
|
51627e47ff1d3c3ecfc9ce6741c04b91b3295359
|
[
"MIT"
] | 12
|
2021-12-12T22:19:52.000Z
|
2022-03-18T11:45:17.000Z
|
Flask/Lezione4/webapp/project/serate/templates/serate/forms.py
|
nick87ds/MaterialeSerate
|
51627e47ff1d3c3ecfc9ce6741c04b91b3295359
|
[
"MIT"
] | 1
|
2022-03-23T13:58:33.000Z
|
2022-03-23T14:05:08.000Z
|
Flask/Lezione4/webapp/project/serate/templates/serate/forms.py
|
nick87ds/MaterialeSerate
|
51627e47ff1d3c3ecfc9ce6741c04b91b3295359
|
[
"MIT"
] | 7
|
2021-02-01T22:09:14.000Z
|
2021-06-22T08:30:16.000Z
|
from time import strftime
from flask_wtf import FlaskForm
from wtforms import (
Form,
validators,
StringField,
IntegerField,
SubmitField,
BooleanField,
SelectField,
TextAreaField,
)
| 16.461538
| 31
| 0.705607
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
ac0aebc29b01daccd8a1302b366f51ee3eb40958
| 190
|
py
|
Python
|
intered/bin/django-admin.py
|
allenallen/interedregistration
|
d6b93bfc33d7bb9bfbabdcdb27b685f3a6be3ea9
|
[
"MIT"
] | null | null | null |
intered/bin/django-admin.py
|
allenallen/interedregistration
|
d6b93bfc33d7bb9bfbabdcdb27b685f3a6be3ea9
|
[
"MIT"
] | 6
|
2020-02-11T23:05:13.000Z
|
2021-06-10T20:43:51.000Z
|
intered/bin/django-admin.py
|
allenallen/interedregistration
|
d6b93bfc33d7bb9bfbabdcdb27b685f3a6be3ea9
|
[
"MIT"
] | null | null | null |
#!/home/allen/Documents/TamarawTechProjects/interedregistration/intered/bin/python3
from django.core import management
if __name__ == "__main__":
management.execute_from_command_line()
| 31.666667
| 83
| 0.821053
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 93
| 0.489474
|
ac0b33a69404bee3fc2c70f72e63ffeda7d74b5d
| 746
|
py
|
Python
|
generate_hamming_command.py
|
zoeleeee/mnist_challenge
|
8a98f7dde35ee1d7a1fb77e85ca931000fb71631
|
[
"MIT"
] | null | null | null |
generate_hamming_command.py
|
zoeleeee/mnist_challenge
|
8a98f7dde35ee1d7a1fb77e85ca931000fb71631
|
[
"MIT"
] | null | null | null |
generate_hamming_command.py
|
zoeleeee/mnist_challenge
|
8a98f7dde35ee1d7a1fb77e85ca931000fb71631
|
[
"MIT"
] | null | null | null |
import numpy as np
import os
path = 'preds'
files = os.listdir(path)
lst = []
for f in files:
if f.find('_0_HASH') == -1:
continue
if f.find('CW') == -1:
continue
if f.find('low')==-1 and f.find('high')==-1 and f.find('mix')==-1:
continue
if f.endswith('show.npy'):
lst.append(f)
for f in lst:
strs = f.split('_0_HASH_')
print(strs)
a = np.load(os.path.join(path, strs[0]+'_0_HASH_'+strs[1]))
b = np.load(os.path.join(path, strs[0]+'_20_HASH_'+strs[1]))
c = np.load(os.path.join(path, strs[0]+'_40_HASH_'+strs[1]))
d = np.load(os.path.join(path, strs[0]+'_60_HASH_'+strs[1]))
np.save(os.path.join(path, strs[0]+'_80_HASH_'+strs[1]), np.hstack((a,b,c,d)))
| 25.724138
| 82
| 0.567024
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 110
| 0.147453
|
ac0bb6d2bc717836589dada86f31d1c73d9161be
| 3,416
|
py
|
Python
|
benchmarks/benchmarks/reflect.py
|
dcortie/refnx
|
037434fa0a64755f72c540d75063986bd517ab10
|
[
"BSD-3-Clause"
] | 32
|
2016-04-18T15:29:59.000Z
|
2022-03-27T08:35:29.000Z
|
benchmarks/benchmarks/reflect.py
|
dcortie/refnx
|
037434fa0a64755f72c540d75063986bd517ab10
|
[
"BSD-3-Clause"
] | 116
|
2015-10-27T04:33:09.000Z
|
2022-02-22T02:02:47.000Z
|
benchmarks/benchmarks/reflect.py
|
dcortie/refnx
|
037434fa0a64755f72c540d75063986bd517ab10
|
[
"BSD-3-Clause"
] | 22
|
2015-09-29T23:21:15.000Z
|
2022-02-27T18:12:18.000Z
|
import os.path
import numpy as np
import pickle
from .common import Benchmark
from refnx.analysis import CurveFitter, Objective, Parameter
import refnx.reflect
from refnx.reflect._creflect import abeles as c_abeles
from refnx.reflect._reflect import abeles
from refnx.reflect import SLD, Slab, Structure, ReflectModel, reflectivity
from refnx.dataset import ReflectDataset as RD
class Abeles(Benchmark):
def setup(self):
self.q = np.linspace(0.005, 0.5, 50000)
self.layers = np.array([[0, 2.07, 0, 3],
[50, 3.47, 0.0001, 4],
[200, -0.5, 1e-5, 5],
[50, 1, 0, 3],
[0, 6.36, 0, 3]])
self.repeat = 20
self.number = 10
def time_cabeles(self):
c_abeles(self.q, self.layers)
def time_abeles(self):
abeles(self.q, self.layers)
def time_reflectivity_constant_dq_q(self):
reflectivity(self.q, self.layers)
def time_reflectivity_pointwise_dq(self):
reflectivity(self.q, self.layers, dq=0.05 * self.q)
class Reflect(Benchmark):
timeout = 120.
# repeat = 2
def setup(self):
pth = os.path.dirname(os.path.abspath(refnx.reflect.__file__))
e361 = RD(os.path.join(pth, 'test', 'e361r.txt'))
sio2 = SLD(3.47, name='SiO2')
si = SLD(2.07, name='Si')
d2o = SLD(6.36, name='D2O')
polymer = SLD(1, name='polymer')
# e361 is an older dataset, but well characterised
structure361 = si | sio2(10, 4) | polymer(200, 3) | d2o(0, 3)
model361 = ReflectModel(structure361, bkg=2e-5)
model361.scale.vary = True
model361.bkg.vary = True
model361.scale.range(0.1, 2)
model361.bkg.range(0, 5e-5)
model361.dq = 5.
# d2o
structure361[-1].sld.real.vary = True
structure361[-1].sld.real.range(6, 6.36)
self.p = structure361[1].thick
structure361[1].thick.vary = True
structure361[1].thick.range(5, 20)
structure361[2].thick.vary = True
structure361[2].thick.range(100, 220)
structure361[2].sld.real.vary = True
structure361[2].sld.real.range(0.2, 1.5)
self.structure361 = structure361
self.model361 = model361
# e361.x_err = None
self.objective = Objective(self.model361,
e361)
self.fitter = CurveFitter(self.objective, nwalkers=200)
self.fitter.initialise('jitter')
def time_reflect_emcee(self):
# test how fast the emcee sampler runs in serial mode
self.fitter.sampler.run_mcmc(self.fitter._state, 30)
def time_reflect_sampling_parallel(self):
# discrepancies in different runs may be because of different numbers
# of processors
self.model361.threads = 1
self.fitter.sample(30, pool=-1)
def time_pickle_objective(self):
# time taken to pickle an objective
s = pickle.dumps(self.objective)
pickle.loads(s)
def time_pickle_model(self):
# time taken to pickle a model
s = pickle.dumps(self.model361)
pickle.loads(s)
def time_pickle_model(self):
# time taken to pickle a parameter
s = pickle.dumps(self.p)
pickle.loads(s)
def time_structure_slabs(self):
self.structure361.slabs()
| 30.774775
| 77
| 0.603923
| 3,029
| 0.88671
| 0
| 0
| 0
| 0
| 0
| 0
| 371
| 0.108607
|
ac0d30f40fdb142e0b5b6ff9a6caa98ff58e125e
| 1,257
|
py
|
Python
|
app/__init__.py
|
annerachael/fifth_year_project
|
3843b4e6315e9a5374f80a2aabc0bcb8423fd0d9
|
[
"Apache-2.0"
] | null | null | null |
app/__init__.py
|
annerachael/fifth_year_project
|
3843b4e6315e9a5374f80a2aabc0bcb8423fd0d9
|
[
"Apache-2.0"
] | null | null | null |
app/__init__.py
|
annerachael/fifth_year_project
|
3843b4e6315e9a5374f80a2aabc0bcb8423fd0d9
|
[
"Apache-2.0"
] | null | null | null |
# app/__init__.py
from flask import Flask
from redis import Redis
from rq_scheduler import Scheduler
from flask_migrate import Migrate
from flask_login import LoginManager
from flask_bootstrap import Bootstrap
from flask_sqlalchemy import SQLAlchemy
"""
This file shall contain configurations for the web app
"""
# create app
app = Flask(__name__)
db = SQLAlchemy()
migrate = Migrate()
bootstrap = Bootstrap()
# Handles login functionality eg creating and removing login sessions
login = LoginManager()
def create_app():
global app, db, migrate, login, bootstrap
import instance.config as cfg
app.config['DEBUG'] = cfg.DEBUG
app.config['SECRET_KEY'] = 'secretkey'
# database set up
app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///Info.db'
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
# Initialize Redis and RQ
app.config['REDIS_URL'] = 'redis://'
app.redis = Redis.from_url(app.config['REDIS_URL'])
# The queue where periodic tasks are submitted
queue_name = 'ann_tasks'
app.scheduler = Scheduler(queue_name, connection=app.redis)
db.init_app(app)
login.init_app(app)
migrate.init_app(app, db)
bootstrap.init_app(app)
from app import models, views
return app
| 25.14
| 69
| 0.731106
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 397
| 0.315831
|
ac0e680fa5ad08e1900fc7ebe2eb246aebdc7e1d
| 148
|
py
|
Python
|
automation/openwebsites.py
|
abrahammachuki/dnav3-code
|
d278bf4facbc0702342f9c86a3845f0fb1c247bf
|
[
"MIT"
] | null | null | null |
automation/openwebsites.py
|
abrahammachuki/dnav3-code
|
d278bf4facbc0702342f9c86a3845f0fb1c247bf
|
[
"MIT"
] | null | null | null |
automation/openwebsites.py
|
abrahammachuki/dnav3-code
|
d278bf4facbc0702342f9c86a3845f0fb1c247bf
|
[
"MIT"
] | null | null | null |
import webbrowser
website = ['site1', 'site2', 'site3', 'site4']
for i in range(len(website)):
site = 'http://' + website[i]
webbrowser.open(site)
| 29.6
| 46
| 0.662162
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 37
| 0.25
|
ac105d162c447186bd1f92785b821628a3aa1ff5
| 1,865
|
py
|
Python
|
hedger/tournament.py
|
dmalison/hedger
|
8db634a484769fb4f3feb945c1847ef50803fafe
|
[
"MIT"
] | null | null | null |
hedger/tournament.py
|
dmalison/hedger
|
8db634a484769fb4f3feb945c1847ef50803fafe
|
[
"MIT"
] | null | null | null |
hedger/tournament.py
|
dmalison/hedger
|
8db634a484769fb4f3feb945c1847ef50803fafe
|
[
"MIT"
] | null | null | null |
import hedger
from hedger import Result
class Tournament:
def __init__(self, entries):
self._entries = entries
self._brackets = self._get_brackets()
self._brackets_info = self._get_brackets_info()
@property
def entries(self):
return self._entries
@property
def brackets(self):
return self._brackets
@property
def brackets_info(self):
return self._brackets_info
def _get_brackets(self):
n_brackets = self._get_n_brackets()
brackets = list()
for code in range(n_brackets):
results = self._get_results_from_code(code)
bracket = self._make_bracket(results)
brackets.append(bracket)
return brackets
def _get_brackets_info(self):
return {
bracket.code: (bracket.prob, bracket.winner_names)
for bracket in self._brackets
}
def _get_n_brackets(self):
n_matches = self._get_n_matches()
return 2 ** n_matches
def _get_n_matches(self):
n_entries = len(self._entries)
return n_entries - 1
def _get_results_from_code(self, bracket_index):
binary = self._convert_to_binary(bracket_index)
results = [self._decode_bit_as_result(b) for b in binary]
return results
def _convert_to_binary(self, bracket_index):
n_digits = self._get_n_matches()
binary_fmt = "{" + "0:0{}b".format(n_digits) + "}"
return binary_fmt.format(bracket_index)
def _decode_bit_as_result(self, bit):
if int(bit) == Result.TOP_WINS.value:
return Result.TOP_WINS
else:
return Result.BOTTOM_WINS
def _make_bracket(self, results):
bracket_builder = hedger.BracketBuilder(self, results)
bracket = bracket_builder.get_bracket()
return bracket
| 27.835821
| 65
| 0.641287
| 1,822
| 0.976944
| 0
| 0
| 197
| 0.10563
| 0
| 0
| 14
| 0.007507
|
ac110f6329cbb307af9cb472a6aa5c74e249a2ef
| 5,294
|
py
|
Python
|
src/tentaclio/clients/sqla_client.py
|
datavaluepeople/tentaclio
|
eb6920a0e115c6c08043063a8c1013d812ec34c8
|
[
"MIT"
] | 12
|
2019-04-30T16:07:42.000Z
|
2021-12-08T08:02:09.000Z
|
src/tentaclio/clients/sqla_client.py
|
octoenergy/tentaclio
|
eb6920a0e115c6c08043063a8c1013d812ec34c8
|
[
"MIT"
] | 74
|
2019-04-25T11:18:22.000Z
|
2022-01-18T11:31:14.000Z
|
src/tentaclio/clients/sqla_client.py
|
datavaluepeople/tentaclio
|
eb6920a0e115c6c08043063a8c1013d812ec34c8
|
[
"MIT"
] | 4
|
2019-05-05T13:13:21.000Z
|
2022-01-14T00:33:07.000Z
|
"""Provide sql connection using sqlalchemy.
This client is used for convinience when using different sql
providers and unifying the client creation. We do not intent to rewriter sqlalchemy.
"""
import contextlib
from typing import Container, Generator, Optional, Union
import pandas as pd
from sqlalchemy.engine import Connection, create_engine, result
from sqlalchemy.engine import url as sqla_url
from sqlalchemy.orm import session, sessionmaker
from sqlalchemy.sql.schema import MetaData
from tentaclio import urls
from . import base_client, decorators
__all__ = ["SQLAlchemyClient", "bound_session", "atomic_session"]
SessionGenerator = Generator[None, session.Session, None]
class _TrueContainer(Container[str]):
"""String container that always returns true.
As we don't have control over the protocols that sqlalchemy is able to
accept. We shouldn't try to limit which urls can be used here or not.
So this container will play well with the super class checks to allow
schemes for different clients.
"""
def __contains__(self, obj: object) -> bool:
"""Return true."""
return True
class SQLAlchemyClient(base_client.BaseClient["SQLAlchemyClient"]):
"""SQLAlchemy based client."""
# The allowed drivers depend on the dependencies installed.
allowed_schemes: Container[str] = _TrueContainer()
# Default connect_args
connect_args_default: dict = {}
conn: Connection
engine = None
execution_options: dict
connect_args: dict
database: str
drivername: str
username: Optional[str]
password: Optional[str]
host: Optional[str]
port: Optional[int]
def __init__(
self, url: Union[str, urls.URL], execution_options: dict = None, connect_args: dict = None
) -> None:
"""Create sqlalchemy client based on the passed url.
This is a wrapper for sqlalchemy engine/connection creation.
"""
self.execution_options = execution_options or {}
self.connect_args = connect_args or self.connect_args_default
super().__init__(url)
self._extract_url_params()
def _extract_url_params(self) -> None:
"""Extract the database parameters from the url."""
# the database doesn't start with /
database = self.url.path[1:]
self.database = database
self.drivername = self.url.scheme
self.username = self.url.username
self.password = self.url.password
self.host = self.url.hostname
self.port = self.url.port
self.url_query = self.url.query
# Connection methods:
def _connect(self) -> Connection:
parsed_url = sqla_url.URL(
drivername=self.drivername,
username=self.username,
password=self.password,
host=self.host,
port=self.port,
database=self.database,
query=self.url_query,
)
if self.engine is None:
self.engine = create_engine(
parsed_url,
execution_options=self.execution_options,
connect_args=self.connect_args,
)
return self.engine.connect()
def _get_raw_conn(self):
"""Acquire raw DBAPI connection from the pool."""
return self.conn.engine.raw_connection()
# Schema methods:
def set_schema(self, meta_data: MetaData) -> None:
"""Create tables based on the metadata object."""
meta_data.create_all(bind=self.conn)
def delete_schema(self, meta_data: MetaData) -> None:
"""Delete tables based on the metadata object."""
meta_data.drop_all(bind=self.conn)
# Query methods:
@decorators.check_conn
def query(self, sql_query: str, **kwargs) -> result.ResultProxy:
"""Execute a read-only SQL query, and return results.
This will not commit any changes to the database.
"""
return self.conn.execute(sql_query, **kwargs)
@decorators.check_conn
def execute(self, sql_query: str, **kwargs) -> None:
"""Execute a raw SQL query command."""
trans = self.conn.begin()
try:
self.conn.execute(sql_query, **kwargs)
except Exception:
trans.rollback()
raise
else:
trans.commit()
# Dataframe methods:
@decorators.check_conn
def get_df(self, sql_query: str, params: dict = None, **kwargs) -> pd.DataFrame:
"""Run a raw SQL query and return a data frame."""
return pd.read_sql(sql_query, self.conn, params=params, **kwargs)
# Session context managers:
@contextlib.contextmanager
def bound_session(connection: Connection) -> SessionGenerator:
"""Context manager for a sqlalchemy session."""
Session = sessionmaker()
sess = Session(bind=connection)
try:
yield sess
finally:
sess.close()
@contextlib.contextmanager
def atomic_session(connection: Connection) -> SessionGenerator:
"""Context manager for a session that will rollback in case of an exception."""
Session = sessionmaker()
sess = Session(bind=connection)
try:
yield sess
except Exception:
sess.rollback()
raise
else:
sess.commit()
finally:
sess.close()
| 29.909605
| 98
| 0.657537
| 3,903
| 0.73725
| 607
| 0.114658
| 1,517
| 0.286551
| 0
| 0
| 1,508
| 0.284851
|
ac119758f2d8245405f37028d9f047dacd5dfbff
| 1,678
|
py
|
Python
|
polls/views.py
|
agustinsuana/mysite
|
03e196470520d5b93f60d6be40358332490f349e
|
[
"MIT"
] | null | null | null |
polls/views.py
|
agustinsuana/mysite
|
03e196470520d5b93f60d6be40358332490f349e
|
[
"MIT"
] | null | null | null |
polls/views.py
|
agustinsuana/mysite
|
03e196470520d5b93f60d6be40358332490f349e
|
[
"MIT"
] | null | null | null |
from django.shortcuts import render, get_object_or_404
from django.http import HttpResponse
from django.template import loader
from .models import Question
from django.http import Http404
def index(request):
#last_questions_list = Question.objects.order_by('-pub_date')[:5]
#template = loader.get_template("polls/index.html")
#context = {
# 'last_question_list' : last_questions_list,
#}
#output = ', '.join([q.question_text for q in last_questions_list])
#return HttpResponse(template.render(context, request))
last_questions_list = Question.objects.order_by("-pub_date")[:5]
context = {"last_questions_list" : last_questions_list}
return render(request, 'polls/index.html', context)
def edit(request):
return HttpResponse("Hola mundo, esta es el edit de Polls.")
def delete(request):
return HttpResponse("Hola mundo, esta es el delete de Polls.")
#def detail(request, question_id):
# return HttpResponse("Estas viendo el detalle de %s." % question_id)
#def detail(request, question_id):
# try:
# question = Question.objects.get(pk = question_id)
# except Question.DoesNotExist:
# raise Http404("La pagina no existe")
# return render(request, 'polls/detail.html', {"question" : question})
def detail(request, question_id):
question = get_object_or_404(Question, pk = question_id)
return render(request, 'polls/detail.html', {'question' : question})
def results(request, question_id):
response = "Estas buscando los resultados de %s."
return HttpResponse(response % question_id)
def vote(request, question_id):
return HttpResponse("Tu has votado por %s" % question_id)
| 34.958333
| 73
| 0.722288
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 879
| 0.523838
|
ac12e55df64cb0e22fe581ffef24c9ea5ad38135
| 2,343
|
py
|
Python
|
cctk/file.py
|
ekwan/cctk
|
85cb8d0b714a80e8e353987dc24006695f1d0532
|
[
"Apache-2.0"
] | 10
|
2020-01-16T15:26:57.000Z
|
2022-01-15T23:12:00.000Z
|
cctk/file.py
|
ekwan/cctk
|
85cb8d0b714a80e8e353987dc24006695f1d0532
|
[
"Apache-2.0"
] | 2
|
2020-05-27T21:04:36.000Z
|
2020-09-26T20:49:53.000Z
|
cctk/file.py
|
ekwan/cctk
|
85cb8d0b714a80e8e353987dc24006695f1d0532
|
[
"Apache-2.0"
] | 2
|
2020-09-24T18:44:18.000Z
|
2021-08-05T20:35:51.000Z
|
import os
from abc import ABC, abstractmethod
class File(ABC):
"""
Abstract class representing text files.
"""
@abstractmethod
def __init__(self):
pass
@staticmethod
def write_file(filename, text, overwrite_existing=True):
"""
Writes output text to a file.
Args:
filename (str): path to file, including name (e.g. ``path/to/input.gjf``)
text (str): desired contents of file
overwrite_existing (Bool): whether any existing files should be overwritten or not
Returns:
``True`` if write succeeded, ``False`` otherwise
"""
if not isinstance(text, str):
raise TypeError("cannot write non-string to file!")
if not overwrite_existing and os.path.exists(filename):
raise ValueError(f"{filename} already exists but not allowed to overwrite")
else:
try:
with open(filename, "w+") as output_file:
output_file.write(text)
return True
except OSError as e:
print(e)
return False
@staticmethod
def append_to_file(filename, text):
"""
Appends output text to a file.
Args:
filename (str): path to file, including name (e.g. ``path/to/input.gjf``)
text (str): desired contents of file
Returns:
``True`` if write succeeded, ``False`` otherwise
"""
if not isinstance(text, str):
raise TypeError("cannot write non-string to file!")
if os.path.exists(filename):
try:
with open(filename, "a+") as output_file:
output_file.write(text)
return True
except OSError as e:
print(e)
return False
else:
raise ValueError(f"{filename} does not exist")
@staticmethod
def read_file(filename, lazy=False):
"""
Reads a file and parses into lines.
Args:
filename (str): The path to the file.
Returns:
A list containing all the lines in the file.
"""
with open(filename, "r") as filehandle:
lines = filehandle.read().splitlines()
return lines
| 28.573171
| 94
| 0.545881
| 2,293
| 0.97866
| 0
| 0
| 2,193
| 0.93598
| 0
| 0
| 1,078
| 0.460094
|
ac14c5baab8284824cd35d4e64729e5b1523569f
| 582
|
py
|
Python
|
elif_bayindir/phase_1/python_basic_1/day_4/q8.py
|
CodedLadiesInnovateTech/-python-challenge-solutions
|
430cd3eb84a2905a286819eef384ee484d8eb9e7
|
[
"MIT"
] | 6
|
2020-05-23T19:53:25.000Z
|
2021-05-08T20:21:30.000Z
|
elif_bayindir/phase_1/python_basic_1/day_4/q8.py
|
CodedLadiesInnovateTech/-python-challenge-solutions
|
430cd3eb84a2905a286819eef384ee484d8eb9e7
|
[
"MIT"
] | 8
|
2020-05-14T18:53:12.000Z
|
2020-07-03T00:06:20.000Z
|
elif_bayindir/phase_1/python_basic_1/day_4/q8.py
|
CodedLadiesInnovateTech/-python-challenge-solutions
|
430cd3eb84a2905a286819eef384ee484d8eb9e7
|
[
"MIT"
] | 39
|
2020-05-10T20:55:02.000Z
|
2020-09-12T17:40:59.000Z
|
# Question 8
# Print even numbers in a list, stop printing when the number is 237
numbers = [
386, 462, 47, 418, 907, 344, 236, 375, 823, 566, 597, 978, 328, 615, 953, 345,
399, 162, 758, 219, 918, 237, 412, 566, 826, 248, 866, 950, 626, 949, 687, 217,
815, 67, 104, 58, 512, 24, 892, 894, 767, 553, 81, 379, 843, 831, 445, 742, 717,
958,743, 527
]
for i in range(len(numbers)):
if numbers[i] % 2 == 0:
print(numbers[i])
elif numbers[i] == 237:
break
# Alternative,
""" for x in numbers:
if x % 2 == 0:
print(x)
elif x == 237:
break """
| 22.384615
| 85
| 0.573883
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 170
| 0.292096
|
ac1751b7ad47eb3e921543a2e5f6b1310543b55f
| 1,377
|
py
|
Python
|
encode_morse.py
|
cmanagoli/github-upload
|
9759b8ee905e1fd37b169231d2150de31e143191
|
[
"MIT"
] | null | null | null |
encode_morse.py
|
cmanagoli/github-upload
|
9759b8ee905e1fd37b169231d2150de31e143191
|
[
"MIT"
] | 4
|
2020-10-14T21:30:35.000Z
|
2020-10-14T21:43:06.000Z
|
encode_morse.py
|
cmanagoli/github-upload
|
9759b8ee905e1fd37b169231d2150de31e143191
|
[
"MIT"
] | null | null | null |
# Author: Chinmai Managoli
import sys as sys
# Morse code dictionary
char_to_dots = {
'A': '.-', 'B': '-...', 'C': '-.-.', 'D': '-..', 'E': '.', 'F': '..-.',
'G': '--.', 'H': '....', 'I': '..', 'J': '.---', 'K': '-.-', 'L': '.-..',
'M': '--', 'N': '-.', 'O': '---', 'P': '.--.', 'Q': '--.-', 'R': '.-.',
'S': '...', 'T': '-', 'U': '..-', 'V': '...-', 'W': '.--', 'X': '-..-',
'Y': '-.--', 'Z': '--..', ' ': ' ', '0': '-----',
'1': '.----', '2': '..---', '3': '...--', '4': '....-', '5': '.....',
'6': '-....', '7': '--...', '8': '---..', '9': '----.',
'&': '.-...', "'": '.----.', '@': '.--.-.', ')': '-.--.-', '(': '-.--.',
':': '---...', ',': '--..--', '=': '-...-', '!': '-.-.--', '.': '.-.-.-',
'-': '-....-', '+': '.-.-.', '"': '.-..-.', '?': '..--..', '/': '-..-.'
}
def encode_morse(message):
message = str(message)
message = message.upper()
try:
for x in message:
print(char_to_dots[x], end=" ")
print("\nMessage was encoded successfully")
# Exceptions
except KeyError:
print("\n" + x + " is an invalid character")
except:
print("\nThere was an error")
if __name__ == "__main__":
print("This program will encode a string into Morse. Unicode characters are not supported.")
string = input("Enter the message to be encoded: ")
encode_morse(string)
sys.exit()
| 34.425
| 96
| 0.336964
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 760
| 0.551924
|
ac18c099c076caa8b9713e7d997b71df79627791
| 22,442
|
py
|
Python
|
self_implement_learning_to_adapt/maml_rl.py
|
chi6/Model-based-meta-learning-rl
|
fda134dcbd87ef3e91f339ea2f836f28ec5f7784
|
[
"MIT"
] | 2
|
2019-09-10T10:13:20.000Z
|
2020-07-17T01:37:27.000Z
|
self_implement_learning_to_adapt/maml_rl.py
|
chi6/Model-based-meta-learning-rl
|
fda134dcbd87ef3e91f339ea2f836f28ec5f7784
|
[
"MIT"
] | null | null | null |
self_implement_learning_to_adapt/maml_rl.py
|
chi6/Model-based-meta-learning-rl
|
fda134dcbd87ef3e91f339ea2f836f28ec5f7784
|
[
"MIT"
] | null | null | null |
import tensorflow as tf
import numpy as np
from self_implement_learning_to_adapt.model import construct_fc_weights,construct_inputs,construct_loss,forward_fc
from self_implement_learning_to_adapt.batch_sampler import ParrallelSampler
from self_implement_learning_to_adapt.vectorized_sampler import VectorizedSampler
from rllab.misc import ext
import matplotlib.pyplot as plt
import scipy.signal as signal
from rllab.sampler.stateful_pool import singleton_pool
class MAML(object):
def __init__(self,
step_size,
env,
batch_size,
meta_batch_size,
seed,
n_itr,
max_path_length,
num_grad_updates,
baseline,
policy,
num_samples = 1000,
scope = None,
sess = None,
center_adv=True,
positive_adv=False,
store_paths=False,
whole_paths=True,
fixed_horizon=False,
load_policy = False,
fake_env = None,
save_video = False,
fast_lr = 0.1,
lr = 0.001,
discount = 0.99,
gae_lambda = 1,
):
self.step_size = step_size
self.env = env
self.fake_env = fake_env
self.batch_size = batch_size
self.meta_batch_size = meta_batch_size
self.seed = seed
self.n_itr = n_itr
self.max_path_length = max_path_length
self.num_grad_updates = num_grad_updates
self.discount = discount
self.baseline = baseline
self.gae_lambda = gae_lambda
self.policy = policy
self.center_adv = center_adv
self.positive_adv = positive_adv
self.store_paths = store_paths
self.whole_paths = whole_paths
self.fixed_horizon = fixed_horizon
self.load_policy = load_policy
self.scope = scope
self.num_samples = num_samples
self.s_size = self.env.observation_space.shape[0]
self.a_size = self.env.action_space.shape[0]
print(self.s_size, self.a_size)
self.lr = lr
self.fast_lr = fast_lr
self.loss_list = []
self.reward_list = []
self.fig = None
self.save_video = save_video
self.train_action_inputs, self.train_state_inputs, self.train_goal_inputs = [], [], []
self.test_action_inputs, self.test_state_inputs, self.test_goal_inputs = [], [], []
# select sampler
if singleton_pool.n_parallel >1:
self.sampler = ParrallelSampler(self, n_envs= self.meta_batch_size)
else:
self.sampler = VectorizedSampler(self, n_envs= self.meta_batch_size)
# define trainer
self.trainer = tf.train.AdamOptimizer(learning_rate=self.lr)
# this is a hacker
self.f_action_inputs, self.f_state_inputs, self.f_goal = construct_inputs(self.s_size, self.a_size, "first_test")
with tf.variable_scope("meta_rl_global"):
self.old_params = construct_fc_weights(self.s_size, self.s_size+ self.a_size, num_hidden= 512)
self.first_outputs = forward_fc(self.f_action_inputs, self.f_state_inputs, self.old_params, reuse= False)
self.f_loss = construct_loss(self.first_outputs, self.f_goal)
self.f_optimizer = self.trainer.minimize(self.f_loss)
# construct input tensors
self.construct_tensor_graph()
self.saver = tf.train.Saver()
def construct_tensor_graph(self):
'''
build maml final graph, directly optimize the initial prior model
:return:
'''
self.test_outputs, self.train_outputs, self.new_params, self.train_goal_inputs = [], [], [], []
# construct inputs and network for each meta task
for i in range(self.meta_batch_size):
tensor_action_inputs, tensor_state_inputs, tensor_goal_inputs = construct_inputs(a_size=self.a_size, s_size=self.s_size,
scpoe="train_inputs" + str(i))
outputs = forward_fc(tensor_action_inputs, tensor_state_inputs, weights=self.old_params,
reuse=True)
self.train_action_inputs.append(tensor_action_inputs)
self.train_state_inputs.append(tensor_state_inputs)
self.train_goal_inputs.append(tensor_goal_inputs)
self.train_outputs.append(outputs)
# maml train case, do first gradients
for i in range(self.meta_batch_size):
loss = construct_loss(self.train_outputs[i], self.train_goal_inputs[i])
grads = tf.gradients(loss, list(self.old_params.values()))
gradients = dict(zip(self.old_params.keys(), grads))
# save the params
self.new_params.append(dict(zip(self.old_params.keys(),
[self.old_params[key] - self.fast_lr * gradients[key] for key in
self.old_params.keys()])))
# maml test case, second order gradients
for i in range(self.meta_batch_size):
tensor_action_inputs, tensor_state_inputs, tensor_goal_inputs = construct_inputs(a_size=self.a_size, s_size=self.s_size,
scpoe="test_inputs" + str(i))
outputs = forward_fc(tensor_action_inputs, tensor_state_inputs, weights=self.new_params[i],
reuse=True)
self.test_action_inputs.append(tensor_action_inputs)
self.test_state_inputs.append(tensor_state_inputs)
self.test_goal_inputs.append(tensor_goal_inputs)
self.test_outputs.append(outputs)
self.cur_params = [self.old_params for i in range(self.meta_batch_size)]
# define total loss
self.total_loss_list = []
for i in range(self.meta_batch_size):
# save the params
self.total_loss_list.append(construct_loss(self.test_outputs[i], self.test_goal_inputs[i]))
with tf.variable_scope("total_loss"):
self.total_loss_before = tf.reduce_mean(tf.stack(self.total_loss_list))
self.second_gradients = self.trainer.minimize(self.total_loss_before, var_list= self.old_params)
def obtain_samples(self, itr, init_state, reset_args ):
paths = self.sampler.obtain_samples(itr,init_state = init_state,reset_args= reset_args, return_dict= True)
return paths
def process_samples(self, itr, path):
return self.sampler.process_samples(itr, path, log = False)
def update_target_graph(self, params, to_scope):
to_vars = tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES, to_scope)
op_holder = []
for from_var, to_var in zip(params, to_vars):
op_holder.append(to_var.assign(from_var))
return op_holder
def cheetah_cost_fn(self,state, action, next_state):
if len(state.shape) > 1:
heading_penalty_factor = 10
scores = np.zeros((state.shape[0],))
# dont move front shin back so far that you tilt forward
front_leg = state[:, 5]
my_range = 0.2
scores[front_leg >= my_range] += heading_penalty_factor
front_shin = state[:, 6]
my_range = 0
scores[front_shin >= my_range] += heading_penalty_factor
front_foot = state[:, 7]
my_range = 0
scores[front_foot >= my_range] += heading_penalty_factor
scores -= (next_state[:, 17] - state[:, 17]) / 0.01 + 0.1 * (np.sum(action**2, axis=1))
return scores
heading_penalty_factor = 10
score = 0
# dont move front shin back so far that you tilt forward
front_leg = state[5]
my_range = 0.2
if front_leg >= my_range:
score += heading_penalty_factor
front_shin = state[6]
my_range = 0
if front_shin >= my_range:
score += heading_penalty_factor
front_foot = state[7]
my_range = 0
if front_foot >= my_range:
score += heading_penalty_factor
score -= (next_state[17] - state[17]) / 0.01 + 0.1 * (np.sum(action**2))
return score
def MPC(self,itr, num_samples, init_state, goal):
'''
# disable multiple joints
adv_list = np.zeros([num_samples])
old_obs = np.asarray([init_state for i in range(num_samples)])
new_obs = old_obs
for i in range(self.batch_size):
action = (np.random.rand(num_samples, self.a_size)-0.5)*2
action[:, goal] = 0.0
if i == 0:
action_list = action
diff = self.sess.run(self.first_outputs, feed_dict={self.f_state_inputs: np.asarray(new_obs).reshape([-1,self.s_size]),
self.f_action_inputs: np.asarray(action).reshape([-1,self.a_size])})
new_obs = diff + old_obs
rewards = diff[:,17]/0.01 - 0.05 * np.sum(np.square(action),axis=1)
adv_list[:] += rewards
index = np.argmax(adv_list)
return action_list[index]
'''
# multi friction
adv_list = np.zeros([num_samples])
old_obs = np.asarray([init_state for i in range(num_samples)])
new_obs = old_obs
for i in range(self.batch_size):
action = (np.random.rand(num_samples, self.a_size)-0.5)*2
if i == 0:
action_list = action
diff = self.sess.run(self.first_outputs, feed_dict={self.f_state_inputs: np.asarray(new_obs).reshape([-1,self.s_size]),
self.f_action_inputs: np.asarray(action).reshape([-1,self.a_size])})
new_obs = diff + old_obs
#angle = np.arccos(old_obs[:,0]/goal)
#rewards = -((((angle+np.pi) % (2*np.pi)) - np.pi) **2 + old_obs[:,2]**2*0.1 + 0.001* np.sum((action)**2))
rewards = diff[:,17]/0.01 - 0.05 * np.sum(np.square(action), axis=1)#self.cheetah_cost_fn(old_obs, action, new_obs)
adv_list[:] += rewards
index = np.argmax(adv_list)
return action_list[index]
def meta_online_train(self, goal):
'''
meta online adaption: load prior meta model, select action by doing MPC, adapt model in each step
:param goal: sample task
:return:
'''
self.goal = goal
self.sess = tf.Session()
with self.sess as sess:
self.summary_writer = tf.summary.FileWriter("./graph/", self.sess.graph)
loss_plot = None
loss_summary = tf.Summary()
loss_summary.value.add(tag='loss', simple_value=loss_plot)
reward_plot = None
reward_summary = tf.Summary()
reward_summary.value.add(tag = 'reward', simple_value = reward_plot)
diff_plot = None
diff_summary = tf.Summary()
diff_summary.value.add(tag='state_difference', simple_value=diff_plot)
if self.load_policy:
sess.run(tf.global_variables_initializer())
self.saver.restore(sess, tf.train.latest_checkpoint('./half_cheetah_model/'))
self.sampler.start_worker()
else:
sess.run(tf.global_variables_initializer())
self.sampler.start_worker()
self.env = self.env.wrapped_env
self.env.reset(reset_args=goal) # set the goal for env
nstep = 0
for itr in range(self.n_itr):
rewards = []
obs, act, diffs, images = [], [], [], []
new_state = self.env.reset()
for step in range(self.max_path_length):
#if step>int(self.max_path_length)*0.7:
# self.env.render()
if len(act) > 0:
indices = np.random.randint(0, len(act), len(act))
_ = sess.run([ self.f_optimizer],
feed_dict={self.f_action_inputs: np.asarray(act)[indices,:],
self.f_state_inputs: np.asarray(obs)[indices,:],
self.f_goal: np.asarray(diffs)[indices,:]})
loss, output = sess.run([self.f_loss,self.first_outputs], feed_dict={self.f_action_inputs: np.asarray(act)[indices,:],
self.f_state_inputs: np.asarray(obs)[indices,:],
self.f_goal: np.asarray(diffs)[indices,:]})
#diff = np.mean(abs(np.asarray(obs[1:-1])-np.asarray(obs[0:-2]) - output[0:-2]))
#diff_summary.value[0].simple_value = diff
loss_summary.value[0].simple_value = loss
self.summary_writer.add_summary(loss_summary, nstep)
self.summary_writer.add_summary(diff_summary, nstep)
obs.append(new_state)
if step%100 == 0:
print("Doing MPC, step:", step)
action = self.MPC(itr = itr, num_samples= self.num_samples, goal= goal, init_state= new_state)
new_obs, reward, done,_= self.env.step(action)
act.append(action)
diffs.append(new_obs - new_state)
rewards.append(reward)
nstep +=1
new_state = new_obs
if done:
break
if self.save_video:
from PIL import Image
image = self.env.wrapped_env.get_viewer().get_image()
pil_image = Image.frombytes('RGB', (image[1], image[2]), image[0])
images.append(np.flipud(np.array(pil_image)))
if self.save_video and itr == self.n_itr -1 :
import moviepy.editor as mpy
clip = mpy.ImageSequenceClip(images, fps=20 * 1)
clip.write_videofile("./video/half_cheetah/", fps=20 * 1)
self.saver.save(sess, './MPC_model/mpc_model.cpkt', global_step=itr)
if itr >= 0:
sum_rewards = np.sum(np.asarray(rewards))
print(sum_rewards)
self.reward_list.append(sum_rewards)
reward_summary.value[0].simple_value = sum_rewards
self.summary_writer.add_summary(reward_summary, itr)
if self.fig == None :
self.fig = plt.figure()
self.fig.set_size_inches(12, 6)
self.fig1= plt.figure()
else:
self.show_rewards(self.reward_list, self.fig, "rewards")
def train(self):
'''
meta training of transition model : sample trajectories based on different tasks, doing optimization
:return:
'''
self.sess = tf.Session()
with self.sess as sess:
self.summary_writer = tf.summary.FileWriter("./graph/", self.sess.graph)
if self.load_policy:
sess.run(tf.global_variables_initializer())
self.saver.restore(sess, tf.train.latest_checkpoint('./half_cheetah_model/'))
self.sampler.start_worker()
else:
sess.run(tf.global_variables_initializer())
self.sampler.start_worker()
self.env = self.env.wrapped_env
loss_plot = None
loss_summary = tf.Summary()
loss_summary.value.add(tag='loss', simple_value=loss_plot)
reward_plot = None
reward_summary = tf.Summary()
reward_summary.value.add(tag = 'reward', simple_value = reward_plot)
for itr in range(self.n_itr):
if itr>0:
print("------------------ total loss: %f" % total_loss_before)
print("------------------ total loss: %f" % total_loss)
# set goals of meta tasks
learner_goals = self.env.sample_goals(self.meta_batch_size)
obs_list, action_list, adv_list, newobs_list, newaction_list, newadv_list = [], [], [], [], [], []
for step in range(self.num_grad_updates+1):
print("-------------------- step: " + str(step))
print("-------------------- obtaining samples :")
paths = self.obtain_samples(itr, reset_args= learner_goals,init_state= None)
print("-------------------- processing samples :")
samples = {}
for key in paths.keys():
samples[key] = self.process_samples(itr, paths[key])
if step == 0:
for i in range(self.meta_batch_size):
inputs = ext.extract(
samples[i],
"observations", "actions", "rewards"
)
obs_list.append(inputs[0])
action_list.append(inputs[1])
adv_list.append(np.asarray(inputs[2]).reshape([-1,1]))
else:
for i in range(self.meta_batch_size):
inputs = ext.extract(
samples[i],
"observations", "actions", "rewards"
)
newobs_list.append(inputs[0])
newaction_list.append(inputs[1])
newadv_list.append(np.asarray(inputs[2]).reshape([-1,1]))
#if step == 0:
# print("-------------------- Compute local gradients : ")
# # apply first gradients, optimize original params
# assign_op = []
print("-------------------------- optimize policy :")
feedict = {}
for i in range(self.meta_batch_size):
feedict.update({self.train_action_inputs[i]: action_list[i][0:-1]})
feedict.update({self.train_state_inputs[i]: obs_list[i][0:-1]})
feedict.update({self.train_goal_inputs[i]: obs_list[i][1::] - obs_list[i][0:-1]})
feedict.update({self.test_action_inputs[i]: newaction_list[i][0:-1]})
feedict.update({self.test_state_inputs[i]: newobs_list[i][0:-1]})
feedict.update({self.test_goal_inputs[i]: newobs_list[i][1::] - newobs_list[i][0:-1] })
total_loss_before= sess.run(self.total_loss_before, feed_dict= feedict)
_ = sess.run([ self.second_gradients], feed_dict= feedict)
total_loss = sess.run(self.total_loss_before,
feed_dict=feedict)
if itr > 0:
self.loss_list.append(total_loss_before)
reward_summary.value[0].simple_value = total_loss_before
self.summary_writer.add_summary(reward_summary, itr)
if self.fig == None :
self.fig = plt.figure()
self.fig.set_size_inches(12, 6)
else:
self.show_rewards(self.loss_list, self.fig, "loss")
if itr%1 == 0:
save_path = self.saver.save(sess, './half_cheetah_model/maml_model.ckpt', global_step = itr)
print("-------------save model : %s " % save_path)
self.sampler.shutdown_worker()
def show_rewards(self, rewards, fig, name,width=12, height=6, window_size=1000):
# sanity checks for plotting
assert (fig is not None)
#if len(rewards) == 0:
# return
plt.figure(fig.number)
plt.clf()
moving_avg = self.compute_moving_average(rewards, window_size)
gcf = plt.gcf()
ax = plt.gca()
gcf.set_size_inches(width, height)
plt.xlim((0, len(rewards)))
r, = plt.plot(rewards, color='red', linestyle='-', linewidth=0.5, label=name, alpha=0.5)
ave_r, = plt.plot(moving_avg, color='blue', linestyle='-', linewidth=0.8, label='avg_' + name)
# e, = plt.plot(epsilons, color='blue', linestyle='--', alpha=0.5, label='epsilon')
plt.legend([r, ave_r], [name, 'average '+ name])
plt.ylabel(name)
plt.xlabel('Episode #')
plt.savefig(name+' fig')
#plt.pause(0.1)
def compute_moving_average(self, rewards, window):
cur_window_size = 1
moving_average = []
for i in range(len(rewards) - 1):
lower_idx = max(0, i - cur_window_size)
average = sum(rewards[lower_idx:i + 1]) / cur_window_size
moving_average.append(average)
cur_window_size += 1
if cur_window_size > window:
cur_window_size = window
return moving_average
def get_param_values(self):
all_params = self.old_params
param_values = tf.get_default_session().run(all_params)
return param_values
def set_param_values(self, params):
tf.get_default_session().run(self.update_target_graph(params, "meta_rl" + str(i)))
def _discount(self, x, gamma):
return signal.lfilter([1.0], [1.0, gamma], x[::-1])[::-1]
def add_params(self, param_1, param_2):
if len(param_1) == 0:
return param_2
return [param_1[i] + param_2[i] for i in range(len(param_1))]
def sub_params(self, param_1, param_2):
return [param_1[i] - param_2[i] for i in range(len(param_1))]
def mult_params(self, param_1, param_2 ):
return [param_1[i] - param_2[i] for i in range(len(param_1))]
def divide_nums(self, param_1, num):
return [param_1[i]/num for i in range(len(param_1))]
| 43.832031
| 142
| 0.549238
| 21,975
| 0.979191
| 0
| 0
| 0
| 0
| 0
| 0
| 3,097
| 0.138
|
ac1910c0faa98f5af77a97256be74c749f17725a
| 279
|
py
|
Python
|
DeepRTS/__init__.py
|
cair/deep-rts
|
7aa5dde0c5df10ae3a3d057e7b89641aec58e115
|
[
"MIT"
] | 144
|
2018-07-13T07:47:50.000Z
|
2022-03-31T06:29:50.000Z
|
DeepRTS/__init__.py
|
cair/DeepRTS
|
2ea4de0993ea0ca2677fdb36a172779db4ce7868
|
[
"MIT"
] | 18
|
2019-03-29T10:37:01.000Z
|
2022-03-02T12:47:34.000Z
|
DeepRTS/__init__.py
|
cair/DeepRTS
|
2ea4de0993ea0ca2677fdb36a172779db4ce7868
|
[
"MIT"
] | 23
|
2018-11-02T18:12:51.000Z
|
2022-02-15T20:32:18.000Z
|
try:
from DeepRTS import Engine
except ImportError:
import Engine
try:
from DeepRTS.Engine import Map, UnitManager, Constants, Player
from DeepRTS.Engine import Constants
except ImportError:
from Engine import Map, UnitManager, Constants, Player, Constants
| 23.25
| 69
| 0.763441
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
ac19348529e2bf02c00c7ac8ed17b55456b351e7
| 4,196
|
py
|
Python
|
openprocurement/auctions/swiftsure/utils.py
|
bdmbdsm/openprocurement.auctions.swiftsure
|
f5b93555eb12212c69c8168f861376eae85f4648
|
[
"Apache-2.0"
] | null | null | null |
openprocurement/auctions/swiftsure/utils.py
|
bdmbdsm/openprocurement.auctions.swiftsure
|
f5b93555eb12212c69c8168f861376eae85f4648
|
[
"Apache-2.0"
] | null | null | null |
openprocurement/auctions/swiftsure/utils.py
|
bdmbdsm/openprocurement.auctions.swiftsure
|
f5b93555eb12212c69c8168f861376eae85f4648
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
from logging import getLogger
from pkg_resources import get_distribution
from openprocurement.auctions.core.plugins.contracting.base.utils import (
check_auction_status
)
from openprocurement.auctions.core.utils import (
cleanup_bids_for_cancelled_lots, check_complaint_status,
remove_draft_bids,
context_unpack,
get_now,
TZ,
)
PKG = get_distribution(__package__)
LOGGER = getLogger(PKG.project_name)
def check_bids(request):
auction = request.validated['auction']
if auction.lots:
[setattr(i.auctionPeriod, 'startDate', None) for i in auction.lots if i.numberOfBids < 2 and i.auctionPeriod and i.auctionPeriod.startDate]
[setattr(i, 'status', 'unsuccessful') for i in auction.lots if i.numberOfBids < 2 and i.status == 'active']
cleanup_bids_for_cancelled_lots(auction)
if not set([i.status for i in auction.lots]).difference(set(['unsuccessful', 'cancelled'])):
auction.status = 'unsuccessful'
else:
if auction.auctionPeriod:
if auction.numberOfBids < auction.minNumberOfQualifiedBids:
auction.auctionPeriod.startDate = None
auction.status = 'unsuccessful'
elif auction.numberOfBids == 1:
auction.auctionPeriod.startDate = None
request.content_configurator.start_awarding()
def check_status(request):
auction = request.validated['auction']
now = get_now()
for complaint in auction.complaints:
check_complaint_status(request, complaint, now)
for award in auction.awards:
request.content_configurator.check_award_status(request, award, now)
for complaint in award.complaints:
check_complaint_status(request, complaint, now)
if not auction.lots and auction.status == 'active.tendering' and auction.tenderPeriod.endDate <= now:
LOGGER.info('Switched auction {} to {}'.format(auction['id'], 'active.auction'),
extra=context_unpack(request, {'MESSAGE_ID': 'switched_auction_active.auction'}))
auction.status = 'active.auction'
remove_draft_bids(request)
check_bids(request)
return
elif auction.lots and auction.status == 'active.tendering' and auction.tenderPeriod.endDate <= now:
LOGGER.info('Switched auction {} to {}'.format(auction['id'], 'active.auction'),
extra=context_unpack(request, {'MESSAGE_ID': 'switched_auction_active.auction'}))
auction.status = 'active.auction'
remove_draft_bids(request)
check_bids(request)
[setattr(i.auctionPeriod, 'startDate', None) for i in auction.lots if i.numberOfBids < 2 and i.auctionPeriod]
return
elif not auction.lots and auction.status == 'active.awarded':
standStillEnds = [
a.complaintPeriod.endDate.astimezone(TZ)
for a in auction.awards
if a.complaintPeriod.endDate
]
if not standStillEnds:
return
standStillEnd = max(standStillEnds)
if standStillEnd <= now:
check_auction_status(request)
elif auction.lots and auction.status in ['active.qualification', 'active.awarded']:
if any([i['status'] in auction.block_complaint_status and i.relatedLot is None for i in auction.complaints]):
return
for lot in auction.lots:
if lot['status'] != 'active':
continue
lot_awards = [i for i in auction.awards if i.lotID == lot.id]
standStillEnds = [
a.complaintPeriod.endDate.astimezone(TZ)
for a in lot_awards
if a.complaintPeriod.endDate
]
if not standStillEnds:
continue
standStillEnd = max(standStillEnds)
if standStillEnd <= now:
check_auction_status(request)
return
def invalidate_bids_under_threshold(auction):
value_threshold = round(auction['value']['amount'] + auction['minimalStep']['amount'], 2)
for bid in auction['bids']:
if bid['value']['amount'] < value_threshold:
bid['status'] = 'invalid'
| 42.383838
| 147
| 0.651335
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 550
| 0.131077
|
ac1a66d846bb2dcb096e3c89c166dcb12db849c3
| 12,989
|
py
|
Python
|
src/plugins/bilibili/__init__.py
|
shinnenijou/JiBot
|
c468a67e301597ca702170aded96c5145d864115
|
[
"MIT"
] | null | null | null |
src/plugins/bilibili/__init__.py
|
shinnenijou/JiBot
|
c468a67e301597ca702170aded96c5145d864115
|
[
"MIT"
] | null | null | null |
src/plugins/bilibili/__init__.py
|
shinnenijou/JiBot
|
c468a67e301597ca702170aded96c5145d864115
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# Python STL
from time import time, sleep
import asyncio
from collections import deque
# Third-party
from bilibili_api import Credential, comment
import nonebot
from nonebot.log import logger
from nonebot import on_command, require
from nonebot.permission import SUPERUSER, USER
from nonebot.adapters.onebot.v11 import GROUP_ADMIN, GROUP_OWNER
from nonebot.adapters.onebot.v11 import GroupMessageEvent, Message, MessageSegment
# Self
import src.plugins.bilibili.dynamics as dynamics
import src.plugins.bilibili.db as db
import src.plugins.bilibili.users as users
from src.plugins.bilibili.live import LiveStatus, Room
# Initiate Database
db.init()
# Credential
SESSDATA = nonebot.get_driver().config.dict()['bili_sessdata']
BILI_JCT = nonebot.get_driver().config.dict()['bili_jct']
BUVID3 = nonebot.get_driver().config.dict()['bili_buvid3']
CREDENTIAL = Credential(SESSDATA, BILI_JCT, BUVID3)
# CONSTANT
BILI_SOURCE = nonebot.get_driver().config.dict()['bili_source']
BILI_TARGET = nonebot.get_driver().config.dict()['bili_target']
DYNAMIC_LISTEN_INTERVAL = nonebot.get_driver().config.dict()['dynamic_listen_interval']
LIVE_LISTEN_INTERVAL = nonebot.get_driver().config.dict()['live_listen_interval']
COMMENT_EXPIRATION = nonebot.get_driver().config.dict()['dynamic_comment_expiration']
# GLOBAL VIRIABLES
#UID_LIST, ROOM_LIST, NAME_LIST, NEWEST_DYNAMICS = db.get_user_list()
USER_LIST = db.get_user_list()
for uid, info in USER_LIST.items(): # Initialize Room list
info['room'] = Room(uid, info['room'], info['name'], CREDENTIAL)
TRANSLATOR_LIST = db.get_translator_list()
DYNAMIC_QUEUE = deque()
##########################
######### 命令帮助 #########
helper = on_command(cmd='bili帮助', priority=2, temp=False, block=True,
permission=GROUP_OWNER|GROUP_ADMIN|SUPERUSER)
@helper.handle()
async def help():
menu = 'bilibili模块目前支持的功能:\n\n'\
+ '/bili关注列表\n'\
+ '/bili关注 ID\n'\
+ '/bili取关 ID\n'\
+ '/开启动态翻译 ID\n'\
+ '/关闭动态翻译 ID\n'\
+ '/评论白名单\n'\
+ '/添加评论白名单 ID\n'\
+ '/移除评论百名单 ID'
await helper.finish(Message(menu))
# 定时任务对象
scheduler = require('nonebot_plugin_apscheduler').scheduler
###########################
######### 动态推送 #########
@scheduler.scheduled_job('interval', seconds=DYNAMIC_LISTEN_INTERVAL,
id='bili_dynamic_pusher')
@logger.catch
async def push_dynamic():
global USER_LIST
# 清理超时动态队列, pop掉发布时间戳离当前时间超过COMMENT_EXPIRATION的动态
while len(DYNAMIC_QUEUE):
front = DYNAMIC_QUEUE.popleft()
if time() - front.timestamp < COMMENT_EXPIRATION:
DYNAMIC_QUEUE.appendleft(front)
break
if not USER_LIST:
return # 监听名单里没有目标
bot = nonebot.get_bot()
timelines = await dynamics.get_users_timeline(CREDENTIAL, *USER_LIST.keys())
# 每个用户的最新动态分别处理
# 索引i: 指示第几个用户
for uid, timeline in timelines.items():
# 读取订阅该用户的群
groups = db.get_user_groups(uid)
# 从旧到新倒着扫描
# 索引j: 该用户的第j条动态
for dynamic_data in reversed(timeline):
# 该动态时间戳比记录的要早则跳过
if dynamic_data['desc']['timestamp'] <= USER_LIST[uid]['newest_timestamp']:
continue
logger.success(f'成功检测到{USER_LIST[uid]["name"]}发布新动态, 准备推送')
# 示例化为动态类
dynamic = dynamics.CLASS_MAP[dynamic_data['desc']['type']](dynamic_data, CREDENTIAL)
await dynamic.translate(BILI_SOURCE, BILI_TARGET)
# 推送至群
# 索引k: 指示订阅该用户的群
tasks = []
for group_id, need_transtale in groups.items():
message = dynamic.get_message(need_transtale)
task = asyncio.create_task(
bot.send_group_msg(
group_id=group_id,
message=message
)
)
tasks.append(task)
try:
await asyncio.gather(*tasks)
# 发送成功后更新内存中的时间戳
USER_LIST[uid]['newest_timestamp'] = dynamic_data['desc']['timestamp']
# 保存该动态至内存, 供回复使用
DYNAMIC_QUEUE.append(dynamic)
except:
logger.error(f'发送{uid}群消息失败, 请检查网络连接或qq账号状态')
# 更新时间戳至数据库
db.update_timestamp(uid, USER_LIST[uid]['newest_timestamp'])
###########################
######### 直播推送 #########
@scheduler.scheduled_job('interval', seconds=LIVE_LISTEN_INTERVAL,
id='bili_live_pusher')
@logger.catch
async def push_live():
global USER_LIST
if not USER_LIST:
return
bot = nonebot.get_bot()
tasks = []
for info in USER_LIST.values():
tasks.append(asyncio.create_task(info['room'].update_live()))
updates = dict(zip(USER_LIST.keys(), await asyncio.gather(*tasks)))
for uid, update in updates.items():
# 直播状态有更新(包括开播与下播),准备推送通知
if update:
logger.success(f'成功检测到{USER_LIST[uid]["name"]}({uid})直播状态变化, 准备推送')
await USER_LIST[uid]['room'].update_key_info()
message = USER_LIST[uid]['room'].get_message()
groups = db.get_user_groups(uid)
tasks = []
for group_id in groups.keys():
task = asyncio.create_task(
bot.send_group_msg(
group_id=group_id,
message=message
)
)
tasks.append(task)
await asyncio.gather(*tasks)
###########################
######### 发送评论 #########
send_comment = on_command(cmd='评论', priority=2, temp=False, block=True,
permission=USER(*TRANSLATOR_LIST.keys()))
@send_comment.handle()
async def send(event:GroupMessageEvent):
args = event.get_plaintext().partition(' ')[2]
dynamic_id = args.split()[0]
msg = '命令格式错误, 请按照命令格式: "/评论 动态id 评论内容"'
if not dynamic_id.isdigit():
return
text = args[len(dynamic_id):].strip()
dynamic_id = int(dynamic_id)
for dynamic in DYNAMIC_QUEUE:
if dynamic.dynamic_id == dynamic_id:
try:
await comment.send_comment(
text=text,
oid=dynamic.reply_id,
type_=dynamics.REPLY_MAP[dynamic.type],
credential=CREDENTIAL
)
msg = '评论发送成功'
except:
logger.error('发送评论失败, 请检查网络状况或Bili账号配置')
break
else:
msg = '发送失败, 请检查动态id'
await send_comment.finish(Message(msg))
###########################
######### 订阅管理 #########
# 显示本群中的关注列表
userlist = on_command(cmd='bili关注列表', priority=2, temp=False, block=True,
permission=GROUP_ADMIN|GROUP_OWNER|SUPERUSER)
@userlist.handle()
async def get_list(event: GroupMessageEvent):
group_id = event.get_session_id().split('_')[1]
msg = '本群已关注以下用户:\n'
uid_list, name_list, translate_list = db.get_group_sub(group_id)
for i in range(len(name_list)):
translate_text = '开启' if translate_list[i] else '关闭'
msg += f'\n[{i + 1}]{name_list[i]}({uid_list[i]}) 翻译已{translate_text}'
await userlist.finish(Message(msg))
# 关注用户
follow_user = on_command(cmd='bili关注', priority=2, temp=False, block=True,
permission=GROUP_OWNER|GROUP_ADMIN|SUPERUSER)
@follow_user.handle()
async def follow(event:GroupMessageEvent):
global USER_LIST
cmd = event.get_plaintext().split()
group_id = event.get_session_id().split('_')[1]
msg = '命令格式错误, 请按照命令格式: "/bili关注 数字uid"'
if len(cmd) != 2 or not cmd[1].isdigit():
await follow_user.finish(Message(msg))
uid = int(cmd[1])
user_info = (await users.get_users_info(CREDENTIAL, uid))[0]
if user_info:
name = user_info['name']
room_id = 0
if user_info['live_room']:
room_id = user_info['live_room']['roomid']
if db.add_user(uid, room_id, name, int(time())): # 最新动态时间戳设置为当前时间
# 更新全局变量
USER_LIST[uid] = {
'name': name,
'room': Room(uid, room_id, name, CREDENTIAL),
'newest_timestamp': int(time())
}
if db.add_group_sub(uid, group_id):
msg = f'{name}({uid}) 关注成功!'
else:
msg = f'{name}({uid})已经在关注列表中!'
else:
msg = f'用户{uid}不存在, 请确认id无误'
await follow_user.finish(Message(msg))
#取关用户
unfollow_user = on_command('bili取关', priority=2, temp=False, block=True,
permission=GROUP_ADMIN|GROUP_OWNER|SUPERUSER)
@unfollow_user.handle()
async def unfollow(event:GroupMessageEvent):
global USER_LIST
group_id = event.get_session_id().split('_')[1]
cmd = event.get_plaintext().split()
msg = '命令格式错误, 请按照命令格式: "/bili取关 数字uid"'
if len(cmd) == 2 and cmd[1].isdigit():
uid = int(cmd[1])
name = db.get_user_name(uid)
if db.delete_group_sub(uid, group_id):
msg = f"{name}({uid})取关成功"
# 更新全局变量
if db.delete_user(uid):
del USER_LIST[uid]
else:
msg = f"{uid}不在本群关注列表中"
await unfollow_user.finish(Message(msg))
#开启动态翻译
translate_on = on_command('开启动态翻译', priority=2, temp=False, block=True,
permission=GROUP_ADMIN|GROUP_OWNER|SUPERUSER)
@translate_on.handle()
async def on(event: GroupMessageEvent):
group_id = int(event.get_session_id().split('_')[1])
cmd = event.get_plaintext().split()
msg = '命令格式错误, 请按照命令格式: "/开启动态翻译 数字uid"'
if len(cmd) == 2 and cmd[1].isdigit():
uid = int(cmd[1])
name = db.get_user_name(uid)
if db.translate_on(uid, group_id):
msg = f'{name}({uid})开启动态翻译成功!'
else:
msg = f'{uid}不在当前关注列表!'
await translate_on.finish(Message(msg))
#关闭动态翻译
translate_off = on_command('关闭动态翻译', priority=2, temp=False, block=True,
permission=GROUP_ADMIN|GROUP_OWNER|SUPERUSER)
@translate_off.handle()
async def off(event: GroupMessageEvent):
group_id = int(event.get_session_id().split('_')[1])
cmd = event.get_plaintext().split()
msg = '命令格式错误, 请按照命令格式: "/开启动态翻译 数字uid"'
if len(cmd) == 2 and cmd[1].isdigit():
uid = int(cmd[1])
name = db.get_user_name(uid)
if db.translate_off(uid, group_id):
msg = f'{name}({uid})关闭动态翻译成功!'
else:
msg = f'{uid}不在当前关注列表!'
await translate_off.finish(Message(msg))
###########################
######### 评论管理 #########
# 查看评论白名单
show_translator = on_command(cmd='评论白名单', priority=2, temp=False, block=True,
permission=SUPERUSER)
@show_translator.handle()
async def show():
msg = '以下用户已加入评论白名单:\n'
i = 0
for session_id, name in TRANSLATOR_LIST.items():
i += 1
group_id = session_id.split('_')[1]
qq_id = session_id.split('_')[2]
msg += f'\n[{i}]群{group_id}: {name}({qq_id})'
await show_translator.finish(Message(msg))
# 添加评论白名单
add_translator = on_command(cmd='添加评论白名单', priority=2, temp=False, block=True,
permission=SUPERUSER)
@add_translator.handle()
async def add(event:GroupMessageEvent):
global TRANSLATOR_LIST
cmd = event.get_plaintext().split()
msg = '命令格式错误, 请按照命令格式: "/添加评论白名单 群号 qqid"'
if len(cmd) == 3 and cmd[1].isdigit() and cmd[2].isdigit():
group_id = int(cmd[1])
qq_id = int(cmd[2])
try:
qq_user_info = await nonebot.get_bot().get_group_member_info(
group_id=group_id, user_id=qq_id, nocache=False
)
qq_name = qq_user_info['card'] if qq_user_info['card'] else qq_user_info['nickname']
except:
qq_user_info = {}
if qq_user_info and db.add_translator_list(qq_id, group_id, qq_name):
msg = f'群{group_id}: {qq_name}({qq_id})添加成功'
TRANSLATOR_LIST = db.get_translator_list()
send_comment.permission = USER(*TRANSLATOR_LIST.keys())
else:
msg = '查无此人, 请确认群号 QQ号无误'
await add_translator.finish(Message(msg))
# 移除评论白名单
remove_translator = on_command(cmd='移除评论白名单', priority=2, temp=False, block=True,
permission=SUPERUSER)
@remove_translator.handle()
async def remove(event:GroupMessageEvent):
global TRANSLATOR_LIST
cmd = event.get_plaintext().split()
msg = '命令格式错误, 请按照命令格式: "/移除评论白名单 群号 qq号"'
if len(cmd) == 3 and cmd[1].isdigit() and cmd[2].isdigit():
group_id = int(cmd[1])
qq_id = int(cmd[2])
try:
qq_user_info = await nonebot.get_bot().get_group_member_info(
group_id=group_id, user_id=qq_id, nocache=False
)
qq_name = qq_user_info['card'] if qq_user_info['card'] else qq_user_info['nickname']
except:
qq_user_info = {}
if qq_user_info and db.remove_translator_list(qq_id, group_id):
msg = f'群{group_id}: {qq_name}({qq_id})移除成功'
TRANSLATOR_LIST = db.get_translator_list()
send_comment.permission = USER(*TRANSLATOR_LIST.keys())
else:
msg = '查无此人, 请确认群号 QQ号无误'
await remove_translator.finish(Message(msg))
| 37.111429
| 96
| 0.614212
| 0
| 0
| 0
| 0
| 10,919
| 0.755379
| 10,465
| 0.723971
| 3,892
| 0.269249
|
ac1aafb69f1a23988f2e6e269f2b272b5e40a652
| 6,593
|
py
|
Python
|
rlzoo/common/build_rlbench_env.py
|
tensorlayer/RLzoo
|
9a587b97f706b2a59ac98555945822bf3987b1d1
|
[
"Apache-2.0"
] | 750
|
2019-07-26T10:56:28.000Z
|
2022-03-25T08:36:38.000Z
|
rlzoo/common/build_rlbench_env.py
|
tensorlayer/RLzoo
|
9a587b97f706b2a59ac98555945822bf3987b1d1
|
[
"Apache-2.0"
] | 29
|
2019-10-25T02:46:19.000Z
|
2022-02-09T23:41:24.000Z
|
rlzoo/common/build_rlbench_env.py
|
tensorlayer/RLzoo
|
9a587b97f706b2a59ac98555945822bf3987b1d1
|
[
"Apache-2.0"
] | 101
|
2019-08-04T12:21:25.000Z
|
2022-03-18T18:06:50.000Z
|
import sys
from collections import OrderedDict
import numpy as np
from gym import spaces
from pyrep.const import RenderMode
from pyrep.objects.dummy import Dummy
from pyrep.objects.vision_sensor import VisionSensor
from rlbench.environment import Environment
from rlbench.action_modes import ArmActionMode, ActionMode
from rlbench.observation_config import ObservationConfig
from rlbench.tasks import *
# Don't forget to add: export PYTHONPATH=PATH_TO_YOUR_LOCAL_RLBENCH_REPO
# list of state types
state_types = ['left_shoulder_rgb',
'left_shoulder_depth',
'left_shoulder_mask',
'right_shoulder_rgb',
'right_shoulder_depth',
'right_shoulder_mask',
'wrist_rgb',
'wrist_depth',
'wrist_mask',
'joint_velocities',
'joint_velocities_noise',
'joint_positions',
'joint_positions_noise',
'joint_forces',
'joint_forces_noise',
'gripper_pose',
'gripper_touch_forces',
'task_low_dim_state']
class RLBenchEnv():
""" make RLBench env to have same interfaces as openai.gym """
def __init__(self, task_name: str, state_type: list = 'state', ):
# render_mode=None):
"""
create RL Bench environment
:param task_name: task names can be found in rlbench.tasks
:param state_type: state or vision or a sub list of state_types list like ['left_shoulder_rgb']
"""
if state_type == 'state' or state_type == 'vision' or isinstance(state_type, list):
self._state_type = state_type
else:
raise ValueError('State type value error, your value is {}'.format(state_type))
# self._render_mode = render_mode
self._render_mode = None
obs_config = ObservationConfig()
obs_config.set_all(True)
action_mode = ActionMode(ArmActionMode.ABS_JOINT_VELOCITY)
self.env = Environment(
action_mode, obs_config=obs_config, headless=True)
self.env.launch()
try:
self.task = self.env.get_task(getattr(sys.modules[__name__], task_name))
except:
raise NotImplementedError
_, obs = self.task.reset()
self.spec = Spec(task_name)
if self._state_type == 'state':
self.observation_space = spaces.Box(
low=-np.inf, high=np.inf, shape=obs.get_low_dim_data().shape)
elif self._state_type == 'vision':
space_dict = OrderedDict()
space_dict["state"] = spaces.Box(
low=-np.inf, high=np.inf, shape=obs.get_low_dim_data().shape)
for i in ["left_shoulder_rgb", "right_shoulder_rgb", "wrist_rgb", "front_rgb"]:
space_dict[i] = spaces.Box(
low=0, high=1, shape=getattr(obs, i).shape)
self.observation_space = spaces.Dict(space_dict)
else:
space_dict = OrderedDict()
for name in self._state_type:
if name.split('_')[-1] in ('rgb', 'depth', 'mask'):
space_dict[name] = spaces.Box(
low=0, high=1, shape=getattr(obs, name).shape)
else:
space_dict[name] = spaces.Box(
low=-np.inf, high=np.inf,
shape=getattr(obs, name).shape)
self.observation_space = spaces.Dict(space_dict)
self.action_space = spaces.Box(low=-1.0, high=1.0, shape=(self.env.action_size,), dtype=np.float32)
# if render_mode is not None:
# # Add the camera to the scene
# cam_placeholder = Dummy('cam_cinematic_placeholder')
# self._gym_cam = VisionSensor.create([640, 360])
# self._gym_cam.set_pose(cam_placeholder.get_pose())
# if render_mode == 'human':
# self._gym_cam.set_render_mode(RenderMode.OPENGL3_WINDOWED)
# else:
# self._gym_cam.set_render_mode(RenderMode.OPENGL3)
def _extract_obs(self, obs):
if self._state_type == 'state':
return np.array(obs.get_low_dim_data(), np.float32)
elif self._state_type == 'vision':
return np.array([np.array(obs.get_low_dim_data(), np.float32),
np.array(obs.left_shoulder_rgb, np.float32),
np.array(obs.right_shoulder_rgb, np.float32),
np.array(obs.wrist_rgb, np.float32),
np.array(obs.front_rgb, np.float32), ])
else:
result = ['tag']
for name in self._state_type:
result.append(np.array(getattr(obs, name), np.float32))
return np.delete(np.array(result,), 0, 0)
def seed(self, seed_value):
# set seed as in openai.gym env
pass
def render(self, mode='human'):
# todo render available at any time
if self._render_mode is None:
self._render_mode = mode
# Add the camera to the scene
cam_placeholder = Dummy('cam_cinematic_placeholder')
self._gym_cam = VisionSensor.create([640, 360])
self._gym_cam.set_pose(cam_placeholder.get_pose())
if mode == 'human':
self._gym_cam.set_render_mode(RenderMode.OPENGL3_WINDOWED)
else:
self._gym_cam.set_render_mode(RenderMode.OPENGL3)
if mode != self._render_mode:
raise ValueError(
'The render mode must match the render mode selected in the '
'constructor. \nI.e. if you want "human" render mode, then '
'create the env by calling: '
'gym.make("reach_target-state-v0", render_mode="human").\n'
'You passed in mode %s, but expected %s.' % (
mode, self._render_mode))
if mode == 'rgb_array':
return self._gym_cam.capture_rgb()
def reset(self):
descriptions, obs = self.task.reset()
return self._extract_obs(obs)
def step(self, action):
obs, reward, terminate = self.task.step(action)
return self._extract_obs(obs), reward, terminate, None
def close(self):
self.env.shutdown()
class Spec():
""" a fake spec """
def __init__(self, id_name):
self.id = id_name
| 40.447853
| 108
| 0.574245
| 5,405
| 0.819809
| 0
| 0
| 0
| 0
| 0
| 0
| 1,786
| 0.270893
|
ac1ab705364244ba4229d2c3d2d14676f947e30d
| 4,117
|
py
|
Python
|
scripts/generate/xgboost_.py
|
thomas-young-2013/automl_space
|
f6267dac27ae6b17123ec17c4a6c843a40e4e963
|
[
"MIT"
] | 2
|
2021-04-23T10:31:47.000Z
|
2021-11-25T07:59:05.000Z
|
scripts/generate/xgboost_.py
|
thomas-young-2013/automl_space
|
f6267dac27ae6b17123ec17c4a6c843a40e4e963
|
[
"MIT"
] | null | null | null |
scripts/generate/xgboost_.py
|
thomas-young-2013/automl_space
|
f6267dac27ae6b17123ec17c4a6c843a40e4e963
|
[
"MIT"
] | 1
|
2021-08-11T15:03:13.000Z
|
2021-08-11T15:03:13.000Z
|
import numpy as np
from ConfigSpace import ConfigurationSpace
from ConfigSpace.hyperparameters import UniformFloatHyperparameter, \
UniformIntegerHyperparameter, UnParametrizedHyperparameter
import argparse
import pickle as pkl
import os
import sys
sys.path.insert(0, '.')
from scripts.utils import check_none, check_for_bool
from scripts.generate.generate_utils import run_exp
parser = argparse.ArgumentParser()
parser.add_argument('--datasets', type=str, default='None')
parser.add_argument('--rep_num', type=int, default=1000)
class XGBoost:
def __init__(self, n_estimators, learning_rate, max_depth, min_child_weight,
subsample, colsample_bytree, gamma=None, reg_alpha=None, reg_lambda=None,
n_jobs=4, seed=1):
self.n_estimators = int(n_estimators)
self.learning_rate = learning_rate
self.max_depth = max_depth
self.subsample = subsample
self.min_child_weight = min_child_weight
self.colsample_bytree = colsample_bytree
self.gamma = gamma
self.reg_alpha = reg_alpha
self.reg_lambda = reg_lambda
self.n_jobs = n_jobs
self.random_state = np.random.RandomState(seed)
self.estimator = None
def fit(self, X, y):
from xgboost import XGBClassifier
# objective is set automatically in sklearn interface of xgboost
self.estimator = XGBClassifier(
use_label_encoder=False,
max_depth=self.max_depth,
learning_rate=self.learning_rate,
n_estimators=self.n_estimators,
min_child_weight=self.min_child_weight,
subsample=self.subsample,
colsample_bytree=self.colsample_bytree,
gamma=self.gamma,
reg_alpha=self.reg_alpha,
reg_lambda=self.reg_lambda,
random_state=self.random_state,
n_jobs=self.n_jobs,
)
self.estimator.fit(X, y)
return self
def predict(self, X):
if self.estimator is None:
raise NotImplementedError()
return self.estimator.predict(X)
@staticmethod
def get_hyperparameter_search_space():
"""
['n_estimators', 'learning_rate', 'max_depth', 'colsample_bytree', 'gamma',
'min_child_weight', 'reg_alpha', 'reg_lambda', 'subsample']
"""
cs = ConfigurationSpace()
n_estimators = UniformIntegerHyperparameter("n_estimators", 100, 1000, q=10, default_value=500)
learning_rate = UniformFloatHyperparameter("learning_rate", 1e-3, 0.9, log=True, default_value=0.1)
max_depth = UniformIntegerHyperparameter("max_depth", 1, 12)
colsample_bytree = UniformFloatHyperparameter("colsample_bytree", 0.1, 1, q=0.1, default_value=1)
gamma = UniformFloatHyperparameter("gamma", 0, 10, q=0.1, default_value=0)
min_child_weight = UniformFloatHyperparameter("min_child_weight", 0, 10, q=0.1, default_value=1)
reg_alpha = UniformFloatHyperparameter("reg_alpha", 0, 10, q=0.1, default_value=0)
reg_lambda = UniformFloatHyperparameter("reg_lambda", 1, 10, q=0.1, default_value=1)
subsample = UniformFloatHyperparameter("subsample", 0.1, 1, q=0.1, default_value=1)
cs.add_hyperparameters([n_estimators, max_depth, learning_rate, min_child_weight, subsample,
colsample_bytree, gamma, reg_alpha, reg_lambda])
return cs
cs = XGBoost.get_hyperparameter_search_space()
def objective_func(config, x_train, x_val, y_train, y_val):
conf_dict = config.get_dictionary()
model = XGBoost(**conf_dict, n_jobs=4)
model.fit(x_train, y_train)
from sklearn.metrics import balanced_accuracy_score
# evaluate on validation data
y_pred = model.predict(x_val)
perf = -balanced_accuracy_score(y_val, y_pred) # minimize
return perf
if __name__ == '__main__':
args = parser.parse_args()
datasets = args.datasets.split(',')
rep_num = args.rep_num
algo_id = 'xgboost'
run_exp(datasets, cs, rep_num, objective_func, algo_id, data_dir='../soln-ml/')
| 37.770642
| 107
| 0.683022
| 2,911
| 0.707068
| 0
| 0
| 1,333
| 0.323779
| 0
| 0
| 467
| 0.113432
|
ac1adabe581fd30a1766857374cb20cf0b69b1b2
| 2,362
|
py
|
Python
|
OnStage/player_chair.py
|
IanDCarroll/xox
|
38feac84e81e8c00a397f7f976efee15756cd3ac
|
[
"MIT"
] | null | null | null |
OnStage/player_chair.py
|
IanDCarroll/xox
|
38feac84e81e8c00a397f7f976efee15756cd3ac
|
[
"MIT"
] | 30
|
2016-11-25T05:34:34.000Z
|
2017-02-11T00:10:17.000Z
|
OnStage/player_chair.py
|
IanDCarroll/tik-tak-toe
|
38feac84e81e8c00a397f7f976efee15756cd3ac
|
[
"MIT"
] | 1
|
2016-11-26T01:41:37.000Z
|
2016-11-26T01:41:37.000Z
|
import sys
from Training.observer_abilities import *
from Training.cortex_3x3_caddy import *
class Player(Observer):
def __init__(self, marker_code):
self.ui = None
self.marker_code = marker_code
def get_enemy_code(self):
if self.marker_code == 10:
return 1
return 10
def move(self, table_top):
choice = self.choose(table_top)
table_top.board[choice] = self.marker_code
return table_top.board
def choose(self, table_top):
options = self.get_legal_moves(table_top.board)
return options[0]
def get_legal_moves(self, board):
legal_moves = []
for i in range(0, len(board)):
if board[i] != 1 and board[i] != 10:
legal_moves.append(i)
return legal_moves
class Human(Player):
name = 'human'
strikes = 0
def choose(self, table_top):
choice = self.get_good_input(table_top)
if self.check_conscience(choice, table_top.board):
return self.redo_move(table_top)
else:
self.reset_strikes()
return choice
def get_good_input(self, board):
try:
return int(self.ui.ask_human()) -1
except(ValueError):
return self.redo_move(board)
def check_conscience(self, choice, board):
if choice not in self.get_legal_moves(board):
return True
def redo_move(self, table_top):
self.add_a_strike(table_top)
table_top.error = True
self.ui.refresh()
return self.choose(table_top)
def add_a_strike(self, table_top):
self.strikes += 1
if self.strikes == 3:
table_top.exit = True
self.ui.refresh()
sys.exit()
def reset_strikes(self):
self.strikes = 0
class Computer(Player):
name = 'computer'
cortex = Cortex_3x3()
def choose(self, table_top):
intel = self.get_intelligence(table_top.board)
choice = self.cortex.direct_move(intel)
return choice
def get_intelligence(self, board):
return { 'board': board,
'options': self.get_legal_moves(board),
'analysis': self.scan_board(board),
'marker_code': self.marker_code,
'enemy_code': self.get_enemy_code() }
| 27.149425
| 58
| 0.595682
| 2,263
| 0.958086
| 0
| 0
| 0
| 0
| 0
| 0
| 68
| 0.028789
|
ac1b3ebd8ffb64179ceb128585149b4d27bf039c
| 575
|
py
|
Python
|
content_f_conditions/ex45_multiple_conditions.py
|
Alyssonmach/learning-python-with-codes
|
e5ef70f3b56712e98449b3053eb34416b8025cb1
|
[
"MIT"
] | 3
|
2020-11-28T08:26:54.000Z
|
2020-12-23T18:37:37.000Z
|
content_f_conditions/ex45_multiple_conditions.py
|
Alyssonmach/learning-python-with-codes
|
e5ef70f3b56712e98449b3053eb34416b8025cb1
|
[
"MIT"
] | 1
|
2021-02-12T12:17:49.000Z
|
2021-02-12T12:17:49.000Z
|
content_f_conditions/ex45_multiple_conditions.py
|
Alyssonmach/learning-python-with-codes
|
e5ef70f3b56712e98449b3053eb34416b8025cb1
|
[
"MIT"
] | null | null | null |
requested_toppings = ['mushrooms', 'extra cheese']
if 'mushrooms' in requested_toppings:
print("Adding mushrooms.")
if 'pepperoni' in requested_toppings:
print("Adding pepperoni.")
if 'extra cheese' in requested_toppings:
print("Adding extra cheese.")
print("\nFinished making your first pizza!")
if 'mushrooms' in requested_toppings:
print("Adding mushrooms.")
elif 'pepperoni' in requested_toppings:
print("Adding pepperoni.")
elif 'extra cheese' in requested_toppings:
print("Adding extra cheese.")
print("\nFinished making your second pizza!")
| 30.263158
| 50
| 0.73913
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 292
| 0.507826
|
ac1cc4d7aa81ff92764789ce626b2f4acf382ec9
| 4,000
|
py
|
Python
|
blog/models.py
|
njuaplusplus/j0shua
|
d14c657c72df157aaf2e471010b06bd85f415296
|
[
"Apache-2.0"
] | null | null | null |
blog/models.py
|
njuaplusplus/j0shua
|
d14c657c72df157aaf2e471010b06bd85f415296
|
[
"Apache-2.0"
] | null | null | null |
blog/models.py
|
njuaplusplus/j0shua
|
d14c657c72df157aaf2e471010b06bd85f415296
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/local/bin/python
# coding=utf-8
from django.db import models
from django.utils.translation import ugettext as _
from markdown import markdown
from django.contrib.auth.models import User
from uuslug import uuslug
from django import forms
from pagedown.widgets import PagedownWidget
# from bootstrap3_datetime.widgets import DateTimePicker
from datetimewidget.widgets import DateTimeWidget
class Category(models.Model) :
"""Category Model"""
title = models.CharField(
verbose_name = _('名称'),
help_text = _(' '),
max_length = 255
)
slug = models.SlugField(
verbose_name = _('Slug'),
help_text = _('Uri identifier.'),
max_length = 255,
unique = True
)
class Meta:
app_label = _('blog')
verbose_name = _("Category")
verbose_name_plural = _("Categories")
ordering = ['title',]
def save(self, *args, **kwargs):
if not self.slug.strip():
# slug is null or empty
self.slug = uuslug(self.title, instance=self, max_length=32, word_boundary=True)
super(Category, self).save(*args, **kwargs)
def __str__(self):
return "%s" % (self.title,)
class Article(models.Model) :
"""Article Model"""
title = models.CharField(
verbose_name = _('标题'),
help_text = _(' '),
max_length = 255
)
slug = models.SlugField(
verbose_name = _('固定链接'),
help_text = _('本文章的短网址(Uri identifier).'),
max_length = 255,
unique = True
)
cover = models.ImageField(
verbose_name = _('封面'),
help_text = _('若留空, 则使用默认图片'),
upload_to='blogs/images/%Y/%m/%d',
null = True,
blank = True
)
excerpt = models.TextField(
verbose_name = _('摘要'),
help_text = _(' '),
null = True,
blank = True
)
author = models.ForeignKey(User, verbose_name=_('作者'))
content_markdown = models.TextField(
verbose_name = _('内容 (Markdown)'),
help_text = _(' '),
)
content_markup = models.TextField(
verbose_name = _('内容 (Markup)'),
help_text = _(' '),
)
categories = models.ManyToManyField(
Category,
verbose_name = _('分类'),
help_text = _(' '),
blank = True
)
date_publish = models.DateTimeField(
verbose_name = _('发布日期'),
help_text = _(' ')
)
is_approved = models.BooleanField(
verbose_name = _('通过审核'),
default = False
)
class Meta:
app_label = _('blog')
verbose_name = _("Article")
verbose_name_plural = _("Articles")
ordering = ['-date_publish']
def save(self, *args, **kwargs):
if not self.slug.strip():
# slug is null or empty
self.slug = uuslug(self.title, instance=self, max_length=32, word_boundary=True)
if self.is_approved is None:
self.is_approved = False
self.content_markup = markdown(self.content_markdown, ['codehilite', 'attr_list'])
super(Article, self).save(*args, **kwargs)
def __str__(self):
return "%s" % (self.title,)
class ArticleForm(forms.ModelForm):
class Meta:
model = Article
dateTimeOptions = {
'todayBtn' : 'true',
}
widgets = {
'content_markdown' : PagedownWidget(),
# 'date_publish' : DateTimePicker(options={"format": "YYYY-MM-DD HH:mm", "pickSeconds": False, "language": 'zh-cn', }),
'date_publish' : DateTimeWidget(usel10n=True, bootstrap_version=3, options = dateTimeOptions),
'title' : forms.TextInput(attrs={'class':'form-control'}),
'slug' : forms.TextInput(attrs={'class':'form-control'}),
'excerpt' : forms.Textarea(attrs={'class':'form-control'}),
'categories' : forms.SelectMultiple(attrs={'class':'form-control'}),
}
exclude = ['content_markup', 'author', 'is_approved', ]
| 31.007752
| 131
| 0.58325
| 3,687
| 0.901467
| 0
| 0
| 0
| 0
| 0
| 0
| 873
| 0.213447
|
ac1cf53bf0793269cb8c9b3fe1ee4967ef2b9385
| 4,788
|
py
|
Python
|
sw_edit.py
|
nellore/deidentify
|
e82befbf9d45e70df739ab2aaafaa1a5513e4aeb
|
[
"MIT"
] | null | null | null |
sw_edit.py
|
nellore/deidentify
|
e82befbf9d45e70df739ab2aaafaa1a5513e4aeb
|
[
"MIT"
] | null | null | null |
sw_edit.py
|
nellore/deidentify
|
e82befbf9d45e70df739ab2aaafaa1a5513e4aeb
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
"""
sw_edit.py
Deidentifies SW_SUMMARY.csv and SW_MINUTE.csv in LABS 2 data; these files
cannot be deidentified properly by date_eliminator.py. This script replaces
dates with days since first day.
This software is licensed under the MIT License.
Copyright (c) 2016 Abhinav Nellore
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
import argparse
import sys
import os
import csv
import datetime
import errno
import itertools
_date_formats = ['%m/%d/%Y', '%d/%m/%Y']
if __name__ == '__main__':
# Print file's docstring if -h is invoked
parser = argparse.ArgumentParser(description=__doc__,
formatter_class=argparse.RawDescriptionHelpFormatter)
parser.add_argument('--input-dir', '-i', type=str, required=True,
help=('input directory; should contain SW_MINUTE.csv and '
'SW_SUMMARY.csv')
)
parser.add_argument('--output-dir', '-o', type=str, required=True,
help='output directory'
)
args = parser.parse_args()
try:
os.makedirs(args.output_dir)
except OSError as e:
if e.errno != errno.EEXIST:
raise
# Use sorted input file list to ensure reproducibility
with open(
os.path.join(args.input_dir, 'SW_MINUTE.csv')
) as minute_stream, open(
os.path.join(args.output_dir, 'SW_MINUTE.csv'), 'w'
) as output_stream:
try:
dialect = csv.Sniffer().sniff(minute_stream.read(1000000))
except csv.Error as e:
print >>sys.stderr, (
'Could not determine delimiter for SW_MINUTE.csv; '
'skipping....'
)
minute_stream.seek(0)
csv_reader = csv.reader(minute_stream, dialect)
# Print header
print >>output_stream, ','.join(csv_reader.next())
for key, group in itertools.groupby(csv_reader, lambda x:x[0]):
zero_date = None
for tokens in group:
if zero_date is None:
zero_date = datetime.datetime.strptime(tokens[7],
'%m/%d/%Y')
print >>output_stream, ','.join(tokens[:6] + [
tokens[6].partition('-')[0] + (
(' ' + ' '.join(tokens[6].split(' ')[-2:]))
if tokens[6].endswith('M') else ''), str(
(datetime.datetime.strptime(tokens[7],
'%m/%d/%Y')
- zero_date).days
)
] + tokens[8:])
with open(
os.path.join(args.input_dir, 'SW_SUMMARY.csv')
) as summary_stream, open(
os.path.join(args.output_dir, 'SW_SUMMARY.csv'), 'w'
) as output_stream:
try:
dialect = csv.Sniffer().sniff(summary_stream.read(1000000))
except csv.Error as e:
print >>sys.stderr, (
'Could not determine delimiter for SW_SUMMARY.csv; '
'skipping....'
)
summary_stream.seek(0)
csv_reader = csv.reader(summary_stream, dialect)
''' Print header; note field 8 is excluded because it's day of week,
which is more specific than year.'''
print >>output_stream, ','.join([token for i, token in enumerate(
csv_reader.next()
) if i != 8])
for tokens in csv_reader:
print >>output_stream, ','.join(tokens[:6] + [
tokens[6].rpartition('/')[-1],
tokens[7].rpartition('/')[-1]
] + tokens[9:]
)
| 41.634783
| 78
| 0.581662
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1,962
| 0.409774
|
ac1d0576b9d96127b532e1ac5e9548932d7f9611
| 39
|
py
|
Python
|
pwas/__init__.py
|
cgreencode/pwas
|
e65901e115491ad9661832c7b622b01b1e81c934
|
[
"MIT"
] | 19
|
2020-06-22T02:39:25.000Z
|
2022-02-21T14:37:33.000Z
|
pwas/__init__.py
|
cgreencode/pwas
|
e65901e115491ad9661832c7b622b01b1e81c934
|
[
"MIT"
] | 5
|
2020-09-28T11:26:01.000Z
|
2021-05-06T15:34:16.000Z
|
pwas/__init__.py
|
cgreencode/pwas
|
e65901e115491ad9661832c7b622b01b1e81c934
|
[
"MIT"
] | 4
|
2020-06-25T18:19:58.000Z
|
2022-01-29T04:02:20.000Z
|
from .genotype import GenotypingManager
| 39
| 39
| 0.897436
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
ac1ea04b12bfdc49c4c9cf7624e0e9869351d1bb
| 6,191
|
py
|
Python
|
CNN_freq.py
|
vishnubk/ml_tutorial_pulsars
|
1a1b1eabbce43c39222b32974e29dfff5a722601
|
[
"MIT"
] | null | null | null |
CNN_freq.py
|
vishnubk/ml_tutorial_pulsars
|
1a1b1eabbce43c39222b32974e29dfff5a722601
|
[
"MIT"
] | null | null | null |
CNN_freq.py
|
vishnubk/ml_tutorial_pulsars
|
1a1b1eabbce43c39222b32974e29dfff5a722601
|
[
"MIT"
] | null | null | null |
from keras.preprocessing.image import ImageDataGenerator
from keras.models import Sequential
from keras.layers import Conv2D, MaxPooling2D
from keras.layers import Activation, Dropout, Flatten, Dense
from sklearn.metrics import confusion_matrix, f1_score, precision_score, recall_score
from keras import backend as K
from keras import optimizers
import numpy as np
import math
from sklearn.model_selection import train_test_split
from keras.callbacks import TensorBoard
#x, x_test, y, y_test = = train_test_split(xtrain,labels,test_size=0.2,train_size=0.8)
img_width, img_height = 48, 48
input_shape = (48, 48, 1)
batch_size = 200
tensor_board = TensorBoard(log_dir='./Graph', histogram_freq=0, write_graph=True, write_images=True)
if K.image_data_format() == 'channels_first':
input_shape = (1, img_width, img_height)
else:
input_shape = (img_width, img_height, 1)
def generator(batch_size,from_list_x,from_list_y):
assert len(from_list_x) == len(from_list_y)
total_size = len(from_list_x)
while True:
for i in range(0,total_size,batch_size):
yield np.array(from_list_x[i:i+batch_size]), np.array(from_list_y[i:i+batch_size])
# Load all data
time_phase_pulsars = np.load('time_phase_data_pulsars.npy')
time_phase_nonpulsars = np.load('time_phase_data_nonpulsars.npy')
freq_phase_pulsars = np.load('freq_phase_data_pulsars.npy')
freq_phase_nonpulsars = np.load('freq_phase_data_nonpulsars.npy')
pulse_profile_pulsars = np.load('pulse_profile_data_pulsars.npy')
pulse_profile_nonpulsars = np.load('pulse_profile_data_nonpulsars.npy')
dm_curve_pulsars = np.load('dm_curve_data_pulsars.npy')
dm_curve_nonpulsars = np.load('dm_curve_data_nonpulsars.npy')
reshaped_time_phase_pulsars = [np.reshape(f,(48,48,1)) for f in time_phase_pulsars]
reshaped_time_phase_nonpulsars = [np.reshape(f,(48,48,1)) for f in time_phase_nonpulsars]
reshaped_freq_phase_pulsars = [np.reshape(f,(48,48,1)) for f in freq_phase_pulsars]
reshaped_freq_phase_nonpulsars = [np.reshape(f,(48,48,1)) for f in freq_phase_nonpulsars]
model = Sequential()
model.add(Conv2D(32, (3, 3), input_shape=input_shape))
model.add(Activation('relu'))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Conv2D(32, (3, 3)))
model.add(Activation('relu'))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Conv2D(64, (3, 3)))
model.add(Activation('relu'))
model.add(MaxPooling2D(pool_size=(2, 2)))
# After these layers, we convert our 3D feature maps to 1D feature vectors ith the help of 'flatten'. We use 'dropout' layer to prevent overfitting
model.add(Flatten())
model.add(Dense(64))
model.add(Activation('relu'))
model.add(Dropout(0.5))
model.add(Dense(1))
model.add(Activation('sigmoid'))
model.compile(loss='binary_crossentropy',
optimizer='rmsprop',
metrics=['accuracy'])
#print(model.summary())
label_reshaped_time_phase_pulsars = np.ones(len(reshaped_time_phase_pulsars))
label_reshaped_time_phase_nonpulsars = np.zeros(len(reshaped_time_phase_nonpulsars))
time_phase_data_combined = np.concatenate((reshaped_time_phase_pulsars, reshaped_time_phase_nonpulsars), axis = 0)
time_phase_label_combined = np.concatenate((label_reshaped_time_phase_pulsars, label_reshaped_time_phase_nonpulsars), axis = 0)
time_phase_train, time_phase_test, time_phase_label_train, time_phase_label_test = train_test_split(time_phase_data_combined, time_phase_label_combined, test_size=0.2, random_state=42)
model.fit_generator(generator(batch_size, time_phase_train, time_phase_label_train), steps_per_epoch=len(time_phase_train)//batch_size, epochs=100, callbacks=[tensor_board])
model.save_weights('first_try.h5')
#number_of_examples = len(time_phase_test)
#number_of_generator_calls = math.ceil(number_of_examples / (1.0 * batch_size))
predict = model.predict_generator(generator(batch_size, time_phase_test, time_phase_label_test), steps=math.ceil(len(time_phase_test)/batch_size))
np.save('predictions.npy', predict)
classified_results = np.rint(predict)
f_score = f1_score(time_phase_label_test, classified_results, average='binary')
precision = precision_score(time_phase_label_test, classified_results, average='binary')
recall = recall_score(time_phase_label_test, classified_results, average='binary')
print('F Score:', f_score, 'Precision:', precision, 'Recall:', recall)
print('Start testing GBNCC data')
gbncc_pulsars = np.load('time_phase_gbncc_test_data_pulsars.npy')
gbncc_nonpulsars = np.load('time_phase_gbncc_test_data_nonpulsars_part3.npy')
reshaped_time_phase_gbncc_pulsars = [np.reshape(f,(48,48,1)) for f in gbncc_pulsars]
reshaped_time_phase_gbncc_nonpulsars = [np.reshape(f,(48,48,1)) for f in gbncc_nonpulsars]
label_reshaped_time_phase_gbncc_pulsars = np.ones(len(gbncc_pulsars), dtype=np.int8)
label_reshaped_time_phase_gbncc_nonpulsars = np.zeros(len(gbncc_nonpulsars), dtype=np.int8)
time_phase_gbncc_data_combined = np.concatenate((reshaped_time_phase_gbncc_pulsars, reshaped_time_phase_gbncc_nonpulsars), axis = 0)
time_phase_gbncc_label_combined = np.concatenate((label_reshaped_time_phase_gbncc_pulsars, label_reshaped_time_phase_gbncc_nonpulsars), axis = 0)
predict = model.predict_generator(generator(batch_size, time_phase_gbncc_data_combined, time_phase_gbncc_label_combined), steps=math.ceil(len(time_phase_gbncc_data_combined)/batch_size))
np.save('predictions_gbncc.npy', predict)
#test = np.rint(predict)
#test = np.reshape(test, (22709))
test = np.random.uniform(0,1,22709)
test = np.rint(test)
f_score = f1_score(time_phase_gbncc_label_combined, test, average='binary')
precision = precision_score(time_phase_gbncc_label_combined, test, average='binary')
recall = recall_score(time_phase_gbncc_label_combined, test, average='binary')
print('F Score:', f_score, 'Precision:', precision, 'Recall:', recall)
accuracy = np.sum(test == time_phase_gbncc_label_combined)
print('Accuracy:', accuracy)
#generator(batch_size, time_phase_data_combined, time_phase_label_combined)
#train_datagen = ImageDataGenerator(rotation_range = 0)
#train_generator = train_datagen.flow_from_directory('train/', target_size=(img_width, img_height), batch_size=batch_size, class_mode='binary')
#print(train_generator)
| 43.598592
| 186
| 0.800517
| 0
| 0
| 296
| 0.047811
| 0
| 0
| 0
| 0
| 1,379
| 0.222743
|
ac1eac77532b97e37684d7282cd7c2a9da13f188
| 1,332
|
py
|
Python
|
src/config/contents.py
|
miloszowi/everyone-mention-telegram-bot
|
a6b441b197b743f57e089dbe32d262b87a155140
|
[
"MIT"
] | 13
|
2021-09-20T17:04:28.000Z
|
2022-03-15T09:27:25.000Z
|
src/config/contents.py
|
miloszowi/everyone-mention-telegram-bot
|
a6b441b197b743f57e089dbe32d262b87a155140
|
[
"MIT"
] | null | null | null |
src/config/contents.py
|
miloszowi/everyone-mention-telegram-bot
|
a6b441b197b743f57e089dbe32d262b87a155140
|
[
"MIT"
] | null | null | null |
# markdownv2 python-telegram-bot specific
joined = '{} joined group `{}`'
not_joined = '{} is already in group `{}`'
left = '{} left group `{}`'
not_left = '{} did not join group `{}` before'
mention_failed = 'There are no users to mention'
no_groups = 'There are no groups for this chat'
# html python-telegram-bot specific
start_text = """
Hello!
@everyone_mention_bot here.
I am here to help you with multiple user mentions.
<b>Usage</b>:
Users that joined the group by <code>/join</code> command,
can be mentioned after typing one of those in your message:
<code>@all</code>, <code>@channel</code>, <code>@chat</code>, <code>@everyone</code>, <code>@group</code> or <code>@here</code>.
If you did create a group named <code>gaming</code>, simply use <code>@gaming</code> to call users from that group.
You can also use <code>/everyone</code> command.
<b>Commands</b>:
<pre>/join {group-name}</pre>
Joins (or creates if group did not exist before) group.
<pre>/leave {group-name}</pre>
Leaves (or deletes if no other users are left) the group
<pre>/everyone {group-name}</pre>
Mentions everyone that joined the group.
<pre>/groups</pre>
Show all created groups in this chat.
<pre>/start</pre>
Show start & help text
<b>Please note</b>
<code>{group-name}</code> is not required, <code>default</code> if not given.
"""
| 30.976744
| 128
| 0.701201
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1,240
| 0.930931
|
ac202e2b18e2572dfa3962aaeb16577bcf9a3ce4
| 3,298
|
py
|
Python
|
portal/libs/utils.py
|
Artis-Physis/utopia-cms
|
5cb8d941d0b2df53fddc566a52e9d3baee4a007e
|
[
"BSD-3-Clause"
] | 8
|
2020-12-15T17:11:08.000Z
|
2021-12-13T22:08:33.000Z
|
portal/libs/utils.py
|
Artis-Physis/utopia-cms
|
5cb8d941d0b2df53fddc566a52e9d3baee4a007e
|
[
"BSD-3-Clause"
] | 28
|
2020-12-15T17:34:03.000Z
|
2022-02-01T04:09:10.000Z
|
portal/libs/utils.py
|
Artis-Physis/utopia-cms
|
5cb8d941d0b2df53fddc566a52e9d3baee4a007e
|
[
"BSD-3-Clause"
] | 7
|
2020-12-15T19:59:17.000Z
|
2021-11-24T16:47:06.000Z
|
# -*- coding: utf-8 -*-
from hashlib import md5
import re
import smtplib
from django.conf import settings
from django.http import HttpResponseBadRequest
def remove_spaces(s):
inline_tags = 'a|b|i|u|em|span|strong|sup|sub|tt|font|small|big'
inlines_with_spaces = r'</(%s)>[\s\n\t]+<(%s)\b' % (
inline_tags, inline_tags)
re_inline = re.compile(inlines_with_spaces)
s = re_inline.sub(r'</\1>&#preservespace;<\2', s)
re_tags = re.compile(r'>[\n\s]+<')
s = re_tags.sub('><', s)
re_spaces = re.compile(r'\n\s+')
s = re_spaces.sub('\n', s)
re_to_space = re.compile(r'[\t\n\s]+')
s = re_to_space.sub(' ', s)
s = s.replace('&#preservespace;', ' ')
return s
def remove_shorttags(s):
return s.replace(' />', '>')
def next(request):
next = '/'
if 'next' in request.GET:
next = request.GET.get('next', '/')
elif 'next' in request.POST:
next = request.POST.get('next', '/')
# path = request.META.get('PATH_INFO', '/')
if next.startswith('/usuarios'):
next = '/'
return next
def do_gonzo(*args, **kwargs):
hash_this = ''
for arg in args:
hash_this += '%s$' % str(arg)
for arg in kwargs:
hash_this += '%s$' % str(kwargs.get(arg))
hash_this += settings.SECRET_KEY
return md5(hash_this).hexdigest()
def md5file(filename):
"""
Re-implementation of md5sum in python. Return the hex digest of a file
without loading it all into memory.
By Nick Craig-Wood <nick@craig-wood.com>
"""
fh = open(filename)
digest = md5.new()
while 1:
buf = fh.read(4096)
if buf == "":
break
digest.update(buf)
fh.close()
return digest.hexdigest()
def set_amp_cors_headers(request, response):
try:
amp_source_origin = request.GET['__amp_source_origin']
except KeyError:
return HttpResponseBadRequest()
if request.META.get('HTTP_AMP_SAME_ORIGIN') == 'true':
access_control_allow_origin = amp_source_origin
else:
try:
access_control_allow_origin = request.META['HTTP_ORIGIN']
except KeyError:
return HttpResponseBadRequest()
amp_access_main_header_name = 'AMP-Access-Control-Allow-Source-Origin'
response[amp_access_main_header_name] = amp_source_origin
response['Access-Control-Allow-Origin'] = access_control_allow_origin
response['Access-Control-Allow-Credentials'] = 'true'
response['Access-Control-Expose-Headers'] = amp_access_main_header_name
return response
def smtp_connect(alternative=False):
"""
Authenticate to SMTP (if any auth needed) and return the conn instance.
If alternative is True, connect to the alternative SMTP instead of the default.
"""
email_conf = {}
for setting in ('HOST', 'PORT', 'HOST_USER', 'HOST_PASSWORD', 'USE_TLS'):
email_conf[setting] = getattr(settings, ('EMAIL_%s' + setting) % ('ALTERNATIVE_' if alternative else ''), None)
s = smtplib.SMTP(email_conf['HOST'], email_conf['PORT'])
if email_conf['USE_TLS']:
s.starttls()
if email_conf['HOST_USER']:
try:
s.login(email_conf['HOST_USER'], email_conf['HOST_PASSWORD'])
except smtplib.SMTPException:
pass
return s
| 28.678261
| 119
| 0.632201
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 978
| 0.296543
|
ac20aa316bc8bb6155930c3ea46cc8a431427a33
| 382
|
py
|
Python
|
Python_OO/DelegarAcessoAtributos.py
|
Madara701/Python_OO
|
8d67569a8c4771dd82f5259c2ed5e782cd4e4036
|
[
"Apache-2.0"
] | null | null | null |
Python_OO/DelegarAcessoAtributos.py
|
Madara701/Python_OO
|
8d67569a8c4771dd82f5259c2ed5e782cd4e4036
|
[
"Apache-2.0"
] | null | null | null |
Python_OO/DelegarAcessoAtributos.py
|
Madara701/Python_OO
|
8d67569a8c4771dd82f5259c2ed5e782cd4e4036
|
[
"Apache-2.0"
] | null | null | null |
class A:
def fazer_algo(self):
print("Palmeiras")
def outro(self):
print("campeão")
class B:
def __init__(self):
self.a = A()
def fazer_algo(self):
#delega para self.a
return self.a.fazer_algo()
def outro(self):
#delegando novamente
return self.a.outro()
b = B()
print(b.fazer_algo())
print(b.outro())
| 17.363636
| 34
| 0.565445
| 330
| 0.861619
| 0
| 0
| 0
| 0
| 0
| 0
| 60
| 0.156658
|
ac20ce5e35b873341e2f84f5642a7ee91537325e
| 777
|
py
|
Python
|
app/main.py
|
sebastian-hatala-leanix/chain-reaction-event
|
15e34ce450b2c267eb9dd2ec5981fbfc51caac23
|
[
"Apache-2.0"
] | null | null | null |
app/main.py
|
sebastian-hatala-leanix/chain-reaction-event
|
15e34ce450b2c267eb9dd2ec5981fbfc51caac23
|
[
"Apache-2.0"
] | null | null | null |
app/main.py
|
sebastian-hatala-leanix/chain-reaction-event
|
15e34ce450b2c267eb9dd2ec5981fbfc51caac23
|
[
"Apache-2.0"
] | null | null | null |
import cv2
import numpy as np
from numpy.linalg import norm
import requests
def _get_image_frame(camera) -> np.ndarray:
_, frame = camera.read()
return frame
def _convert_frame_to_hsv(frame: np.ndarray) -> np.ndarray:
return cv2.cvtColor(frame, cv2.COLOR_BGR2HSV)
def _post_to_michi() -> None:
try:
requests.post("https://tbaum.duckdns.org/api/webhook/awesome-leanix")
except Exception:
_post_to_michi()
def main() -> None:
camera = cv2.VideoCapture(0)
while True:
frame = _get_image_frame(camera)
hsv_img = _convert_frame_to_hsv(frame)
if np.average(norm(hsv_img, axis=2)) / np.sqrt(3) > 110:
_post_to_michi()
break
print("Success!")
if __name__ == "__main__":
main()
| 22.852941
| 77
| 0.655084
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 74
| 0.095238
|
ac2256d9966602bb7091a7d1aec1644daf7e0196
| 8,134
|
py
|
Python
|
habitat_baselines/utils/gym_adapter.py
|
srama2512/habitat-api
|
bc85d0961cef3b4a08bc9263869606109fb6ff0a
|
[
"MIT"
] | 355
|
2020-08-18T03:48:26.000Z
|
2022-03-30T00:22:50.000Z
|
habitat_baselines/utils/gym_adapter.py
|
srama2512/habitat-api
|
bc85d0961cef3b4a08bc9263869606109fb6ff0a
|
[
"MIT"
] | 328
|
2020-08-12T21:25:09.000Z
|
2022-03-31T10:39:21.000Z
|
habitat_baselines/utils/gym_adapter.py
|
srama2512/habitat-api
|
bc85d0961cef3b4a08bc9263869606109fb6ff0a
|
[
"MIT"
] | 159
|
2020-08-12T22:23:36.000Z
|
2022-03-30T22:56:52.000Z
|
#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
from typing import Any, Dict, Optional, Union
import gym
import numpy as np
from gym import spaces
from habitat.core.simulator import Observations
from habitat.utils.visualizations.utils import observations_to_image
def flatten_dict(d, parent_key=""):
# From https://stackoverflow.com/questions/6027558/flatten-nested-dictionaries-compressing-keys
items = []
for k, v in d.items():
new_key = parent_key + str(k) if parent_key else str(k)
if isinstance(v, dict):
items.extend(flatten_dict(v, new_key).items())
else:
items.append((new_key, v))
return dict(items)
def smash_observation_space(obs_space, limit_keys):
obs_shapes = [obs_space.spaces[k].shape for k in limit_keys]
def transform_shape(shape):
if len(shape) == 2:
return (np.prod(shape),)
return shape
obs_shapes = [transform_shape(shape) for shape in obs_shapes]
obs_dims = [len(shape) for shape in obs_shapes]
if len(set(obs_dims)) == 1 and obs_dims[0] == 1:
# Smash together
total_dim = sum([shape[0] for shape in obs_shapes])
return spaces.Box(
shape=(total_dim,), low=-1.0, high=1.0, dtype=np.float32
)
return obs_space
class HabGymWrapper(gym.Env):
"""
Wraps a Habitat RLEnv into a format compatible with the standard OpenAI Gym
interface. Currently does not support discrete actions. This wrapper
therefore changes the behavior so that:
- The action input to `.step(...)` is always a numpy array
- The returned value of `.step(...)` and `.reset()` is a either a numpy array or a
dictionary consisting of string keys and numpy array values.
- The action space is converted to a `gym.spaces.Box`, action spaces from the RLEnv are
flattened into one Box space.
- The observation space is either a `gym.spaces.Box` or a `gym.spaces.Dict`
where the spaces of the Dict are `gym.spaces.Box`.
Configuration allows filtering the included observations, specifying goals,
or filtering actions. Listed below are the
config keys:
- `RL.GYM_OBS_KEYS`: Which observation names from the wrapped environment
to include. The order of the key names is kept in the output observation array.
- `RL.GYM_DESIRED_GOAL_KEYS`: By default is an empty list. If not empty,
any observations are returned in the `desired_goal` returned key of the
observation.
- `RL.GYM_FIX_INFO_DICT`: By default False, but if specified as true, this
flattens the returned info dictionary to have depth 1 where sub-keys are
concatenated to parent keys.
- `RL.GYM_ACTION_KEYS`: Include a subset of the allowed actions in the
wrapped environment. If not specified or empty, all actions are included.
Example usage:
```
config = baselines_get_config(hab_cfg_path)
env_class = get_env_class(config.ENV_NAME)
env = habitat_baselines.utils.env_utils.make_env_fn(
env_class=env_class, config=config
)
env = HabGymWrapper(env)
env = HabRenderWrapper(env)
```
"""
def __init__(self, env, save_orig_obs: bool = False):
self._gym_goal_keys = env._rl_config.get("GYM_DESIRED_GOAL_KEYS", [])
self._gym_achieved_goal_keys = env._rl_config.get(
"GYM_ACHIEVED_GOAL_KEYS", []
)
self._fix_info_dict = env._rl_config.get("GYM_FIX_INFO_DICT", False)
self._gym_action_keys = env._rl_config.get("GYM_ACTION_KEYS", None)
self._gym_obs_keys = env._rl_config.get("GYM_OBS_KEYS", None)
action_space = env.action_space
action_space = spaces.Dict(
{
k: v
for k, v in action_space.spaces.items()
if (
(self._gym_action_keys is None)
or (k in self._gym_action_keys)
)
}
)
self._last_obs: Optional[Observations] = None
self.action_mapping = {}
self._save_orig_obs = save_orig_obs
self.orig_obs = None
if len(action_space.spaces) != 1:
raise ValueError(
"Cannot convert this action space, more than one action"
)
self.orig_action_name = list(action_space.spaces.keys())[0]
action_space = action_space.spaces[self.orig_action_name]
if not isinstance(action_space, spaces.Dict):
raise ValueError("Cannot convert this action space")
all_box = True
for sub_space in action_space.spaces.values():
if not isinstance(sub_space, spaces.Box):
all_box = False
break
if not all_box:
raise ValueError("Cannot convert this action space")
start_i = 0
for name, sub_space in action_space.spaces.items():
end_i = start_i + sub_space.shape[0]
self.action_mapping[name] = (start_i, end_i)
self.action_space = spaces.Box(
shape=(end_i,), low=-1.0, high=1.0, dtype=np.float32
)
self.observation_space = smash_observation_space(
env.observation_space, self._gym_obs_keys
)
dict_space = {
"observation": self.observation_space,
}
if len(self._gym_goal_keys) > 0:
dict_space["desired_goal"] = smash_observation_space(
env.observation_space, self._gym_goal_keys
)
if len(self._gym_achieved_goal_keys) > 0:
dict_space["achieved_goal"] = smash_observation_space(
env.observation_space, self._gym_achieved_goal_keys
)
if len(dict_space) > 1:
self.observation_space = spaces.Dict(dict_space)
self._env = env
def step(self, action: np.ndarray):
action_args = {}
for k, (start_i, end_i) in self.action_mapping.items():
action_args[k] = action[start_i:end_i]
action = {
"action": self.orig_action_name,
"action_args": action_args,
}
return self.direct_hab_step(action)
def direct_hab_step(self, action: Union[int, str, Dict[str, Any]]):
obs, reward, done, info = self._env.step(action=action)
self._last_obs = obs
obs = self._transform_obs(obs)
if self._fix_info_dict:
info = flatten_dict(info)
info = {k: float(v) for k, v in info.items()}
return obs, reward, done, info
def _is_space_flat(self, space_name):
if isinstance(self.observation_space, spaces.Box):
return True
return isinstance(
self.observation_space.spaces[space_name], spaces.Box
)
def _transform_obs(self, obs):
if self._save_orig_obs:
self.orig_obs = obs
observation = {"observation": [obs[k] for k in self._gym_obs_keys]}
if len(self._gym_goal_keys) > 0:
observation["desired_goal"] = [obs[k] for k in self._gym_goal_keys]
if len(self._gym_achieved_goal_keys) > 0:
observation["achieved_goal"] = [
obs[k] for k in self._gym_achieved_goal_keys
]
for k, v in observation.items():
if self._is_space_flat(k):
observation[k] = np.concatenate(v)
if len(observation) == 1:
return observation["observation"]
return observation
def reset(self) -> Union[np.ndarray, Dict[str, np.ndarray]]:
obs = self._env.reset()
self._last_obs = obs
return self._transform_obs(obs)
def render(self, mode: str = "rgb_array") -> np.ndarray:
frame = None
if mode == "rgb_array":
frame = observations_to_image(
self._last_obs, self._env._env.get_metrics()
)
else:
raise ValueError(f"Render mode {mode} not currently supported.")
return frame
| 36.475336
| 99
| 0.630194
| 6,679
| 0.821121
| 0
| 0
| 0
| 0
| 0
| 0
| 2,532
| 0.311286
|
ac25119082b547f7c1b12e27563aa843d253ac6b
| 1,279
|
py
|
Python
|
TopicExtractor/src/SampleFlaskApp/app.py
|
npnkbabu/mymlproject
|
9b9aaeef4a5dac2d967262166ca8cdf4fa09cd5d
|
[
"Apache-2.0"
] | null | null | null |
TopicExtractor/src/SampleFlaskApp/app.py
|
npnkbabu/mymlproject
|
9b9aaeef4a5dac2d967262166ca8cdf4fa09cd5d
|
[
"Apache-2.0"
] | null | null | null |
TopicExtractor/src/SampleFlaskApp/app.py
|
npnkbabu/mymlproject
|
9b9aaeef4a5dac2d967262166ca8cdf4fa09cd5d
|
[
"Apache-2.0"
] | null | null | null |
from flask import Flask, render_template, Response
from topicsconsumer import TopicsConsumer
import math
import time
import queue
import threading
import json
app = Flask(__name__)
@app.route('/', methods=['GET', 'POST'])
def searchTopic():
return render_template('base.html')
@app.route('/topics', methods=['GET', 'POST'])
def getTopics():
return render_template('topics.html')
@app.route('/newsandtopics', methods=['GET', 'POST'])
def newsandtopics():
try:
def inner():
newsq = queue.Queue()
cosumerObj = TopicsConsumer(newsq)
cosumerObj.startConsumer()
time.sleep(10)
while True:
obj = json.loads(newsq.get())
# content and topics
content = json.loads(obj[0])
topics = obj[1]
yield '***********************START*********************' + '\r\n' +'News : ' + '\r\n' +content['content'] + '\r\n' + '\r\n' +'Topics : ' + '\r\n' +topics +'\r\n'+'***********************END*********************'+ '\r\n'
time.sleep(10)
return Response(inner(), mimetype='text/event-stream')
except Exception as ex:
print(ex)
if __name__ == '__main__':
app.run(debug=True,port=5050)
| 31.975
| 249
| 0.532447
| 0
| 0
| 771
| 0.602815
| 1,029
| 0.804535
| 0
| 0
| 313
| 0.244722
|