hexsha stringlengths 40 40 | size int64 5 2.06M | ext stringclasses 10 values | lang stringclasses 1 value | max_stars_repo_path stringlengths 3 248 | max_stars_repo_name stringlengths 5 125 | max_stars_repo_head_hexsha stringlengths 40 78 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 3 248 | max_issues_repo_name stringlengths 5 125 | max_issues_repo_head_hexsha stringlengths 40 78 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 67k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 3 248 | max_forks_repo_name stringlengths 5 125 | max_forks_repo_head_hexsha stringlengths 40 78 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 5 2.06M | avg_line_length float64 1 1.02M | max_line_length int64 3 1.03M | alphanum_fraction float64 0 1 | count_classes int64 0 1.6M | score_classes float64 0 1 | count_generators int64 0 651k | score_generators float64 0 1 | count_decorators int64 0 990k | score_decorators float64 0 1 | count_async_functions int64 0 235k | score_async_functions float64 0 1 | count_documentation int64 0 1.04M | score_documentation float64 0 1 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
4f701be6860630646ec1eab6b85fb174acb20f92 | 813 | py | Python | api/routers/fetch.py | ThisIsBrainDamage/OSC-API | af361ebe72cd282d1a3e69924dde1a215816f0bd | [
"MIT"
] | 2 | 2022-03-16T04:09:53.000Z | 2022-03-16T09:38:52.000Z | api/routers/fetch.py | ThisIsBrainDamage/OSC-API | af361ebe72cd282d1a3e69924dde1a215816f0bd | [
"MIT"
] | null | null | null | api/routers/fetch.py | ThisIsBrainDamage/OSC-API | af361ebe72cd282d1a3e69924dde1a215816f0bd | [
"MIT"
] | null | null | null | # Standard library imports
import os
# Third party imports
from fastapi import Depends, APIRouter, HTTPException
from dotenv import load_dotenv
# Local imports
from ..auth.classes import User
from ..auth.authenticate import get_current_active_user
from ..data import fetch_all, fetch_item
load_dotenv()
fetch = APIRouter()
@fetch.get("/find")
async def get_by_name(name : str, current_user: User = Depends(get_current_active_user)):
"""
Looks for an item in the database
"""
data = await fetch_item(name)
if data is None:
return HTTPException(404, "Item not found")
return data
@fetch.get("/get_all")
async def get_all(current_user: User = Depends(get_current_active_user)):
"""
Gets all items from the database
"""
data = await fetch_all()
return data | 22.583333 | 89 | 0.719557 | 0 | 0 | 0 | 0 | 481 | 0.591636 | 438 | 0.538745 | 192 | 0.236162 |
4f70b4a59c8c55fded3663f5ef0848de16c340f1 | 2,990 | py | Python | driver/helper/SYNHelper_Kumar.py | qgking/FRGAN | b6a250c46981707c43c3889f80d8cc3ec31edaaf | [
"MIT"
] | 2 | 2021-08-10T02:38:23.000Z | 2021-08-10T03:04:22.000Z | driver/helper/SYNHelper_Kumar.py | qgking/FRGAN | b6a250c46981707c43c3889f80d8cc3ec31edaaf | [
"MIT"
] | 1 | 2022-02-21T15:57:03.000Z | 2022-02-21T15:57:03.000Z | driver/helper/SYNHelper_Kumar.py | qgking/FRGAN | b6a250c46981707c43c3889f80d8cc3ec31edaaf | [
"MIT"
] | 2 | 2021-11-07T13:25:51.000Z | 2022-01-18T07:09:30.000Z | from tumor_data.SYNDataLoader import *
from tensorboardX import SummaryWriter
from torchsummaryX import summary
from driver.helper.base_syn_helper import BaseTrainHelper
class SYNHelper_Kumar(BaseTrainHelper):
def __init__(self, generator, discriminator,
criterions, config):
super(SYNHelper_Kumar, self).__init__(generator, discriminator,
criterions, config)
def out_put_shape(self):
self.summary_writer = SummaryWriter(self.config.tensorboard_dir)
# summary(self.generator.cpu(),
# torch.zeros((1, 1, self.config.patch_x, self.config.patch_y, self.config.patch_z)),
# torch.zeros((1, 1, self.config.patch_x, self.config.patch_y, self.config.patch_z)))
# summary(self.discriminator.cpu(),
# torch.zeros((1, 2, self.config.patch_x, self.config.patch_y, self.config.patch_z)))
def test_one_batch(self, batch_gen):
real_A = batch_gen['real_A']
real_B = batch_gen['real_B']
tumor_B = batch_gen['tumor_B']
fake_B = self.generator(real_A, tumor_B)
loss_pixel = self.criterions['criterion_pixelwise'](fake_B, real_B)
losses = loss_pixel.item()
return {
"loss_pixel": losses,
"fake_B": fake_B,
"loss_boundary": 0,
"loss_tumor": 0,
"loss_style": 0,
"loss_percep": 0,
"loss_pixel_coarse": 0,
"loss_boundary_coarse": 0,
"fake_B_coarse": None,
"boundary": None,
"boundary_coarse": None,
"loss_tumor_coarse": 0,
}
def train_generator_one_batch(self, batch_gen):
real_A = batch_gen['real_A']
real_B = batch_gen['real_B']
tumor_B = batch_gen['tumor_B']
# GAN loss
fake_B = self.generator(real_A, tumor_B)
# loss gan
pred_fake = self.discriminator(torch.cat((fake_B.detach(), tumor_B), 1))
out_shape = (pred_fake.size(0), 1, pred_fake.size(2), pred_fake.size(3), pred_fake.size(4))
valid_label = self.FloatTensor(np.ones(out_shape))
loss_fake = self.criterions['criterion_GAN'](pred_fake, valid_label)
loss_GAN_final = loss_fake
losses_GAN = loss_GAN_final.item()
# # Pixel-wise loss
loss_pixel = self.criterions['criterion_pixelwise'](fake_B, real_B)
losses_pixel = loss_pixel.item()
# Total loss
loss_G = loss_GAN_final + loss_pixel
losses_G = loss_G.item()
loss_G.backward()
return {
"loss_GAN": losses_GAN,
"loss_pixel": losses_pixel,
"loss_boundary": 0,
"loss_G": losses_G,
"fake_B": fake_B,
"loss_tumor": 0,
"loss_percep": 0,
"loss_style": 0,
"loss_pixel_coarse": 0,
"loss_boundary_coarse": 0,
"loss_tumor_coarse": 0,
}
| 36.024096 | 101 | 0.593311 | 2,816 | 0.941806 | 0 | 0 | 0 | 0 | 0 | 0 | 827 | 0.276589 |
4f7163d43490c92cd3148ad70a2d3f997e7a8d13 | 1,769 | py | Python | models/wrf_hydro/ensemble_config_files/ens_setup_croton_gridded.py | hkershaw-brown/feature-preprocess | fe2bd77b38c63fa0566c83ebc4d2fac1623aef66 | [
"Apache-2.0"
] | 65 | 2019-10-16T13:31:06.000Z | 2022-03-14T11:52:58.000Z | models/wrf_hydro/ensemble_config_files/ens_setup_croton_gridded.py | hkershaw-brown/feature-preprocess | fe2bd77b38c63fa0566c83ebc4d2fac1623aef66 | [
"Apache-2.0"
] | 283 | 2019-09-23T15:48:34.000Z | 2022-03-31T21:44:41.000Z | models/wrf_hydro/ensemble_config_files/ens_setup_croton_gridded.py | hkershaw-brown/feature-preprocess | fe2bd77b38c63fa0566c83ebc4d2fac1623aef66 | [
"Apache-2.0"
] | 67 | 2019-09-19T22:13:24.000Z | 2022-03-20T15:58:26.000Z |
# m=wrf_hydro_ens_sim.members[0]
# dir(m)
# Change restart frequency to hourly in hydro namelist
att_tuple = ('base_hydro_namelist', 'hydro_nlist', 'rst_dt')
# The values can be a scalar (uniform across the ensemble) or a list of length N (ensemble size).
values = 60
wrf_hydro_ens_sim.set_member_diffs(att_tuple, values)
wrf_hydro_ens_sim.member_diffs # wont report any values uniform across the ensemble
# but this will:
[mm.base_hydro_namelist['hydro_nlist']['rst_dt'] for mm in wrf_hydro_ens_sim.members]
# Change restart frequency to hourly in hrldas namelist
att_tuple = ('base_hrldas_namelist', 'noahlsm_offline', 'restart_frequency_hours')
values = 1
wrf_hydro_ens_sim.set_member_diffs(att_tuple, values)
[mm.base_hrldas_namelist['noahlsm_offline']['restart_frequency_hours'] for mm in wrf_hydro_ens_sim.members]
# There are multiple restart files in the domain and the default is on 2018-06-01
# Change restart frequency to hourly in hydro namelist.
# att_tuple = ('base_hydro_namelist', 'hydro_nlist', 'restart_file')
# values = '/glade/work/jamesmcc/domains/public/croton_NY/Gridded/RESTART/HYDRO_RST.2011-08-26_00:00_DOMAIN1'
# wrf_hydro_ens_sim.set_member_diffs(att_tuple, values)
# att_tuple = ('base_hrldas_namelist', 'noahlsm_offline', 'restart_filename_requested')
# values = '/glade/work/jamesmcc/domains/public/croton_NY/Gridded/RESTART/RESTART.2011082600_DOMAIN1'
# wrf_hydro_ens_sim.set_member_diffs(att_tuple, values)
# Change model advance to 1 hour in hrldas namelist
# This is governed by the configuration namelist setting:
# run_experiment: time: advance_model_hours:
# No other differences across the ensemble, only the FORCING dir for each
# will be set at run time by the noise_model.
# We could to parameter differences here.
| 45.358974 | 109 | 0.79706 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,408 | 0.79593 |
4f71b59f7fa424504c4850aad52c0fb59243156e | 1,475 | py | Python | graphium/graph_management/model/access.py | graphium-project/graphium-qgis-plugin | 480e90dc874522b4d4d36b0d7b909ef3144da8b2 | [
"Apache-2.0"
] | 1 | 2020-07-11T10:28:33.000Z | 2020-07-11T10:28:33.000Z | graphium/graph_management/model/access.py | graphium-project/graphium-qgis-plugin | 480e90dc874522b4d4d36b0d7b909ef3144da8b2 | [
"Apache-2.0"
] | null | null | null | graphium/graph_management/model/access.py | graphium-project/graphium-qgis-plugin | 480e90dc874522b4d4d36b0d7b909ef3144da8b2 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
"""
/***************************************************************************
QGIS plugin 'Graphium'
/***************************************************************************
*
* Copyright 2020 Simon Gröchenig @ Salzburg Research
* eMail graphium@salzburgresearch.at
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
***************************************************************************/
"""
from enum import Enum
class Access(Enum):
PEDESTRIAN = 1
BIKE = 2
PRIVATE_CAR = 3
PUBLIC_BUS = 4
RAILWAY = 5
TRAM = 6
SUBWAY = 7
FERRY_BOAT = 8
HIGH_OCCUPATION_CAR = 9
TRUCK = 10
TAXI = 11
EMERGENCY_VEHICLE = 12
MOTOR_COACH = 13
TROLLY_BUS = 14
MOTORCYCLE = 15
RACK_RAILWAY = 16
CABLE_RAILWAY = 17
CAR_FERRY = 18
CAMPER = 19
COMBUSTIBLES = 20
HAZARDOUS_TO_WATER = 21
GARBAGE_COLLECTION_VEHICLE = 22
ELECTRIC_CAR = 23
NONE = -1
| 27.314815 | 77 | 0.568814 | 500 | 0.338753 | 0 | 0 | 0 | 0 | 0 | 0 | 947 | 0.641599 |
4f72fe87f0f555011e0abddaa2ff010aaadedc6e | 2,591 | py | Python | aea/cli/utils/formatting.py | bryanchriswhite/agents-aea | d3f177a963eb855d9528555167255bf2b478f4ba | [
"Apache-2.0"
] | 126 | 2019-09-07T09:32:44.000Z | 2022-03-29T14:28:41.000Z | aea/cli/utils/formatting.py | salman6049/agents-aea | d3f177a963eb855d9528555167255bf2b478f4ba | [
"Apache-2.0"
] | 1,814 | 2019-08-24T10:08:07.000Z | 2022-03-31T14:28:36.000Z | aea/cli/utils/formatting.py | salman6049/agents-aea | d3f177a963eb855d9528555167255bf2b478f4ba | [
"Apache-2.0"
] | 46 | 2019-09-03T22:13:58.000Z | 2022-03-22T01:25:16.000Z | # -*- coding: utf-8 -*-
# ------------------------------------------------------------------------------
#
# Copyright 2018-2020 Fetch.AI Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# ------------------------------------------------------------------------------
"""Module with formatting utils of the aea cli."""
from typing import Dict, List
from aea.configurations.base import AgentConfig
from aea.configurations.loader import ConfigLoader
from aea.exceptions import enforce
from aea.helpers.io import open_file
def format_items(items: List[Dict]) -> str:
"""Format list of items (protocols/connections) to a string for CLI output."""
list_str = ""
for item in items:
list_str += (
"{line}\n"
"Public ID: {public_id}\n"
"Name: {name}\n"
"Description: {description}\n"
"Author: {author}\n"
"Version: {version}\n"
"{line}\n".format(
name=item["name"],
public_id=item["public_id"],
description=item["description"],
author=item["author"],
version=item["version"],
line="-" * 30,
)
)
return list_str
def retrieve_details(name: str, loader: ConfigLoader, config_filepath: str) -> Dict:
"""Return description of a protocol, skill, connection."""
with open_file(str(config_filepath)) as fp:
config = loader.load(fp)
item_name = config.agent_name if isinstance(config, AgentConfig) else config.name
enforce(item_name == name, "Item names do not match!")
return {
"public_id": str(config.public_id),
"name": item_name,
"author": config.author,
"description": config.description,
"version": config.version,
}
def sort_items(items: List[Dict]) -> List[Dict]:
"""
Sort a list of dict items associated with packages.
:param items: list of dicts that represent items.
:return: sorted list.
"""
return sorted(items, key=lambda k: k["name"])
| 33.649351 | 85 | 0.592821 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,377 | 0.531455 |
4f746f8a5d2093a7b2129bda73802e19923f5043 | 851 | py | Python | ocean_utils/agreements/utils.py | oceanprotocol/common-utils-py | f577f4762841496584e114baaec0d476e73c700e | [
"Apache-2.0"
] | null | null | null | ocean_utils/agreements/utils.py | oceanprotocol/common-utils-py | f577f4762841496584e114baaec0d476e73c700e | [
"Apache-2.0"
] | 2 | 2019-12-16T11:26:21.000Z | 2021-03-18T13:06:31.000Z | ocean_utils/agreements/utils.py | oceanprotocol/common-utils-py | f577f4762841496584e114baaec0d476e73c700e | [
"Apache-2.0"
] | null | null | null | """Agreements module."""
# Copyright 2018 Ocean Protocol Foundation
# SPDX-License-Identifier: Apache-2.0
from ocean_utils.agreements.access_sla_template import ACCESS_SLA_TEMPLATE
from ocean_utils.agreements.compute_sla_template import COMPUTE_SLA_TEMPLATE
from ocean_utils.agreements.service_types import ServiceTypes
def get_sla_template(service_type=ServiceTypes.ASSET_ACCESS):
"""
Get the template for a ServiceType.
:param service_type: ServiceTypes
:return: template dict
"""
if service_type == ServiceTypes.ASSET_ACCESS:
return ACCESS_SLA_TEMPLATE['serviceAgreementTemplate'].copy()
elif service_type == ServiceTypes.CLOUD_COMPUTE:
return COMPUTE_SLA_TEMPLATE['serviceAgreementTemplate'].copy()
else:
raise ValueError(f'Invalid/unsupported service agreement type {service_type}')
| 35.458333 | 86 | 0.780259 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 334 | 0.392479 |
4f74e6b364f85cdca76783ea050b55c4b0e2bf27 | 1,750 | py | Python | mobilenetv1.py | Monster880/pytorch_py | 9c5ac5974f48edb5ea3d897a1100a63d488c61d9 | [
"MIT"
] | null | null | null | mobilenetv1.py | Monster880/pytorch_py | 9c5ac5974f48edb5ea3d897a1100a63d488c61d9 | [
"MIT"
] | null | null | null | mobilenetv1.py | Monster880/pytorch_py | 9c5ac5974f48edb5ea3d897a1100a63d488c61d9 | [
"MIT"
] | null | null | null | import torch
import torch.nn as nn
import torch.nn.functional as F
class mobilenet(nn.Module):
def __init__(self):
super(mobilenet, self).__init__()
self.conv_1 = nn.Sequential(
nn.Conv2d(3, 64, kernel_size=3, stride=1, padding=1),
nn.BatchNorm2d(64),
nn.ReLU()
)
self.conv_dw2 = self.conv_dw(32, 32, 1)
self.conv_dw3 = self.conv_dw(32, 64, 2)
self.conv_dw4 = self.conv_dw(64, 64, 1)
self.conv_dw5 = self.conv_dw(64, 128, 2)
self.conv_dw6 = self.conv_dw(128, 128, 1)
self.conv_dw7 = self.conv_dw(128, 256, 2)
self.conv_dw8 = self.conv_dw(256, 256, 1)
self.conv_dw9 = self.conv_dw(256, 512, 2)
self.fc = nn.Linear(512, 10)
def conv_dw(in_channel, out_channel, stride):
nn.Sequential(
nn.Conv2d(in_channel, out_channel, kernel_size=3 ,stride=stride, padding=1, groups=in_channel, bias=False),
nn.BatchNorm2d(in_channel),
nn.ReLU(),
nn.Conv2d(in_channel, out_channel, kernel_size=1, stride=1, padding=0, bias=False),
nn.BatchNorm2d(in_channel),
nn.ReLU(),
)
self.conv_dw = conv_dw
raise
def forward(self,x):
out = self.conv1(x)
out = self.conv_dw2(out)
out = self.conv_dw3(out)
out = self.conv_dw4(out)
out = self.conv_dw5(out)
out = self.conv_dw6(out)
out = self.conv_dw7(out)
out = self.conv_dw8(out)
out = self.conv_dw9(out)
out = F.avg_pool2d(out,2)
out = out.view(-1, 512)
out = self.fc(out)
return out
def mobilenetv1_small():
return mobilenet() | 30.701754 | 123 | 0.566286 | 1,633 | 0.933143 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
4f7749d7fdeb213a71c50fac2777773a4cae4cde | 982 | py | Python | sdk/ml/azure-ai-ml/azure/ai/ml/_schema/_sweep/search_space/randint.py | dubiety/azure-sdk-for-python | 62ffa839f5d753594cf0fe63668f454a9d87a346 | [
"MIT"
] | 1 | 2022-02-01T18:50:12.000Z | 2022-02-01T18:50:12.000Z | sdk/ml/azure-ai-ml/azure/ai/ml/_schema/_sweep/search_space/randint.py | ellhe-blaster/azure-sdk-for-python | 82193ba5e81cc5e5e5a5239bba58abe62e86f469 | [
"MIT"
] | null | null | null | sdk/ml/azure-ai-ml/azure/ai/ml/_schema/_sweep/search_space/randint.py | ellhe-blaster/azure-sdk-for-python | 82193ba5e81cc5e5e5a5239bba58abe62e86f469 | [
"MIT"
] | null | null | null | # ---------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# ---------------------------------------------------------
from azure.ai.ml.constants import SearchSpace
from marshmallow import fields, post_load, pre_dump, ValidationError
from azure.ai.ml._schema.core.fields import StringTransformedEnum
from azure.ai.ml._schema.core.schema import PatchedSchemaMeta
class RandintSchema(metaclass=PatchedSchemaMeta):
type = StringTransformedEnum(required=True, allowed_values=SearchSpace.RANDINT)
upper = fields.Integer(required=True)
@post_load
def make(self, data, **kwargs):
from azure.ai.ml.sweep import Randint
return Randint(**data)
@pre_dump
def predump(self, data, **kwargs):
from azure.ai.ml.sweep import Randint
if not isinstance(data, Randint):
raise ValidationError("Cannot dump non-Randint object into RandintSchema")
return data
| 35.071429 | 86 | 0.649695 | 555 | 0.565173 | 0 | 0 | 368 | 0.374745 | 0 | 0 | 228 | 0.232179 |
4f792214bd4390f63e5a1db2e2943cc2db998be9 | 3,189 | py | Python | unittest_reinvent/scoring_tests/scoring_components/pip/test_pip_caco2_efflux.py | MolecularAI/reinvent-scoring | f7e052ceeffd29e17e1672c33607189873c82a45 | [
"MIT"
] | null | null | null | unittest_reinvent/scoring_tests/scoring_components/pip/test_pip_caco2_efflux.py | MolecularAI/reinvent-scoring | f7e052ceeffd29e17e1672c33607189873c82a45 | [
"MIT"
] | 2 | 2021-11-01T23:19:42.000Z | 2021-11-22T23:41:39.000Z | unittest_reinvent/scoring_tests/scoring_components/pip/test_pip_caco2_efflux.py | MolecularAI/reinvent-scoring | f7e052ceeffd29e17e1672c33607189873c82a45 | [
"MIT"
] | 2 | 2021-11-18T13:14:22.000Z | 2022-03-16T07:52:57.000Z | import os
import shutil
import unittest
from unittest.mock import MagicMock
import numpy as np
import numpy.testing as npt
import pytest
from reinvent_scoring.scoring.enums import ComponentSpecificParametersEnum
from reinvent_scoring.scoring.enums import ScoringFunctionComponentNameEnum
from reinvent_scoring.scoring.enums import TransformationParametersEnum
from reinvent_scoring.scoring.score_components.pip.pip_prediction_component import PiPPredictionComponent
from unittest_reinvent.fixtures.paths import MAIN_TEST_PATH
from unittest_reinvent.scoring_tests.fixtures.predictive_model_fixtures import create_c_lab_component
from unittest_reinvent.scoring_tests.scoring_components.fixtures import score
from unittest_reinvent.fixtures.test_data import CELECOXIB, BUTANE, PENTANE
from unittest_reinvent.scoring_tests.scoring_components.pip.utils import patch_pip_response
class Test_pip_caco2_efflux(unittest.TestCase):
@classmethod
def setUpClass(cls):
enum = ScoringFunctionComponentNameEnum()
csp_enum = ComponentSpecificParametersEnum()
parameters = create_c_lab_component(enum.CACO2_EFFLUX_PIP)
parameters.specific_parameters[csp_enum.TRANSFORMATION] = {}
if not os.path.isdir(MAIN_TEST_PATH):
os.makedirs(MAIN_TEST_PATH)
cls.query_smiles = [CELECOXIB, BUTANE, PENTANE]
cls.expected_scores = [0.7, 2.2, 2.1]
cls.component = PiPPredictionComponent(parameters)
@classmethod
def tearDownClass(cls):
if os.path.isdir(MAIN_TEST_PATH):
shutil.rmtree(MAIN_TEST_PATH)
def test_pip_1(self):
with patch_pip_response(self.expected_scores):
npt.assert_almost_equal(score(self.component, self.query_smiles), self.expected_scores, decimal=1)
def test_pip_empty_response(self):
with patch_pip_response([]):
npt.assert_almost_equal(score(self.component, self.query_smiles), [0, 0, 0], 3)
class Test_pip_caco2_efflux_transformation(unittest.TestCase):
def setUp(cls):
enum = ScoringFunctionComponentNameEnum()
csp_enum = ComponentSpecificParametersEnum()
parameters = create_c_lab_component(enum.CACO2_EFFLUX_PIP)
parameters.specific_parameters[csp_enum.TRANSFORMATION].update({
TransformationParametersEnum.HIGH: 3,
TransformationParametersEnum.LOW: 0,
})
if not os.path.isdir(MAIN_TEST_PATH):
os.makedirs(MAIN_TEST_PATH)
cls.query_smiles = [CELECOXIB, BUTANE, PENTANE]
cls.expected_raw_scores = [0.795, 1.587, 1.55]
cls.expected_scores = [0.17, 0.19, 0.19]
cls.component = PiPPredictionComponent(parameters)
@classmethod
def tearDownClass(self):
if os.path.isdir(MAIN_TEST_PATH):
shutil.rmtree(MAIN_TEST_PATH)
def test_clab_transformed_1(self):
with patch_pip_response(self.expected_raw_scores):
result = score(self.component, self.query_smiles)
has_null = any(np.isnan(result))
npt.assert_(not has_null, "returned array has Null values")
unique = np.unique(result)
npt.assert_equal(np.sort(unique), np.sort(result))
| 39.37037 | 110 | 0.743493 | 2,307 | 0.723424 | 0 | 0 | 774 | 0.242709 | 0 | 0 | 32 | 0.010034 |
4f7a49d4d9b2c7c57d26803d6ea3fbe28b85a380 | 9,344 | py | Python | variation/normalize.py | cancervariants/variant-normalization | e89a9f8366a659c82b2042aeb7effe339851bfb4 | [
"MIT"
] | 1 | 2022-01-19T18:17:49.000Z | 2022-01-19T18:17:49.000Z | variation/normalize.py | cancervariants/variation-normalization | 9c8fbab1562591ae9445d82ddd15df29f1ea1f5a | [
"MIT"
] | 99 | 2021-06-07T12:50:34.000Z | 2022-03-23T13:38:29.000Z | variation/normalize.py | cancervariants/variant-normalization | e89a9f8366a659c82b2042aeb7effe339851bfb4 | [
"MIT"
] | null | null | null | """Module for Variation Normalization."""
from typing import Optional, List, Tuple, Dict
from ga4gh.vrsatile.pydantic.vrs_models import Text
from ga4gh.vrsatile.pydantic.vrsatile_models import VariationDescriptor, \
GeneDescriptor
from ga4gh.vrs import models
from ga4gh.core import ga4gh_identify
from variation.data_sources import SeqRepoAccess, UTA
from urllib.parse import quote
from variation import logger
from gene.query import QueryHandler as GeneQueryHandler
from variation.schemas.token_response_schema import GeneMatchToken, Token
from variation.schemas.validation_response_schema import ValidationSummary, \
ValidationResult
class Normalize:
"""The Normalize class used to normalize a given variation."""
def __init__(self, seqrepo_access: SeqRepoAccess, uta: UTA,
gene_normalizer: GeneQueryHandler) -> None:
"""Initialize Normalize class.
:param SeqRepoAccess seqrepo_access: Access to SeqRepo data queries
:param UTA uta: Access to UTA database and queries
:parm QueryHandler gene_normalizer: Access to gene-normalizer queries
"""
self.seqrepo_access = seqrepo_access
self.uta = uta
self.warnings = list()
self._gene_norm_cache = dict()
self.gene_normalizer = gene_normalizer
@staticmethod
def get_valid_result(q: str, validations: ValidationSummary,
warnings: List) -> ValidationResult:
"""Get valid result from ValidationSummary
:param str q: Query string
:param ValidationSummary validations: Validation summary for query
:param List warnings: List of warnings
:return: Valid Validation Result
"""
# For now, only use first valid result
valid_result = None
for r in validations.valid_results:
if r.is_mane_transcript and r.variation:
valid_result = r
break
if not valid_result:
warning = f"Unable to find MANE Transcript for {q}."
logger.warning(warning)
warnings.append(warning)
valid_result = validations.valid_results[0]
return valid_result
def normalize(self, q: str, validations: ValidationSummary,
warnings: List) -> Optional[VariationDescriptor]:
"""Normalize a given variation.
:param str q: The variation to normalize
:param ValidationSummary validations: Invalid and valid results
:param List warnings: List of warnings
:return: An variation descriptor for a valid result if one exists.
Else, None.
"""
if not q:
resp, warnings = self._no_variation_entered()
else:
_id = f"normalize.variation:{quote(' '.join(q.strip().split()))}"
if len(validations.valid_results) > 0:
valid_result = self.get_valid_result(q, validations, warnings)
resp, warnings = self.get_variation_descriptor(
valid_result.variation, valid_result, _id, warnings)
else:
if not q.strip():
resp, warnings = self._no_variation_entered()
else:
resp, warnings = self.text_variation_resp(q, _id, warnings)
self.warnings = warnings
return resp
@staticmethod
def text_variation_resp(
q: str, _id: str,
warnings: List) -> Tuple[VariationDescriptor, List]:
"""Return text variation for queries that could not be normalized
:param str q: query
:param str _id: _id field for variation descriptor
:param List warnings: List of warnings
:return: Variation descriptor, warnings
"""
warning = f"Unable to normalize {q}"
text = models.Text(definition=q)
text._id = ga4gh_identify(text)
resp = VariationDescriptor(
id=_id,
variation=Text(**text.as_dict())
)
if not warnings:
warnings.append(warning)
logger.warning(warning)
return resp, warnings
def get_variation_descriptor(
self, variation: Dict, valid_result: ValidationResult,
_id: str, warnings: List, gene: Optional[str] = None
) -> Tuple[VariationDescriptor, List]:
"""Return variation descriptor and warnings
:param Dict variation: VRS variation object
:param ValidationResult valid_result: Valid result for query
:param str _id: _id field for variation descriptor
:param List warnings: List of warnings
:param Optional[str] gene: Gene symbol
:return: Variation descriptor, warnings
"""
variation_id = variation['_id']
identifier = valid_result.identifier
token_type = \
valid_result.classification_token.token_type.lower()
vrs_ref_allele_seq = None
if 'uncertain' in token_type:
warnings = ['Ambiguous regions cannot be normalized']
elif 'range' not in token_type:
if variation['type'] == 'Allele':
vrs_ref_allele_seq = self.get_ref_allele_seq(
variation, identifier
)
elif variation['type'] == 'CopyNumber':
vrs_ref_allele_seq = self.get_ref_allele_seq(
variation['subject'], identifier
)
if valid_result.gene_tokens:
gene_token = valid_result.gene_tokens[0]
gene_context = self.get_gene_descriptor(gene_token=gene_token)
else:
if gene:
gene_context = self.get_gene_descriptor(gene=gene)
else:
gene_context = None
return VariationDescriptor(
id=_id,
variation_id=variation_id,
variation=variation,
molecule_context=valid_result.classification_token.molecule_context, # noqa: E501
structural_type=valid_result.classification_token.so_id,
vrs_ref_allele_seq=vrs_ref_allele_seq if vrs_ref_allele_seq else None, # noqa: E501
gene_context=gene_context
), warnings
def _no_variation_entered(self) -> Tuple[None, List[str]]:
"""Return response when no variation queried.
:return: None, list of warnings
"""
warnings = ["No variation was entered to normalize"]
logger.warning(warnings)
return None, warnings
def get_gene_descriptor(
self, gene_token: Optional[GeneMatchToken] = None,
gene: Optional[str] = None) -> Optional[GeneDescriptor]:
"""Return a GA4GH Gene Descriptor using Gene Normalization.
:param Optional[GeneMatchToken] gene_token: A gene token
:param Optional[str] gene: Gene query
:return: A gene descriptor for a given gene if a record exists in
gene-normalizer.
"""
if gene_token:
gene_query = gene_token.matched_value
elif gene:
gene_query = gene
else:
return None
if gene_query in self._gene_norm_cache:
return self._gene_norm_cache[gene_query]
else:
response = self.gene_normalizer.normalize(gene_query)
if response.gene_descriptor:
gene_descriptor = response.gene_descriptor
self._gene_norm_cache[gene_query] = gene_descriptor
return gene_descriptor
return None
def get_ref_allele_seq(self, allele: Dict,
identifier: str) -> Optional[str]:
"""Return ref allele seq for transcript.
:param Dict allele: VRS Allele object
:param str identifier: Identifier for allele
:return: Ref seq allele
"""
start = None
end = None
interval = allele['location']['interval']
ival_type = interval['type']
if ival_type == 'SequenceInterval':
if interval['start']['type'] == 'Number':
start = interval['start']['value'] + 1
end = interval['end']['value']
if start is None and end is None:
return None
return self.seqrepo_access.get_sequence(identifier, start, end)
def _is_token_type(self, valid_result_tokens: List,
token_type: str) -> bool:
"""Return whether or not token_type is in valid_result_tokens.
:param List valid_result_tokens: Valid token matches
:param str token_type: The token's type
:return: Whether or not token_type is in valid_result_tokens
"""
for t in valid_result_tokens:
if t.token_type == token_type:
return True
return False
def _get_instance_type_token(self, valid_result_tokens: List,
instance_type: Token) -> Optional[Token]:
"""Return the tokens for a given instance type.
:param List valid_result_tokens: A list of valid tokens for the input
string
:param Token instance_type: The instance type to check
:return: Token for a given instance type
"""
for t in valid_result_tokens:
if isinstance(t, instance_type):
return t
return None
| 38.933333 | 96 | 0.622967 | 8,696 | 0.930651 | 0 | 0 | 1,651 | 0.176691 | 0 | 0 | 3,074 | 0.328981 |
4f7ac73509e6e84e42327cc7eb2cf60206c29356 | 478 | py | Python | blender/arm/logicnode/math/LN_separate_xyz.py | onelsonic/armory | 55cfead0844923d419d75bf4bd677ebed714b4b5 | [
"Zlib"
] | 2,583 | 2016-07-27T08:25:47.000Z | 2022-03-31T10:42:17.000Z | blender/arm/logicnode/math/LN_separate_xyz.py | N8n5h/armory | 5b4d24f067a2354bafd3ab417bb8e30ee0c5aff8 | [
"Zlib"
] | 2,122 | 2016-07-31T14:20:04.000Z | 2022-03-31T20:44:14.000Z | blender/arm/logicnode/math/LN_separate_xyz.py | N8n5h/armory | 5b4d24f067a2354bafd3ab417bb8e30ee0c5aff8 | [
"Zlib"
] | 451 | 2016-08-12T05:52:58.000Z | 2022-03-31T01:33:07.000Z | from arm.logicnode.arm_nodes import *
class SeparateVectorNode(ArmLogicTreeNode):
"""Splits the given vector into X, Y and Z."""
bl_idname = 'LNSeparateVectorNode'
bl_label = 'Separate XYZ'
arm_section = 'vector'
arm_version = 1
def arm_init(self, context):
self.add_input('ArmVectorSocket', 'Vector')
self.add_output('ArmFloatSocket', 'X')
self.add_output('ArmFloatSocket', 'Y')
self.add_output('ArmFloatSocket', 'Z')
| 29.875 | 51 | 0.669456 | 438 | 0.916318 | 0 | 0 | 0 | 0 | 0 | 0 | 172 | 0.359833 |
4f7b4269a2cfc7be43ae83b5c540838d9d6ebafb | 1,023 | py | Python | setup.py | Mikfr83/skyhook | 1af5afdd8ad77a46ea918be487b100d376642db8 | [
"Apache-2.0",
"MIT"
] | null | null | null | setup.py | Mikfr83/skyhook | 1af5afdd8ad77a46ea918be487b100d376642db8 | [
"Apache-2.0",
"MIT"
] | null | null | null | setup.py | Mikfr83/skyhook | 1af5afdd8ad77a46ea918be487b100d376642db8 | [
"Apache-2.0",
"MIT"
] | null | null | null | import sys
import setuptools
with open("README.md", "r") as fh:
long_description = fh.read()
# any data files that match this pattern will be included
data_files_to_include = ["*.png"]
if sys.version_info.major == 2:
try:
import PySide
requires = ['requests']
except:
requires = ['requests', 'pyside']
else:
try:
import PySide2
requires = ['requests']
except:
requires = ['requests', 'pyside2']
setuptools.setup(
name="skyhook",
version="2.2.8",
author="Niels Vaes",
author_email="niels.vaes@embark-studios.com",
description="Engine and DCC communication system",
long_description="Engine and DCC communication system",
long_description_content_type="text/markdown",
url="https://github.com/EmbarkStudios/skyhook",
install_requires=requires,
packages=setuptools.find_packages(),
package_data={
"": data_files_to_include,
},
classifiers=[
"Operating System :: OS Independent",
]
) | 25.575 | 59 | 0.652004 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 363 | 0.354839 |
4f7b5a243af90599473c06dcec0b2e2ed8c76eb5 | 2,432 | py | Python | ml3d/torch/utils/helper_torch.py | inkyusa/Open3D-ML | 40b5f7ff45577bcc6fd451cf63cc366324730849 | [
"MIT"
] | 447 | 2020-10-14T23:16:41.000Z | 2021-07-27T06:57:45.000Z | ml3d/torch/utils/helper_torch.py | inkyusa/Open3D-ML | 40b5f7ff45577bcc6fd451cf63cc366324730849 | [
"MIT"
] | 118 | 2020-10-14T10:20:37.000Z | 2021-07-27T12:23:18.000Z | ml3d/torch/utils/helper_torch.py | inkyusa/Open3D-ML | 40b5f7ff45577bcc6fd451cf63cc366324730849 | [
"MIT"
] | 80 | 2020-10-14T17:35:48.000Z | 2021-07-23T08:48:17.000Z | import torch
import torch.nn as nn
import torch.nn.functional as F
class conv2d_transpose(nn.Module):
def __init__(self,
batchNorm,
in_planes,
out_planes,
kernel_size=1,
stride=1,
activation=True):
super(conv2d_transpose, self).__init__()
self.conv = nn.ConvTranspose2d(in_planes,
out_planes,
kernel_size=kernel_size,
stride=stride,
padding=(kernel_size - 1) // 2)
self.biases = self.conv.bias
self.weights = self.conv.weight
self.batchNorm = batchNorm
self.batch_normalization = nn.BatchNorm2d(out_planes,
momentum=0.01,
eps=1e-6)
if activation:
self.activation_fn = nn.LeakyReLU(0.2)
else:
self.activation_fn = nn.Identity()
def forward(self, x):
x = self.conv(x)
if self.batchNorm:
x = self.batch_normalization(x)
x = self.activation_fn(x)
return x
class conv2d(nn.Module):
def __init__(self,
batchNorm,
in_planes,
out_planes,
kernel_size=1,
stride=1,
activation=True):
super(conv2d, self).__init__()
self.conv = nn.Conv2d(in_planes,
out_planes,
kernel_size=kernel_size,
stride=stride,
padding=(kernel_size - 1) // 2)
self.biases = self.conv.bias
self.weights = self.conv.weight
self.batchNorm = batchNorm
if self.batchNorm:
self.batch_normalization = nn.BatchNorm2d(out_planes,
momentum=0.01,
eps=1e-6)
if activation:
self.activation_fn = nn.LeakyReLU(0.2, inplace=True)
else:
self.activation_fn = nn.Identity()
def forward(self, x):
x = self.conv(x)
if self.batchNorm:
x = self.batch_normalization(x)
x = self.activation_fn(x)
return x
| 32 | 70 | 0.45847 | 2,359 | 0.969984 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
4f7dcd2be58eb42137bca30cd766b4e6d48d031b | 13,986 | py | Python | 2020/aoc_2020_20/aoc_2020_20.py | ericcolton/AdventOfCode | 58bdf8886d1d6cea5faeee74248c10ddaf97a93b | [
"MIT"
] | null | null | null | 2020/aoc_2020_20/aoc_2020_20.py | ericcolton/AdventOfCode | 58bdf8886d1d6cea5faeee74248c10ddaf97a93b | [
"MIT"
] | null | null | null | 2020/aoc_2020_20/aoc_2020_20.py | ericcolton/AdventOfCode | 58bdf8886d1d6cea5faeee74248c10ddaf97a93b | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
"""
Advent of Code 2020 Day 20: Jurassic Jigsaw
https://adventofcode.com/2020/day/20
Solution by Eric Colton
"""
import re
from functools import reduce
SEA_MONSTER_PROFILE = """
..................#.
#....##....##....###
.#..#..#..#..#..#..."""
class Tile:
def __init__(self, id: int, content):
self.id = id
self.base_content = content
self.reoriented_base_content = content
self.content = content
self.corner_locations = None
self.current_rotation = 0
self.build_signatures()
def build_signatures(self):
self.top = self._build_signature(self.content[0])
self.bottom = self._build_signature(self.content[-1])
self.left = self._build_signature([entry[0] for entry in self.content])
self.right = self._build_signature([entry[-1] for entry in self.content])
def orient_to(self, orientation):
self.content = self.base_content
if orientation & 1:
self.flip_horizontally()
if orientation & 2:
self.flip_vertically()
self.reoriented_base_content = self.content
self.build_signatures()
def rotate_to(self, rotation: int):
if self.current_rotation == rotation:
return
content = [i.copy() for i in self.reoriented_base_content]
for _ in range((rotation - self.current_rotation) % 4):
new_content = [i.copy() for i in content]
for i in range(len(content)):
for j in range(len(content[i])):
new_content[i][j] = content[-j - 1][i]
content = [i.copy() for i in new_content]
self.content = content
self.build_signatures()
def flip_vertically(self):
for i in range(len(self.content) // 2):
self.content[i], self.content[-i - 1] = self.content[-i - 1], self.content[i]
def flip_horizontally(self):
for i in range(len(self.content)):
for j in range(len(self.content[i]) // 2):
self.content[i][j], self.content[i][-j - 1] = self.content[i][-j - 1], self.content[i][j]
def linkable_with_edges(self, available_edges):
return (self.top in available_edges or
self.bottom in available_edges or
self.left in available_edges or
self.right in available_edges)
def _build_signature(self, elements: list):
total = 0
for i, value in enumerate(reversed(list(elements))):
if value:
total += 2 ** i
return total
class PuzzleBuilder:
def __init__(self, tiles: set):
self.initial_tiles = tiles
self.build_success = False
self.corner_locations = None
self.side_length = int(len(tiles) ** 0.5)
if self.side_length ** 2 != len(tiles):
raise Exception("Number of pieces must be square")
def build(self):
initial_tile = reduce(lambda a, b: a if a.id < b.id else b, self.initial_tiles)
unlinked_tiles = self.initial_tiles.copy()
unlinked_tiles.remove(initial_tile)
self.tile_for_location = {}
self.tile_for_location[(0, 0)] = initial_tile
self.location_for_tile = {}
self.location_for_tile[initial_tile] = (0, 0)
if self._build(set([initial_tile]), unlinked_tiles):
self.build_success = True
return True
return False
def ordered_tiles(self):
if not self.build_success:
raise Exception("Cannot fetched ordered tiles without build success")
min_x, min_y, _, _ = self._find_corner_locations()
ordered_tiles = []
for y in range(min_y, min_y + self.side_length):
row = []
for x in range(min_x, min_x + self.side_length):
row.append(self.tile_for_location[(x, y)])
ordered_tiles.append(row)
return list(reversed(ordered_tiles))
def can_be_placed(self, candidate_tile: 'Tile', candidate_location: tuple):
hits = 0
x, y = candidate_location
above = x, y + 1
if above in self.tile_for_location:
hits += 1
above_tile = self.tile_for_location[above]
if above_tile.bottom != candidate_tile.top:
return False
below = x, y - 1
if below in self.tile_for_location:
hits += 1
below_tile = self.tile_for_location[below]
if below_tile.top != candidate_tile.bottom:
return False
to_the_left = x - 1, y
if to_the_left in self.tile_for_location:
hits += 1
to_the_left_tile = self.tile_for_location[to_the_left]
if to_the_left_tile.right != candidate_tile.left:
return False
to_the_right = x + 1, y
if to_the_right in self.tile_for_location:
hits += 1
to_the_right_tile = self.tile_for_location[to_the_right]
if to_the_right_tile.left != candidate_tile.right:
return False
if hits == 0:
raise Exception("Location {} should link to at least one tile".format(candidate_location))
return True
def corner_tile_ids_product(self):
if not self.build_success:
raise Exception("Corners can only be queried once built")
min_x, min_y, max_x, max_y = self._find_corner_locations()
corner_tiles = [\
self.tile_for_location[(min_x, min_y)], \
self.tile_for_location[(min_x, max_y)], \
self.tile_for_location[(max_x, min_y)], \
self.tile_for_location[(max_x, max_y)], \
]
return reduce(lambda a, b: a * b, map(lambda a: a.id, corner_tiles))
def _calculate_available_edges(self, linked_tiles: set):
available_edges = set()
for tile in linked_tiles:
x, y = self.location_for_tile[tile]
if (x, y + 1) not in self.tile_for_location:
available_edges.add(tile.top)
if (x, y - 1) not in self.tile_for_location:
available_edges.add(tile.bottom)
if (x - 1, y) not in self.tile_for_location:
available_edges.add(tile.left)
if (x + 1, y) not in self.tile_for_location:
available_edges.add(tile.right)
return available_edges
def _build(self, linked_tiles: set, unlinked_tiles: set):
if len(unlinked_tiles) == 0:
return True
available_edges = self._calculate_available_edges(linked_tiles)
for linked_tile in linked_tiles:
linked_location = self.location_for_tile[linked_tile]
for unlinked_tile in unlinked_tiles:
for orientation in range(4):
unlinked_tile.orient_to(orientation)
for rotation in range(4):
unlinked_tile.rotate_to(rotation)
if not unlinked_tile.linkable_with_edges(available_edges):
continue
for side in range(4):
new_location = self._side_available(linked_location, side)
if new_location:
if self.can_be_placed(unlinked_tile, new_location):
self.location_for_tile[unlinked_tile] = new_location
self.tile_for_location[new_location] = unlinked_tile
self._reset_corner_locations()
linked_tiles_copy = linked_tiles.copy()
unlinked_tiles_copy = unlinked_tiles.copy()
linked_tiles_copy.add(unlinked_tile)
unlinked_tiles_copy.remove(unlinked_tile)
rv = self._build(linked_tiles_copy, unlinked_tiles_copy)
if rv:
return True
else:
del self.location_for_tile[unlinked_tile]
del self.tile_for_location[new_location]
self._reset_corner_locations()
return None
def _reset_corner_locations(self):
self.corner_locations = None
def _find_corner_locations(self):
if self.corner_locations:
return self.corner_locations
min_x, min_y, max_x, max_y = 0, 0, 0, 0
for location in self.tile_for_location.keys():
x, y = location
min_x, min_y = min(min_x, x), min(min_y, y)
max_x, max_y = max(max_x, x), max(max_y, y)
self.corner_locations = min_x, min_y, max_x, max_y
return self.corner_locations
def _side_available(self, location, side):
x, y = location
if side == 0:
candidate = x, y + 1
elif side == 1:
candidate = x + 1, y
elif side == 2:
candidate = x, y - 1
elif side == 3:
candidate = x - 1, y
else:
raise Exception("Illegal side: '{}'".format(side))
min_x, min_y, max_x, max_y = self._find_corner_locations()
if max_x - x > self.side_length or \
min_x + x > self.side_length or \
max_y - y > self.side_length or \
min_y + y > self.side_length:
return None
return None if candidate in self.tile_for_location else candidate
def generate_combined_tile(ordered_tiles: list):
rows = []
for y_tile in range(len(ordered_tiles)):
for y_char in range(1, len(ordered_tiles[y_tile][0].content) - 1):
row = []
for x_tile in range(len(ordered_tiles[y_tile])):
tile = ordered_tiles[y_tile][x_tile]
for x_char in range(1, len(tile.content[y_char]) - 1):
row.append(tile.content[y_char][x_char])
rows.append(row)
return Tile(0, rows)
def parse_sea_monster():
iterator = iter(SEA_MONSTER_PROFILE.split("\n"))
next(iterator) # ignore first line
rows = []
for line in iterator:
rows.append([1 if c == '#' else 0 for c in line.rstrip("\n")])
return rows
def count_non_sea_monster_positives(tile: Tile):
sea_monster = parse_sea_monster()
non_sea_monster_count_low_watermark = -1
for orientation in range(4):
tile.orient_to(orientation)
for rotation in range(4):
tile.rotate_to(rotation)
sm_counter = 0
sm_marked = set()
for y in range(len(tile.content) - len(sea_monster)):
tile_row = tile.content[y]
for x in range(len(tile_row) - len(sea_monster[0])):
sm_found = True
for sm_y in range(len(sea_monster)):
for sm_x in range(len(sea_monster[sm_y])):
if sea_monster[sm_y][sm_x] == 1:
if not tile.content[y + sm_y][x + sm_x] == 1:
sm_found = False
break
if not sm_found:
break
if sm_found:
sm_counter += 1
for sm_y in range(len(sea_monster)):
for sm_x in range(len(sea_monster[sm_y])):
if sea_monster[sm_y][sm_x] == 1:
sm_marked.add((y + sm_y, x + sm_x))
non_sea_monster_count = 0
for y in range(len(tile.content)):
for x in range(len(tile.content[y])):
if tile.content[y][x] and not (y, x) in sm_marked:
non_sea_monster_count += 1
if non_sea_monster_count_low_watermark > 0:
non_sea_monster_count_low_watermark = min(non_sea_monster_count_low_watermark, non_sea_monster_count)
else:
non_sea_monster_count_low_watermark = non_sea_monster_count
return non_sea_monster_count_low_watermark
def parse_input(input):
tiles = set()
current_tile_id, current_tile_contents = None, None
line_count = -1
for line in input:
line = line.rstrip()
line_count += 1
header_match = re.search(r'Tile (\d+):', line)
if header_match:
current_tile_id = int(header_match.group(1))
current_tile_contents = []
continue
elif line == '':
if not current_tile_id:
raise Exception("unexpected empty line")
tiles.add(Tile(current_tile_id, current_tile_contents))
current_tile_id, current_tile_contents = None, None
else:
content_match = re.fullmatch(r'([\#\.]+)', line)
if content_match:
current_tile_contents.append([1 if c == '#' else 0 for c in line])
else:
raise Exception("unexpected content: '{}' on line {}".format(line, line_count))
if current_tile_id and current_tile_contents:
tiles.add(Tile(current_tile_id, current_tile_contents))
return tiles
if __name__ == '__main__':
input_filename = __file__.rstrip('.py') + '_input.txt'
with open(input_filename, 'r') as file:
data = parse_input(file)
puzzle_builder = PuzzleBuilder(data)
puzzle_builder.build()
part_1 = puzzle_builder.corner_tile_ids_product()
assert part_1 == 32287787075651
print("The solution to Part 1 is {}".format(part_1))
ordered_tiles = puzzle_builder.ordered_tiles()
combined_tile = generate_combined_tile(ordered_tiles)
part_2 = count_non_sea_monster_positives(combined_tile)
assert part_2 == 1939
print("The solution to Part 2 is {}".format(part_2))
| 41.256637 | 117 | 0.568712 | 9,317 | 0.666166 | 0 | 0 | 0 | 0 | 0 | 0 | 613 | 0.04383 |
4f7eadeff29e825eecd011fae2e486b757106c64 | 7,752 | py | Python | experiments/SciTail_NLI/benchmark_paraphrases_generation.py | matejklemen/paraphrase-nli | d31ecba7e54eea4c3b098a2de7bc35f538e4e6dc | [
"MIT"
] | null | null | null | experiments/SciTail_NLI/benchmark_paraphrases_generation.py | matejklemen/paraphrase-nli | d31ecba7e54eea4c3b098a2de7bc35f538e4e6dc | [
"MIT"
] | null | null | null | experiments/SciTail_NLI/benchmark_paraphrases_generation.py | matejklemen/paraphrase-nli | d31ecba7e54eea4c3b098a2de7bc35f538e4e6dc | [
"MIT"
] | null | null | null | import json
import logging
import os
import sys
from argparse import ArgumentParser
import re
import numpy as np
import pandas as pd
import torch
from transformers import GPT2Tokenizer
from src.data.cleaning import mask_not_na, inds_unique, mask_long_enough
from src.data.nli import TransformersSeqPairDataset
from src.models.pg_trainer import AutoregressivePGTrainer
parser = ArgumentParser()
parser.add_argument("--experiment_dir", type=str, default="debug")
parser.add_argument("--paraphrase_path", type=str,
default="/home/matej/Documents/paraphrase-nli/experiments/SciTail_NLI/PARAPHRASE_IDENTIFICATION/id-scitail-roberta-base-argmax/all_para_id.csv")
parser.add_argument("--pretrained_name_or_path", type=str, default="gpt2")
parser.add_argument("--model_type", type=str, default="gpt2",
choices=["gpt2"])
parser.add_argument("--num_epochs", type=int, default=10)
parser.add_argument("--max_seq_len", type=int, default=79)
parser.add_argument("--batch_size", type=int, default=8)
parser.add_argument("--learning_rate", type=float, default=2e-5)
parser.add_argument("--early_stopping_rounds", type=int, default=5)
parser.add_argument("--validate_every_n_examples", type=int, default=5000)
parser.add_argument("--random_seed", type=int, default=17)
parser.add_argument("--use_cpu", action="store_true")
if __name__ == "__main__":
args = parser.parse_args()
DEVICE = torch.device("cpu") if args.use_cpu else torch.device("cuda")
if not os.path.exists(args.experiment_dir):
os.makedirs(args.experiment_dir)
if args.random_seed is not None:
np.random.seed(args.random_seed)
torch.manual_seed(args.random_seed)
with open(os.path.join(args.experiment_dir, "experiment_config.json"), "w") as f:
json.dump(vars(args), fp=f, indent=4)
# Set up logging to file and stdout
logger = logging.getLogger()
logger.setLevel(logging.INFO)
for curr_handler in [logging.StreamHandler(sys.stdout),
logging.FileHandler(os.path.join(args.experiment_dir, "experiment.log"))]:
curr_handler.setFormatter(logging.Formatter("%(asctime)s [%(levelname)-5.5s] %(message)s"))
logger.addHandler(curr_handler)
for k, v in vars(args).items():
v_str = str(v)
v_str = f"...{v_str[-(50 - 3):]}" if len(v_str) > 50 else v_str
logging.info(f"|{k:30s}|{v_str:50s}|")
# No AutoTokenizerFast at the moment?
if args.model_type == "gpt2":
tokenizer_cls = GPT2Tokenizer
else:
raise NotImplementedError(f"Model_type '{args.model_type}' is not supported")
tokenizer = tokenizer_cls.from_pretrained(args.pretrained_name_or_path)
tokenizer.add_special_tokens({
"eos_token": "<EOS>",
"pad_token": "<PAD>",
"additional_special_tokens": ["<PARA>"]
})
tokenizer.save_pretrained(args.experiment_dir)
SEPARATOR_ID = int(tokenizer.encode("<PARA>", add_special_tokens=False)[0])
df = pd.read_csv(args.paraphrase_path)
# Basic data cleaning - remove NAs (?), duplicate pairs, pairs with one sequence very short
df = df.loc[mask_not_na(df["sequence1"], df["sequence2"])]
df = df.iloc[inds_unique(df["sequence1"], df["sequence2"])]
df = df.loc[mask_long_enough(df["sequence1"], df["sequence2"])]
df = df.loc[df["label"] == 1].reset_index(drop=True)
df["formatted"] = list(map(
lambda pair: f"{pair[0]} <PARA> {pair[1]} {tokenizer.eos_token}",
zip(df["sequence1"].tolist(), df["sequence2"].tolist())
))
num_ex = df.shape[0]
indices = np.random.permutation(num_ex)
train_df = df.iloc[indices[:int(0.7 * num_ex)]]
dev_df = df.iloc[indices[int(0.7 * num_ex): int(0.85 * num_ex)]]
test_df = df.iloc[indices[int(0.85 * num_ex):]]
train_df.drop("formatted", axis=1).to_csv(os.path.join(args.experiment_dir, "train.csv"), sep=",", index=False)
dev_df.drop("formatted", axis=1).to_csv(os.path.join(args.experiment_dir, "dev.csv"), sep=",", index=False)
test_df.drop("formatted", axis=1).to_csv(os.path.join(args.experiment_dir, "test.csv"), sep=",", index=False)
_encoded_train = tokenizer.batch_encode_plus(
train_df["formatted"].tolist(),
max_length=args.max_seq_len, padding="max_length", truncation="longest_first", return_tensors="pt"
)
_train_labels = _encoded_train["input_ids"].clone()
for idx_ex in range(_train_labels.shape[0]):
for idx_token in range(args.max_seq_len):
_train_labels[idx_ex, idx_token] = -100
if _encoded_train["input_ids"][idx_ex, idx_token] == SEPARATOR_ID:
break
_encoded_train["labels"] = _train_labels
_encoded_dev = tokenizer.batch_encode_plus(
dev_df["formatted"].tolist(),
max_length=args.max_seq_len, padding="max_length", truncation="longest_first", return_tensors="pt"
)
_dev_labels = _encoded_dev["input_ids"].clone()
for idx_ex in range(_dev_labels.shape[0]):
for idx_token in range(args.max_seq_len):
_dev_labels[idx_ex, idx_token] = -100
if _encoded_dev["input_ids"][idx_ex, idx_token] == SEPARATOR_ID:
break
_encoded_dev["labels"] = _dev_labels
_encoded_test = tokenizer.batch_encode_plus(
test_df["formatted"].tolist(),
max_length=args.max_seq_len, padding="max_length", truncation="longest_first", return_tensors="pt"
)
_test_labels = _encoded_test["input_ids"].clone()
for idx_ex in range(_test_labels.shape[0]):
for idx_token in range(args.max_seq_len):
_test_labels[idx_ex, idx_token] = -100
if _encoded_test["input_ids"][idx_ex, idx_token] == SEPARATOR_ID:
break
_encoded_test["labels"] = _test_labels
train_set = TransformersSeqPairDataset(**_encoded_train)
dev_set = TransformersSeqPairDataset(**_encoded_dev)
test_set = TransformersSeqPairDataset(**_encoded_test)
logging.info(f"Loaded {len(train_set)} training examples, {len(dev_set)} dev examples and "
f"{len(test_set)} test examples")
pg_trainer = AutoregressivePGTrainer(args.experiment_dir,
pretrained_model_name_or_path=args.pretrained_name_or_path,
tokenizer_path=args.experiment_dir,
batch_size=args.batch_size,
learning_rate=args.learning_rate,
validate_every_n_steps=args.validate_every_n_examples,
early_stopping_tol=args.early_stopping_rounds,
device=("cuda" if not args.use_cpu else "cpu"))
pg_trainer.run(train_dataset=train_set, val_dataset=dev_set, num_epochs=args.num_epochs)
# Reload best model
pg_trainer = AutoregressivePGTrainer.from_pretrained(args.experiment_dir)
dev_prompts = dev_df["sequence1"].apply(lambda s: f"{s} <PARA>")
test_prompts = test_df["sequence1"].apply(lambda s: f"{s} <PARA>")
dev_df["sequence2"].to_csv(os.path.join(args.experiment_dir, "dev_ref.txt"), sep=",", index=False, header=False)
test_df["sequence2"].to_csv(os.path.join(args.experiment_dir, "test_ref.txt"), sep=",", index=False, header=False)
dev_df["sequence1"].to_csv(os.path.join(args.experiment_dir, "dev_input_copy.txt"), sep=",", index=False, header=False)
test_df["sequence1"].to_csv(os.path.join(args.experiment_dir, "test_input_copy.txt"), sep=",", index=False, header=False)
strategies = {
"greedy": {},
"beam": {"num_beams": 5, "early_stopping": True},
"top_p": {"do_sample": True, "top_p": 0.9, "top_k": 0},
"top_k": {"do_sample": True, "top_k": 10}
}
for curr_strat, strat_kwargs in strategies.items():
dev_pred_para = pg_trainer.generate(dev_prompts.tolist(), max_seq_len=args.max_seq_len, strategy=strat_kwargs)
with open(os.path.join(args.experiment_dir, f"dev_{curr_strat}_hyp.txt"), "w", encoding="utf-8") as f:
for _txt in dev_pred_para:
print(re.sub(r"(\n)+", " ", _txt.strip()), file=f)
test_pred_para = pg_trainer.generate(test_prompts.tolist(), max_seq_len=args.max_seq_len, strategy=strat_kwargs)
with open(os.path.join(args.experiment_dir, f"test_{curr_strat}_hyp.txt"), "w", encoding="utf-8") as f:
for _txt in test_pred_para:
print(re.sub(r"(\n)+", " ", _txt.strip()), file=f)
| 43.307263 | 149 | 0.736197 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,799 | 0.232069 |
4f7ef52ff38ec211c689a86a821876bea757fb2f | 4,838 | py | Python | test.py | conema/distributed-TransE | 07de643bf8733e524e17a0ae4656ffdfd281b3a5 | [
"MIT"
] | 3 | 2021-02-11T11:22:16.000Z | 2021-05-27T03:22:57.000Z | test.py | conema/distributed-TransE | 07de643bf8733e524e17a0ae4656ffdfd281b3a5 | [
"MIT"
] | null | null | null | test.py | conema/distributed-TransE | 07de643bf8733e524e17a0ae4656ffdfd281b3a5 | [
"MIT"
] | 1 | 2021-04-21T15:13:47.000Z | 2021-04-21T15:13:47.000Z | import numpy as np
from pyspark.sql import SparkSession
from pyspark import SparkContext, SparkConf
from TransEmodule import utils
def check_entities(x, map):
if x in map:
return map[x]
else:
return None
def calculate_rankings(rank_list):
flat = rank_list.map(lambda x: x[0]).persist()
prepare_mean = flat.map(lambda x: (x, 1))
prepare_hits = flat.map(lambda x: (1 if x <= 10 else 0, 1))
x = prepare_mean.reduce(lambda x, y: (x[0] + y[0], x[1] + y[1]))
mean = x[0]/x[1]
x = prepare_hits.reduce(lambda x, y: (x[0] + y[0], x[1] + y[1]))
hits = x[0]/x[1]
return mean, hits
def testing(partition, test_entities_to_id, test_labels_to_id,
entities_to_id_map, label_to_id_map, entity_embedding,
label_embedding):
rank_list = []
i = 0
for (h, l, t) in partition:
# get train ids from testset ids
h_train = check_entities(utils.get_id_by_value(test_entities_to_id.value, h)[0],
entities_to_id_map.value)
l_train = check_entities(utils.get_id_by_value(test_labels_to_id.value, l)[0],
label_to_id_map.value)
t_train = check_entities(utils.get_id_by_value(test_entities_to_id.value, t)[0],
entities_to_id_map.value)
if h_train is None or l_train is None or t_train is None:
continue
# head
corrupted_entities = entity_embedding.value.vector + label_embedding.value.vector[l_train] - entity_embedding.value.vector[t_train]
distances = np.apply_along_axis(lambda x: np.sum(np.square(x)), 1, corrupted_entities)
indices = np.argsort(distances)
rank = np.where(indices == h_train)
rank_list.append(rank[0])
# tail
corrupted_entities = entity_embedding.value.vector[h_train] + label_embedding.value.vector[l_train]
distances = np.apply_along_axis(lambda x: np.sum(np.square(corrupted_entities - x)), 1, entity_embedding.value.vector)
indices = np.argsort(distances)
rank = np.where(indices == t_train)
rank_list.append(rank[0])
if i % 50 == 0:
rank_list_baby = np.concatenate(rank_list, axis=0)
print("Mean: " + str(np.mean(rank_list_baby)))
print("Hit: " + str(np.mean(rank_list_baby <= 10)*100))
print(i)
i += 1
return rank_list
def test(testset, test_entities_to_id, test_labels_to_id,
entities_to_id_map, label_to_id_map, entity_embedding,
label_embedding):
testset_rdd = sc.parallelize(testset).persist()
test_entities_BC = sc.broadcast(test_entities_to_id)
test_labels_BC = sc.broadcast(test_labels_to_id)
entities_embedding_BC = sc.broadcast(entity_embedding)
labels_embedding_BC = sc.broadcast(label_embedding)
entities_map_BC = sc.broadcast(entities_to_id_map)
labels_map_BC = sc.broadcast(label_to_id_map)
rank_list = testset_rdd.mapPartitions(lambda x: testing(x,
test_entities_BC,
test_labels_BC,
entities_map_BC,
labels_map_BC,
entities_embedding_BC,
labels_embedding_BC)
)
mean, hits = calculate_rankings(rank_list)
return mean, hits
if __name__ == "__main__":
# change the paths if you are not using
# our terraform project!
# create the session
conf = SparkConf().setAll([("spark.worker.cleanup.enabled", True),
("spark.serializer",
"org.apache.spark.serializer.KryoSerializer"),
("spark.kryo.registrationRequired", "false"),
("spark.master", "spark://s01:7077")])
sc = SparkContext(conf=conf).getOrCreate()
sc.addPyFile('TransEmodule.zip')
entity_embedding, label_embedding = utils.restore('/home/ubuntu/entity_embedding_999.pkl',
'/home/ubuntu/label_embedding_999.pkl')
ds_to_id, entities_to_id_map, label_to_id_map = utils.load_dataset(sc, "hdfs://s01:9000/train2.tsv")
testset, test_entities_to_id, test_labels_to_id = utils.load_dataset(sc, "hdfs://s01:9000/test2.tsv")
mean, hits = test(testset, test_entities_to_id, test_labels_to_id,
entities_to_id_map, label_to_id_map,
entity_embedding, label_embedding)
print("Mean: " + str(mean) + "\nHits@10: " + str(hits))
| 38.094488 | 139 | 0.590327 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 487 | 0.100661 |
4f80040acf6642b4b3a8529a3483b2576449af0d | 169 | py | Python | Test_cases/Generated_Python/loops.py | TY-Projects/Code-Converter | 9fee4e186e0be94741a38bb95abe32adb8957fde | [
"MIT"
] | null | null | null | Test_cases/Generated_Python/loops.py | TY-Projects/Code-Converter | 9fee4e186e0be94741a38bb95abe32adb8957fde | [
"MIT"
] | null | null | null | Test_cases/Generated_Python/loops.py | TY-Projects/Code-Converter | 9fee4e186e0be94741a38bb95abe32adb8957fde | [
"MIT"
] | null | null | null | def main () :
i = 1
fib = 1
target = 10
temp = 0
while (i < target) :
temp = fib
fib += temp
i+=1
print(fib)
return 0
if __name__ == '__main__':
main()
| 10.5625 | 26 | 0.538462 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 10 | 0.059172 |
4f804d5a0e36342ad2a4e48d3e16c64fd9394410 | 746 | py | Python | unifrakturmaguntia/src/generate.py | graffitiMSX/msxpower-googlefontdirectory | 1e201a68c8181698c143279734c4677f194855d8 | [
"Apache-2.0"
] | 1 | 2016-06-05T07:51:16.000Z | 2016-06-05T07:51:16.000Z | unifrakturmaguntia/src/generate.py | graffitiMSX/msxpower-googlefontdirectory | 1e201a68c8181698c143279734c4677f194855d8 | [
"Apache-2.0"
] | null | null | null | unifrakturmaguntia/src/generate.py | graffitiMSX/msxpower-googlefontdirectory | 1e201a68c8181698c143279734c4677f194855d8 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/python
#
# With the SFDs in the current directory, run this with
# $ python generate.py
import fontforge, sys
required_version = "20090923"
if fontforge.version() < required_version:
print ("Your version of FontForge is too old - %s or newer is required" % (required_version));
print ("Current fontforge version:")
print fontforge.version()
files = [
'UnifrakturMaguntia.sfd',
]
# smart features in fea/gdl/mif sources to be integrated into the buildpath
for font in files:
f = fontforge.open(font)
print ("Building ") + f.fullname + ( " ") + f.weight + (" from sfd sources with fontforge")
f.generate(f.fontname + '-' + f.weight + '.ttf')
f.close
print ("font version:")
print (f.version)
print ("Done");
| 24.064516 | 98 | 0.686327 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 377 | 0.505362 |
4f80e45d4e32c8be3ef561b50ff2444d16c0afd8 | 1,790 | py | Python | xgboost-0.6-py3.6.egg/xgboost/dmlc-core/tracker/dmlc_tracker/sge.py | EnjoyLifeFund/macHighSierra-py36-pkgs | 5668b5785296b314ea1321057420bcd077dba9ea | [
"BSD-3-Clause",
"BSD-2-Clause",
"MIT"
] | null | null | null | xgboost-0.6-py3.6.egg/xgboost/dmlc-core/tracker/dmlc_tracker/sge.py | EnjoyLifeFund/macHighSierra-py36-pkgs | 5668b5785296b314ea1321057420bcd077dba9ea | [
"BSD-3-Clause",
"BSD-2-Clause",
"MIT"
] | null | null | null | xgboost-0.6-py3.6.egg/xgboost/dmlc-core/tracker/dmlc_tracker/sge.py | EnjoyLifeFund/macHighSierra-py36-pkgs | 5668b5785296b314ea1321057420bcd077dba9ea | [
"BSD-3-Clause",
"BSD-2-Clause",
"MIT"
] | null | null | null | """Submit jobs to Sun Grid Engine."""
# pylint: disable=invalid-name
import os
import subprocess
from . import tracker
def submit(args):
"""Job submission script for SGE."""
if args.jobname is None:
args.jobname = ('dmlc%d.' % args.num_workers) + args.command[0].split('/')[-1]
if args.sge_log_dir is None:
args.sge_log_dir = args.jobname + '.log'
if os.path.exists(args.sge_log_dir):
if not os.path.isdir(args.sge_log_dir):
raise RuntimeError('specified --sge-log-dir %s is not a dir' % args.sge_log_dir)
else:
os.mkdir(args.sge_log_dir)
runscript = '%s/rundmlc.sh' % args.logdir
fo = open(runscript, 'w')
fo.write('source ~/.bashrc\n')
fo.write('export DMLC_TASK_ID=${SGE_TASK_ID}\n')
fo.write('export DMLC_JOB_CLUSTER=sge\n')
fo.write('\"$@\"\n')
fo.close()
def sge_submit(nworker, nserver, pass_envs):
"""Internal submission function."""
env_arg = ','.join(['%s=\"%s\"' % (k, str(v)) for k, v in list(pass_envs.items())])
cmd = 'qsub -cwd -t 1-%d -S /bin/bash' % (nworker + nserver)
if args.queue != 'default':
cmd += '-q %s' % args.queue
cmd += ' -N %s ' % args.jobname
cmd += ' -e %s -o %s' % (args.logdir, args.logdir)
cmd += ' -pe orte %d' % (args.vcores)
cmd += ' -v %s,PATH=${PATH}:.' % env_arg
cmd += ' %s %s' % (runscript, ' '.join(args.command))
print(cmd)
subprocess.check_call(cmd, shell=True)
print('Waiting for the jobs to get up...')
# call submit, with nslave, the commands to run each job and submit function
tracker.submit(args.num_workers, args.num_servers,
fun_submit=sge_submit,
pscmd=' '.join(args.command))
| 36.530612 | 92 | 0.581006 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 561 | 0.313408 |
4f83b5b21aeb93464b101c16597a50e5af097412 | 443 | py | Python | Week 3/id_755/LeetCode_455_755.py | larryRishi/algorithm004-05 | e60d0b1176acd32a9184b215e36d4122ba0b6263 | [
"Apache-2.0"
] | 1 | 2019-10-12T06:48:45.000Z | 2019-10-12T06:48:45.000Z | Week 3/id_755/LeetCode_455_755.py | larryRishi/algorithm004-05 | e60d0b1176acd32a9184b215e36d4122ba0b6263 | [
"Apache-2.0"
] | 1 | 2019-12-01T10:02:03.000Z | 2019-12-01T10:02:03.000Z | Week 3/id_755/LeetCode_455_755.py | larryRishi/algorithm004-05 | e60d0b1176acd32a9184b215e36d4122ba0b6263 | [
"Apache-2.0"
] | null | null | null | class Solution:
def findContentChildren(self, g: List[int], s: List[int]) -> int:
g.sort()
s.sort()
greed_p = 0
size_p = 0
count = 0
while greed_p < len(g) and size_p < len(s):
if g[greed_p] <= s[size_p]:
count += 1
greed_p += 1
size_p += 1
elif g[greed_p] > s[size_p]:
size_p += 1
return count
| 27.6875 | 69 | 0.435666 | 442 | 0.997743 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
4f854d880deadf16219033650ff85578caf3e5b5 | 6,473 | py | Python | MindElec/examples/physics_driven/time_domain_maxwell/src/utils.py | mindspore-ai/mindscience | b5269245915695de2d99fb290fef662c241db189 | [
"Apache-2.0"
] | 3 | 2021-11-10T06:17:50.000Z | 2022-03-21T14:25:30.000Z | tests/st/mindelec/networks/test_time_domain_maxwell/src/utils.py | mindspore-ai/mindscience | b5269245915695de2d99fb290fef662c241db189 | [
"Apache-2.0"
] | null | null | null | tests/st/mindelec/networks/test_time_domain_maxwell/src/utils.py | mindspore-ai/mindscience | b5269245915695de2d99fb290fef662c241db189 | [
"Apache-2.0"
] | 1 | 2021-12-05T11:41:29.000Z | 2021-12-05T11:41:29.000Z | # Copyright 2021 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""visualization of field quantities"""
import os
import copy
import io
import cv2
import PIL
import numpy as np
import matplotlib.gridspec as gridspec
import matplotlib.pyplot as plt
from mpl_toolkits.axes_grid1 import make_axes_locatable, axes_size
plt.rcParams['figure.dpi'] = 300
def visual_result(input_data, label, predict, path, name):
"""visulization of original field and normalized field"""
input_data = copy.deepcopy(input_data)
label = copy.deepcopy(label)
predict = copy.deepcopy(predict)
visual(input_data, label, predict, path, name)
# normalize
ex_min = label[:, :, :, 0].min()
ex_max = label[:, :, :, 0].max()
ey_min = label[:, :, :, 1].min()
ey_max = label[:, :, :, 1].max()
hz_min = label[:, :, :, 2].min()
hz_max = label[:, :, :, 2].max()
if ex_min == ex_max:
ex_min = ex_min - 1
ex_max = ex_max + 1
if ey_min == ey_max:
ey_min = ey_min - 1
ey_max = ey_max + 1
if hz_min == hz_max:
hz_min = hz_min - 1
hz_max = hz_max + 1
label[:, :, :, 0] = 2 * (label[:, :, :, 0] - np.mean([ex_max, ex_min])) / (ex_max - ex_min)
label[:, :, :, 1] = 2 * (label[:, :, :, 1] - np.mean([ey_max, ey_min])) / (ey_max - ey_min)
label[:, :, :, 2] = 2 * (label[:, :, :, 2] - np.mean([hz_max, hz_min])) / (hz_max - hz_min)
predict[:, :, :, 0] = 2 * (predict[:, :, :, 0] - np.mean([ex_max, ex_min])) / (ex_max - ex_min)
predict[:, :, :, 1] = 2 * (predict[:, :, :, 1] - np.mean([ey_max, ey_min])) / (ey_max - ey_min)
predict[:, :, :, 2] = 2 * (predict[:, :, :, 2] - np.mean([hz_max, hz_min])) / (hz_max - hz_min)
visual(input_data, label, predict, path, str(name) + "_normlize")
def visual(input_data, label, predict, path, name):
"""visulization of ex/ey/hz"""
[sample_t, sample_x, sample_y, _] = np.shape(input_data)
# 将label、predict归一化
ex_vmin, ex_vmax = np.percentile(label[:, :, :, 0], [0.5, 99.5])
ey_vmin, ey_vmax = np.percentile(label[:, :, :, 1], [0.5, 99.5])
hz_vmin, hz_vmax = np.percentile(label[:, :, :, 2], [0.5, 99.5])
vmin_list = [ex_vmin, ey_vmin, hz_vmin]
vmax_list = [ex_vmax, ey_vmax, hz_vmax]
mean_abs_ex_label = 1.0
mean_abs_ey_label = 1.0
mean_abs_hz_label = 1.0
output_names = ["Ex", "Ey", "Hz"]
if not os.path.isdir(path):
os.makedirs(path)
fourcc = cv2.VideoWriter_fourcc('D', 'I', 'V', 'X')
fps = 10
size = (1920, 1440)
video = cv2.VideoWriter(os.path.join(path, "EH_" + str(name) + ".avi"), fourcc, fps, size)
t_set = []
if sample_t < 100:
t_set = np.arange(sample_t, dtype=np.int32)
else:
for t in range(sample_t):
if t % int(sample_t / 20) == 0 or t == sample_t - 1:
t_set.append(t)
for t in t_set:
ex_label = label[t, :, :, 0]
ey_label = label[t, :, :, 1]
hz_label = label[t, :, :, 2]
ex_predict = predict[t, :, :, 0]
ey_predict = predict[t, :, :, 1]
hz_predict = predict[t, :, :, 2]
ex_label_2d = np.reshape(np.array(ex_label), (sample_x, sample_y))
ey_label_2d = np.reshape(np.array(ey_label), (sample_x, sample_y))
hz_label_2d = np.reshape(np.array(hz_label), (sample_x, sample_y))
ex_predict_2d = np.reshape(np.array(ex_predict), (sample_x, sample_y))
ey_predict_2d = np.reshape(np.array(ey_predict), (sample_x, sample_y))
hz_predict_2d = np.reshape(np.array(hz_predict), (sample_x, sample_y))
ex_error_2d = np.abs(ex_predict_2d - ex_label_2d) / mean_abs_ex_label
ey_error_2d = np.abs(ey_predict_2d - ey_label_2d) / mean_abs_ey_label
hz_error_2d = np.abs(hz_predict_2d - hz_label_2d) / mean_abs_hz_label
label_2d = [ex_label_2d, ey_label_2d, hz_label_2d]
predict_2d = [ex_predict_2d, ey_predict_2d, hz_predict_2d]
error_2d = [ex_error_2d, ey_error_2d, hz_error_2d]
lpe_2d = [label_2d, predict_2d, error_2d]
lpe_names = ["label", "predict", "error"]
fig = plt.figure()
gs = gridspec.GridSpec(3, 3)
title = "t={:d}".format(t)
plt.suptitle(title, fontsize=14)
gs_idx = int(0)
for i, data_2d in enumerate(lpe_2d):
for j, data in enumerate(data_2d):
ax = fig.add_subplot(gs[gs_idx])
gs_idx += 1
if lpe_names[i] == "error":
img = ax.imshow(data.T, vmin=0, vmax=1,
cmap=plt.get_cmap("jet"), origin='lower')
else:
img = ax.imshow(data.T, vmin=vmin_list[j], vmax=vmax_list[j],
cmap=plt.get_cmap("jet"), origin='lower')
ax.set_title(output_names[j] + " " + lpe_names[i], fontsize=4)
plt.xticks(size=4)
plt.yticks(size=4)
aspect = 20
pad_fraction = 0.5
divider = make_axes_locatable(ax)
width = axes_size.AxesY(ax, aspect=1/aspect)
pad = axes_size.Fraction(pad_fraction, width)
cax = divider.append_axes("right", size=width, pad=pad)
cb = plt.colorbar(img, cax=cax)
cb.ax.tick_params(labelsize=4)
gs.tight_layout(fig, pad=0.4, w_pad=0.4, h_pad=0.4)
# save image to memory buffer
buffer_ = io.BytesIO()
fig.savefig(buffer_, format="jpg")
buffer_.seek(0)
image = PIL.Image.open(buffer_)
video.write(np.asarray(image))
buffer_.close()
plt.close()
video.release()
| 37.201149 | 100 | 0.560482 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,000 | 0.15425 |
4f86096694d847caa47a97bddc557947b0f8dce5 | 6,968 | py | Python | Code/CBEExamples.py | andreschristen/BA_Dilution_Series | b9de412f5e71770a056c0412249ef8ce7764cf15 | [
"MIT"
] | null | null | null | Code/CBEExamples.py | andreschristen/BA_Dilution_Series | b9de412f5e71770a056c0412249ef8ce7764cf15 | [
"MIT"
] | null | null | null | Code/CBEExamples.py | andreschristen/BA_Dilution_Series | b9de412f5e71770a056c0412249ef8ce7764cf15 | [
"MIT"
] | 1 | 2021-04-16T15:01:33.000Z | 2021-04-16T15:01:33.000Z | # -*- coding: utf-8 -*-
"""
Created on Mon 2018.02.28:00:00:00
@author: Nigel Orlando Santillán Morales
Abstrac:
Examples of experiments from different labs.
"""
######################################################################################################################################################################################
from pickle import load, dump
from pandas import read_excel
from pylab import plot, rc, figure, close, savefig, xlim, ylabel, xlabel, tight_layout, legend
rc('font', size=18)
from CBEDataAnalysis import AnaTimeData, PlotIndPost, AnaBF
from DilExp import GetKDE
######################################################################################################################################################################################
"""
EXAMPLES: Intra Lab Experiment.
"""
##################### CBE data #######################
### Three repetitions, ten plated drops in two petri dishes, default values.
# Experiment: Times:
CBE =\
[['RoomTemp', [ 1, 10, 15], "blue"],\
[ '65CTemp', [15, 30, 45, 60, 90], "yellow"],\
[ '70CTemp', [10, 20, 30, 40, 60], "orange"],\
[ '75CTemp', [10, 20], "red"],\
[ '80CTemp', [ 1, 2], "firebrick"]]
def ExpPlots( CBEData, experiment, time, plot_ind=False, K=[0,1], betabinom=False):
md = AnaTimeData( CBEData=CBEData, experiment=experiment, time=time, T=500000, fig=[0,1], betabinom=betabinom)
figure(0)
tight_layout()
savefig("../Images/%s_%dmin_TS.png" % (experiment, time))
if plot_ind:
if betabinom:
PlotIndPost(md, fig=[1,2], K=K, color="green") #without hierachycal model
else:
PlotIndPost(md, fig=[1,2], K=K)
figure(2)
tight_layout()
savefig("../Images/%s_%dmin_IndPosts.jpg" % (experiment, time))
figure(1)
tight_layout()
if betabinom:
savefig("../Images/%s_%dmin_bbinom_Results.jpg" % (experiment, time))
else:
savefig("../Images/%s_%dmin_Results.jpg" % (experiment, time))
return md
if __name__ == '__main__':
CBEData = read_excel( './Data/CBE_BiofilmHotWaterStudies.xls',\
['RoomTemp', '65CTemp', '70CTemp', '75CTemp', '80CTemp'])
### Data taken from spreadsheet: "Biofilm Hot Water Studies.xlsx"
### NOTE: The RoomTemp experiment corresponds to the control of the 80C experiment.
### md = ExpPlots(...) is a MultiDilExp object, see DilExp
### md.d[0].y is the data for repetition 0 etc.
### The simulated values of E are available in md.TwalkE()
### Run all data BF's with beta-binomial
rt_CBE =[]
print("%16s, %2s, %16s, %16s, %16s, %16s" %\
( "experiment", "k" , "bbinom" , "binom" , "Prob binom", "BF"))
for item in CBE:
experiment= item[0]
for time in item[1]:
rt_CBE += AnaBF( CBEData=CBEData, experiment=experiment, time=time)
dump( rt_CBE, open("CBE_rt_CBE.pkl", "wb")) #To be used by InterLabExamples in the BF plot
All = {} ### Dictiionary to hold MCMC iterations of E for all experiments
experiment='70CTemp'
time=10
close(1)
close(2)
md = ExpPlots( CBEData=CBEData, experiment=experiment, time=time,\
betabinom=False)#, plot_ind=True, K=[0,1,2])
#md_bb = ExpPlots( CBEData=CBEData, experiment=experiment, time=time, betabinom=True)#, plot_ind=True, K=[0,1,2])
All[experiment] = {}
All[experiment][time.__str__() + 'min'] = md.TwalkE()
###### Figs 3 and 4 take some 5 min to run
### Plots for Fig. 3
experiment='65CTemp'
time=15
close(1)
close(2)
md = ExpPlots( CBEData=CBEData, experiment=experiment, time=time)
All[experiment] = {}
All[experiment][time.__str__() + 'min'] = md.TwalkE()
experiment='75CTemp'
time=10
close(1)
close(2)
md = ExpPlots( CBEData=CBEData, experiment=experiment, time=time)
All[experiment] = {}
All[experiment][time.__str__() + 'min'] = md.TwalkE()
### This last one is also neede for fig. 4
experiment='RoomTemp'
time=15
close(1)
close(2)
mdControl = ExpPlots( CBEData=CBEData, experiment=experiment, time=time)
All[experiment] = {}
All[experiment][time.__str__() + 'min'] = mdControl.TwalkE()
### Plots for Fig. 4
experiment='80CTemp'
time=2
close(1)
close(2)
md = ExpPlots( CBEData=CBEData, experiment=experiment, time=time, plot_ind=True)
All[experiment] = {}
All[experiment][time.__str__() + 'min'] = md.TwalkE()
figure(1)
xlim((-0.5,3.5))
tight_layout()
savefig("../Images/%s_%dmin_Results.jpg" % (experiment, time))
figure(2)
xlim((0,900))
tight_layout()
savefig("../Images/%s_%dmin_IndPosts.jpg" % (experiment, time))
### Log reduction wrt RoomTemp experiment
close(1)
figure(1)
LR = mdControl.TwalkE() - md.TwalkE()
e, kde = GetKDE( LR, alpha=0.0000001)
plot( e, kde, 'k-')
ylabel("Density")
xlabel(r"$log_{10}\left(\frac{CFU_0 + 1}{CFU + 1}\right)$")
xlim(( 4, 9))
tight_layout()
savefig("../Images/%s_%dmin_LR.jpg" % (experiment, time))
print("%s_%s, $P[ LR > 3 ] = %6.4f" % (experiment, time, sum(LR > 3)/len(LR)))
### Activation threshold figure: Takes longer, some 15 min
### We load the data from "CBEallE.pkl" below ########
### Remaining experiments:
CBE_R=[\
['RoomTemp', [ 10, 15]],\
[ '65CTemp', [ 30, 45, 60, 90]],\
[ '70CTemp', [ 20, 30, 40, 60]],\
[ '75CTemp', [ 20]],\
[ '80CTemp', [ 1]]]
for ex in CBE_R:
experiment = ex[0]
for time in ex[1]:
md = ExpPlots( CBEData=CBEData, experiment=experiment, time=time)
All[experiment][time.__str__() + 'min'] = md.TwalkE()
#print("All[%s][%s]" % ( experiment, time.__str__() + 'min'))
dump( All, open("CBEallE.pkl", "wb"))
CBE =\
[['RoomTemp', [ 1, 10, 15], "blue"],\
[ '65CTemp', [15, 30, 45, 60, 90], "yellow"],\
[ '70CTemp', [10, 20, 30, 40, 60], "orange"],\
[ '75CTemp', [10, 20], "red"],\
[ '80CTemp', [ 1, 2], "firebrick"]]
#All = load(open("CBEallE.pkl", "rb"))
close(2)
figure(2)
eh = 2 ### Threshold for E
for ex in CBE:
experiment = ex[0]
act_prob = []
for time in ex[1]:
act_prob += [sum(All[experiment][time.__str__() + 'min'] < eh)/len(All[experiment][time.__str__() + 'min'])]
ex += [act_prob]
for ex in CBE[1:]:
experiment, time, color, act_prob = ex
plot( time, act_prob, '-o', color=color, label=experiment[:2] + r" $^o$C")
legend(fontsize=14) #(loc=( 61, 0.3))
xlabel("min")
ylabel("Act. Probability")
tight_layout()
savefig("../Images/ActivationProbability.jpg")
| 35.015075 | 183 | 0.53961 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2,742 | 0.393457 |
4f867e1c4eeed274461bae0c3be4c88d43b70d6d | 6,985 | py | Python | org_manager_form.py | MervmessInc/sfdx_scratch_org_builder | c592e5d1842a2567c2ea57e4671dd8bb317ef619 | [
"MIT"
] | null | null | null | org_manager_form.py | MervmessInc/sfdx_scratch_org_builder | c592e5d1842a2567c2ea57e4671dd8bb317ef619 | [
"MIT"
] | null | null | null | org_manager_form.py | MervmessInc/sfdx_scratch_org_builder | c592e5d1842a2567c2ea57e4671dd8bb317ef619 | [
"MIT"
] | null | null | null | from asciimatics.widgets import (
Button,
Divider,
Frame,
Layout,
RadioButtons,
Text,
)
from asciimatics.event import KeyboardEvent, MouseEvent
from asciimatics.scene import Scene
from asciimatics.screen import Screen
from asciimatics.exceptions import (
ResizeScreenError,
StopApplication,
)
# from asciimatics.parsers import AsciimaticsParser
import json
import os
import sys
import logging
import threading
import console_mode as console_mode
from sf_org_manager import sfdx_cli_utils as sfdx
logging.basicConfig(filename="org_manager_form.log", level=logging.WARN)
# Config
#
TGREEN = "\033[1;32m"
TRED = "\033[1;31m"
ENDC = "\033[m"
#
#
def clean_org_data(org):
if "alias" not in org:
a = {"alias": ""}
org.update(a)
if "isDevHub" not in org:
dh = {"isDevHub": False}
org.update(dh)
if "defaultMarker" not in org:
dm = {"defaultMarker": ""}
org.update(dm)
if "status" not in org:
s = {"status": "Active"}
org.update(s)
if "expirationDate" not in org:
dt = {"expirationDate": ""}
org.update(dt)
return org
def update_org_list():
org_list = sfdx.org_list()
if org_list["status"] == 1:
message = org_list["message"]
logging.error(f"MESSAGE: {message}")
logging.warning(f"{org_list}")
sys.exit(1)
json.dump(org_list, open("org_list.json", "w"))
return org_list
def get_org_list():
if os.path.isfile("org_list.json"):
org_list = json.load(open("org_list.json", "r"))
t = threading.Thread(target=update_org_list)
t.start()
else:
org_list = update_org_list()
return org_list
def get_orgs_map(org_list):
try:
non_scratch_orgs = org_list["result"]["nonScratchOrgs"]
except KeyError:
pass
try:
non_scratch_orgs = org_list["result"]["salesforceOrgs"]
except KeyError:
pass
try:
scratch_orgs = org_list["result"]["scratchOrgs"]
except KeyError:
pass
orgs = {}
defaultusername = 1
index = 1
for o in non_scratch_orgs:
org = {index: clean_org_data(o)}
orgs.update(org)
index = index + 1
for o in scratch_orgs:
clean_org = clean_org_data(o)
if clean_org["defaultMarker"] == "(U)":
defaultusername = index
org = {index: clean_org}
orgs.update(org)
index = index + 1
return orgs, defaultusername
def get_org_options(orgs):
options = []
for idx, o in orgs.items():
color = TGREEN
if o["status"] != "Active":
color = TRED
options.append(
(
f'{o["alias"]:<30} {o["username"]:<45} {o["expirationDate"]:<12} {color}{o["status"]:<10}{ENDC}',
idx,
)
)
return options
class org_list_frame(Frame):
cmd_string = ""
def __init__(self, screen):
super(org_list_frame, self).__init__(
screen,
int(screen.height),
int(screen.width),
data=form_data,
has_shadow=False,
name="Org List",
has_border=False,
hover_focus=False,
)
layout = Layout([1, 18, 1])
self.add_layout(layout)
layout.add_widget(
RadioButtons(
options=org_options,
label="Orgs List",
name="radio",
on_change=self._on_change,
),
1,
)
layout.add_widget(Divider(height=3), 1)
layout.add_widget(Text(label="Org Id", name="org_id", readonly=True), 1)
layout.add_widget(Text(label="Username", name="username", readonly=True), 1)
layout.add_widget(Text(label="Url", name="url", readonly=True), 1)
layout.add_widget(
Text(label="Access Token", name="access_token", readonly=True), 1
)
layout.add_widget(Divider(height=3), 1)
layout2 = Layout([1, 1, 1])
self.add_layout(layout2)
layout2.add_widget(Button("Login", self.sf_login, name="login"), 0)
layout2.add_widget(Button("(Q)uit", self._quit, name="quit"), 2)
# self.set_theme("default")
self.set_theme("bright")
self.fix()
def _on_change(self):
self.save()
def process_event(self, event):
# Do the key handling for this Frame.
if isinstance(event, KeyboardEvent):
if event.key_code >= 0 and event.key_code <= 255:
self.cmd_string = self.cmd_string + chr(event.key_code)
# print(f"{self.cmd_string}")
if event.key_code in [ord("q"), ord("Q"), Screen.ctrl("c")]:
self.cmd_string = ""
console_mode.quickedit(1)
raise StopApplication("User quit")
elif self.cmd_string.lower() == "login":
self.cmd_string = ""
self.sf_login()
elif event.key_code in (ord("\n"), ord("\r")):
self.cmd_string = ""
if self.focussed_widget.name == "radio":
self.get_org_details()
elif isinstance(event, MouseEvent):
# MouseEvent !!!
print("MouseEvent")
return super(org_list_frame, self).process_event(event)
def get_org_details(self):
radio = self.find_widget("radio")
org_id = self.find_widget("org_id")
username = self.find_widget("username")
url = self.find_widget("url")
access_token = self.find_widget("access_token")
org = orgs.get(radio.value)
usr_detail = sfdx.user_details(org["username"])
org_id.value = f"{usr_detail['result']['orgId']}"
username.value = f"{usr_detail['result']['username']}"
url.value = f"{usr_detail['result']['instanceUrl']}"
access_token.value = f"{usr_detail['result']['accessToken']}"
def sf_login(self):
radio = self.find_widget("radio")
org = orgs.get(radio.value)
sfdx.org_open(org["username"])
console_mode.quickedit(1)
raise StopApplication("User requested exit")
def _quit(self):
console_mode.quickedit(1)
raise StopApplication("User requested exit")
def main(screen, scene):
screen.clear()
screen.play(
[Scene([org_list_frame(screen)], -1)],
stop_on_resize=True,
start_scene=scene,
allow_int=True,
)
if __name__ == "__main__":
console_mode.quickedit(0)
org_list = get_org_list()
orgs, defaultusername = get_orgs_map(org_list)
org_options = get_org_options(orgs)
form_data = {"radio": org_options}
last_scene = None
while True:
try:
Screen.wrapper(main, catch_interrupt=False, arguments=[last_scene])
sys.exit(0)
except ResizeScreenError as e:
last_scene = e.scene
| 24.59507 | 113 | 0.579384 | 3,437 | 0.492054 | 0 | 0 | 0 | 0 | 0 | 0 | 1,135 | 0.162491 |
4f86cee26e324a8d48afb573b6c1c7561b27084a | 10,087 | py | Python | src/environmentbase/scripts/environmentutil.py | ion-channel/cloudformation-environmentbase | 2dc32fb3fa1aa659bd9efc4350e6962092dfbc19 | [
"BSD-2-Clause"
] | 1 | 2019-04-23T09:02:42.000Z | 2019-04-23T09:02:42.000Z | src/environmentbase/scripts/environmentutil.py | ion-channel/cloudformation-environmentbase | 2dc32fb3fa1aa659bd9efc4350e6962092dfbc19 | [
"BSD-2-Clause"
] | null | null | null | src/environmentbase/scripts/environmentutil.py | ion-channel/cloudformation-environmentbase | 2dc32fb3fa1aa659bd9efc4350e6962092dfbc19 | [
"BSD-2-Clause"
] | 3 | 2017-05-24T23:26:46.000Z | 2018-04-19T01:50:19.000Z | #!/usr/bin/env python
"""environmentutil.py
Utility tool helps to manage mappings and gathering data from across multiple AWS Availability zones.
Usage:
environmentutil amimap get [--aws_region <AWS_REGION>]
[--config_file <CONFIG_FILE>]
environmentutil amimap write [<OUTPUT_FILE>] [--aws_region <AWS_REGION>]
[--config_file <CONFIG_FILE>]
environmentutil deploy <CLOUDFORMATION_TEMPLATE> [<PARAMETER_JSON_FILE>]
[--aws_region <AWS_REGION>] [--config_file <CONFIG_FILE>]
Options:
-h --help Show this screen.
-v --version Show version.
--aws_region <AWS_REGION> Region to start queries to AWS API from [default: us-east-1].
--config_file <CONFIG_FILE> JSON Config file holding the extended configuration for this toolset [default: config_args.json].
"""
from docopt import docopt
import boto
import json
import logging
from .version import __version__
import time
class EnvironmentUtil(object):
"""
EnvironmentUtil class holds common task methods for deploying, managing or
building CloudFormation templates with the environmenbase toolset.
"""
def __init__(self,
config_args):
"""
Init for EnvironmentUtil class which persists config args as a dictionary
@param config_args [dict] - dictionary of configuration values
"""
self.configuration = config_args
def get_ami_map(self,
aws_region=None, image_names=None):
"""
Method iterates on all AWS regions for a given set of AMI names to gather AMI IDs and
to create a regionmap for CloudFormation templates.
@param aws_region [string] - optionally provides the region to start querying when gathering the list of regions globally.
"""
if aws_region is None:
aws_region = self.configuration.get('boto', {}).get('region_name', 'us-east-1')
logging.debug('Setting default AWS Region for API access from overall configuration [' + aws_region + ']')
region_map = {}
vpc_conn = boto.connect_vpc(aws_region)
logging.debug('Connected to VPC in region [' + aws_region + ']')
for region in vpc_conn.get_all_regions():
if region.name not in list(region_map.keys()):
logging.debug('Adding region [' + region.name + '] to region map.')
region_map[region.name] = {}
ec2_conn = boto.connect_ec2(region.name)
logging.debug('Connected to EC2 API in region [' + region.name + ']')
for k, v in image_names:
logging.debug('Looking for Image [' + k + ': ' + v + '] in region [' + region.name + ']')
images = ec2_conn.get_all_images(filters={'name': v})
if len(images) == 0:
logging.warn('No image found for [' + k + ': ' + v + '] in region [' + region.name + ']')
elif len(images) > 1:
logging.warn('Found ' + str(len(images)) + ' images for [' + k + ': ' + v + '] in region [' + region.name + ']')
else:
logging.debug('Adding image [' + images[0].id + '] to region [' + region.name + '] for key [' + k + ']')
region_map[region.name][k] = images[0].id
logging.debug('AMI Region Map Contents: ' + json.dumps(region_map))
return region_map
def write_ami_map(self,
aws_region,
output_file):
"""
Utility and convenience method for wrapping the get_ami_map method and subsequently
writing the output to a file for use as an ami id cache.
@param aws_region [string] - AWS-specific region name to start when querying the AWS APIs
@param output_file [string] - file location where the ami cache is to be saved locally
"""
with open(output_file, 'w') as f:
logging.debug('Writing ami cache file to [' + output_file + ']')
f.write(json.dumps(self.get_ami_map(aws_region)))
def get_stack_status(self,
cf_conn,
stack_name):
"""
Helper method handles edge cases when stack status doesn't exist yet or any more.
@param cf_conn [Boto.CloudFormation.Connection] - Connection object to CloudFormation via Boto
@param stack_name [string] - Name of the stack to check status on
"""
api_result = cf_conn.describe_stacks(stack_name_or_id=stack_name)
if len(api_result) == 0:
return 'NOT_CREATED'
else:
return api_result[0].stack_status
def wait_for_stack(self,
cf_conn,
stack_name,
sleep_time=20):
"""
Method handles a wait loop for stack deploys to AWS. Sleep time should be ramped up (longer polls)
when deploying multiple sets of stacks at the same time.
Returns true when deploy is successful, false when errors occur.
@param cf_conn [Boto.CloudFormation.Connection] - Connection object to CloudFormation via Boto
@param stack_name [string] - Name of the stack to check status on
@param sleep_time [int] - number of seconds to wait between polls of the AWS API for status on the specified CloudFormation stack
"""
stack_status = self.get_stack_status(cf_conn, stack_name)
loop_id = 0
while 'IN_PROGRESS' in stack_status:
if loop_id != 0:
message = 'Stack %s is not yet completely deployed. Waiting 20 sec until next polling interval. Update query count [ %s ]' % (stack_name, str(loop_id))
logging.info(message)
time.sleep(sleep_time)
stack_status = self.get_stack_status(cf_conn, stack_name)
loop_id += 1
if cf_conn.describe_stacks(stack_name_or_id=stack_name)[0].stack_status in ['CREATE_COMPLETE', 'UPDATE_COMPLETE']:
return True
else:
return False
def deploy_stack(self,
stack_name,
template_string_or_url,
capabilities=['CAPABILITY_IAM'],
parameters=None,
aws_region=None,
wait_for_complete=True):
"""
Method takes a CloudFormation template string or S3 url and deploys the stack to the specified AWS region.
@param stack_name [string] - name to use when deploying the CloudFormation stack.
@param template_string_or_url [string] - S3 URL or CloudFormation template body to be deployed.
@param capabiltiies [list(str)] - List of CloudFormation template capabilities to be granted to the deployed stack.
@param parameters [dict] - dictionary of key value pairs containing overrides to template parameter defaults.
@param aws_region [string] - AWS-specific region name to start when querying the AWS APIs
@param wait_for_complete [boolean] - boolean indicating whether to poll for success or failure before completing the deploy process.
"""
if aws_region is None:
aws_region = self.configuration.get('boto', {}).get('region_name', 'us-east-1')
logging.debug('Setting default AWS Region for API access from overall configuration [' + aws_region + ']')
logging.info('Connecting to CloudFormation in region [' + aws_region + ']')
cf_conn = boto.connect_cloudformation(aws_region)
logging.info('Starting deploy of stack [' + stack_name + '] to AWS in region [' + aws_region + ']')
command_args = {'capabilities': capabilities}
try:
if type(template_string_or_url) == dict:
command_args['template_body'] = json.dumps(template_string_or_url)
else:
# template_dict = json.loads(template_string_or_url)
command_args['template_body'] = template_string_or_url
except TypeError:
command_args['template_s3_url'] = template_string_or_url
logging.debug('Calling stack deploy for [' + stack_name + '] with arguments: ' + json.dumps(command_args))
cf_conn.create_stack(stack_name, **command_args)
if wait_for_complete:
if self.wait_for_stack(cf_conn, stack_name):
logging.info('Stack [' + stack_name + '] successfully deployed to AWS in region [' + aws_region + ']')
return True
else:
message = 'Stack [%s] failed to deploy to AWS in region [%s] with status [%s]' % (stack_name, aws_region, self.get_stack_status(cf_conn, stack_name))
logging.warn(message)
return False
else:
return True
if __name__ == '__main__':
arguments = docopt(__doc__, version='environmentbase-cfn environment_util %s' % __version__)
if arguments.get('--debug', False):
level = 'DEBUG'
else:
level = 'INFO'
logging.basicConfig(format='%(asctime)s %(levelname)s:%(message)s', level=level)
config_file_path = arguments.get('--config_file', 'config_args.json')
if config_file_path:
with open(config_file_path, 'r') as f:
logging.info('Reading configuration in from ')
json_data = json.loads(f.read())
else:
json_data = {}
if arguments.get('amimap'):
env_util = EnvironmentUtil(json_data)
if arguments.get('get'):
logging.info('Getting AMI Map and printing to console.')
logging.info(
json.dumps(
env_util.get_ami_map(arguments.get('--aws_region', 'us-east-1'))))
elif arguments.get('write'):
file_location = arguments.get('<OUTPUT_FILE>', 'ami_cache.json')
logging.info('Getting AMI Map and writing to file [' + file_location + ']')
env_util.write_ami_map(arguments.get('--aws_region', 'us-east-1'), file_location)
elif arguments.get('deploy'):
pass
| 48.263158 | 167 | 0.619312 | 7,863 | 0.779518 | 0 | 0 | 0 | 0 | 0 | 0 | 5,011 | 0.496778 |
4f879069ef98ad7ff738cf144fe1fd75fdd1089e | 859 | py | Python | service-workers/service-worker/resources/update-max-aged-worker.py | meyerweb/wpt | f04261533819893c71289614c03434c06856c13e | [
"BSD-3-Clause"
] | 14,668 | 2015-01-01T01:57:10.000Z | 2022-03-31T23:33:32.000Z | service-workers/service-worker/resources/update-max-aged-worker.py | meyerweb/wpt | f04261533819893c71289614c03434c06856c13e | [
"BSD-3-Clause"
] | 7,642 | 2018-05-28T09:38:03.000Z | 2022-03-31T20:55:48.000Z | service-workers/service-worker/resources/update-max-aged-worker.py | meyerweb/wpt | f04261533819893c71289614c03434c06856c13e | [
"BSD-3-Clause"
] | 5,941 | 2015-01-02T11:32:21.000Z | 2022-03-31T16:35:46.000Z | import time
import json
from wptserve.utils import isomorphic_decode, isomorphic_encode
def main(request, response):
headers = [(b'Content-Type', b'application/javascript'),
(b'Cache-Control', b'max-age=86400'),
(b'Last-Modified', isomorphic_encode(time.strftime(u"%a, %d %b %Y %H:%M:%S GMT", time.gmtime())))]
test = request.GET[b'test']
body = u'''
const mainTime = {time:8f};
const testName = {test};
importScripts('update-max-aged-worker-imported-script.py');
addEventListener('message', event => {{
event.source.postMessage({{
mainTime,
importTime,
test: {test}
}});
}});
'''.format(
time=time.time(),
test=json.dumps(isomorphic_decode(test))
)
return headers, body
| 27.709677 | 113 | 0.559953 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 474 | 0.551804 |
4f87ac6ce4b524e13bfbf144a4213783caf6da68 | 4,344 | py | Python | src/infrastructure/clients/provider/exchange_rate_api/drivers.py | sdediego/forex-django-clean-architecture | 915a8d844a8db5a40c726fe4cf9f6d50f7c95275 | [
"MIT"
] | 8 | 2021-11-09T16:43:38.000Z | 2022-03-25T16:04:26.000Z | src/infrastructure/clients/provider/exchange_rate_api/drivers.py | sdediego/forex-django-clean-architecture | 915a8d844a8db5a40c726fe4cf9f6d50f7c95275 | [
"MIT"
] | null | null | null | src/infrastructure/clients/provider/exchange_rate_api/drivers.py | sdediego/forex-django-clean-architecture | 915a8d844a8db5a40c726fe4cf9f6d50f7c95275 | [
"MIT"
] | 2 | 2021-11-16T21:17:31.000Z | 2022-02-11T11:15:29.000Z | # coding: utf-8
import asyncio
from itertools import chain, repeat
from typing import List
from requests import Response
from requests.exceptions import RequestException
from src.domain.exchange_rate import CurrencyEntity, CurrencyExchangeRateEntity
from src.domain.provider import ProviderEntity
from src.infrastructure.clients.provider.base import ProviderBaseDriver
from src.infrastructure.clients.provider.decorators import async_event_loop
from src.infrastructure.clients.provider.utils import (
get_business_days, get_last_business_day)
from src.infrastructure.clients.provider.exchange_rate_api.exceptions import (
ExchangeRateAPIDriverError)
from src.infrastructure.clients.provider.exchange_rate_api.serializers import (
CurrencySerializer, ExchangeRateSerializer)
class ExchangeRateAPIDriver(ProviderBaseDriver):
CURRENCIES = 'currencies'
HISTORICAL_RATE = 'historical'
ENDPOINTS = {
CURRENCIES: {
'method': 'get',
'path': 'codes',
'serializer_class': CurrencySerializer,
},
HISTORICAL_RATE: {
'method': 'get',
'path': 'history/{currency}/{year}/{month}/{day}',
'serializer_class': ExchangeRateSerializer,
}
}
def __init__(self, provider: ProviderEntity):
super().__init__(provider)
self.api_url = provider.settings.get('api_url').value
self.api_key = provider.settings.get('api_key').value
def _get_headers(self) -> dict:
headers = super()._get_headers()
headers.update({'Authorization': f'Bearer {self.api_key}'})
return headers
def _has_response_error(self, response: Response) -> bool:
try:
data = response.json()
except ValueError:
return False
return data.get('error-type') is not None
def _handle_response_error(self, error: RequestException):
has_response = error.response is not None
message = error.response.reason if has_response else str(error)
status_code = error.response.status_code if has_response else None
raise ExchangeRateAPIDriverError(message=message, code=status_code)
def _process_response_error(self, data: dict, status_code: int):
message = data.get('error-type', '')
raise ExchangeRateAPIDriverError(message=message, code=status_code)
def get_currencies(self) -> List[CurrencyEntity]:
response = self._request(self.CURRENCIES)
currencies = self._deserialize_response(self.CURRENCIES, response)
return currencies
def get_exchange_rate(self, source_currency: str, exchanged_currency: str,
date: str = None) -> CurrencyExchangeRateEntity:
date = date or get_last_business_day()
year, month, day = date.split('-')
url_params = {
'currency': source_currency,
'year': year,
'month': month,
'day': day,
}
response = self._request(self.HISTORICAL_RATE, url_params=url_params)
response.update({'symbols': exchanged_currency})
exchange_rate = self._deserialize_response(self.HISTORICAL_RATE, response)
return exchange_rate[0] if len(exchange_rate) > 0 else None
@async_event_loop
async def get_time_series(self, source_currency: str, exchanged_currency: str,
date_from: str, date_to: str) -> List[CurrencyExchangeRateEntity]:
async def request(endpoint: str, params: dict, url_params: dict) -> dict:
response = self._request(endpoint, url_params=url_params)
response.update(params)
return response
url_params = []
for business_day in get_business_days(date_from, date_to):
year, month, day = business_day.split('-')
url_params.append({
'currency': source_currency,
'year': year,
'month': month,
'day': day,
})
params = {'symbols': exchanged_currency}
responses = await asyncio.gather(*list(
map(request, repeat(self.HISTORICAL_RATE), repeat(params), url_params)))
timeseries = list(chain(*map(
self._deserialize_response, repeat(self.HISTORICAL_RATE), responses)))
return timeseries
| 40.222222 | 96 | 0.665516 | 3,552 | 0.81768 | 0 | 0 | 1,071 | 0.246547 | 1,049 | 0.241483 | 324 | 0.074586 |
4f88238f777db0f4e648169070250612e0996459 | 1,885 | py | Python | Client/utils/listFileLumis.py | vkuznet/DBS | 14df8bbe8ee8f874fe423399b18afef911fe78c7 | [
"Apache-2.0"
] | 8 | 2015-08-14T04:01:32.000Z | 2021-06-03T00:56:42.000Z | Client/utils/listFileLumis.py | yuyiguo/DBS | 14df8bbe8ee8f874fe423399b18afef911fe78c7 | [
"Apache-2.0"
] | 162 | 2015-01-07T21:34:47.000Z | 2021-10-13T09:42:41.000Z | Client/utils/listFileLumis.py | yuyiguo/DBS | 14df8bbe8ee8f874fe423399b18afef911fe78c7 | [
"Apache-2.0"
] | 16 | 2015-01-22T15:27:29.000Z | 2021-04-28T09:23:28.000Z | from __future__ import print_function
#DBS-3 imports
import time
from dbs.apis.dbsClient import *
url="https://cmsweb.cern.ch/dbs/prod/global/DBSReader/"
#url="https://dbs3-test2.cern.ch/dbs/dev/global/DBSReader/"
# API Object
dbs3api = DbsApi(url=url)
# will throw error because no lfn or block_name provided
run_num = 297723
#print (dbs3api.listFileLumis(run_num=run_num))
lfn = '/store/relval/CMSSW_9_2_3_patch2/DoubleEG/RECO/2017_07_11_19_22_PRref_92X_dataRun2_Prompt_RefGT_week28_2017-v1/00000/B6DEF099-6366-E711-94F9-0025905A6104.root'
#print (dbs3api.listFileLumis(logical_file_name=lfn))
#print (dbs3api.listFileLumis(block_name="/DoubleEG/CMSSW_9_2_3_patch2-2017_07_11_19_22_PRref_92X_dataRun2_Prompt_RefGT_week28_2017-v1/RECO#69d88304-6678-11e7-ab2c-02163e00d7b3"))
# We are testing listFileLumiArray
lfn_list = ['/store/relval/CMSSW_9_3_0_pre2/RelValMinBias_13/GEN-SIM-RECO/92X_upgrade2017_design_IdealBS_v7-v1/00000/FC2FFB7B-BF68-E711-9779-0CC47A4D75EC.root',
'/store/relval/CMSSW_9_3_0_pre2/RelValMinBias_13/GEN-SIM-RECO/92X_upgrade2017_design_IdealBS_v7-v1/00000/388DDB84-BF68-E711-BB04-0CC47A4D76D2.root',
'/store/relval/CMSSW_9_2_3_patch2/DoubleEG/RECO/2017_07_11_19_22_PRref_92X_dataRun2_Prompt_RefGT_week28_2017-v1/00000/B6DEF099-6366-E711-94F9-0025905A6104.root']
print(dbs3api.listFileLumiArray(logical_file_name=lfn_list, validFileOnly=0))
#print(dbs3api.listFileLumiArray(run_num=[297723, 100], logical_file_name=lfn, validFileOnly=0))
# will throw exception because cannot be two list.
#print(dbs3api.listFileLumiArray(run_num=[297723, 100], logical_file_name=lfn_list, validFileOnly=0))
# will throw exceprion because block_name is not supported.
#print(dbs3api.listFileLumiArray(block_name="/DoubleEG/CMSSW_9_2_3_patch2-2017_07_11_19_22_PRref_92X_dataRun2_Prompt_RefGT_week28_2017-v1/RECO#69d88304-6678-11e7-ab2c-02163e00d7b3", validFileOnly=0))
| 55.441176 | 199 | 0.838196 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,629 | 0.864191 |
4f89b62bf25997382973f0cd09db16afd2027eee | 795 | py | Python | src/py-opentimelineio/opentimelineio/schema/generator_reference.py | eric-with-a-c/OpenTimelineIO | 9f55adfc78293b4f508c4a27a34e110ccd92fca6 | [
"Apache-2.0"
] | 1 | 2022-03-26T12:59:59.000Z | 2022-03-26T12:59:59.000Z | src/py-opentimelineio/opentimelineio/schema/generator_reference.py | eric-with-a-c/OpenTimelineIO | 9f55adfc78293b4f508c4a27a34e110ccd92fca6 | [
"Apache-2.0"
] | 40 | 2021-09-13T03:09:21.000Z | 2022-03-28T03:11:10.000Z | src/py-opentimelineio/opentimelineio/schema/generator_reference.py | eric-with-a-c/OpenTimelineIO | 9f55adfc78293b4f508c4a27a34e110ccd92fca6 | [
"Apache-2.0"
] | 1 | 2022-03-09T03:37:24.000Z | 2022-03-09T03:37:24.000Z | from .. core._core_utils import add_method
from .. import _otio
@add_method(_otio.GeneratorReference)
def __str__(self):
return 'GeneratorReference("{}", "{}", {}, {}, {})'.format(
self.name,
self.generator_kind,
self.parameters,
self.available_image_bounds,
self.metadata
)
@add_method(_otio.GeneratorReference)
def __repr__(self):
return (
'otio.schema.GeneratorReference('
'name={}, '
'generator_kind={}, '
'parameters={}, '
'available_image_bounds={}, '
'metadata={}'
')'.format(
repr(self.name),
repr(self.generator_kind),
repr(self.parameters),
repr(self.available_image_bounds),
repr(self.metadata),
)
)
| 24.090909 | 63 | 0.56478 | 0 | 0 | 0 | 0 | 725 | 0.91195 | 0 | 0 | 171 | 0.215094 |
4f89bcf2d852f2ef8eb07b1d90b312478a835bbc | 4,696 | py | Python | ftests/nuxeo-server-gatling-tests/scripts/injector.py | aTiKhan/nuxeo | 05753c13d97d599b87acf8710ca523dbd89738f4 | [
"Apache-2.0"
] | 1 | 2021-02-15T19:07:59.000Z | 2021-02-15T19:07:59.000Z | ftests/nuxeo-server-gatling-tests/scripts/injector.py | aTiKhan/nuxeo | 05753c13d97d599b87acf8710ca523dbd89738f4 | [
"Apache-2.0"
] | 3 | 2021-07-03T21:32:41.000Z | 2022-03-23T13:15:18.000Z | ftests/nuxeo-server-gatling-tests/scripts/injector.py | aTiKhan/nuxeo | 05753c13d97d599b87acf8710ca523dbd89738f4 | [
"Apache-2.0"
] | 1 | 2022-03-17T14:55:30.000Z | 2022-03-17T14:55:30.000Z | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# (C) Copyright 2015 Nuxeo SA (http://nuxeo.com/) and contributors.
#
# All rights reserved. This program and the accompanying materials
# are made available under the terms of the GNU Lesser General Public License
# (LGPL) version 2.1 which accompanies this distribution, and is available at
# http://www.gnu.org/licenses/lgpl-2.1.html
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# Contributors:
# Delbosc Benoit
#
import os
import sys
import logging
import argparse
from abc import ABCMeta, abstractmethod
from redis import RedisWriter
from nuxeo import NuxeoWriter
from utils import download
__version__ = "0.1.0"
module = sys.modules['__main__'].__file__
DESC = """By default read input from stdin and output Redis pipe command on stdout.
To download the data file see the -d option.
"""
class Injector(object):
"""Abstract class to create an injector, take care of parsing args, downloading data"""
__metaclass__ = ABCMeta
@abstractmethod
def parse(self, input, writer):
"""Parse an input file, use Nuxeo writer to output document in redis format."""
pass
@abstractmethod
def downloadInfo(self):
"""Return a tupple (download_url, archive_name)"""
pass
def run(self):
"""Run the injector"""
self.log = logging.getLogger(module)
logging.basicConfig(stream=sys.stderr, level=logging.DEBUG,
format='%(name)s (%(levelname)s): %(message)s')
try:
args = self.parse_command_line()
self.set_log_level()
output = args.output
writer = NuxeoWriter(RedisWriter(out=output,
prefix=args.redis_ns,
usePipeProtocol=not args.no_pipe))
self.parse(self.get_input(), writer)
output.flush()
output.close()
return 0
except KeyboardInterrupt:
self.log.error('Program interrupted!')
return -1
finally:
logging.shutdown()
def parse_command_line(self):
argv = sys.argv
formatter_class = argparse.ArgumentDefaultsHelpFormatter
parser = argparse.ArgumentParser(description=DESC,
formatter_class=formatter_class)
parser.add_argument('--version', action='version',
version='%(prog)s {}'.format(__version__))
parser.add_argument('-v', '--verbose', dest='verbose_count',
action='count', default=0,
help='Increases log verbosity for each occurence.')
parser.add_argument('-o', '--output', metavar='output',
type=argparse.FileType('w'), default=sys.stdout,
help='Redirect output to a file')
parser.add_argument('-i', '--input', metavar='input',
type=argparse.FileType('r'),
default=sys.stdin,
help='Input file')
parser.add_argument('--no-pipe', action='store_true',
help='Output Redis command in clear not using pipe mode protocol.')
parser.add_argument('-d', '--download', action='store_true', dest='download',
help='Download input if not already done.')
parser.add_argument('-u', '--download-url', dest='url', default=self.downloadInfo()[1],
help='URL used to download the data file.')
parser.add_argument('-O', '--data-directory', dest='data_dir', default=os.path.join('~', 'data'),
help='Data directory to store downloaded file.')
parser.add_argument('-p', '--redis-namespace', dest='redis_ns', default="imp",
help='Redis key prefix.')
arguments = parser.parse_args(argv[1:])
self.arguments = arguments
return arguments
def set_log_level(self):
# Sets log level to WARN going more verbose for each new -v.
self.log.setLevel(max(3 - self.arguments.verbose_count, 0) * 10)
def get_input(self):
args = self.arguments
if args.download:
self.log.info("downloading")
archive_name, url = self.downloadInfo()
return open(download(args.data_dir, archive_name, url), 'r')
return args.input
| 40.482759 | 105 | 0.59753 | 3,638 | 0.774702 | 0 | 0 | 267 | 0.056857 | 0 | 0 | 1,738 | 0.370102 |
4f8a3f1a9a10f7b445cddfc325c3345386840c0c | 1,781 | py | Python | python/network-security/column.py | shivekkhurana/learning | d871343a30cf4db85f3f938a2ca0e419997ca84e | [
"MIT"
] | null | null | null | python/network-security/column.py | shivekkhurana/learning | d871343a30cf4db85f3f938a2ca0e419997ca84e | [
"MIT"
] | null | null | null | python/network-security/column.py | shivekkhurana/learning | d871343a30cf4db85f3f938a2ca0e419997ca84e | [
"MIT"
] | null | null | null | from random import randint
class Column(object):
def __init__(self, columns = 5):
super(Column, self).__init__()
self.columns = columns
def encrypt(self, payload):
matrix = []
payload = payload.replace(' ', '')
complete = False
while True:
row = []
for i in range(self.columns):
try:
row.append(payload[0])
payload = payload[1:]
except IndexError:
complete = True
done = len(row)
left = self.columns - done
for j in range(left):
row.append(chr(randint(97,26+97)))
matrix.append(row)
if (complete): break
cypher = ''
for j in xrange(self.columns):
for i in xrange(len(matrix)):
cypher = cypher + matrix[i][j]
cypher = cypher + ' '
return cypher
def decrypt(self, payload):
columns = payload.split(' ')
matrix = []
[matrix.append(list(column)) for column in columns]
matrix = [m for m in matrix if m]
cipher_columns = len(matrix)
cipher_rows = len(matrix[0])
decrypt = ''
for j in range(cipher_rows):
for i in range(cipher_columns):
decrypt = decrypt + matrix[i][j]
return decrypt
def main():
payload = raw_input("Enter Payload : ")
c = raw_input("Enter Columns : ")
#payload = 'All the students of third year are intelligent'
encrypt = Column(int(c)).encrypt(payload)
print(encrypt)
decrypt = Column(int(c)).decrypt(encrypt)
print(decrypt)
if __name__ == '__main__':
main() | 28.269841 | 63 | 0.509264 | 1,424 | 0.799551 | 0 | 0 | 0 | 0 | 0 | 0 | 120 | 0.067378 |
4f8bacf5cc8d683ea814fa649f93bbfbdf8b2af2 | 67 | py | Python | SoftUni-Basic/first_steps_in_coding/lab/04_inches_to_centimeters.py | Darkartt/SoftUni | 23d65ddb9f1e454c9b1338a60dc52f5a64c30bc9 | [
"MIT"
] | null | null | null | SoftUni-Basic/first_steps_in_coding/lab/04_inches_to_centimeters.py | Darkartt/SoftUni | 23d65ddb9f1e454c9b1338a60dc52f5a64c30bc9 | [
"MIT"
] | null | null | null | SoftUni-Basic/first_steps_in_coding/lab/04_inches_to_centimeters.py | Darkartt/SoftUni | 23d65ddb9f1e454c9b1338a60dc52f5a64c30bc9 | [
"MIT"
] | null | null | null | inch = float(input())
centimetars = inch * 2.54
print(centimetars) | 16.75 | 25 | 0.716418 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
4f8c5c029193d738ae427dd4616e827e5ddb3129 | 1,119 | py | Python | api/load_db.py | Build-Week-Post-Here-1/Data-Science | d9920656e853898034989fc04fdc37ea13bb73af | [
"MIT"
] | null | null | null | api/load_db.py | Build-Week-Post-Here-1/Data-Science | d9920656e853898034989fc04fdc37ea13bb73af | [
"MIT"
] | 2 | 2021-09-08T01:48:39.000Z | 2022-01-13T02:16:26.000Z | api/load_db.py | Build-Week-Post-Here-1/Data-Science | d9920656e853898034989fc04fdc37ea13bb73af | [
"MIT"
] | 1 | 2020-02-04T17:20:23.000Z | 2020-02-04T17:20:23.000Z | import os
from dotenv import load_dotenv
import sqlite3
import praw
# Create db schema
conn = sqlite3.connect('db.sqlite3')
c = conn.cursor()
c.execute('drop table if exists submissions')
c.execute('''create table submissions (
subreddit text,
subreddit_subs int,
title text,
text text
)
''')
# Load from Reddit
load_dotenv()
reddit = praw.Reddit(client_id=os.getenv('REDDIT_CLIENT_ID'),
client_secret=os.getenv('REDDIT_CLIENT_SECRET'),
user_agent='lambda/posthere1')
subreddit_count = 0
for subreddit in reddit.subreddits.popular(limit=1000):
subreddit_count += 1
print(subreddit_count, subreddit)
records = []
for submission in subreddit.top(limit=500):
records.append(
[subreddit.display_name, subreddit.subscribers, submission.title, submission.selftext])
c.executemany('''insert into submissions
(subreddit, subreddit_subs, title, text)
values (?, ?, ?, ?)
''', records)
conn.commit()
| 30.243243 | 99 | 0.613941 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 464 | 0.414656 |
4f8c777594f9d7410f30000c4abbaca642fbcd09 | 887 | py | Python | Exemple/exemple.py | GouaiedYosra/mask_rcnn | 1e2bf2a2cbafbec7e890e770b32c256fbe20519f | [
"MIT"
] | null | null | null | Exemple/exemple.py | GouaiedYosra/mask_rcnn | 1e2bf2a2cbafbec7e890e770b32c256fbe20519f | [
"MIT"
] | null | null | null | Exemple/exemple.py | GouaiedYosra/mask_rcnn | 1e2bf2a2cbafbec7e890e770b32c256fbe20519f | [
"MIT"
] | null | null | null | # example of extracting bounding boxes from an annotation file
from xml.etree import ElementTree
# function to extract bounding boxes from an annotation file
def extract_boxes(filename):
# load and parse the file
tree = ElementTree.parse(filename)
# get the root of the document
root = tree.getroot()
# extract each bounding box
boxes = list()
for box in root.findall('.//bndbox'):
xmin = int(box.find('xmin').text)
ymin = int(box.find('ymin').text)
xmax = int(box.find('xmax').text)
ymax = int(box.find('ymax').text)
coors = [xmin, ymin, xmax, ymax]
boxes.append(coors)
# extract image dimensions
width = int(root.find('.//size/width').text)
height = int(root.find('.//size/height').text)
return boxes, width, height
# extract details form annotation file
boxes, w, h = extract_boxes('kangaroo/annots/00001.xml')
# summarize extracted details
print(boxes, w, h) | 34.115385 | 62 | 0.717024 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 390 | 0.439684 |
4f8d150b4ae88cb9a27ba7e46fc92e9ece8c3e04 | 11,435 | py | Python | 14_plot_target-list.py | kuntzer/SALSA-public | 79fd601d3999ac977bbc97be010b2c4ef81e4c35 | [
"BSD-3-Clause"
] | 1 | 2021-07-30T09:59:41.000Z | 2021-07-30T09:59:41.000Z | 14_plot_target-list.py | kuntzer/SALSA-public | 79fd601d3999ac977bbc97be010b2c4ef81e4c35 | [
"BSD-3-Clause"
] | null | null | null | 14_plot_target-list.py | kuntzer/SALSA-public | 79fd601d3999ac977bbc97be010b2c4ef81e4c35 | [
"BSD-3-Clause"
] | 1 | 2021-07-30T10:38:54.000Z | 2021-07-30T10:38:54.000Z | ''' 14-plot_target-list.py
===============================================
AIM: Given a catalogue of objects, plots when the targets are visible according to their magnitude for a given period of time.
INPUT: files: - <orbit_id>_misc/orbits.dat
- <orbit_id>_flux/flux_*.dat
variables: see section PARAMETERS (below)
OUTPUT: in <orbit_id>_figures/ : (see below for file name definition)
CMD: python 14-plot_target-list.py
ISSUES: <none known>
REQUIRES:- standard python libraries, specific libraries in resources/ (+ SciPy)
- BaseMap --> http://matplotlib.org/basemap/
- Structure of the root folder:
* <orbit_id>_flux/ --> flux files
* <orbit_id>_figures/ --> figures
* <orbit_id>_misc/ --> storages of data
* all_figures/ --> comparison figures
REMARKS: based on 11-<...>.py, but has a better way of saving appearance and disapperance of the targets, using the class in resources/targets.py
'''
###########################################################################
### INCLUDES
import numpy as np
import pylab as plt
import matplotlib.cm as cm
import time
from resources.routines import *
from resources.TimeStepping import *
from resources.targets import *
import parameters as param
import resources.constants as const
import resources.figures as figures
import time
from matplotlib import dates
from matplotlib.ticker import MaxNLocator, MultipleLocator, FormatStrFormatter
###########################################################################
### PARAMETERS
# orbit_id
orbit_id = 701
apogee=700
perigee=700
# First minute analysis
minute_ini = 0
# Last minute to look for
minute_end = 1440
# Include SAA ?
SAA = False
# Show plots
show = True
# Save the picture ?
save = True
# Fancy plots ?
fancy = True
# Take into account the stray light?
straylight = True
# Minimum observable time for plots
threshold_obs_time = 50
# Time to acquire a target
t_acquisition = 6
# Catalogue name (in resources/)
catalogue = 'cheops_target_list_v0.1.dat'
# Maximum magnitude that can be seen by CHEOPS, only for cosmetics purposes
CHEOPS_mag_max = 12
# File name for the list of orbit file
orbits_file = 'orbits.dat'
# Factor in the SL post treatment correction ?
SL_post_treat = True
# Factor in mirror efficiency for the equivalent star magnitude ?
mirror_correction = True
#####################################################################################################################
# CONSTANTS AND PHYSICAL PARAMETERS
period = altitude2period(apogee,perigee)
###########################################################################
### INITIALISATION
file_flux = 'flux_'
# changes the threshold by addition the acquisition time:
threshold_obs_time += t_acquisition
# Formatted folders definitions
folder_flux, folder_figures, folder_misc = init_folders(orbit_id)
## Prepare grid
n_alpha = param.resx
n_delta = param.resy
ra_i = 0
ra_f = 2.*np.pi
dec_i = -np.pi/2.
dec_f = np.pi/2.
ra_step = (ra_f-ra_i)/n_alpha
dec_step = (dec_f-dec_i)/n_delta
iterable = (ra_i + ra_step/2+ i*ra_step for i in range(n_alpha))
ras = np.fromiter(iterable, np.float)
iterable = (dec_i + dec_step/2+ i*dec_step for i in range(n_delta))
decs = np.fromiter(iterable, np.float)
ra_grid, dec_grid = np.meshgrid(ras, decs)
if SAA:
SAA_data = np.loadtxt('resources/SAA_table_%d.dat' % orbit_id, delimiter=',')
SAA_data = SAA_data[SAA_data[:,0]>= minute_ini]
SAA_data = SAA_data[SAA_data[:,0]<= minute_end]
computed_orbits = np.loadtxt(folder_misc+orbits_file)[:,0]
############################################################################
### Load catalogue and assign them to the nearest grid point
name_cat, ra_cat, dec_cat, mag_cat = load_catalogue(catalogue)
index_ra_cat = np.zeros(np.shape(ra_cat))
index_dec_cat= np.zeros(np.shape(ra_cat))
targets = []
for name, ra, dec, mag in zip(name_cat, ra_cat, dec_cat, mag_cat):
id_ra = find_nearest(ras, ra/const.RAD)
id_dec = find_nearest(decs, dec/const.RAD)
targets.append(target_list(name, ra/const.RAD, id_ra, dec/const.RAD, id_dec, mag, int(period+3)))
# Apply the flux correction (SL post-treatment removal and the mirror efficiency)
corr_fact = 1.0
if mirror_correction: corr_fact /= param.mirror_efficiency
if SL_post_treat: corr_fact *= (1.0 - param.SL_post_treat_reduction)
############################################################################
### Start the anaylsis
start = time.time()
# Prepare the arrays
visibility = np.zeros(np.shape(ra_grid))
#observations = np.zeros(len(name_cat)*)
workspace = np.zeros(np.shape(ra_grid))
#data = np.zeros(np.shape(ra_grid))
# Load the reference times
orbits = np.loadtxt(folder_misc+orbits_file,dtype='i4')
minutes_orbit_iditude = np.loadtxt('resources/minute_table_%d.dat' % orbit_id, delimiter=',',dtype='Int32')
# Set variables for printing the advance
numberofminutes = minute_end+1 - minute_ini
lo = fast_minute2orbit(minutes_orbit_iditude,minute_end, orbit_id)
fo = fast_minute2orbit(minutes_orbit_iditude,minute_ini, orbit_id)
lp = -1
junk, junk, at_ini, junk = fast_orbit2times(minutes_orbit_iditude, fo, orbit_id)
first_computed = computed_orbits[computed_orbits<=fo][-1]
first_minute = minute_ini
last_minute = minute_end
if not fo == first_computed:
junk, junk, minute_ini, junk = fast_orbit2times(minutes_orbit_iditude, first_computed, orbit_id)
# print '1st referenced orbit: %d\twanted orbit: %d' % (first_computed, fo)
try:
for minute in range(minute_ini,minute_end+1+int(period)):
minute = int(minute)
if SAA and fast_SAA(SAA_data, minute): SAA_at_minute = True
else: SAA_at_minute = False
orbit_current = fast_minute2orbit(minutes_orbit_iditude, minute, orbit_id)
if orbit_current > lp:
lp = orbit_current
message = "Analysing orbit %d on %d...\t" % (lp,lo)
sys.stdout.write( '\r'*len(message) )
sys.stdout.write(message)
sys.stdout.flush()
junk, len_orbit, atc_ini, junk = fast_orbit2times(minutes_orbit_iditude, orbit_current, orbit_id)
try:
ra, dec, S_sl = load_flux_file(minute, file_flux, folder=folder_flux)
load = True
minute_to_load = minute-atc_ini#+shift
except IOError:
# if there is nothing then well, do nothing ie we copy the past values
# in which orbit are we ?
# get the previous orbit computed and copy the stray light data of this orbit :
#orbit_previous = orbits[orbits[:,0] < orbit_current][-1,0]
#minute_replacement = minute - atc_ini + shift #+ at_ini
minute_to_load = minute-atc_ini
for obj in targets:
if SAA_at_minute:
obj.current_visibility = 0
else:
obj.current_visibility = obj.visible_save[minute_to_load]
load = False
# populate the visbility matrix
# for ii in range(0, targets[0].CountObjects()):
if load:
for obj in targets:
ra_ = obj.ra
dec_ = obj.dec
a = np.where(np.abs(ra_-ra)<ra_step/2)[0]
b = np.where(np.abs(dec_-dec)<dec_step/2)[0]
INT = np.intersect1d(a,b)
if np.shape(INT)[0] == 0 or (straylight and S_sl[INT]*corr_fact > obj.maximum_flux()):
obj.visible_save[minute_to_load] = 0
obj.current_visibility = 0
continue
else:
obj.visible_save[minute_to_load] = 1
if SAA_at_minute: obj.current_visibility = 0
else: obj.current_visibility = 1
if minute == minute_ini:
for obj in targets:
obj.workspace=obj.current_visibility
continue
for obj in targets: obj.Next(minute,threshold_obs_time)
except KeyboardInterrupt: print hilite('\nWARNING! USER STOPPED LOADING AT MINUTE %d' % minute,False,False)
for ii in range(0, targets[0].CountObjects()): targets[ii].Next(minute,threshold_obs_time)
### #TODO if first minute look for past orbits anyways
print
worthy_targets = []
for ii in range(0, targets[0].CountObjects()):
if np.shape(targets[ii].visible)[0] > 0:
worthy_targets.append(targets[ii])
############################################################################
end = time.time()
elapsed_time = round((end-start)/60.,2)
sys.stdout.write( '\r'*len(message) )
sys.stdout.flush()
print "Time needed: %2.2f min" % elapsed_time
### Plot a few things
if fancy: figures.set_fancy()
### Plot time line
figures.set_fancy()
minute_ini = first_minute
minute_end = last_minute
maxy = len(worthy_targets)
print 'Number of star visible in period selected: %d' % maxy
size = 2 + maxy/3
figsize = (17.,size) # fig size in inches (width,height)
fig = plt.figure(figsize=figsize)
ax = plt.subplot(111)
ii = 0
ax.yaxis.set_major_locator(MultipleLocator(1))
plt.grid(True)
for ii in range (0, len(worthy_targets)):
y = float(ii)
visi = worthy_targets[ii].Visibility()
invi = worthy_targets[ii].Invisibility()
for vis, ini in zip(visi, invi):
plt.hlines(y, vis, ini, lw=3, color=cm.Dark2(y/(maxy+5)))
if ii > maxy: break
else: ii+=1
labels = ['%s (%2.1f)' % (wt.name, wt.mag) for wt in worthy_targets[0:maxy]]
ax.set_yticklabels(labels)
ax.set_ylim(-0.5,maxy-0.5)
# convert epoch to matplotlib float format
labels = np.linspace(minute_ini, minute_end+1, 12) * 60. + const.timestamp_2018_01_01
plt.xlim([minute_ini, minute_end+1])
ax.xaxis.set_major_locator(MultipleLocator((minute_end-minute_ini+1)/11))
# to human readable date
pre = map (time.gmtime, labels)
labels = map(figures.format_second, pre)
ax.set_xticklabels(labels)
fig.autofmt_xdate()
if save:
threshold_obs_time -= t_acquisition
if SAA: note = '_SAA'
else: note = ''
fname = '%svisibility_stars_obs_%d_o_%d_to_%d%s' % (folder_figures,threshold_obs_time,fo,lo, note)
figures.savefig(fname,fig,fancy)
### A spatial plot of the targets
fig = plt.figure()
ax = plt.subplot(111, projection='mollweide')
plt.scatter((ra_cat-180)/const.RAD,dec_cat/const.RAD, c=mag_cat, marker='*', s=50, edgecolor='none', vmin=param.magnitude_min,vmax=param.magnitude_max+0.2)
v = np.linspace(param.magnitude_min,param.magnitude_max, (param.magnitude_max-param.magnitude_min+1), endpoint=True)
t = map(figures.format_mag, v)
cbar = plt.colorbar(ticks=v, orientation='horizontal',shrink=.8)
cbar.set_ticklabels(t)
l,b,w,h = plt.gca().get_position().bounds
ll,bb,ww,hh = cbar.ax.get_position().bounds
cbar.ax.set_position([ll, bb+0.1, ww, hh])
ax.grid(True)
ax.set_xticklabels([r'$30^{\circ}$',r'$60^{\circ}$',r'$90^{\circ}$',r'$120^{\circ}$',r'$150^{\circ}$',r'$180^{\circ}$',r'$210^{\circ}$',r'$240^{\circ}$',r'$270^{\circ}$',r'$300^{\circ}$',r'$330^{\circ}$']) #,r'$360^{\circ}$'
ax.set_xlabel(r'$\alpha$')
ax.set_ylabel(r'$\delta$')
if save:
fname = '%stargets_distribution' % folder_figures
figures.savefig(fname,fig,fancy)
### A histogram of the magnitudes
fig = plt.figure(dpi=100)
ax = fig.add_subplot(111)
bins=np.linspace(np.amin(mag_cat),np.amax(mag_cat), 50)
n, bins, patches = plt.hist(mag_cat,bins=bins)
plt.setp(patches, 'edgecolor', 'black', 'linewidth', 2, 'facecolor','blue','alpha',1)
ax.xaxis.set_major_locator(MultipleLocator(2))
ax.xaxis.set_minor_locator(MultipleLocator(1))
ax.yaxis.set_major_locator(MultipleLocator(5))
ax.yaxis.set_minor_locator(MultipleLocator(1))
ax.xaxis.grid(True,'minor')
ax.yaxis.grid(True,'minor')
ax.xaxis.grid(True,'major',linewidth=2)
ax.yaxis.grid(True,'major',linewidth=2)
plt.xlim([np.amin(mag_cat)*0.95, 1.05*np.amax(mag_cat)])
plt.xlabel(r'$m_V$')
plt.ylabel(r'$\mathrm{distribution}$')
x1,x2,y1,y2 = plt.axis()
plt.axvline(CHEOPS_mag_max, lw=2, color='r')
if save:
fname = '%stargets_hist_mag' % folder_figures
figures.savefig(fname,fig,fancy)
if show: plt.show()
| 29.778646 | 224 | 0.689899 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3,933 | 0.343944 |
4f8d86e6a28fdb9feabc081905b63e6018146084 | 5,829 | py | Python | pymontecarlo_gui/results/base.py | pymontecarlo/pymontecarlo-gui | 1b3c37d4b634a85c63f23d27ea8bd79bf5a43a2f | [
"Apache-2.0"
] | null | null | null | pymontecarlo_gui/results/base.py | pymontecarlo/pymontecarlo-gui | 1b3c37d4b634a85c63f23d27ea8bd79bf5a43a2f | [
"Apache-2.0"
] | 2 | 2016-05-16T10:19:56.000Z | 2021-12-29T15:16:20.000Z | pymontecarlo_gui/results/base.py | pymontecarlo/pymontecarlo-gui | 1b3c37d4b634a85c63f23d27ea8bd79bf5a43a2f | [
"Apache-2.0"
] | null | null | null | """"""
# Standard library modules.
import csv
import io
import functools
# Third party modules.
from qtpy import QtCore, QtGui, QtWidgets, QtWebEngineWidgets
import xlsxwriter
# Local modules.
from pymontecarlo_gui.settings import SettingsBasedField
from pymontecarlo_gui.widgets.dialog import ExecutionProgressDialog
from pymontecarlo.formats.document import publish_html, DocumentBuilder
# Globals and constants variables.
class ResultWidgetBase(QtWidgets.QWidget):
def __init__(self, result, settings, parent=None):
super().__init__(parent)
# Variables
self._result = result
self._settings = settings
def result(self):
return self._result
def settings(self):
return self._settings
class ResultTableWidgetBase(ResultWidgetBase):
def __init__(self, result, settings, parent=None):
super().__init__(result, settings, parent)
# Actions
self.action_copy = QtWidgets.QAction("Copy to clipboard")
self.action_copy.setIcon(QtGui.QIcon.fromTheme("edit-copy"))
self.action_copy.setShortcut(QtGui.QKeySequence.Copy)
self.action_copy.triggered.connect(self._on_copy)
self.action_save = QtWidgets.QAction("Save")
self.action_save.setIcon(QtGui.QIcon.fromTheme("document-save"))
self.action_save.triggered.connect(self._on_save)
# Widgets
self.table_view = QtWidgets.QTableView()
self.table_view.setModel(self._create_model(result, settings))
self.table_view.horizontalHeader().setSectionResizeMode(
QtWidgets.QHeaderView.Stretch
)
self.table_view.setSortingEnabled(True)
self.web_widget = QtWebEngineWidgets.QWebEngineView()
self.web_widget.setHtml(self._render_html(result, settings))
self.toolbar = QtWidgets.QToolBar()
self.toolbar.addAction(self.action_copy)
self.toolbar.addAction(self.action_save)
# Layouts
widget = QtWidgets.QTabWidget()
widget.addTab(self.table_view, "Results")
widget.addTab(self.web_widget, "Analysis")
layout = QtWidgets.QVBoxLayout()
layout.addWidget(widget)
layout.addWidget(self.toolbar)
self.setLayout(layout)
# Signals
settings.settings_changed.connect(self._on_settings_changed)
def _create_model(self, result, settings):
raise NotImplementedError
def _render_html(self, result, settings):
builder = DocumentBuilder(settings)
result.analysis.convert_document(builder)
return publish_html(builder).decode("utf8")
def _on_settings_changed(self):
model = self.table_view.model()
model.modelReset.emit()
def _get_data(self):
model = self.table_view.model()
rows = []
# Header
header = []
for icol in range(model.columnCount()):
header.append(
model.headerData(icol, QtCore.Qt.Horizontal, QtCore.Qt.UserRole)
)
rows.append(header)
# Data
for irow in range(model.rowCount()):
row = []
for icol in range(model.columnCount()):
index = model.createIndex(irow, icol)
row.append(model.data(index, QtCore.Qt.UserRole))
rows.append(row)
return rows
def _on_copy(self):
data = self._get_data()
buffer = io.StringIO()
writer = csv.writer(buffer, lineterminator="\n", delimiter="\t")
writer.writerows(data)
data = QtCore.QMimeData()
data.setText(buffer.getvalue())
QtGui.QGuiApplication.instance().clipboard().setMimeData(data)
def _save_csv(self, filepath):
data = self._get_data()
with open(filepath, "w", encoding="utf8") as fp:
writer = csv.writer(fp, lineterminator="\n")
writer.writerows(data)
def _save_xlsx(self, filepath):
data = self._get_data()
workbook = xlsxwriter.Workbook(filepath)
try:
format_header = workbook.add_format({"bold": True})
worksheet = workbook.add_worksheet(self.result().getname())
worksheet.write_row(0, 0, data[0], format_header)
for irow, row in enumerate(data[1:], 1):
for icol, value in enumerate(row):
worksheet.write(irow, icol, value)
finally:
workbook.close()
def _on_save(self):
caption = "Save result"
dirpath = self.settings().savedir
namefilters = "Excel spreadsheet (*.xlsx);;CSV text file (*.csv)"
filepath, namefilter = QtWidgets.QFileDialog.getSaveFileName(
self, caption, dirpath, namefilters
)
if not namefilter:
return False
if not filepath:
return False
if namefilter == "CSV text file (*.csv)":
ext = ".csv"
function = functools.partial(self._save_csv, filepath)
elif namefilter == "Excel spreadsheet (*.xlsx)":
ext = "xlsx"
function = functools.partial(self._save_xlsx, filepath)
if not filepath.endswith(ext):
filepath += ext
dialog = ExecutionProgressDialog(
"Save result", "Saving result...", "Result saved", function
)
dialog.exec_()
class ResultSummaryWidgetBase(QtWidgets.QWidget):
def setProject(self, project):
raise NotImplementedError
class ResultFieldBase(SettingsBasedField):
def __init__(self, result, settings):
self._result = result
super().__init__(settings)
def title(self):
return self.result().getname()
def icon(self):
return QtGui.QIcon.fromTheme("format-justify-fill")
def result(self):
return self._result
| 29 | 80 | 0.635615 | 5,387 | 0.924172 | 0 | 0 | 0 | 0 | 0 | 0 | 462 | 0.079259 |
4f8dc869ef1dc9d22cc9f3f4842b852a13377f7b | 14,586 | py | Python | tests/test.py | rfsaliev/light-model-transformer | 550a357db8f0c6765328cd42d312fa1be62eae91 | [
"Apache-2.0"
] | 67 | 2018-08-13T02:57:35.000Z | 2021-09-20T05:47:46.000Z | tests/test.py | Mengjintao/light-model-transformer | 9e0c7d5c3ec4042d30e6d8ee67a9c856837573c8 | [
"Apache-2.0"
] | 3 | 2018-08-14T05:54:10.000Z | 2019-09-06T20:50:02.000Z | tests/test.py | Mengjintao/light-model-transformer | 9e0c7d5c3ec4042d30e6d8ee67a9c856837573c8 | [
"Apache-2.0"
] | 18 | 2018-08-13T03:02:00.000Z | 2021-12-10T01:57:28.000Z | # -*- coding: UTF-8 -*-
import os
import sys
import subprocess
import argparse
import re
import configparser
import logging
import copy
import psutil
if sys.version_info[0] < 3:
import struct
import tensorflow as tf
from tensorflow.core.framework import graph_pb2
# level=logging.INFO only logging.debug info,level=logging.DEBUG may logging.debug debug and info
logging.basicConfig(level=logging.INFO)
# Global configuration mainly got from test.cfg
TRANSFORMER_PATH = ''
TF_SRC_PATH = ''
TF_SLIM_PATH= ''
NUM_THREADS = 1
EPSILON = 0.0
LOOPS = 1
testcases = []
class TestCase(object):
def __init__(self):
self.model_name = ''
self.model_type = ''
self.url = ''
self.output_node = ''
# Some checkpoint cannot be frozen without fix
self.fix_graph = False
# File path of checkpoint (.ckpt)
self.ckpt_file = None
# File path of inference graph, generated by export_inference_graph.py
self.graph_file = None
# File path of frozen pb, generated by freeze_graph
self.frozen_file = None
# File path where to save the transformed model
self.save_model_dir = None
def __repr__(self):
return '[%s]\ntype=%s\nurl=%s\noutput_node=%s\nfix_graph=%r' % \
(self.model_name, self.model_type, self.url, self.output_node, self.fix_graph)
def exec_cmd(cmd, title, check_output=True):
logging.info(title)
logging.debug(cmd)
if check_output:
out = subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True)
return out
else:
text = os.popen(cmd)
out = text.read()
return out
def get_extract_command(filename, target_path):
if filename.endswith('.tar.gz'):
extract_file = 'tar -xzf %s -C %s' % (filename, target_path)
elif filename.endswith('.tar.bz2'):
extract_file = 'tar -xjf %s -C %s' % (filename, target_path)
else:
extract_file = 'tar -xf %s -C %s' % (filename, target_path)
return extract_file
def decode_string(str):
str = str.decode('UTF-8')
return str
# Find a ckpt file in a directory, and return the path of ckpt file
def find_ckpt(ckpt_dir):
ckpt_file = None
files = os.listdir(ckpt_dir)
for f in files:
if f.endswith('.ckpt'):
ckpt_file = f
break
return os.path.join(ckpt_dir, ckpt_file) if ckpt_file else None
def download_ckpt(tc):
ckpt_dir = '%s/checkpoints/%s' % (TRANSFORMER_PATH, tc.model_name)
tc.graph_file = '%s/%s_inf_graph.pb' % (ckpt_dir, tc.model_name)
tc.frozen_file = '%s/frozen_%s.pb' % (ckpt_dir, tc.model_name)
tc.save_model_dir = '%s/saved_model/%s' % (TRANSFORMER_PATH, tc.model_name)
# Look for existing ckpt file
if os.path.exists(ckpt_dir):
tc.ckpt_file = find_ckpt(ckpt_dir)
else:
os.makedirs(ckpt_dir)
# Already exist
if tc.ckpt_file:
logging.debug("ckpt file (%s) already exist!" % tc.ckpt_file)
return
tar_name = tc.url.split('/')[-1]
download_cmd = 'wget -c %s' % tc.url
extract_cmd = get_extract_command(tar_name, ckpt_dir)
rm_cmd = 'rm -f %s' % tar_name
exec_cmd(download_cmd, "download ckpt file ...")
exec_cmd(extract_cmd, "untar ckpt file ...")
exec_cmd(rm_cmd, "delete ckpt archieve ...")
logging.debug("ckpt file has been download!")
tc.ckpt_file = find_ckpt(ckpt_dir)
def export_inference_graph(tc):
if os.path.isfile(tc.graph_file):
logging.debug("graph file of %s already exist!" % tc.model_name)
else:
export_cmd = 'cd %s && python export_inference_graph.py --alsologtostderr --model_name=%s --output_file=%s' % (TF_SLIM_PATH, tc.model_name, tc.graph_file)
exec_cmd(export_cmd, "export graph.pb")
logging.debug("%s has been exported!" % tc.graph_file)
def summarize_graph(tc):
bazel_build = 'cd %s && bazel build tensorflow/tools/graph_transforms:summarize_graph' % TF_SRC_PATH
summarize_graph = 'cd %s && bazel-bin/tensorflow/tools/graph_transforms/summarize_graph --in_graph=%s' % (TF_SRC_PATH, tc.graph_file)
exec_cmd(bazel_build, "bazel build ...")
exec_cmd(summarize_graph, "summarize graph ...")
logging.debug("summarize graph has been done!")
def load_graph(filename):
graph_def = tf.GraphDef()
with tf.gfile.FastGFile(filename, 'rb') as f:
graph_def.ParseFromString(f.read())
return graph_def
def change_tensor_shape(tensor_shape):
dims = len(tensor_shape.dim)
if dims == 4 and tensor_shape.dim[3].size == 1001:
tensor_shape.dim[3].size = 1000
#print("shape changed, shape=%s" % str(tensor_shape))
if dims == 1 and tensor_shape.dim[0].size == 1001:
tensor_shape.dim[0].size = 1000
#print("shape changed, shape=%s" % str(tensor_shape))
def int_to_bytes(val):
if sys.version_info[0] >= 3:
return val.to_bytes(4, 'little')
else:
return struct.pack("<L", val)
def int_from_bytes(barray):
if sys.version_info[0] >= 3:
return int.from_bytes(barray, byteorder='little')
else:
return struct.unpack("<L", barray)[0]
# Designed to fix the error like:
# Assign requires shapes of both tensors to match. lhs shape= [1,1,4096,1001] rhs shape= [1,1,4096,1000]
def fix_graph_1001_to_1000(tc):
pb_file = tc.graph_file
graph_def = load_graph(pb_file)
new_graph_def = graph_pb2.GraphDef()
for node in graph_def.node:
# Check the value of const node
if node.op == 'Const':
tensor = node.attr.get('value').tensor
# DataType value got from https://github.com/tensorflow/tensorflow/blob/r1.10/tensorflow/core/framework/types.proto
if tensor and tensor.dtype >= 3 and tensor.dtype <= 6: # data type is int
tensor_shape = tensor.tensor_shape
# Change tensor like: {"tensor":{"dtype":"DT_INT32","tensor_shape":{"dim":[{"size":4}]},"tensor_content":"\\001\\000\\000\\000\\001\\000\\000\\000\\000\\020\\000\\000\\350\\003\\000\\000"}}
if len(tensor_shape.dim) == 1 and tensor_shape.dim[0].size == 4:
element_size = (int)(len(tensor.tensor_content) / 4)
shape = [0, 0, 0, 0]
for i in range(4):
shape[i] = int_from_bytes(tensor.tensor_content[i*element_size: (i+1)*element_size])
# 1x1x2048x1001 -> 1x1x2048x1000, etc.
if shape[3] == 1001:
shape[3] = 1000
content = int_to_bytes(shape[0]) + int_to_bytes(shape[1]) + int_to_bytes(shape[2]) + int_to_bytes(shape[3]);
tensor.tensor_content = content
# Change tensor like: {"tensor":{"dtype":"DT_INT32","tensor_shape":{"dim":[{"size":2}]},"tensor_content":"\\377\\377\\377\\377\\351\\003\\000\\000"}}
if len(tensor_shape.dim) == 1 and tensor_shape.dim[0].size == 2:
element_size = (int)(len(tensor.tensor_content) / 2)
shape = [0, 0]
for i in range(2):
shape[i] = int_from_bytes(tensor.tensor_content[i*element_size: (i+1)*element_size])
# -1x1001 -> -1x1000, etc.
if shape[1] == 1001:
shape[1] = 1000
content = int_to_bytes(shape[0]) + int_to_bytes(shape[1]);
tensor.tensor_content = content
# Change tensor like: {"tensor":{"dtype":"DT_INT32","tensor_shape":{"dim":[{"size":1}]},"int_val":1000}}
if len(tensor_shape.dim) == 1 and tensor_shape.dim[0].size == 1 and len(tensor.int_val) == 1:
if tensor.int_val[0] == 1001:
tensor.int_val[0] = 1000
# Check shape attribute
shape_value = node.attr.get('shape')
if shape_value:
change_tensor_shape(shape_value.shape)
new_graph_def.node.extend([copy.deepcopy(node)])
# save new graph
with tf.gfile.GFile(pb_file, "wb") as f:
f.write(new_graph_def.SerializeToString())
def frozen_pb(tc):
if os.path.exists(tc.frozen_file):
logging.debug("frozen pb file exist")
else:
bazel_build = 'cd %s && bazel build tensorflow/python/tools:freeze_graph' % TF_SRC_PATH
frozen_cmd = 'cd %s && bazel-bin/tensorflow/python/tools/freeze_graph --input_graph=%s --input_checkpoint=%s --input_binary=true --output_graph=%s --output_node_names=%s' % \
(TF_SRC_PATH, tc.graph_file, tc.ckpt_file, tc.frozen_file, tc.output_node)
exec_cmd(bazel_build, "bazel build ...")
exec_cmd(frozen_cmd, "frozen pb ...")
logging.debug("frozen has been done!")
def tf_inference(tc, inference_input):
if inference_input == 'data':
do_inference = 'cd %s/tests && OMP_NUM_THREADS=%d python pb_inference.py --pb_file=%s --output_node=%s --batch_size=1 --loop=%d' % \
(TRANSFORMER_PATH, NUM_THREADS, tc.frozen_file, tc.output_node, LOOPS)
else:
do_inference = 'cd %s/tests && OMP_NUM_THREADS=%d python pb_inference.py --pb_file=%s --output_node=% --batch_size=1 --loop=%d --picture=%s' % \
(TRANSFORMER_PATH, NUM_THREADS, tc.frozen_file, tc.output_node, LOOPS, inference_input)
output = exec_cmd(do_inference,"tensorflow do inference!")
output = decode_string(output)
# Parse output info from cmd out
tf_info = "tensorflow output:\s\[([\-?\d+\.?\d*e?-?\d*?\s]+)"
tf_result = re.findall(tf_info, output)
logging.debug("tensorflow output: %s" % tf_result)
tf_time_info = "TF time used per loop is: (\d+\.?\d*) ms"
tf_time_used = re.findall(tf_time_info, output)
return tf_result, tf_time_used
def mkldnn_inference(tc, inference_input):
if not os.path.exists(tc.save_model_dir):
os.makedirs(tc.save_model_dir)
tf2topo = 'cd %s/ && python tf2topo.py --input_model_filename=%s --weights_file=%s/weights.bin --pkl_file=%s/weights.pkl --topo_file=%s/topo.txt' % \
(TRANSFORMER_PATH, tc.frozen_file, tc.save_model_dir, tc.save_model_dir, tc.save_model_dir)
exec_cmd(tf2topo, "convert tf pb file to topo")
topo2mkldnn = 'cd %s/ && python topo2code.py --topo=%s/topo.txt' % (TRANSFORMER_PATH, tc.save_model_dir)
exec_cmd(topo2mkldnn, "convert topo to inference code")
run_mkldnn = 'cd %s/inference_code/ && sh build.sh && OMP_NUM_THREADS=%d ./test -W %s/weights.bin -b 1 -l %d' % \
(TRANSFORMER_PATH, NUM_THREADS, tc.save_model_dir, LOOPS)
output = exec_cmd(run_mkldnn, "build and run inference code")
output = decode_string(output)
#search info from cmd ouput
out_info = "Last_output >> \[([\-?\d+\.?\d*e?-?\d*?\s]+)"
inference_result = re.findall(out_info, output)
logging.debug("mkldnn output:%s" % inference_result)
mkldnn_time_info = "AVG Time: (\d+\.?\d*) ms"
mkldnn_time_used = re.findall(mkldnn_time_info, output)
return inference_result,mkldnn_time_used
def str2list(input_str):
output_list = input_str.split()
return output_list
def compare(list1,list2):
length = min(len(list1),len(list2))
num = 0
for i in range(length):
if abs(float(list1[i]) - float(list2[i])) <= float(EPSILON):
num = num + 1
if num == length:
logging.debug("mkldnn inference outputs equal tensorflow outputs, num=%d" % num)
return True
else:
logging.debug("mkldnn inference outputs are different from tensorflow outputs!")
return False
def init_config():
global TRANSFORMER_PATH, TF_SRC_PATH, TF_SLIM_PATH, NUM_THREADS, EPSILON, LOOPS
TRANSFORMER_PATH = os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))
NUM_THREADS = psutil.cpu_count(logical = False)
config = configparser.ConfigParser()
config.read('test.cfg')
TF_SRC_PATH = config.get('path', 'tensorflow')
TF_SLIM_PATH = config.get('path', 'tensorflow_slim')
EPSILON = config.get('control', 'epsilon')
LOOPS = int(config.get('control', 'loops'))
str_models = config.get('models', 'names')
model_list = str_models.split(',')
for model in model_list:
tc = TestCase()
tc.model_name = model
tc.model_type = config.get(model, 'type')
tc.url = config.get(model, 'url')
tc.output_node = config.get(model, 'output_node')
try:
tc.fix_graph = config.get(model, 'fix_graph')
except:
pass
testcases.append(tc)
def model_test(tc):
print(" Test model: %s start ............" % tc.model_name)
if tc.model_type == "ckpt":
download_ckpt(tc)
export_inference_graph(tc)
if tc.fix_graph:
fix_graph_1001_to_1000(tc)
# if need to get the output name, could call this func
summarize_graph(tc)
frozen_pb(tc)
else:
logger.debug("model type error!")
exit()
tf_output, tf_time = tf_inference(tc, args.inference_input)
mkldnn_output, mkldnn_time = mkldnn_inference(tc, args.inference_input)
tf_output_list = str2list(tf_output[0])
mkldnn_output_list = str2list(mkldnn_output[0])
logging.debug("tensorflow ouput: %s" % tf_output_list)
logging.debug("mkldnn output: %s" % mkldnn_output_list)
result = compare(mkldnn_output_list, tf_output_list)
if result:
print(" %s passed! tensorflow used time: %s ms, mkldnn used time: %s ms." % (tc.model_name, tf_time, mkldnn_time))
else:
print(" %s failed! tensorflow used time: %s ms, mkldnn used time: %s ms." % (tc.model_name, tf_time, mkldnn_time))
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument("--model_name", "-n", default="all", type=str, help="which model to test, default means to test all configured models in test.cfg.")
parser.add_argument("--inference_input", "-i", default="data", type=str, help="input: 'data' or an image file path. Currently only support 'data', which means to use emulated data.")
args = parser.parse_args()
init_config()
if args.model_name == "all":
for tc in testcases:
model_test(tc)
else:
for tc in testcases:
if tc.model_name == args.model_name:
model_test(tc)
print(" All tests done!")
| 36.648241 | 205 | 0.627794 | 804 | 0.055121 | 0 | 0 | 0 | 0 | 0 | 0 | 4,358 | 0.29878 |
4f8dd0d57f577449e61da2e80c2e713182dfefec | 3,912 | py | Python | tests/test_jenkins.py | lakiyi/aiojenkins | 12bd43c6d79be664855a011fa86aedc735db307b | [
"MIT"
] | 19 | 2020-04-26T21:45:21.000Z | 2022-03-07T16:13:12.000Z | tests/test_jenkins.py | lakiyi/aiojenkins | 12bd43c6d79be664855a011fa86aedc735db307b | [
"MIT"
] | 24 | 2020-04-23T20:05:10.000Z | 2021-12-03T22:39:22.000Z | tests/test_jenkins.py | lakiyi/aiojenkins | 12bd43c6d79be664855a011fa86aedc735db307b | [
"MIT"
] | 5 | 2020-08-05T11:04:46.000Z | 2022-01-10T01:45:43.000Z | import asyncio
import time
from collections import namedtuple
from http import HTTPStatus
import pytest
from aiojenkins.exceptions import JenkinsError
from aiojenkins.jenkins import Jenkins
from tests import CreateJob, get_host, get_login, get_password, is_ci_server
@pytest.mark.asyncio
async def test_invalid_host(jenkins):
with pytest.raises(JenkinsError):
jenkins = Jenkins('@#$')
await jenkins.get_version()
@pytest.mark.asyncio
async def test_get_status(jenkins):
await jenkins.get_status()
@pytest.mark.asyncio
async def test_quiet_down(jenkins):
await jenkins.quiet_down()
server_status = await jenkins.get_status()
assert server_status['quietingDown'] is True
await jenkins.cancel_quiet_down()
server_status = await jenkins.get_status()
assert server_status['quietingDown'] is False
@pytest.mark.asyncio
async def test_restart(jenkins):
if not is_ci_server():
pytest.skip('takes too much time +40 seconds')
await jenkins.safe_restart()
await asyncio.sleep(5)
await jenkins.wait_until_ready()
assert (await jenkins.is_ready()) is True
await jenkins.restart()
await jenkins.wait_until_ready()
assert (await jenkins.is_ready()) is True
@pytest.mark.asyncio
async def test_tokens(jenkins):
version = await jenkins.get_version()
if not (version.major >= 2 and version.minor >= 129):
pytest.skip('Version isn`t support API tokens')
async with CreateJob(jenkins) as job_name:
token_value, token_uuid = await jenkins.generate_token('')
token_name = str(time.time())
token_value, token_uuid = await jenkins.generate_token(token_name)
await jenkins.nodes.enable('master')
# instance without credentials
jenkins_tokened = Jenkins(get_host(), get_login(), token_value)
await jenkins_tokened.builds.start(job_name)
await jenkins.revoke_token(token_uuid)
with pytest.raises(JenkinsError):
await jenkins_tokened.builds.start(job_name)
@pytest.mark.asyncio
async def test_run_groovy_script(jenkins):
# TC: compare with expected result
text = 'test'
response = await jenkins.run_groovy_script('print("{}")'.format(text))
assert response == text
# TC: invalid script
response = await jenkins.run_groovy_script('xxx')
assert 'No such property' in response
@pytest.mark.asyncio
async def test_retry_client(monkeypatch):
attempts = 0
async def text():
return 'error'
async def request(*args, **kwargs):
nonlocal attempts
attempts += 1
response = namedtuple(
'response', ['status', 'cookies', 'text', 'json']
)
if attempts == 1:
raise asyncio.TimeoutError
elif attempts < 3:
response.status = HTTPStatus.INTERNAL_SERVER_ERROR
else:
response.status = HTTPStatus.OK
response.text = text
response.json = text
return response
retry = dict(total=5, statuses=[HTTPStatus.INTERNAL_SERVER_ERROR])
try:
jenkins = Jenkins(get_host(), get_login(), get_password(), retry=retry)
await jenkins.get_status()
monkeypatch.setattr('aiohttp.client.ClientSession.request', request)
await jenkins.get_status()
finally:
await jenkins.close()
@pytest.mark.asyncio
async def test_retry_validation():
retry = dict(attempts=5, statuses=[HTTPStatus.INTERNAL_SERVER_ERROR])
with pytest.raises(JenkinsError):
jenkins = Jenkins(get_host(), get_login(), get_password(), retry=retry)
await jenkins.get_status()
def test_session_close():
def do():
Jenkins(
get_host(),
get_login(),
get_password(),
retry=dict(enabled=True)
)
do()
# just check for no exceptions
import gc
gc.collect()
| 25.568627 | 79 | 0.676636 | 0 | 0 | 0 | 0 | 3,357 | 0.858129 | 3,189 | 0.815184 | 350 | 0.089468 |
4f8de99ba5448f45940e47db59549a0a5e04b962 | 1,790 | py | Python | api.py | hale6666/drink-cli | e10b4a2ecba09c53ef3a34b0bd9d60938ae434d0 | [
"MIT"
] | null | null | null | api.py | hale6666/drink-cli | e10b4a2ecba09c53ef3a34b0bd9d60938ae434d0 | [
"MIT"
] | null | null | null | api.py | hale6666/drink-cli | e10b4a2ecba09c53ef3a34b0bd9d60938ae434d0 | [
"MIT"
] | null | null | null | import requests
import json
with open("api.key") as f:
API_KEY = f.read().strip()
"""
This is for security online, so the API key is not publicized.
"""
f.close()
url='https://webdrink.csh.rit.edu/api/index.php'
def test():
"""
This is not usually called, other than for testing the API.
"""
head = {"request": "test/api/{}".format(API_KEY), "api_key": API_KEY}
ret=requests.get(url,params=head)
print(ret.text)
print(ret.json())
print(ret.json()['message'])
def get_credits(uid):
"""
This is to return the credits a user has, so that it can be shown on the screen.
"""
head = {"request": "users/credits/{}".format(uid), "uid": uid, "api_key": API_KEY}
ret = requests.get(url,params=head)
if ret.status_code != 200:
raise ValueError
else:
return ret.json()['data']
def get_user_info():
"""
Gets name, credits, etc.
"""
head = {"request": "users/info/", "api_key": API_KEY}
ret = requests.get(url,params=head)
if ret.status_code != 200:
raise ValueError
else:
return ret.json()['data']
def get_machine_info():
"""
Gets stock of each machine.
"""
head = {"request": "machines/stock/"}
ret = requests.get(url,params=head)
if ret.status_code != 200:
raise ValueError
else:
return ret.json()['data']
def drop_drink(ib, mach, slot, delay):
"""
Drops specified drink.
"""
head = {"request": "drops/drop/{}/{}/{}/{}".format(ib, mach, slot, delay), "ibutton": ib,
"machine_id": mach, "slot_num": slot, "delay": delay, "api_key": API_KEY}
ret = requests.post(url,data=head)
if ret.status_code != 200:
raise ValueError
else:
return ret.json()['message']
| 26.716418 | 93 | 0.591061 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 668 | 0.373184 |
4f8e402da6a278939229244bc051662a74bdbb7f | 1,099 | py | Python | app/main/views.py | jfairf01/OrgoWebsite | 62f25faa1cc174f91377a281e1cea1dd898b0b30 | [
"MIT"
] | null | null | null | app/main/views.py | jfairf01/OrgoWebsite | 62f25faa1cc174f91377a281e1cea1dd898b0b30 | [
"MIT"
] | null | null | null | app/main/views.py | jfairf01/OrgoWebsite | 62f25faa1cc174f91377a281e1cea1dd898b0b30 | [
"MIT"
] | null | null | null | from flask import render_template, jsonify
from sqlalchemy import func
from . import main
from .. import db
from ..models import User, HighScore
@main.route('/highScore')
def highScore():
highScore = HighScore.query.order_by(HighScore.username).first()
if highScore is None:
return jsonify(username="None", score=0)
return jsonify(username=highScore.username, score=highScore.highScore)
@main.route('/newhighScore/<userName>/<newScore>')
def newhighScore(userName, newScore):
HighScore.query.delete()
newHighScore = HighScore(username=userName, highScore=newScore)
db.session.add(newHighScore)
db.session.commit()
return jsonify(username=newHighScore.username, score=newHighScore.highScore)
@main.route('/')
def index():
return render_template('main/index.html')
@main.route('/Categories')
def Categories():
return render_template('main/Categories.html')
@main.route('/FourthPage')
def fourthPage():
return render_template('main/FourthPage.html')
@main.route('/FifthPage')
def fifthPage():
return render_template('main/FifthPage.html')
| 28.921053 | 80 | 0.743403 | 0 | 0 | 0 | 0 | 941 | 0.856233 | 0 | 0 | 178 | 0.161965 |
4f8f2fcef7a9b8ab49ddc25206ab33968b97d1cd | 938 | py | Python | src/python/rodney/misc.py | mesnardo/petibm-rollingpitching | 39f7ed9b88973727bed6955e31d99754d7627c9f | [
"BSD-3-Clause"
] | 2 | 2021-09-06T03:37:06.000Z | 2021-12-01T02:39:13.000Z | src/python/rodney/misc.py | mesnardo/petibm-rollingpitching | 39f7ed9b88973727bed6955e31d99754d7627c9f | [
"BSD-3-Clause"
] | 3 | 2020-03-30T21:52:01.000Z | 2021-07-11T13:11:35.000Z | src/python/rodney/misc.py | mesnardo/petibm-rollingpitching | 39f7ed9b88973727bed6955e31d99754d7627c9f | [
"BSD-3-Clause"
] | 4 | 2021-02-22T21:54:16.000Z | 2022-01-18T18:39:34.000Z | """Miscellaneous helper functions."""
import argparse
def parse_command_line():
"""Parse the command-line options."""
formatter_class = argparse.ArgumentDefaultsHelpFormatter
descr = 'Generic command-line parser for the rolling-piching application.'
parser = argparse.ArgumentParser(description=descr,
formatter_class=formatter_class)
parser.add_argument('--no-show', dest='show_figures',
action='store_false',
help='Do not display Matplotlib figures')
parser.add_argument('--no-save', dest='save_figures',
action='store_false',
help='Do not save Matplotlib figures')
parser.add_argument('--no-data', dest='extra_data',
action='store_false',
help='Add extra data for comparison (if available)')
return parser.parse_args()
| 42.636364 | 78 | 0.60661 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 365 | 0.389126 |
4f900b835f7f2809af5aeab94956f59503ab79fe | 2,534 | py | Python | tests/test_lime.py | dianna-ai/dianna | 88bcaec001e640c35e5e1e08517ef1624fd661cb | [
"Apache-2.0"
] | 9 | 2021-11-16T09:53:47.000Z | 2022-03-02T13:28:53.000Z | tests/test_lime.py | dianna-ai/dianna | 88bcaec001e640c35e5e1e08517ef1624fd661cb | [
"Apache-2.0"
] | 340 | 2021-03-03T12:55:37.000Z | 2022-03-31T13:53:44.000Z | tests/test_lime.py | dianna-ai/dianna | 88bcaec001e640c35e5e1e08517ef1624fd661cb | [
"Apache-2.0"
] | 5 | 2021-08-19T08:14:35.000Z | 2022-03-17T21:12:46.000Z | from unittest import TestCase
import numpy as np
import dianna
import dianna.visualization
from dianna.methods import LIME
from tests.test_onnx_runner import generate_data
from tests.utils import ModelRunner
from tests.utils import run_model
class LimeOnImages(TestCase):
def test_lime_function(self):
np.random.seed(42)
input_data = np.random.random((1, 224, 224, 3))
labels = ('batch', 'y', 'x', 'channels')
explainer = LIME(random_state=42, axes_labels=labels)
heatmap = explainer.explain_image(run_model, input_data, num_samples=100)
heatmap_expected = np.load('tests/test_data/heatmap_lime_function.npy')
assert heatmap.shape == input_data[0].shape[:2]
assert np.allclose(heatmap, heatmap_expected, atol=.01)
def test_lime_filename(self):
np.random.seed(42)
model_filename = 'tests/test_data/mnist_model.onnx'
black_and_white = generate_data(batch_size=1)
# Make data 3-channel instead of 1-channel
input_data = np.zeros([1, 3] + list(black_and_white.shape[2:])) + black_and_white
input_data = input_data.astype(np.float32)
labels = ('batch', 'channels', 'y', 'x')
def preprocess(data):
# select single channel out of 3, but keep the channel axis
return data[:, [0], ...]
heatmap = dianna.explain_image(model_filename, input_data, method="LIME", preprocess_function=preprocess, random_state=42,
axes_labels=labels)
heatmap_expected = np.load('tests/test_data/heatmap_lime_filename.npy')
assert heatmap.shape == input_data[0, 0].shape
assert np.allclose(heatmap, heatmap_expected, atol=.01)
def test_lime_text():
model_path = 'tests/test_data/movie_review_model.onnx'
word_vector_file = 'tests/test_data/word_vectors.txt'
runner = ModelRunner(model_path, word_vector_file, max_filter_size=5)
review = 'such a bad movie'
explanation = dianna.explain_text(runner, review, labels=[0], method='LIME', random_state=42)[0]
words = [element[0] for element in explanation]
word_indices = [element[1] for element in explanation]
scores = [element[2] for element in explanation]
expected_words = ['bad', 'such', 'movie', 'a']
expected_word_indices = [7, 0, 11, 5]
expected_scores = [.492, -.046, .036, -.008]
assert words == expected_words
assert word_indices == expected_word_indices
assert np.allclose(scores, expected_scores, atol=.01)
| 39.59375 | 130 | 0.685478 | 1,490 | 0.588003 | 0 | 0 | 0 | 0 | 0 | 0 | 393 | 0.155091 |
4f90480ab176667d12cd4cc6901cb752aa1d6d1f | 2,600 | py | Python | pyUnicodeSteganography/__init__.py | bunnylab/pyUnicodeSteganography | 4a0e0cdedfa5c96b8a5fea8921881123a7548924 | [
"WTFPL"
] | 2 | 2021-12-18T02:41:40.000Z | 2022-03-14T22:48:49.000Z | pyUnicodeSteganography/__init__.py | bunnylab/pyUnicodeSteganography | 4a0e0cdedfa5c96b8a5fea8921881123a7548924 | [
"WTFPL"
] | null | null | null | pyUnicodeSteganography/__init__.py | bunnylab/pyUnicodeSteganography | 4a0e0cdedfa5c96b8a5fea8921881123a7548924 | [
"WTFPL"
] | null | null | null | import re
import pyUnicodeSteganography.zerowidth as zerowidth
import pyUnicodeSteganography.lookalikes as lookalikes
import pyUnicodeSteganography.snow as snow
import pyUnicodeSteganography.emoji as emoji
from pyUnicodeSteganography.zerowidth import zwc_4
def encode(unencoded_string, msg, method="zw", binary=False, replacements=None, delimiter=None):
'''
Main encoding method
Dispatches to corresponding encoder based on specified method and handles
insertion/appending etc. of message into the string.
'''
if method == "zw":
code = zerowidth.encode(msg, character_set=replacements, binary=binary)
chars = list(unencoded_string)
split_code = [code[i:i+4] for i in range(0, len(code), 4)]
if len(split_code) >= len(chars):
raise ValueError("String too short to encode message")
out = ''
for i in range(len(chars)):
out = out + chars[i]
if i < len(split_code):
out = out + split_code[i]
return out
elif method == "snow":
if not delimiter:
delimiter = '\t\t\t'
code = snow.encode(msg, character_set=replacements, binary=binary)
return unencoded_string + delimiter + code
elif method == "lookalike":
return lookalikes.encode(unencoded_string, msg, substitution_table=replacements, binary=binary)
elif method == "emoji":
return emoji.encode(msg, binary=binary)
else:
raise Exception("Method: {}, is not supported".format(method))
def decode(encoded_string, method="zw", binary=False, replacements=None, delimiter=None):
'''
Main decoding method
Dispatches to corresponding decoder based on specified method and handles
extraction of encoded message from the string.
'''
if method == "zw":
if not replacements:
replacements = zwc_4
code = ''
for c in encoded_string:
if c in replacements:
code = code + c
return zerowidth.decode(code, character_set=replacements, binary=binary)
elif method == "snow":
if not delimiter:
delimiter = '\t\t\t'
regex = "{}(.+)$".format(delimiter)
m = re.search(regex, encoded_string)
code = m.groups()[0]
return snow.decode(code, character_set=replacements, binary=binary)
elif method == "lookalike":
return lookalikes.decode(encoded_string, substitution_table=replacements, binary=binary)
elif method == "emoji":
return emoji.decode(encoded_string, binary=binary)
| 33.333333 | 103 | 0.651154 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 495 | 0.190385 |
4f904ef1dc73b23f3b9aba76a41b6b2cd535196b | 6,500 | py | Python | landlab/components/transport_length_diffusion/tests/test_tl_hill_diff.py | saraahsimon/landlab | 1cf809b685efbccaaa149b5899a600c3ccedf30f | [
"MIT"
] | null | null | null | landlab/components/transport_length_diffusion/tests/test_tl_hill_diff.py | saraahsimon/landlab | 1cf809b685efbccaaa149b5899a600c3ccedf30f | [
"MIT"
] | null | null | null | landlab/components/transport_length_diffusion/tests/test_tl_hill_diff.py | saraahsimon/landlab | 1cf809b685efbccaaa149b5899a600c3ccedf30f | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Thu Sep 21 16:52:10 2017
@author: margauxmouchene
"""
import numpy as np
import pytest
from numpy.testing import assert_almost_equal
from landlab import RasterModelGrid
from landlab.components import (
FlowAccumulator,
FlowDirectorSteepest,
TransportLengthHillslopeDiffuser,
)
def test_route_to_multiple_error_raised():
mg = RasterModelGrid((10, 10))
z = mg.add_zeros("node", "topographic__elevation")
z += mg.x_of_node + mg.y_of_node
fa = FlowAccumulator(mg, flow_director="MFD")
fa.run_one_step()
with pytest.raises(NotImplementedError):
TransportLengthHillslopeDiffuser(mg, erodibility=1.0, slope_crit=0.5)
def test_tl_hill_diff():
"""Test cases where S>Sc, S=Sc and S<Sc"""
# Test cases where S>Sc, S=Sc and S<Sc
# Set up a 3x16 grid with closed boundaries and initial elevations.
mg = RasterModelGrid((3, 12))
z = np.array(
[
0.,
0.,
0.,
0.,
0.,
0.,
0.,
0.,
0.,
0.,
0.,
0.,
0.,
5.,
1.9,
1.9,
1.9,
1.9,
1.3,
1.3,
1.3,
1.3,
1.,
0.,
0.,
0.,
0.,
0.,
0.,
0.,
0.,
0.,
0.,
0.,
0.,
0.,
]
)
mg.add_field("node", "topographic__elevation", z)
mg.set_closed_boundaries_at_grid_edges(True, True, True, True)
# Parameter values for test
k = 0.001
Sc = 0.6
# Instantiate flow director and tl hillslope diffuser
fdir = FlowDirectorSteepest(mg)
tl_diff = TransportLengthHillslopeDiffuser(mg, erodibility=k, slope_crit=Sc)
# Run flow director
fdir.run_one_step()
# test slopes
s_out = mg.at_node["topographic__steepest_slope"]
s_test = np.array(
[
0.,
0.,
0.,
0.,
0.,
0.,
0.,
0.,
0.,
0.,
0.,
0.,
0.,
3.1,
0.,
0.,
0.,
0.6,
0.,
0.,
0.,
0.3,
0.,
0.,
0.,
0.,
0.,
0.,
0.,
0.,
0.,
0.,
0.,
0.,
0.,
0.,
]
)
assert_almost_equal(s_out, s_test, decimal=10)
# Run tl hillslope diffusion component
tl_diff.run_one_step(1.)
# Test results
# flux_out
fo_test = np.array(
[
0.,
0.,
0.,
0.,
0.,
0.,
0.,
0.,
0.,
0.,
0.,
0.,
0.,
0.025,
0.,
0.,
0.,
0.0006,
0.,
0.,
0.,
0.0003,
0.,
0.,
0.,
0.,
0.,
0.,
0.,
0.,
0.,
0.,
0.,
0.,
0.,
0.,
]
)
fo_out = mg.at_node["sediment__flux_out"]
assert_almost_equal(fo_out, fo_test, decimal=10)
# updated elevation
elev_test = np.array(
[
0.,
0.,
0.,
0.,
0.,
0.,
0.,
0.,
0.,
0.,
0.,
0.,
0.,
4.975,
1.9,
1.9,
1.9,
1.8994,
1.3,
1.3,
1.3,
1.2997,
1.,
0.,
0.,
0.,
0.,
0.,
0.,
0.,
0.,
0.,
0.,
0.,
0.,
0.,
]
)
elev_out = mg.at_node["topographic__elevation"]
assert_almost_equal(elev_out, elev_test, decimal=10)
# Run another time step because deposition and transfer were null
# the first time
fdir.run_one_step()
tl_diff.run_one_step(1.)
# Test results
# flux_out
fo_test = np.array(
[
0.00000000e+00,
0.00000000e+00,
0.00000000e+00,
0.00000000e+00,
0.00000000e+00,
0.00000000e+00,
0.00000000e+00,
0.00000000e+00,
0.00000000e+00,
0.00000000e+00,
0.00000000e+00,
0.00000000e+00,
0.00000000e+00,
2.47500000e-02,
0.00000000e+00,
0.00000000e+00,
6.00000000e-07,
5.99400000e-04,
0.00000000e+00,
0.00000000e+00,
3.00000000e-07,
2.99700000e-04,
0.00000000e+00,
0.00000000e+00,
0.00000000e+00,
0.00000000e+00,
0.00000000e+00,
0.00000000e+00,
0.00000000e+00,
0.00000000e+00,
0.00000000e+00,
0.00000000e+00,
0.00000000e+00,
0.00000000e+00,
0.00000000e+00,
0.00000000e+00,
]
)
fo_out = mg.at_node["sediment__flux_out"]
assert_almost_equal(fo_out, fo_test, decimal=10)
# updated elevation
elev_test = np.array(
[
0.,
0.,
0.,
0.,
0.,
0.,
0.,
0.,
0.,
0.,
0.,
0.,
0.,
4.95025,
1.925,
1.9,
1.8999994,
1.8988006,
1.3006,
1.3,
1.2999997,
1.2994003,
1.0003,
0.,
0.,
0.,
0.,
0.,
0.,
0.,
0.,
0.,
0.,
0.,
0.,
0.,
]
)
elev_out = mg.at_node["topographic__elevation"]
assert_almost_equal(elev_out, elev_test, decimal=10)
| 20.249221 | 80 | 0.365231 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 760 | 0.116923 |
4f90780c2a2a1ba5b699d89bbb0798c1c6453c81 | 1,361 | py | Python | backend/app.py | HalmonLui/square-hackathon | 62d5be7a229f9e39e27a546c164facd779d28aa4 | [
"MIT"
] | 3 | 2020-06-13T02:47:29.000Z | 2020-06-20T17:34:15.000Z | backend/app.py | HalmonLui/square-hackathon | 62d5be7a229f9e39e27a546c164facd779d28aa4 | [
"MIT"
] | 2 | 2020-06-14T20:29:26.000Z | 2020-06-14T20:29:34.000Z | backend/app.py | HalmonLui/square-hackathon | 62d5be7a229f9e39e27a546c164facd779d28aa4 | [
"MIT"
] | 1 | 2020-09-04T01:45:39.000Z | 2020-09-04T01:45:39.000Z | # import app
from flask import Flask, render_template, make_response, send_file
from flask_cors import CORS
# import custom helpers
from maplib import generate_embed
import loyaltylib as ll
app = Flask(__name__)
CORS(app)
# import declared routes
import frontenddata
@app.route('/ll')
def llfn():
ll.create_loyalty_account()
ll.retrieve_loyalty_account()
return
@app.route('/map')
def map():
location = '850 FOLSOM ST, San Francisco, CA 94107'
addresslist = {'a' : generate_embed(location)}
return render_template('map.html', addresslist=addresslist)
@app.route('/cal')
def cal():
appoint = {
'stylist': 'Bob Nguyen',
'salon': 'Salon Bobby',
'event': 'Men\'s Haircut',
'location':'850 FOLSOM ST, San Francisco, CA 94107',
'starttime':'2020-06-23 08:00:00',
'endtime':'2020-06-23 08:45:00',
}
return render_template('cal.html', appoint=appoint)
# def loop_matcher(delay):
# while(True):
# print('Matcher Automatically Run')
# handle_matcher()
# #do expired status update here
# time.sleep(delay)
# Run Server
if __name__ == "__main__":
#matcher_delay = 3600 # 1 hour in seconds
#p = Process(target=loop_matcher, args=(matcher_delay,))
#p.start()
app.run(host = '0.0.0.0', debug=True, use_reloader=False)
#p.join() | 24.303571 | 66 | 0.653196 | 0 | 0 | 0 | 0 | 658 | 0.483468 | 0 | 0 | 645 | 0.473916 |
4f90a023a910c225c34fb968b47953ace924c78b | 2,652 | py | Python | pytorch_mask_rcnn/visualize.py | zimonitrome/PyTorch-Simple-MaskRCNN | 8e8ff3072331c1df5149964a9787817ed0cf6477 | [
"MIT"
] | null | null | null | pytorch_mask_rcnn/visualize.py | zimonitrome/PyTorch-Simple-MaskRCNN | 8e8ff3072331c1df5149964a9787817ed0cf6477 | [
"MIT"
] | null | null | null | pytorch_mask_rcnn/visualize.py | zimonitrome/PyTorch-Simple-MaskRCNN | 8e8ff3072331c1df5149964a9787817ed0cf6477 | [
"MIT"
] | null | null | null | import torch
import torch.nn.functional as F
try:
import matplotlib.pyplot as plt
except ImportError:
pass
def factor_getter(n, base):
base = base * 0.8 ** (n // 6)
i = n % 6
if i < 3:
f = [0, 0, 0]
f[i] = base
else:
base /= 2
f = [base, base, base]
f[i - 3] = 0
return f
def resize(image, target, scale_factor):
ori_image_shape = image.shape[-2:]
image = F.interpolate(image[None], scale_factor=scale_factor, mode='bilinear', align_corners=False)[0]
if target is None:
return image, target
if 'boxes' in target:
box = target['boxes']
box[:, [0, 2]] = box[:, [0, 2]] * image.shape[-1] / ori_image_shape[1]
box[:, [1, 3]] = box[:, [1, 3]] * image.shape[-2] / ori_image_shape[0]
target['boxes'] = box
if 'masks' in target:
mask = target['masks']
mask = F.interpolate(mask[None].float(), scale_factor=scale_factor)[0].byte()
target['masks'] = mask
return image, target
def show(image, target=None, classes=None, scale_factor=None, base=0.4):
image = image.clone()
if scale_factor is not None:
image, target = resize(image, target, scale_factor)
if target is not None and 'masks' in target:
mask = target['masks']
mask = mask.reshape(-1, 1, mask.shape[1], mask.shape[2])
mask = mask.repeat(1, 3, 1, 1).to(image)
for i, m in enumerate(mask):
factor = torch.tensor(factor_getter(i, base)).reshape(3, 1, 1).to(image)
value = factor * m
image += value
image = image.clamp(0, 1)
im = image.cpu().numpy()
plt.imshow(im.transpose(1, 2, 0))
if target is not None:
if 'boxes' in target:
box = target['boxes']
box = box.cpu()
for i, b in enumerate(box):
plt.plot(b[[0, 2, 2, 0, 0]], b[[1, 1, 3, 3, 1]])
if 'labels' in target:
l = target['labels'][i].item()
if classes is None:
raise ValueError("'classes' should not be None when 'target' has 'labels'!")
txt = classes[l]
if 'scores' in target:
s = target['scores'][i]
s = round(s.item() * 100)
txt = '{} {}%'.format(txt, s)
plt.text(
b[0], b[1], txt, fontsize=14,
bbox=dict(boxstyle='round', fc='white', lw=1, alpha=0.7))
plt.title(im.shape)
plt.axis('off')
plt.show() | 31.951807 | 106 | 0.501508 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 197 | 0.074284 |
4f9120612f328e8baf028521cfe58288d762c155 | 134 | py | Python | hec_gnn/single_model/__init__.py | zlinaf/PowerGear | 51ab67a7e2a2f4833de5196bb8aac57eaf77db69 | [
"MIT"
] | 8 | 2022-03-11T03:29:15.000Z | 2022-03-27T07:39:48.000Z | hec_gnn/single_model/__init__.py | zlinaf/PowerGear | 51ab67a7e2a2f4833de5196bb8aac57eaf77db69 | [
"MIT"
] | null | null | null | hec_gnn/single_model/__init__.py | zlinaf/PowerGear | 51ab67a7e2a2f4833de5196bb8aac57eaf77db69 | [
"MIT"
] | 3 | 2022-03-11T02:30:24.000Z | 2022-03-11T02:35:26.000Z | import sys
import os
ROOT_DIR = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
sys.path.append(ROOT_DIR) | 33.5 | 87 | 0.791045 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
4f912920b3cf2a5552ceadf14640e9d27357ffce | 65 | py | Python | test/Task/ImageSequence/__init__.py | paulondc/chilopoda | 046dbb0c1b4ff20ea5f2e1679f8d89f3089b6aa4 | [
"MIT"
] | 2 | 2019-09-24T18:56:27.000Z | 2021-02-07T04:58:49.000Z | test/Task/ImageSequence/__init__.py | paulondc/kombi | 046dbb0c1b4ff20ea5f2e1679f8d89f3089b6aa4 | [
"MIT"
] | 20 | 2019-02-16T04:21:13.000Z | 2019-03-09T21:21:21.000Z | test/Task/ImageSequence/__init__.py | paulondc/kombi | 046dbb0c1b4ff20ea5f2e1679f8d89f3089b6aa4 | [
"MIT"
] | 3 | 2019-11-15T05:16:32.000Z | 2021-09-28T21:28:29.000Z | from .SequenceThumbnailTaskTest import SequenceThumbnailTaskTest
| 32.5 | 64 | 0.923077 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
4f926892b5144ce9012c8e3193223c3bccd78877 | 2,898 | py | Python | flax/tests/geometry_test.py | eevee/flax | 5b4f310c0f980cbc2c1de3a7e74edf1c8f292702 | [
"MIT"
] | 47 | 2015-01-08T09:16:47.000Z | 2021-11-08T11:53:09.000Z | flax/tests/geometry_test.py | eevee/flax | 5b4f310c0f980cbc2c1de3a7e74edf1c8f292702 | [
"MIT"
] | 3 | 2015-03-01T19:44:53.000Z | 2016-03-04T04:10:28.000Z | flax/tests/geometry_test.py | eevee/flax | 5b4f310c0f980cbc2c1de3a7e74edf1c8f292702 | [
"MIT"
] | 9 | 2015-03-02T06:55:42.000Z | 2021-11-03T16:48:47.000Z | from flax.geometry import Blob, Point, Rectangle, Size, Span
def test_blob_create():
rect = Rectangle(origin=Point(0, 0), size=Size(5, 5))
blob = Blob.from_rectangle(rect)
assert blob.area == rect.area
assert blob.height == rect.height
def test_blob_math_disjoint():
# These rectangles look like this:
# xxx
# xxx
# xxx xxx
# xxx
# xxx
rect1 = Rectangle(origin=Point(0, 0), size=Size(3, 3))
rect2 = Rectangle(origin=Point(6, 2), size=Size(3, 3))
blob1 = Blob.from_rectangle(rect1)
blob2 = Blob.from_rectangle(rect2)
union_blob = blob1 + blob2
assert union_blob.area == blob1.area + blob2.area
assert union_blob.area == rect1.area + rect2.area
assert union_blob.height == 5
left_blob = blob1 - blob2
from pprint import pprint
pprint(blob1.spans)
pprint(blob2.spans)
pprint(left_blob.spans)
assert left_blob.area == blob1.area
assert left_blob == blob1
right_blob = blob2 - blob1
from pprint import pprint
pprint(blob1.spans)
pprint(blob2.spans)
pprint(right_blob.spans)
assert right_blob.area == blob2.area
assert right_blob == blob2
def test_blob_math_overlap():
# These rectangles look like this:
# xxx
# x##x
# x##x
# xxx
rect1 = Rectangle(origin=Point(0, 0), size=Size(3, 3))
rect2 = Rectangle(origin=Point(1, 1), size=Size(3, 3))
blob1 = Blob.from_rectangle(rect1)
blob2 = Blob.from_rectangle(rect2)
union_blob = blob1 + blob2
assert union_blob.area == 14
left_blob = blob1 - blob2
assert left_blob.area == 5
assert left_blob.height == 3
assert left_blob.spans == {
0: (Span(0, 2),),
1: (Span(0, 0),),
2: (Span(0, 0),),
}
right_blob = blob2 - blob1
assert right_blob.area == 5
assert right_blob.height == 3
assert right_blob.spans == {
1: (Span(3, 3),),
2: (Span(3, 3),),
3: (Span(1, 3),),
}
def test_blob_math_contain():
# These rectangles look like this:
# xxxxx
# x###x
# x###x
# x###x
# xxxxx
rect1 = Rectangle(origin=Point(0, 0), size=Size(5, 5))
rect2 = Rectangle(origin=Point(1, 1), size=Size(3, 3))
blob1 = Blob.from_rectangle(rect1)
blob2 = Blob.from_rectangle(rect2)
union_blob = blob1 + blob2
assert union_blob.area == blob1.area
assert union_blob.height == blob1.height
left_blob = blob1 - blob2
assert left_blob.area == 16
assert left_blob.height == 5
assert left_blob.spans == {
0: (Span(0, 4),),
1: (Span(0, 0), Span(4, 4)),
2: (Span(0, 0), Span(4, 4)),
3: (Span(0, 0), Span(4, 4)),
4: (Span(0, 4),),
}
right_blob = blob2 - blob1
assert right_blob.area == 0
assert right_blob.height == 0
assert right_blob.spans == {}
def test_blob_math_fuzzer():
pass
| 25.421053 | 60 | 0.6049 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 203 | 0.070048 |
4f926b4a8eb11004ae116e00ec4924740a1b7257 | 224 | py | Python | generators/UGATIT/dataset/day2rain/txt_gen.py | JW9MsjwjnpdRLFw/RMT | a877fd78639a8d4c534d0373b9d0ad023e0fa2dd | [
"MIT"
] | null | null | null | generators/UGATIT/dataset/day2rain/txt_gen.py | JW9MsjwjnpdRLFw/RMT | a877fd78639a8d4c534d0373b9d0ad023e0fa2dd | [
"MIT"
] | null | null | null | generators/UGATIT/dataset/day2rain/txt_gen.py | JW9MsjwjnpdRLFw/RMT | a877fd78639a8d4c534d0373b9d0ad023e0fa2dd | [
"MIT"
] | 3 | 2021-01-25T02:44:23.000Z | 2021-04-09T13:25:57.000Z | import os
dir = ['trainA', 'trainB', 'testA', 'testB']
for d in dir:
img_names = os.listdir(d)
f = open('list_' + d + '.txt', "w")
for img in img_names:
f.write('./' + img + '\n')
# print(img_names) | 22.4 | 44 | 0.522321 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 72 | 0.321429 |
4f934a58ff2ca49d71df3afac1a02ffb913fa343 | 260 | py | Python | resources/libExamples/python_class/reader1.py | andy897221/Proof-of-Play-Flow-Demo | 018ec382801f1363711b7680e728535a2ac94d26 | [
"MIT"
] | null | null | null | resources/libExamples/python_class/reader1.py | andy897221/Proof-of-Play-Flow-Demo | 018ec382801f1363711b7680e728535a2ac94d26 | [
"MIT"
] | null | null | null | resources/libExamples/python_class/reader1.py | andy897221/Proof-of-Play-Flow-Demo | 018ec382801f1363711b7680e728535a2ac94d26 | [
"MIT"
] | null | null | null | import time
from concurrent.futures import ThreadPoolExecutor
import read2
import data_class
executor = ThreadPoolExecutor(1)
executor.submit(read2.test)
print(data_class.data.a)
data_class.data.a = "reader 1 received!"
time.sleep(3)
print(data_class.data.a) | 21.666667 | 49 | 0.815385 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 20 | 0.076923 |
4f94469aa70e6bd1234bc19b4b98cb6d67b1b6d5 | 2,990 | py | Python | slowfast/datasets/utils.py | serre-lab/pred_gn | 437034687a561e72bf013dc295454da239748044 | [
"Apache-2.0"
] | null | null | null | slowfast/datasets/utils.py | serre-lab/pred_gn | 437034687a561e72bf013dc295454da239748044 | [
"Apache-2.0"
] | null | null | null | slowfast/datasets/utils.py | serre-lab/pred_gn | 437034687a561e72bf013dc295454da239748044 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python3
import logging
import numpy as np
import time
import torch
import cv2
logger = logging.getLogger(__name__)
def retry_load_images(image_paths, retry=10, backend="pytorch"):
"""
This function is to load images with support of retrying for failed load.
Args:
image_paths (list): paths of images needed to be loaded.
retry (int, optional): maximum time of loading retrying. Defaults to 10.
backend (str): `pytorch` or `cv2`.
Returns:
imgs (list): list of loaded images.
"""
for i in range(retry):
imgs = [cv2.imread(image_path) for image_path in image_paths]
if all(img is not None for img in imgs):
if backend == "pytorch":
imgs = torch.as_tensor(np.stack(imgs))
return imgs
else:
logger.warn("Reading failed. Will retry.")
time.sleep(1.0)
if i == retry - 1:
raise Exception("Failed to load images {}".format(image_paths))
def get_sequence(center_idx, half_len, sample_rate, num_frames):
"""
Sample frames among the corresponding clip.
Args:
center_idx (int): center frame idx for current clip
half_len (int): half of the clip length
sample_rate (int): sampling rate for sampling frames inside of the clip
num_frames (int): number of expected sampled frames
Returns:
seq (list): list of indexes of sampled frames in this clip.
"""
seq = list(range(center_idx - half_len, center_idx + half_len, sample_rate))
for seq_idx in range(len(seq)):
if seq[seq_idx] < 0:
seq[seq_idx] = 0
elif seq[seq_idx] >= num_frames:
seq[seq_idx] = num_frames - 1
return seq
def pack_pathway_output(cfg, frames):
"""
Prepare output as a list of tensors. Each tensor corresponding to a
unique pathway.
Args:
frames (tensor): frames of images sampled from the video. The
dimension is `channel` x `num frames` x `height` x `width`.
Returns:
frame_list (list): list of tensors with the dimension of
`channel` x `num frames` x `height` x `width`.
"""
# if cfg.MODEL.ARCH in cfg.MODEL.SINGLE_PATHWAY_ARCH:
# frame_list = [frames]
if cfg.MODEL.ARCH in cfg.MODEL.MULTI_PATHWAY_ARCH:
fast_pathway = frames
# Perform temporal sampling from the fast pathway.
slow_pathway = torch.index_select(
frames,
1,
torch.linspace(
0, frames.shape[1] - 1, frames.shape[1] // cfg.SLOWFAST.ALPHA
).long(),
)
frame_list = [slow_pathway, fast_pathway]
else:
frame_list = [frames]
# raise NotImplementedError(
# "Model arch {} is not in {}".format(
# cfg.MODEL.ARCH,
# cfg.MODEL.SINGLE_PATHWAY_ARCH + cfg.MODEL.MULTI_PATHWAY_ARCH,
# )
# )
return frame_list
| 31.473684 | 80 | 0.607692 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,540 | 0.51505 |
4f96c33c8a6968237aa27de63e2f0f42021a110e | 1,437 | py | Python | malleus/client/service/call_service.py | joelgerard/malleus | 763850ef270a449829b89a998cdce8febf5020ef | [
"Apache-2.0"
] | null | null | null | malleus/client/service/call_service.py | joelgerard/malleus | 763850ef270a449829b89a998cdce8febf5020ef | [
"Apache-2.0"
] | 2 | 2021-02-08T20:22:50.000Z | 2021-06-01T22:07:40.000Z | malleus/client/service/call_service.py | joelgerard/malleus | 763850ef270a449829b89a998cdce8febf5020ef | [
"Apache-2.0"
] | null | null | null | import config
from malleus.api.service.protos.bench_service_pb2 import BenchRequest
#from malleus.api.service.protos.bench_service_pb2 import BenchRequest.Datasource
from malleus.api.domain.timer import Timer
import grpc
from malleus.api.service.protos.bench_service_pb2 import BenchRequest
import malleus.api.service.protos.bench_service_pb2_grpc as bench_service_pb2_grpc
class CallService:
def __init__(self, region):
channel = grpc.insecure_channel(config.host[region])
self.stub = bench_service_pb2_grpc.BenchServiceStub(channel)
def write(self, num, datasource = None):
bench_request = BenchRequest()
bench_request.num = num
return self.stub.write(bench_request)
def read(self, num, datasource = None):
datasources = [BenchRequest.GDATASTORE, BenchRequest.MONGODB]
for datasource in datasources:
bench_request = BenchRequest()
bench_request.datasource = datasource
bench_request.num = num
timings = self.stub.read(bench_request)
timer = Timer(timings)
self.print_stats(datasource, timer)
#return timings
def print_stats(self, datasource, timer):
print(datasource)
print("Duration: " + str(timer.get_duration()))
print("Average: " + str(timer.get_avg()))
print("95pct:" + str(timer.get_95p()))
print("99pct:" + str(timer.get_99p()))
| 37.815789 | 82 | 0.695894 | 1,060 | 0.737648 | 0 | 0 | 0 | 0 | 0 | 0 | 135 | 0.093946 |
4f9759e43dd9c4a722e982e34e894b89c7ce75b3 | 1,999 | py | Python | resolwe_bio/tests/workflows/test_chemut.py | gregorjerse/resolwe-bio | 80f1e354cf0014a1eeff00acc112c622a2a044a9 | [
"Apache-2.0"
] | 12 | 2015-12-07T18:29:27.000Z | 2022-03-16T08:00:18.000Z | resolwe_bio/tests/workflows/test_chemut.py | gregorjerse/resolwe-bio | 80f1e354cf0014a1eeff00acc112c622a2a044a9 | [
"Apache-2.0"
] | 480 | 2015-11-20T21:46:43.000Z | 2022-03-28T12:40:57.000Z | resolwe_bio/tests/workflows/test_chemut.py | gregorjerse/resolwe-bio | 80f1e354cf0014a1eeff00acc112c622a2a044a9 | [
"Apache-2.0"
] | 45 | 2015-11-19T14:54:07.000Z | 2022-02-13T21:36:50.000Z | from resolwe.flow.models import Data
from resolwe.test import tag_process
from resolwe_bio.utils.filter import filter_vcf_variable
from resolwe_bio.utils.test import BioProcessTestCase
class CheMutWorkflowTestCase(BioProcessTestCase):
@tag_process("workflow-chemut")
def test_chemut_workflow(self):
with self.preparation_stage():
inputs = {
"src": "chemut_genome.fasta.gz",
"species": "Dictyostelium discoideum",
"build": "dd-05-2009",
}
ref_seq = self.run_process("upload-fasta-nucl", inputs)
bwa_index = self.run_process("bwa-index", {"ref_seq": ref_seq.id})
inputs = {"src1": ["AX4_mate1.fq.gz"], "src2": ["AX4_mate2.fq.gz"]}
parental_reads = self.run_process("upload-fastq-paired", inputs)
inputs = {"src1": ["CM_mate1.fq.gz"], "src2": ["CM_mate2.fq.gz"]}
mut_reads = self.run_process("upload-fastq-paired", inputs)
inputs = {"genome": bwa_index.id, "reads": parental_reads.id}
align_parental = self.run_process("alignment-bwa-mem", inputs)
inputs = {"genome": bwa_index.id, "reads": mut_reads.id}
align_mut = self.run_process("alignment-bwa-mem", inputs)
self.run_process(
"workflow-chemut",
{
"analysis_type": "snv",
"parental_strains": [align_parental.id],
"mutant_strains": [align_mut.id],
"genome": ref_seq.id,
"Vc": {"stand_emit_conf": 15, "stand_call_conf": 35, "rf": True},
"Vf": {"read_depth": 7},
},
)
for data in Data.objects.all():
self.assertStatus(data, Data.STATUS_DONE)
variants = Data.objects.last()
self.assertFile(
variants,
"vcf",
"chemut.vcf.gz",
file_filter=filter_vcf_variable,
compression="gzip",
)
| 35.070175 | 81 | 0.568284 | 1,810 | 0.905453 | 0 | 0 | 1,756 | 0.878439 | 0 | 0 | 502 | 0.251126 |
4f98c653568190ba3d9757f9946ea9b828f77543 | 186 | py | Python | code/src/scripts/pause.py | nuvla/job-engine | 58d42bd24d8dd2c6e28541c08df1455c9ac909f6 | [
"Apache-2.0"
] | 3 | 2019-04-27T10:36:21.000Z | 2019-04-29T12:41:57.000Z | code/src/scripts/pause.py | nuvla/job-engine | 58d42bd24d8dd2c6e28541c08df1455c9ac909f6 | [
"Apache-2.0"
] | 131 | 2019-02-13T06:00:49.000Z | 2022-03-29T15:06:03.000Z | code/src/scripts/pause.py | nuvla/job-engine | 58d42bd24d8dd2c6e28541c08df1455c9ac909f6 | [
"Apache-2.0"
] | 1 | 2020-12-03T11:35:21.000Z | 2020-12-03T11:35:21.000Z | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import docker
import socket
docker_client = docker.from_env()
myself = docker_client.containers.get(socket.gethostname())
myself.pause() | 16.909091 | 59 | 0.731183 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 44 | 0.236559 |
4f98fc6e974de77141b6fbb73e66badbc6f28920 | 3,282 | py | Python | scripts/parchg.py | Vevn/ENVISIoN | d0e48a5ec38ed95375f632eafdc5814415f0f570 | [
"BSD-2-Clause"
] | null | null | null | scripts/parchg.py | Vevn/ENVISIoN | d0e48a5ec38ed95375f632eafdc5814415f0f570 | [
"BSD-2-Clause"
] | null | null | null | scripts/parchg.py | Vevn/ENVISIoN | d0e48a5ec38ed95375f632eafdc5814415f0f570 | [
"BSD-2-Clause"
] | 1 | 2020-05-15T14:52:19.000Z | 2020-05-15T14:52:19.000Z | # ENVISIoN
#
# Copyright (c) 2018 Jesper Ericsson
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
##############################################################################################
# CONFIGURE VISUALISATION HERE
# Path to your envision installation
PATH_TO_ENVISION = "C:/Kandidatprojekt/ENVISIoN-sommar"
# Path to the vasp output directory you wish to visualise
PATH_TO_VASP_CALC = "C:/Kandidatprojekt/VASP/partial_charges"
# Path to where you want to save the resulting hdf5 file
PATH_TO_HDF5 = "C:/Kandidatprojekt/HDF5-demo/parchg_demo.hdf5"
# Set band selections and modes
# band_list : list of the bands you want to visualize, by number, e.g. [34,55,190] to select band 34, 55 and 190
# mode_list : Specifies how to visualize a specific band. In the order you enumerated your bands in parchg_list, choose mode where
# 0 for 'total'
# 1 for 'magnetic'
# 2 for 'up'
# 3 for 'down'
# Example: If band_list is [31, 212] and mode_list is [1,3], band 31 will be visualized as 'magnetic' and 212 as 'down'
band_list = [1, 2, 3, 4]
mode_list = [0, 1, 2, 3]
import os, sys, inspect, inviwopy
sys.path.append(PATH_TO_ENVISION)
import envisionpy
import envisionpy.hdf5parser
from envisionpy.processor_network.ParchgNetworkHandler import ParchgNetworkHandler
# Parse for charge density visualisation.
envisionpy.hdf5parser.parchg(PATH_TO_HDF5, PATH_TO_VASP_CALC)
envisionpy.hdf5parser.unitcell(PATH_TO_HDF5, PATH_TO_VASP_CALC)
# Initialize inviwo network
inviwopy.app.network.clear()
networkHandler = ParchgNetworkHandler(PATH_TO_HDF5, inviwopy.app)
networkHandler.select_bands(band_list, mode_list)
# Configure slice visualisation
networkHandler.toggle_slice_canvas(True)
networkHandler.toggle_slice_plane(True)
networkHandler.set_plane_normal(0, 1, 0)
networkHandler.set_plane_height(0.5)
# Configure unitcell visualisation
if networkHandler.unitcellAvailable:
#networkHandler.hide_atoms()
networkHandler.toggle_unitcell_canvas(True)
networkHandler.set_atom_radius(0.2)
| 42.623377 | 130 | 0.763254 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2,364 | 0.720293 |
4f99110a6e3f66f4ccceeec3bdd358ee7d2b0212 | 75 | py | Python | test/hello64_patch_search.py | Nicholas1126/patchkit | 290f61440abdcb8a334c0823326c0bc4e110059e | [
"MIT"
] | null | null | null | test/hello64_patch_search.py | Nicholas1126/patchkit | 290f61440abdcb8a334c0823326c0bc4e110059e | [
"MIT"
] | null | null | null | test/hello64_patch_search.py | Nicholas1126/patchkit | 290f61440abdcb8a334c0823326c0bc4e110059e | [
"MIT"
] | null | null | null | def search_add(pt):
add_addr = pt.search('add')
print hex(add_addr) | 25 | 31 | 0.666667 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 5 | 0.066667 |
4f997b4422654e8ded613b50854c6295cfbecb26 | 2,437 | py | Python | functions/sentim-preprocess/main.py | gipfelen/Sentiment-Analysis | f852f9887310cdff90e115cc140a2f8ae3618087 | [
"MIT"
] | null | null | null | functions/sentim-preprocess/main.py | gipfelen/Sentiment-Analysis | f852f9887310cdff90e115cc140a2f8ae3618087 | [
"MIT"
] | 1 | 2022-01-04T22:47:06.000Z | 2022-01-04T22:47:06.000Z | functions/sentim-preprocess/main.py | gipfelen/Sentiment-Analysis | f852f9887310cdff90e115cc140a2f8ae3618087 | [
"MIT"
] | 3 | 2021-04-15T09:47:28.000Z | 2021-12-06T21:05:28.000Z | import json
import re
##################################################
########## Boilerplate wrapping code #############
##################################################
# IBM wrapper
def main(args):
res = sentim_preprocess(args)
return res
# AWS wrapper
def lambda_handler(event, context):
# read in the args from the POST object
json_input = json.loads(event["body"])
res = sentim_preprocess(json_input)
return {"statusCode": 200, "body": json.dumps(res)}
##################################################
##################################################
# { 'text': string, 'id': number, 'location': 'string' }
def preprocess(tweet):
# split text into sentences
if ("text" in tweet) is False:
return None
text = tweet["text"]
sentences = text.replace("!", ".").replace("?", ".").split(".") # quick and dirty
# filter empty strings (sentences)
sentences = [sentence for sentence in sentences if sentence]
# In each sentence, only keep alpha numeric
sencences = [re.sub("[^0-9a-zA-Z]+", "*", sentence) for sentence in sentences]
processed_sentences = []
for sentence in sentences:
processed_sentence = sentence.split(" ") # split per word
# replace weird chars
processed_sentence = [
re.sub("[^0-9a-zA-Z]+", "", word) for word in processed_sentence
]
# filter empty strings (words)
processed_sentence = [word for word in processed_sentence if word]
# to lowercase
processed_sentence = [word.lower() for word in processed_sentence]
processed_sentences.append(processed_sentence)
# Return sentences in place of text
res = {
**tweet,
"sentences": processed_sentences,
}
res.pop("text", None)
return res
# Tokenizes and normalizes (TODO) tweets
def sentim_preprocess(j):
# read in the args
tweets = j["tweets"]["tweets"] # TODO
# do the preprocessing
tokenized_tweets = [preprocess(tweet) for tweet in tweets]
# filter out invalids
tokenized_tweets = [t for t in tokenized_tweets if t]
# return the result
res = {"tokenized_tweets": tokenized_tweets}
return res
# Docker wrapper
if __name__ == "__main__":
# read the json
json_input = json.loads(open("jsonInput.json").read())
result = sentim_preprocess(json_input)
# write to std out
print(json.dumps(result)) | 28.337209 | 86 | 0.587197 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 949 | 0.389413 |
4f99c3f7f27703ec474adca4fd3ebb493a659631 | 3,981 | py | Python | pixette/application.py | EXLER/pixette | 0d31238fcb99ab8936f8b37c8c3987b70a326df9 | [
"MIT"
] | null | null | null | pixette/application.py | EXLER/pixette | 0d31238fcb99ab8936f8b37c8c3987b70a326df9 | [
"MIT"
] | null | null | null | pixette/application.py | EXLER/pixette | 0d31238fcb99ab8936f8b37c8c3987b70a326df9 | [
"MIT"
] | null | null | null | import logging
import os
import pygame
from pixette.scenes.admin import AdminScene
from pixette.scenes.clock import ClockScene
from pixette.scenes.currencies import CurrenciesScene
from pixette.scenes.weather import WeatherScene
class Application:
def __init__(self, device, title, resolution, update_rate, debug=False):
self.device = device
if not debug:
os.environ["SDL_FBDEV"] = "/dev/fb1"
os.environ["SDL_VIDEODRIVER"] = "fbcon"
os.environ["SDL_VIDEO_CENTERED"] = "1"
pygame.init()
pygame.mouse.set_visible(False)
self._scene = None
self._screen = None
self.title = title
self.resolution = resolution
self.update_rate = update_rate
logging.info("Display initialized")
self.scenes = [ClockScene(), CurrenciesScene(), WeatherScene(lat=51.11, lon=17.04), AdminScene()]
logging.info("Scenes loaded")
self.device.left_btn.when_pressed = self.previous_scene
self.device.right_btn.when_pressed = self.next_scene
@property
def title(self):
return pygame.display.get_caption()
@title.setter
def title(self, value):
pygame.display.set_caption(value)
@property
def resolution(self):
return self._screen.get_size()
@resolution.setter
def resolution(self, value):
self._screen = pygame.display.set_mode(value)
@property
def width(self):
return self.resolution[0]
@property
def height(self):
return self.resolution[1]
@property
def active_scene(self):
return self._scene
def next_scene(self):
index = self.scenes.index(self.active_scene)
if index == len(self.scenes) - 1:
self.change_scene(self.scenes[0])
else:
self.change_scene(self.scenes[index + 1])
def previous_scene(self):
index = self.scenes.index(self.active_scene)
if index == 0:
self.change_scene(self.scenes[-1])
else:
self.change_scene(self.scenes[index - 1])
def change_scene(self, scene):
"""
Change the currently active scene.
This will invoke `scene.on_exit` and `scene.on_enter` methods on the switching scenes.
If `None` is provided, the application's execution will end.
"""
if self.active_scene is not None:
self.active_scene.on_exit(next_scene=scene)
self.active_scene._application = None
self._scene, old_scene = scene, self.active_scene
if self.active_scene is not None:
self.active_scene._application = self
self.active_scene.on_enter(previous_scene=old_scene)
def run(self, scene=None):
"""
Start the application at given scene.
"""
if scene is not None:
for s in self.scenes:
if type(s) == scene:
self.change_scene(s)
break
elif self.active_scene is not None:
self.change_scene(scene)
else:
raise ValueError("No scene provided")
clock = pygame.time.Clock()
while self.active_scene is not None:
try:
for event in pygame.event.get():
self.active_scene.handle_event(event)
if event.type == pygame.QUIT:
self.change_scene(None) # Trigger scene.on_exit()
return
self.device.keys(event)
dt = clock.tick(self.update_rate)
self.active_scene.update(dt)
self.active_scene.draw(self._screen)
pygame.display.update()
except KeyboardInterrupt:
logging.info("Shutting down")
break
except Exception as e:
logging.exception("Caught exception: %s" % e, exc_info=True)
break
| 30.860465 | 105 | 0.596333 | 3,747 | 0.941221 | 0 | 0 | 529 | 0.132881 | 0 | 0 | 469 | 0.11781 |
4f9a379d3ba99f8e8d48d7249f66b2636ad03135 | 880 | py | Python | backend/app/security.py | Kwsswart/writter | 851b887d0c8b0a9489f530065f0efe744bb149f3 | [
"MIT"
] | null | null | null | backend/app/security.py | Kwsswart/writter | 851b887d0c8b0a9489f530065f0efe744bb149f3 | [
"MIT"
] | null | null | null | backend/app/security.py | Kwsswart/writter | 851b887d0c8b0a9489f530065f0efe744bb149f3 | [
"MIT"
] | null | null | null | import os
from bcrypt import hashpw, gensalt, checkpw
from base64 import b64encode
from hashlib import sha256
from cryptography.fernet import Fernet
e = Fernet(os.getenv("KEY"))
def encpwd(pwd):
"""
Hash pwd provided
"""
return hashpw(b64encode(sha256(pwd.encode()).digest()), gensalt()).decode()
def checkpwd(x, y):
"""
Check whether password hashed matches:
* arg x** password to check
* arg y** original hashed password
"""
return checkpw(b64encode(sha256(x.encode()).digest()), y.encode())
def gen_fernet_key():
"""
Generate encryption key with Fernet
"""
return Fernet.generate_key().decode()
def enc(txt: str) -> str:
"""
Encrypt str
"""
return e.encrypt(txt.encode()).decode()
def dec(txt: str) -> str:
"""
Decrypt str
"""
return e.decrypt(txt.encode()).decode() | 20 | 79 | 0.626136 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 277 | 0.314773 |
4f9ac2bfc1a1e513e9625d77e9d955a817e50301 | 427 | py | Python | zfused_maya/zfused_maya/tool/animation/studiolibrary/examples/example3.py | zhoulh0322/zfused_outsource_old | 3d20c1493c3cffd742b10a7db6e0b1f3bb374af8 | [
"Apache-2.0"
] | 1 | 2022-01-14T11:40:35.000Z | 2022-01-14T11:40:35.000Z | zfused_maya/zfused_maya/tool/animation/studiolibrary/examples/example3.py | zhoulh0322/zfused_outsource_old | 3d20c1493c3cffd742b10a7db6e0b1f3bb374af8 | [
"Apache-2.0"
] | null | null | null | zfused_maya/zfused_maya/tool/animation/studiolibrary/examples/example3.py | zhoulh0322/zfused_outsource_old | 3d20c1493c3cffd742b10a7db6e0b1f3bb374af8 | [
"Apache-2.0"
] | null | null | null | # An example for how to lock specific folders using the lockRegExp param
import studiolibrary
if __name__ == "__main__":
# Use the studiolibrary.app context for creating a QApplication instance
with studiolibrary.app():
# Lock all folders that contain the words "icon" & "Pixar" in the path
lockRegExp = "icon|Pixar"
studiolibrary.main(name="Example3", path="data", lockRegExp=lockRegExp)
| 28.466667 | 79 | 0.711944 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 252 | 0.590164 |
4f9ca017d8993f316f0c7fd3c1640c6b4667a2cc | 1,002 | py | Python | djangae/contrib/googleauth/backends/base.py | sleepyjames/djangae | 79a9e42c09c3be1189464870f008f8af7060bd9c | [
"BSD-3-Clause"
] | 467 | 2015-01-02T22:35:37.000Z | 2022-02-22T23:13:36.000Z | djangae/contrib/googleauth/backends/base.py | sleepyjames/djangae | 79a9e42c09c3be1189464870f008f8af7060bd9c | [
"BSD-3-Clause"
] | 743 | 2015-01-02T15:55:34.000Z | 2021-01-29T09:43:19.000Z | djangae/contrib/googleauth/backends/base.py | sleepyjames/djangae | 79a9e42c09c3be1189464870f008f8af7060bd9c | [
"BSD-3-Clause"
] | 154 | 2015-01-01T17:05:59.000Z | 2021-12-09T06:40:07.000Z | """
This is duplicated from Django 3.0 to avoid
starting an import chain that ends up with
ContentTypes which may not be installed in a
Djangae project.
"""
class BaseBackend:
def authenticate(self, request, **kwargs):
return None
@classmethod
def can_authenticate(cls, request):
"""
This is a pre-check to see if the credentials are
available to try to authenticate.
"""
return True
def get_user(self, user_id):
return None
def get_user_permissions(self, user_obj, obj=None):
return set()
def get_group_permissions(self, user_obj, obj=None):
return set()
def get_all_permissions(self, user_obj, obj=None):
return {
*self.get_user_permissions(user_obj, obj=obj),
*self.get_group_permissions(user_obj, obj=obj),
}
def has_perm(self, user_obj, perm, obj=None):
return perm in self.get_all_permissions(user_obj, obj=obj)
| 26.368421 | 66 | 0.636727 | 826 | 0.824351 | 0 | 0 | 204 | 0.203593 | 0 | 0 | 295 | 0.294411 |
4f9d958bf40e31e749c287f77979cfb2be9e5cee | 396 | py | Python | tests/cell_fabric/test_rect.py | mabrains/ALIGN-public | 9a6c14310de13df369a8340f465911b629f15a3f | [
"BSD-3-Clause"
] | 119 | 2019-05-14T18:44:34.000Z | 2022-03-17T01:01:02.000Z | tests/cell_fabric/test_rect.py | mabrains/ALIGN-public | 9a6c14310de13df369a8340f465911b629f15a3f | [
"BSD-3-Clause"
] | 717 | 2019-04-03T15:36:35.000Z | 2022-03-31T21:56:47.000Z | tests/cell_fabric/test_rect.py | mabrains/ALIGN-public | 9a6c14310de13df369a8340f465911b629f15a3f | [
"BSD-3-Clause"
] | 34 | 2019-04-01T21:21:27.000Z | 2022-03-21T09:46:57.000Z | from align.cell_fabric.transformation import Rect
def test_toList():
r = Rect( 0, 0, 1, 1)
assert r.toList() == [0, 0, 1, 1]
def test_canonical():
r = Rect( 1, 1, 0, 0)
assert r.canonical().toList() == [0, 0, 1, 1]
def test_repr():
r = Rect( 0, 0, 1, 1)
assert r.__repr__() == "[0, 0, 1, 1]"
assert repr(r) == "[0, 0, 1, 1]"
assert str(r) == "[0, 0, 1, 1]"
| 24.75 | 49 | 0.517677 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 42 | 0.106061 |
4f9ee549c0660d61b100be39690bb3e9a1b75160 | 2,800 | py | Python | CALCI PROGRAM TKINTER.py | Madmaxcoder2612/Programming-Codes | f70c06ed4a7892ed55673f66c1585370d3f1169f | [
"MIT"
] | null | null | null | CALCI PROGRAM TKINTER.py | Madmaxcoder2612/Programming-Codes | f70c06ed4a7892ed55673f66c1585370d3f1169f | [
"MIT"
] | null | null | null | CALCI PROGRAM TKINTER.py | Madmaxcoder2612/Programming-Codes | f70c06ed4a7892ed55673f66c1585370d3f1169f | [
"MIT"
] | null | null | null | # GUI Development using Tkinter
import tkinter as tk
app = tk.Tk()
app.geometry('340x310')
app.title("Calculator")
entry = tk.Entry(app,text='0',font=('arial',20,'normal'))
entry.place(x=20,y=15)
def number(n):
if n=="C":
entry.delete(0,'end')
elif n=="ans":
k2 = entry.get()
entry.delete(0,'end')
entry.insert('end',eval(k2))
elif n=='del':
entry.delete(len(entry.get())-1)
else:
entry.insert('end',n)
one_button = tk.Button(app,text = "7",font=('arial',10,'bold'),command = lambda:number(7),width = 6).place(x=30,y=70)
one_button = tk.Button(app,text = "8",font=('arial',10,'bold'),command = lambda:number(8),width = 6).place(x=100,y=70)
one_button = tk.Button(app,text = "9",font=('arial',10,'bold'),command = lambda:number(9),width = 6).place(x=170,y=70)
one_button = tk.Button(app,text = "+",font=('arial',10,'bold'),command = lambda:number('+'),width = 6).place(x=250,y=70)
one_button = tk.Button(app,text = "4",font=('arial',10,'bold'),command = lambda:number(4),width = 6).place(x=30,y=120)
one_button = tk.Button(app,text = "5",font=('arial',10,'bold'),command = lambda:number(5),width = 6).place(x=100,y=120)
one_button = tk.Button(app,text = "6",font=('arial',10,'bold'),command = lambda:number(6),width = 6).place(x=170,y=120)
one_button = tk.Button(app,text = "-",font=('arial',10,'bold'),command = lambda:number('-'),width = 6).place(x=250,y=120)
one_button = tk.Button(app,text = "1",font=('arial',10,'bold'),command = lambda:number(1),width = 6).place(x=30,y=170)
one_button = tk.Button(app,text = "2",font=('arial',10,'bold'),command = lambda:number(2),width = 6).place(x=100,y=170)
one_button = tk.Button(app,text = "3",font=('arial',10,'bold'),command = lambda:number(3),width = 6).place(x=170,y=170)
one_button = tk.Button(app,text = "*",font=('arial',10,'bold'),command = lambda:number('*'),width = 6).place(x=250,y=170)
one_button = tk.Button(app,text = "C",font=('arial',10,'bold'),command = lambda:number("C"),width = 6).place(x=30,y=220)
one_button = tk.Button(app,text = "0",font=('arial',10,'bold'),command = lambda:number(0),width = 6).place(x=100,y=220)
one_button = tk.Button(app,text = "=",font=('arial',10,'bold'),command = lambda:number('ans'),width = 6).place(x=170,y=220)
one_button = tk.Button(app,text = "/",font=('arial',10,'bold'),command = lambda:number('/'),width = 6).place(x=250,y=220)
one_button = tk.Button(app,text = ".",font=('arial',10,'bold'),command = lambda:number('.'),width = 6).place(x=30,y=270)
one_button = tk.Button(app,text = "00",font=('arial',10,'bold'),command = lambda:number("00"),width = 6).place(x=100,y=270)
one_button = tk.Button(app,text = "del",font=('arial',10,'bold'),command = lambda:number('del'),width = 6).place(x=170,y=270)
app.mainloop()
| 51.851852 | 126 | 0.639286 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 443 | 0.158214 |
4fa0554d8bffe632c5f4057529ceb3127ce8caaf | 12,538 | py | Python | models.py | andreweskeclarke/rl_gyms | e4193180ae73b3410f006ba6f20300267d479054 | [
"Apache-2.0"
] | null | null | null | models.py | andreweskeclarke/rl_gyms | e4193180ae73b3410f006ba6f20300267d479054 | [
"Apache-2.0"
] | null | null | null | models.py | andreweskeclarke/rl_gyms | e4193180ae73b3410f006ba6f20300267d479054 | [
"Apache-2.0"
] | null | null | null | import ipdb
import math
import numpy as np
import tensorflow as tf
# N_DIM_STATE = 4
# N_DIM_ACTIONS = 2
N_DIM_STATE = 210*160
N_DIM_ACTIONS = 9
def batch_norm_init(inits, size, name):
return tf.Variable(inits * tf.ones([size]), name=name)
def weight_init(shape, name):
return tf.Variable(tf.random_normal(shape, stddev=math.sqrt(shape[0])), name=name)
def batch_normalization(batch, mean=None, var=None):
if mean is None or var is None:
mean, var = tf.nn.moments(batch, axes=[0])
return (batch - mean) / tf.sqrt(var + tf.constant(1e-9))
def update_batch_normalization(batch, l, bn_assigns, running_mean, running_var, ewma):
mean, var = tf.nn.moments(batch, axes=[0])
assign_mean = running_mean[l - 1].assign(mean)
assign_var = running_var[l - 1].assign(var)
bn_assigns.append(ewma.apply([running_mean[l - 1], running_var[l - 1]]))
with tf.control_dependencies([assign_mean, assign_var]):
return (batch - mean) / tf.sqrt(var + 1e-10)
def ddqn(s1, a1, r1, s2, discount, learning_rate, layers, q_values_fun_builder):
training = tf.placeholder(tf.bool)
n_data = tf.shape(s1)[0]
# DDQN - Find best value using the up to date Q function, but estimate it's value from our target Q function.
targets, _, bn_assigns, target_weights, _ = q_values_fun_builder(s2, training)
best_action = tf.argmax(targets, axis=1)
# Cases when the second action is picked
second_action_is_best = tf.cast(best_action, dtype=bool)
# DDQN Pick action with Q_1, score with Q_target
ddqn_target_scores, _, _, ddqn_target_weights, _ = q_values_fun_builder(s2, training)
target_scores = tf.where(
second_action_is_best,
discount*ddqn_target_scores[:, 1],
discount*ddqn_target_scores[:, 0])
# Remove future score prediction if end of episode
future_score = tf.where(
tf.equal(r1, -1*tf.ones(tf.shape(r1))),
tf.zeros(tf.shape(r1)),
tf.reshape(target_scores, [-1, 1]))
target_q_valuez = tf.concat([r1 + future_score for _ in range(N_DIM_ACTIONS)], 1)
all_ones = tf.concat([tf.ones([n_data, 1]) for _ in range(N_DIM_ACTIONS)], 1)
predicted_q_values, _, _, online_weights, _ = q_values_fun_builder(s1, training)
target_q_values = tf.where(
tf.equal(a1, all_ones),
target_q_valuez,
predicted_q_values)
best_action_picker, u_loss, bn_assigns, _, tf_debug_var = q_values_fun_builder(s1, training, online_weights)
u_loss = (u_loss * tf.constant(1/100))
supervised_loss = tf.reduce_mean(tf.square(tf.stop_gradient(target_q_values) - predicted_q_values))
loss = supervised_loss + u_loss
training_vars = []
for w_key, weights in online_weights.items():
training_vars = training_vars + weights
opt = tf.train.AdamOptimizer(learning_rate=learning_rate)
train_op = opt.minimize(loss, var_list=training_vars)
target_updaters = []
for w_key, weights in online_weights.items():
for w_index in range(len(weights)):
target_updaters.append(
tf.assign(target_weights[w_key][w_index],
online_weights[w_key][w_index]))
updaters = []
for w_key, weights in online_weights.items():
for w_index in range(len(weights)):
updaters.append(
tf.assign(ddqn_target_weights[w_key][w_index],
online_weights[w_key][w_index]))
def updater(sess):
for u in updaters:
sess.run(u)
# add the updates of batch normalization statistics to train_step
network_updates = tf.group(*(bn_assigns + target_updaters))
with tf.control_dependencies([train_op]):
train_op = tf.group(network_updates)
return loss, \
train_op, \
best_action_picker, \
updater, \
training, \
None
def ddqn_mlp(s1, a1, r1, s2, discount, learning_rate, layer_sizes):
n_data = tf.shape(s1)[0]
# Q-Values from a ladder network
def q_values(state1, training, weights=None):
L = len(layer_sizes) - 1 # number of layers
shapes = [s for s in zip(layer_sizes[:-1], layer_sizes[1:])] # shapes of linear layers
if weights is None:
weights = {
'Encoder_w': [weight_init(s, 'Encoder_w') for s in shapes], # Encoder weights
'beta': [batch_norm_init(0.0, layer_sizes[l+1], 'beta') for l in range(L)],
'gamma': [batch_norm_init(1.0, layer_sizes[l+1], 'gamma') for l in range(L)]
}
# Relative importance of each layer
running_mean = [tf.Variable(tf.constant(0.0, shape=[l]), name='running_mean', trainable=False)
for l in layer_sizes[1:]]
running_var = [tf.Variable(tf.constant(1.0, shape=[l]), name='running_var', trainable=False)
for l in layer_sizes[1:]]
ewma = tf.train.ExponentialMovingAverage(decay=0.99) # to calculate the moving averages of mean and variance
bn_assigns = [] # this list stores the updates to be made to average mean and variance
# to store the pre-activation, activation, mean and variance for each layer
d = {'z': {}, 'm': {}, 'v': {}, 'h': {}}
h = state1
d['z'][0] = h
for l in range(1, L + 1):
print("Layer ", l, ": ", layer_sizes[l - 1], " -> ", layer_sizes[l])
d['h'][l - 1] = h
z_pre = tf.matmul(h, weights['Encoder_w'][l - 1]) # pre-activation
m, v = tf.nn.moments(z_pre, axes=[0])
# if training:
def training_batch_norm():
return update_batch_normalization(z_pre, l, bn_assigns, running_mean, running_var, ewma)
# else:
def eval_batch_norm():
mean = ewma.average(running_mean[l - 1])
var = ewma.average(running_var[l - 1])
z = batch_normalization(z_pre, mean, var)
return z
z = tf.cond(training, training_batch_norm, eval_batch_norm)
if l == L:
h = tf.nn.softmax(weights['gamma'][l - 1] * (z + weights["beta"][l - 1]))
else:
h = tf.nn.relu(z + weights["beta"][l - 1])
d['z'][l] = z
d['m'][l], d['v'][l] = m, v
d['h'][l] = h
return h, tf.Variable(tf.constant(0.0)), bn_assigns, weights, None
return ddqn(s1, a1, r1, s2, discount, learning_rate, layer_sizes, q_values)
# https://github.com/rinuboney/ladder/blob/master/ladder.py
def ladder_mlp(s1, a1, r1, s2, discount, learning_rate, layer_sizes, denoising_cost):
# Q-Values from a ladder network
def q_values(state1, training, weights=None):
L = len(layer_sizes) - 1 # number of layers
shapes = [s for s in zip(layer_sizes[:-1], layer_sizes[1:])] # shapes of linear layers
if weights is None:
weights = {
'Encoder_w': [weight_init(s, 'Encoder_w') for s in shapes], # Encoder weights
'Decoder_w': [weight_init(s[::-1], 'Decoder_w') for s in shapes], # Decoder weights
'beta': [batch_norm_init(0.0, layer_sizes[l+1], 'beta') for l in range(L)],
'gamma': [batch_norm_init(1.0, layer_sizes[l+1], 'gamma') for l in range(L)]
}
# Relative importance of each layer
running_mean = [tf.Variable(tf.constant(0.0, shape=[l]), name='running_mean', trainable=False)
for l in layer_sizes[1:]]
running_var = [tf.Variable(tf.constant(1.0, shape=[l]), name='running_var', trainable=False)
for l in layer_sizes[1:]]
ewma = tf.train.ExponentialMovingAverage(decay=0.99) # to calculate the moving averages of mean and variance
bn_assigns = [] # this list stores the updates to be made to average mean and variance
def encoder(inputs, noise_std):
# add noise to input
h = inputs + tf.random_normal(tf.shape(inputs)) * noise_std
# to store the pre-activation, activation, mean and variance for each layer
d = {'z': {}, 'm': {}, 'v': {}, 'h': {}}
d['z'][0] = h
for l in range(1, L + 1):
print("Layer ", l, ": ", layer_sizes[l - 1], " -> ", layer_sizes[l])
d['h'][l - 1] = h
z_pre = tf.matmul(h, weights['Encoder_w'][l - 1]) # pre-activation
m, v = tf.nn.moments(z_pre, axes=[0])
# if training:
def training_batch_norm():
# Training batch normalization
# batch normalization for labeled and unlabeled examples is performed separately
if noise_std > 0:
# Corrupted encoder
# batch normalization + noise
z = batch_normalization(z_pre, m, v)
z += tf.random_normal(tf.shape(z_pre)) * noise_std
else:
# Clean encoder
# batch normalization + update the average mean and variance using batch
# mean and variance of labeled examples
z = update_batch_normalization(z_pre, l, bn_assigns, running_mean, running_var, ewma)
return z
# else:
def eval_batch_norm():
# Evaluation batch normalization
# obtain average mean and variance and use it to normalize the batch
mean = ewma.average(running_mean[l - 1])
var = ewma.average(running_var[l - 1])
z = batch_normalization(z_pre, mean, var)
return z
# perform batch normalization according to value of boolean "training" placeholder:
z = tf.cond(training, training_batch_norm, eval_batch_norm)
if l == L:
# use softmax activation in output layer
h = tf.nn.softmax(weights['gamma'][l - 1] * (z + weights["beta"][l - 1]))
else:
# use ReLU activation in hidden layers
h = tf.nn.relu(z + weights["beta"][l - 1])
d['z'][l] = z
d['m'][l], d['v'][l] = m, v # save mean and variance of unlabeled examples for decoding
d['h'][l] = h
return h, d
print("=== Corrupted Encoder ===")
y_c, corr = encoder(state1, 0.1)
print("=== Clean Encoder ===")
y, clean = encoder(state1, 0.0) # 0.0 -> do not add noise
print("=== Decoder ===")
def g_gauss(z_c, u, size):
wi = lambda inits, name: tf.Variable(inits * tf.ones([size]), name=name)
a1 = wi(0., 'a1')
a2 = wi(1., 'a2')
a3 = wi(0., 'a3')
a4 = wi(0., 'a4')
a5 = wi(0., 'a5')
a6 = wi(0., 'a6')
a7 = wi(1., 'a7')
a8 = wi(0., 'a8')
a9 = wi(0., 'a9')
a10 = wi(0., 'a10')
mu = a1 * tf.sigmoid(a2 * (u + tf.constant(1e-9)) + a3) + a4 * u + a5
v = a6 * tf.sigmoid(a7 * (u + tf.constant(1e-9)) + a8) + a9 * u + a10
z_est = (z_c - mu) * v + mu
return z_est
# Decoder
z_est = {}
d_cost = [] # to store the denoising cost of all layers
for l in range(L, -1, -1):
print("Layer ", l, ": ", layer_sizes[l+1] if l+1 < len(layer_sizes) else None,
" -> ", layer_sizes[l], ", denoising cost: ", denoising_cost[l])
z, z_c = clean['z'][l], corr['z'][l]
m = clean['m'].get(l, 0)
v = clean['v'].get(l, 1-1e-10) + tf.constant(1e-9)
if l == L:
u = y_c
else:
u = tf.matmul(z_est[l+1], weights['Decoder_w'][l])
u = batch_normalization(u)
z_est[l] = g_gauss(z_c, u, layer_sizes[l])
z_est_bn = (z_est[l] - m) / v
# append the cost of this layer to d_cost
d_cost.append((tf.reduce_mean(tf.reduce_sum(tf.square(z_est_bn - z), 1)) / layer_sizes[l]) * denoising_cost[l])
# calculate total unsupervised cost by adding the denoising cost of all layers
unsupervised_cost = tf.add_n(d_cost)
return y, unsupervised_cost, bn_assigns, weights, None
return ddqn(s1, a1, r1, s2, discount, learning_rate, layer_sizes, q_values)
| 44.460993 | 123 | 0.56564 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2,469 | 0.196921 |
4fa0f0b6da9530431d0a28bc79144ef0fdfe6ec0 | 12,808 | py | Python | nuclino/api.py | Vanderhoof/PyNuclino | 20aefaaa145f0afa15778b049d83be157e5a553b | [
"MIT"
] | 1 | 2022-01-28T06:27:57.000Z | 2022-01-28T06:27:57.000Z | nuclino/api.py | Vanderhoof/PyNuclino | 20aefaaa145f0afa15778b049d83be157e5a553b | [
"MIT"
] | null | null | null | nuclino/api.py | Vanderhoof/PyNuclino | 20aefaaa145f0afa15778b049d83be157e5a553b | [
"MIT"
] | null | null | null | import requests
from typing import List
from typing import Optional
from typing import Union
from ratelimit import limits
from .utils import sleep_and_retry
from .objects import NuclinoObject
from .objects import get_loader
BASE_URL = 'https://api.nuclino.com/v0'
class NuclinoError(Exception):
pass
def join_url(base_url, path):
return '/'.join(part.strip('/') for part in [base_url, path])
class Client:
'''
Base class for Nuclino API client. May be used as a context processor.
'''
def __init__(
self,
api_key: str,
base_url: Optional[str] = BASE_URL,
requests_per_minute: int = 140
):
'''
:param api_key: your Nuclino API key.
:param base_url: base url to send API requests.
:requests_per_minute: max requests per minute. If limit exceeded, client will wait
for some time before processing the next request.
'''
self.check_limit = sleep_and_retry()(
limits(requests_per_minute, period=60)(lambda: None)
)
self.session = requests.Session()
self.session.headers['Authorization'] = api_key
self.timer = None
self.base_url = base_url
def __enter__(self):
return self
def __exit__(self, *_):
self.close()
def close(self):
self.session.close()
def _process_response(
self,
response: requests.models.Response
) -> Union[List, NuclinoObject, dict]:
'''
General method that processes API responses. Raises error on HTTP
errors, sends results to parser on 200 ok.
:param response: response object, received after calling API.
'''
content = response.json()
if response.status_code != 200:
message = content.get('message', '')
raise NuclinoError(f'{response.status_code}: {message}')
else:
data = content['data']
return self.parse(data)
def parse(self, source: dict) -> Union[List, NuclinoObject, dict]:
'''
Parse successful response dictionary. This method will determine the
type of object, that was returned, and construct corresponding
NuclinoObject as the return result.
:param source: the "data" dictionary from Nuclino API response.
:returns: corresponsing NuclinoObject constructed from `source`.
'''
if 'object' not in source:
return source
func = get_loader(source['object'])
result = func(source, self)
if isinstance(result, NuclinoObject):
return result
elif isinstance(result, list):
return [self.parse(li) for li in result]
else:
return source
def get(self, path: str, params: dict = {}) -> Union[List, NuclinoObject, dict]:
self.check_limit()
response = self.session.get(join_url(self.base_url, path), params=params)
return self._process_response(response)
def delete(self, path: str) -> Union[List, NuclinoObject, dict]:
self.check_limit()
response = self.session.delete(join_url(self.base_url, path))
return self._process_response(response)
def post(self, path: str, data: dict) -> Union[List, NuclinoObject, dict]:
headers = {'Content-Type': 'application/json'}
self.check_limit()
response = self.session.post(
join_url(self.base_url, path),
json=data,
headers=headers
)
return self._process_response(response)
def put(self, path: str, data: dict) -> Union[List, NuclinoObject, dict]:
headers = {'Content-Type': 'application/json'}
self.check_limit()
response = self.session.put(
join_url(self.base_url, path),
json=data,
headers=headers
)
return self._process_response(response)
class Nuclino(Client):
def get_teams(
self,
limit: Optional[int] = None,
after: Optional[str] = None
) -> Union[List, NuclinoObject, dict]:
'''
Get list of teams available for user.
:param limit: number between 1 and 100 to limit the results.
:param after: only return teams that come after the given team ID.
:returns: list of Team objects.
'''
path = '/teams'
params = {}
if limit is not None:
params['limit'] = str(limit)
if after is not None:
params['after'] = after
return self.get(path, params)
def get_team(self, team_id: str):
'''
Get specific team by ID.
:param team_id: ID of the team to get.
:returns: Team object.
'''
path = f'/teams/{team_id}'
return self.get(path)
def get_workspaces(
self,
team_id: Optional[str] = None,
limit: Optional[int] = None,
after: Optional[str] = None
) -> Union[List, NuclinoObject, dict]:
'''
Get list of workspaces available for user.
:param team_id: ID of the team the returned workspaces should belong to.
:param limit: number between 1 and 100 to limit the results.
:param after: only return workspaces that come after the given workspace
ID.
:returns: list of Workspace objects.
'''
path = '/workspaces'
params = {}
if team_id is not None:
params['teamId'] = team_id
if limit is not None:
params['limit'] = str(limit)
if after is not None:
params['after'] = after
return self.get(path, params)
def get_workspace(self, workspace_id: str) -> Union[List, NuclinoObject, dict]:
'''
Get specific workspace by ID.
:param workspace_id: ID of the workspace to get.
:returns: Workspace object.
'''
path = f'/workspaces/{workspace_id}'
return self.get(path)
def get_items(
self,
team_id: Optional[str] = None,
workspace_id: Optional[str] = None,
limit: Optional[int] = None,
after: Optional[str] = None,
search: Optional[str] = None
) -> Union[List, NuclinoObject, dict]:
'''
Get list of items and cluster from the team or the workspace. Either
`team_id` or `workspace_id` parameter is required. This method is also
used for item search, use `search` parameter.
:param team_id: ID of the team the returned items should belong to.
:param team_id: ID of the workspace the returned items should belong to.
:param limit: number between 1 and 100 to limit the results.
:param after: only return workspaces that come after the given workspace
ID.
:param search: search query.
:returns: list of Item and Cluster objects.
'''
path = '/items'
params = {}
if team_id is not None:
params['teamId'] = team_id
if workspace_id is not None:
params['workspaceId'] = workspace_id
if limit is not None:
params['limit'] = str(limit)
if after is not None:
params['after'] = after
if search is not None:
params['search'] = search
return self.get(path, params)
def get_item(self, item_id: str) -> Union[List, NuclinoObject, dict]:
'''
Get specific item or cluster by ID.
:param item_id: ID of the item to get.
:returns: Item or Cluster object.
'''
path = f'/items/{item_id}'
return self.get(path)
def get_cluster(self, cluster_id: str) -> Union[List, NuclinoObject, dict]:
'''
Alias for get_item. Get specific item or cluster by ID.
:param item_id: ID of the item to get.
:returns: Item or Cluster object.
'''
return self.get_item(cluster_id)
def create_item(
self,
workspace_id: Optional[str] = None,
parent_id: Optional[str] = None,
object: Optional[str] = 'item',
title: Optional[str] = None,
content: Optional[str] = None,
index: Optional[int] = None
) -> Union[List, NuclinoObject, dict]:
'''
Create a new item or cluster in the workspace or as a child of a
cluster. Either `workspace_id` or `parent_id` parameter is required.
:param workspace_id: ID of the workspace the item should be put in (will
be placed at the root of the workspace).
:param parent_id: ID of the cluster the item should be put in.
:param object: 'item' or 'cluster'.
:param title: item or cluster title.
:param content: item content (only for items).
:param index: where to put this item in the tree. If not
specified — will be put at the end.
:returns: the created Item or Cluster object.
'''
path = f'/items'
data = {'object': object}
if workspace_id is not None:
data['workspaceId'] = workspace_id
if parent_id is not None:
data['parentId'] = parent_id
if title is not None:
data['title'] = title
if content is not None:
data['content'] = content
if index is not None:
data['index'] = str(index)
return self.post(path, data)
def create_cluster(
self,
workspace_id: Optional[str] = None,
parent_id: Optional[str] = None,
title: Optional[str] = None,
index: Optional[int] = None
) -> Union[List, NuclinoObject, dict]:
'''
Create a cluster in the workspace or as a child of another cluster.
Either `workspace_id` or `parent_id` parameter is required.
:param workspace_id: ID of the workspace the cluster should be put in
(will be placed at the root of the workspace).
:param parent_id: ID of the cluster this cluster should be put in.
:param title: cluster title.
:param index: where to put this cluster in the tree. If not
specified — will be put at the end.
:returns: the created Cluster object.
'''
return self.create_item(
workspace_id=workspace_id,
parent_id=parent_id,
object='cluster',
title=title,
content=None,
index=index
)
def update_item(
self,
item_id: str,
title: Optional[str] = None,
content: Optional[str] = None
) -> Union[List, NuclinoObject, dict]:
'''
Update item or cluster.
:param item_id: ID of the item to update.
:param title: new item title. If not specified — won't be changed.
:param content: new item content (only for items). If not specified —
won't be changed.
:returns: updated Item or Cluster object.
'''
path = f'/items/{item_id}'
data = {}
if title is not None:
data['title'] = title
if content is not None:
data['content'] = content
return self.put(path, data=data)
def update_cluster(
self,
cluster_id: str,
title: Optional[str] = None
) -> Union[List, NuclinoObject, dict]:
'''
Update cluster title.
:param cluster_id: ID of the cluster to update.
:param title: new cluster title. If not specified — won't be
changed.
:returns: updated Cluster object.
'''
return self.update_item(cluster_id, title=title, content=None)
def delete_item(self, item_id: str) -> Union[List, NuclinoObject, dict]:
'''
Move item or cluster to trash.
:param item_id: ID of the item to delete.
:returns: a dictionary with ID of deleted item.
'''
path = f'/items/{item_id}'
return self.delete(path)
def delete_cluster(self, cluster_id: str) -> Union[List, NuclinoObject, dict]:
'''
Alias for delete_item. Move item or cluster to trash.
:param item_id: ID of the item to delete.
:returns: a dictionary with ID of deleted item.
'''
return self.delete_item(cluster_id)
def get_file(self, file_id: str) -> Union[List, NuclinoObject, dict]:
'''
Get a file object by ID.
:param item_id: ID of the file to get.
:returns: a File object.
'''
path = f'/files/{file_id}'
return self.get(path)
| 31.469287 | 90 | 0.579169 | 12,442 | 0.970666 | 0 | 0 | 0 | 0 | 0 | 0 | 5,787 | 0.451474 |
4fa4042c8d50c736872f18bc6de82a9eeac26254 | 591 | py | Python | apps/base/migrations/0056_auto_20190821_1628.py | gvizquel/pyerp | c859f7293cabd1003f79112463cee93ac89fccba | [
"MIT"
] | null | null | null | apps/base/migrations/0056_auto_20190821_1628.py | gvizquel/pyerp | c859f7293cabd1003f79112463cee93ac89fccba | [
"MIT"
] | 11 | 2020-06-05T22:50:37.000Z | 2022-02-10T09:05:56.000Z | apps/base/migrations/0056_auto_20190821_1628.py | gvizquel/pyerp | c859f7293cabd1003f79112463cee93ac89fccba | [
"MIT"
] | null | null | null | # Generated by Django 2.2.4 on 2019-08-21 20:28
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('base', '0055_pycron'),
]
operations = [
migrations.AlterField(
model_name='pycron',
name='model_name',
field=models.CharField(max_length=40, verbose_name='Modelo'),
),
migrations.AlterField(
model_name='pycron',
name='number_call',
field=models.IntegerField(default=-1, verbose_name='Número de llamadas'),
),
]
| 24.625 | 85 | 0.588832 | 499 | 0.842905 | 0 | 0 | 0 | 0 | 0 | 0 | 136 | 0.22973 |
4fa4d1534b1e2c6e2ca72fcf0d2aa45f72c2bfdf | 1,516 | py | Python | tests/test_settings.py | abahnihi/kn-defaults | 02517a76f14da8b519124af38a773a621b9a4041 | [
"MIT"
] | 2 | 2020-10-04T09:22:52.000Z | 2020-11-18T15:37:22.000Z | tests/test_settings.py | abahnihi/kn-defaults | 02517a76f14da8b519124af38a773a621b9a4041 | [
"MIT"
] | 2 | 2020-10-12T06:13:49.000Z | 2020-11-18T15:37:05.000Z | tests/test_settings.py | abahnihi/kn-defaults | 02517a76f14da8b519124af38a773a621b9a4041 | [
"MIT"
] | 1 | 2021-09-16T10:23:56.000Z | 2021-09-16T10:23:56.000Z | import os
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
SECRET_KEY = 'fake-key'
INSTALLED_APPS = [
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.admin',
'raven.contrib.django.raven_compat',
'kn_defaults.logging',
"tests",
]
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages'
]
}
},
]
ROOT_URLCONF = 'tests.urls'
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'kn_defaults.logging.middlewares.KnLogging']
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'test.sqlite3'),
}
}
KN_LOGGING_URL_PATTERNS = [
'success_func_view',
'error_func_view',
]
from kn_defaults.logging.defaults import BASE_LOGGING
BASE_LOGGING.update({})
LOGGING = BASE_LOGGING
RAVEN_CONFIG = {'dsn': ''}
| 25.694915 | 70 | 0.674142 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 879 | 0.579815 |
4fa7c8aae2cd758336fcea63483f62b6c22958c0 | 135 | py | Python | 0/2/2752/2752.py | chr0m3/boj-codes | d71d0a22d0a3ae62c225f382442461275f56fe8f | [
"MIT"
] | 3 | 2017-07-08T16:29:06.000Z | 2020-07-20T00:17:45.000Z | 0/2/2752/2752.py | chr0m3/boj-codes | d71d0a22d0a3ae62c225f382442461275f56fe8f | [
"MIT"
] | null | null | null | 0/2/2752/2752.py | chr0m3/boj-codes | d71d0a22d0a3ae62c225f382442461275f56fe8f | [
"MIT"
] | 2 | 2017-11-20T14:06:06.000Z | 2020-07-20T00:17:47.000Z | numbers = list(map(int, input().split())).sort()
numbers.sort()
print(str(numbers[0]) + ' ' + str(numbers[1]) + ' ' + str(numbers[2]))
| 33.75 | 70 | 0.592593 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6 | 0.044444 |
4fa960640ac080eae40140838421c34218db4f57 | 1,431 | py | Python | examples/worker.py | denissmirnov/kiel | fa80aa1ccd790c0fbbd8cc46a72162195e1aed69 | [
"Apache-2.0"
] | 47 | 2016-02-23T18:32:55.000Z | 2021-12-03T00:50:52.000Z | examples/worker.py | denissmirnov/kiel | fa80aa1ccd790c0fbbd8cc46a72162195e1aed69 | [
"Apache-2.0"
] | 21 | 2016-02-23T01:41:18.000Z | 2020-10-27T21:09:32.000Z | examples/worker.py | denissmirnov/kiel | fa80aa1ccd790c0fbbd8cc46a72162195e1aed69 | [
"Apache-2.0"
] | 13 | 2016-05-18T06:16:48.000Z | 2019-10-31T19:35:25.000Z | #!/usr/bin/env python
import argparse
import logging
from tornado import gen, ioloop
from kiel.clients import GroupedConsumer
log = logging.getLogger()
parser = argparse.ArgumentParser(
description="Example grouped consumer that prints out messages it gets."
)
parser.add_argument(
"brokers", type=lambda v: v.split(","),
help="Comma-separated list of bootstrap broker servers"
)
parser.add_argument(
"zk_hosts", type=lambda v: v.split(","),
help="Comma-separated list of zookeeper servers."
)
parser.add_argument(
"topic", type=str,
help="Topic to publish to"
)
parser.add_argument(
"--debug", type=bool, default=False,
help="Sets the logging level to DEBUG"
)
def process_message(msg):
print(msg)
@gen.coroutine
def run(c, args):
yield c.connect()
while True:
msgs = yield c.consume(args.topic)
for msg in msgs:
process_message(msg)
if msgs:
c.commit_offsets()
if __name__ == "__main__":
args = parser.parse_args()
loop = ioloop.IOLoop.instance()
if args.debug:
log.setLevel(logging.DEBUG)
c = GroupedConsumer(
brokers=args.brokers,
group="worker-group",
zk_hosts=args.zk_hosts,
autocommit=False
)
loop.add_callback(run, c, args)
try:
loop.start()
except KeyboardInterrupt:
c.close().add_done_callback(lambda f: loop.stop())
| 19.60274 | 76 | 0.65269 | 0 | 0 | 207 | 0.144654 | 222 | 0.155136 | 0 | 0 | 294 | 0.205451 |
4faa95c6d247e311aedbaf9ff405c3309caa0c17 | 146 | py | Python | glucose_app/glicemy/apps.py | luciano-s/glucose_app | f8b8d97f96bfbc5106fdce0f3de9694486b97f16 | [
"MIT"
] | null | null | null | glucose_app/glicemy/apps.py | luciano-s/glucose_app | f8b8d97f96bfbc5106fdce0f3de9694486b97f16 | [
"MIT"
] | null | null | null | glucose_app/glicemy/apps.py | luciano-s/glucose_app | f8b8d97f96bfbc5106fdce0f3de9694486b97f16 | [
"MIT"
] | null | null | null | from django.apps import AppConfig
class GlicemyConfig(AppConfig):
default_auto_field = 'django.db.models.BigAutoField'
name = 'glicemy'
| 20.857143 | 56 | 0.760274 | 109 | 0.746575 | 0 | 0 | 0 | 0 | 0 | 0 | 40 | 0.273973 |
4fae4d9fb269656577ad00e357001a2996f802a4 | 7,480 | py | Python | pycancer.py | WORD559/pycancer | 355d09e5f8c347e467b72279c51182aee1a9564f | [
"MIT"
] | null | null | null | pycancer.py | WORD559/pycancer | 355d09e5f8c347e467b72279c51182aee1a9564f | [
"MIT"
] | 3 | 2021-09-08T01:04:59.000Z | 2022-03-11T23:51:01.000Z | pycancer.py | WORD559/pycancer | 355d09e5f8c347e467b72279c51182aee1a9564f | [
"MIT"
] | null | null | null | #!/usr/bin/env python3.6
import math
import time
import os
import pygame
from pygame.locals import *
from constants import *
from sprites import Spinner
class App():
def __init__(self,width,height,title="pygame window",icon=None):
self.running = False
self.size = (width,height)
self.title = title
self.icon = icon
pygame.init()
def init(self):
"""Commands to be processed before the application starts"""
self.clock = pygame.time.Clock()
pygame.display.set_caption(self.title)
if self.icon != None:
self.icon = pygame.image.load(self.icon)
pygame.display.set_icon(self.icon)
self.display = pygame.display.set_mode(self.size, pygame.HWSURFACE | DOUBLEBUF)
# Load fonts
self.small_font = pygame.font.SysFont("sans-serif",15)
self.medium_font = pygame.font.SysFont("sans-serif",22)
# Open the fidget spinner sprite
self.sprite_num = 0
self.sprite_files = sorted([os.path.join(SPRITE_PATH,file) for file in os.listdir(SPRITE_PATH) if file[-4:].lower() == ".tif"])
self.spinner = Spinner(self.sprite_files[0], (self.size[0]//2,self.size[1]//2))
self.spinner.set_centre_pos((self.size[0]/2,self.size[1]/2))
# Attempt to load music
self.music = None
music_files = [file for file in os.listdir(MUSIC_PATH) if file[-4:].lower() == ".ogg"]
if len(music_files) >= 1:
pygame.mixer.init()
self.music = music_files[0]
pygame.mixer.music.load(os.path.join(MUSIC_PATH,self.music))
print("Loaded music: "+self.music)
else:
print("No music available to load :(")
self.reset()
self.difficulty = 1
self.legendary = False ## will be interpreted as int
return True
def reset(self):
# displayed as acceleration in-game as in the original
self.speed = 0
self.highest_speed = 0
# time in the 90s tracking
self.time_start = None
self.time_spent_in_90s = 0
self.highest_time_spent_in_90s = 0
# reset spinner position
self.spinner.angle = 0
self.spinner.rotate(0)
self.background = 0
self.pressed = False
if self.music is not None:
pygame.mixer.music.stop()
def __loop__(self):
"""Commands processed every frame"""
## speed decreases by 12.5 every second
#self.speed -= 12.5*(self.clock.get_time()/1000)
## exponential speed decrease makes it slightly easier the faster you go
## this is to recreate the slight lag in scratch as the spinner got faster
self.speed -= (10 + 2.5*math.exp(-self.speed/200))*(self.clock.get_time()/1000)
if self.speed < 0:
self.speed = 0
if self.speed > self.highest_speed:
self.highest_speed = self.speed
if self.speed >= 90:
self.background = (self.background+1)%len(BACKGROUND_COLOURS)
if self.time_start is None:
self.time_start = time.time()
self.time_spent_in_90s = time.time() - self.time_start
if self.time_spent_in_90s > self.highest_time_spent_in_90s:
self.highest_time_spent_in_90s = self.time_spent_in_90s
if self.music is not None:
pygame.mixer.music.set_volume(1)
if not pygame.mixer.music.get_busy():
pygame.mixer.music.play(0)
else:
if self.music is not None:
if pygame.mixer.music.get_busy():
pygame.mixer.music.set_volume(0)
self.background = 0
self.time_spent_in_90s = 0
self.time_start = None
## Rotate the spinner according to the angular speed
self.spinner.rotate(-self.speed)
## set the hue shift as it would in scratch
self.spinner.set_hueshift(self.speed/360)
def __events__(self, event):
"""Event Handling"""
if event.type == pygame.QUIT:
self.running = False
## Handle key press
if event.type == pygame.KEYDOWN and event.key == pygame.K_s:
#if not self.pressed:
## self.pressed = True
self.speed += (5- self.speed/(39 - self.legendary*10 - self.difficulty))
## if event.type == pygame.KEYUP and event.key == pygame.K_s:
## self.pressed = False
## Enter to reset
if event.type == pygame.KEYDOWN and event.key == pygame.K_RETURN:
self.reset()
## Space to use next sprite
if event.type == pygame.KEYDOWN and event.key == pygame.K_SPACE:
self.sprite_num = (self.sprite_num+1)%len(self.sprite_files)
self.spinner.load_image(self.sprite_files[self.sprite_num])
self.spinner.set_centre_pos((self.size[0]/2,self.size[1]/2))
def __render__(self):
"""Rendering"""
# Clear background
self.display.fill(BACKGROUND_COLOURS[self.background])
self.spinner.draw(self.display)
# draw texts
fps_text = self.small_font.render("FPS: "+str(round(self.clock.get_fps(),1)), 1, COLOUR_BLACK)
self.display.blit(fps_text, (0,0))
speed_text = self.medium_font.render("Acceleration: "+str(round(self.speed,1)), 1, COLOUR_BLACK)
self.display.blit(speed_text, (0,20))
highest_text = self.medium_font.render("Highest: "+str(round(self.highest_speed,1)), 1, COLOUR_BLACK)
highest_text_rect = highest_text.get_rect()
highest_text_rect.topright = (self.size[0]-3, 20)
self.display.blit(highest_text, highest_text_rect)
nineties_text = self.medium_font.render("Time Spent Running in the 90s: "+str(round(self.time_spent_in_90s,1))+"s", 1, COLOUR_BLACK)
nineties_text_rect = nineties_text.get_rect()
nineties_text_rect.bottomleft = (3, self.size[1])
self.display.blit(nineties_text, nineties_text_rect)
highest_nineties_text = self.medium_font.render("Longest Time Spent Running in the 90s: "+str(round(self.highest_time_spent_in_90s,1))+"s", 1, COLOUR_BLACK)
highest_nineties_text_rect = highest_nineties_text.get_rect()
highest_nineties_text_rect.bottomright = (self.size[0]-3, self.size[1])
self.display.blit(highest_nineties_text, highest_nineties_text_rect)
pygame.display.flip()
def __cleanup__(self,e=None):
"""Commands to be processed when quiiting"""
pygame.quit()
if e != None:
raise e
def start(self,fps_limit=0):
"""Start the application"""
self.fps_limit = fps_limit #This way fps can be dynamically adjusted
ex = None
try:
self.running = self.init()
except Exception as e:
self.running = False
ex = e
while self.running == True:
try:
self.clock.tick(self.fps_limit)
for event in pygame.event.get():
self.__events__(event)
self.__loop__()
self.__render__()
except Exception as e:
self.running = False
ex = e
self.__cleanup__(ex)
if __name__ == "__main__":
app = App(WINDOW_SIZE[0], WINDOW_SIZE[1],
WINDOW_NAME)
app.start(FPS_LIMIT)
| 36.487805 | 164 | 0.60361 | 7,196 | 0.962032 | 0 | 0 | 0 | 0 | 0 | 0 | 1,266 | 0.169251 |
4faff66bcc8450a4ff6ceab6fbf57edf2800ad21 | 2,007 | py | Python | shapes_and_text.py | UPstartDeveloper/open-cv-learning | fea0e6756ff50ef65601a3de36d503c9690e816e | [
"MIT"
] | null | null | null | shapes_and_text.py | UPstartDeveloper/open-cv-learning | fea0e6756ff50ef65601a3de36d503c9690e816e | [
"MIT"
] | null | null | null | shapes_and_text.py | UPstartDeveloper/open-cv-learning | fea0e6756ff50ef65601a3de36d503c9690e816e | [
"MIT"
] | null | null | null | import cv2
import numpy as np
# Chapter 4 - How to Draw Shapes and Text on Images
if __name__ == '__main__':
# define an "image" only a matrix
img = np.zeros((512, 512, 3), np.uint8)
# print(img) # print the numbers making up the "image" in CLI
# "Color" the whole image, by manipulating the matrix values
# img[:] = 255, 0, 0 # completely blue, darkly shaded
# img[20:30, 10:30] = 255, 0, 0 # only a portion becomes dark blue
# print(img)
'''
Drawing a Line on the image
Args:
- the image being drawn on
- starting point of the line
- the ending point
- the color of the line
- the thickness of the line
'''
# cv2.line(img, (0, 0), (300, 300), (0, 255, 0), 3)
# Example where the line goes all the way across (so dimensions are used)
# cv2.line(img, (0, 0), (img.shape[1], img.shape[0]), (0, 255, 0), 3)
'''
Drawing a Rectangle on the image
Args (roughly the same convention):
- image
- the endpoints (for top-left to bottom-right diagonal)
- color
- thickness (or, you can put cv2.FILLED to color in the rectangle)
'''
# cv2.rectangle(img, (0, 0), (250, 350), (0, 0, 255), 2)
'''
Drawing a Circle on the image
Args (roughly the same convention):
- image
- coordinates of the center of the circle
- radius length
- color of the outline
- thickness (or can be filled in)
'''
# cv2.circle(img, (400, 50), 30, (255, 255, 0), 5)
'''
Adding text to images
Args
- image
- text that will be shown
- starting coordinates for where the text will be shown
- the font of the text
- scale (think of it like font size)
- color
- thickness (like the "boldness" of the text)
'''
cv2.putText(img,
"AI is so cool!",
(112, 200),
cv2.FONT_HERSHEY_COMPLEX,
1, (0, 130, 0), 1)
# show the images
cv2.imshow("Image", img)
cv2.waitKey(100000)
| 28.671429 | 77 | 0.584953 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,637 | 0.815645 |
4fb045ff192d656ea573c38eeb8da1f72e35f93e | 416 | py | Python | mysite/myapp/migrations/0004_auto_20191219_1946.py | Niyy/monsterpedia | fa43286a49c5c6098c79c33f55af00a867a43da2 | [
"MIT"
] | null | null | null | mysite/myapp/migrations/0004_auto_20191219_1946.py | Niyy/monsterpedia | fa43286a49c5c6098c79c33f55af00a867a43da2 | [
"MIT"
] | null | null | null | mysite/myapp/migrations/0004_auto_20191219_1946.py | Niyy/monsterpedia | fa43286a49c5c6098c79c33f55af00a867a43da2 | [
"MIT"
] | null | null | null | # Generated by Django 2.2.8 on 2019-12-19 19:46
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('myapp', '0003_auto_20191218_0740'),
]
operations = [
migrations.AlterField(
model_name='monster',
name='monster_picture',
field=models.ImageField(upload_to='uploads/%Y/%m/%d/%h/%m'),
),
]
| 21.894737 | 72 | 0.598558 | 323 | 0.776442 | 0 | 0 | 0 | 0 | 0 | 0 | 129 | 0.310096 |
4fb0b3f6ba847c924951e836cc3ba121616356df | 2,089 | py | Python | src/data/caf/make_data.py | tiphaine/o2-base | 9c70b0b6e1f63bf0f12411df64fcb7fa02ca754f | [
"MIT"
] | null | null | null | src/data/caf/make_data.py | tiphaine/o2-base | 9c70b0b6e1f63bf0f12411df64fcb7fa02ca754f | [
"MIT"
] | 2 | 2020-02-19T18:42:35.000Z | 2020-02-19T18:44:16.000Z | src/data/caf/make_data.py | tiphaine/o2-base | 9c70b0b6e1f63bf0f12411df64fcb7fa02ca754f | [
"MIT"
] | null | null | null | import pandas as pd
from caf import source_config
from utils import write_csv_file
def _get_year(p):
"""Get year from string having the DD/MM/YEAR format."""
date_tokens = p['dtref'].split('/')
if len(date_tokens) == 3:
date_year = date_tokens[-1]
else:
date_year = None
return date_year
def _format_column_name(raw_column_name, year):
"""Formats a raw column name to remove: spaces, year and character with
accents.
Args:
raw_column_name (str): a column name
year: the year tu remove
Returns:
(str) The formatted column name.
"""
raw_column_name = ('_'.join(raw_column_name.lower().split()))
raw_column_name = raw_column_name.replace('_en_{}_'.format(year), '_')
formatted_column_name = raw_column_name.replace('é', 'e')
return formatted_column_name
def make_caf_foyers_bas_revenus(decoupage_geo=None):
"""
Collects and formats 'allocations foyers bas revenus' data for France by
commune (source INSEE). Reads the information location and outputs in the
`source_config.py` file.
"""
if decoupage_geo is None:
decoupage_geo = 'commune'
for year, file_paths in source_config.foyers_alloc_bas_revenus_files[
decoupage_geo].items():
raw_cols = []
raw_file = file_paths['raw']
for item in pd.read_csv(raw_file, sep=';',
encoding='ISO-8859-1').columns:
raw_cols.append(_format_column_name(item, year))
raw_data = pd.read_csv(raw_file, sep=';',
encoding='ISO-8859-1')
raw_data.columns = raw_cols
output_file = source_config.foyers_alloc_bas_revenus_files[
decoupage_geo][year]['processed']
if int(year) <= 2015:
raw_data['year'] = year
else:
raw_data['year'] = raw_data.apply(_get_year, axis=1)
raw_data = raw_data[raw_data.year == year]
raw_data.drop(['dtref'], axis=1, inplace=True)
write_csv_file(raw_data, output_file=output_file)
| 34.245902 | 77 | 0.637626 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 593 | 0.283732 |
96c4a51ecf35e22dc9c831e47236365c612bb6e2 | 3,635 | py | Python | draco/run.py | ngehlenborg/draco | e7e958d9494413e0578053359368b7918a0135de | [
"BSD-3-Clause"
] | null | null | null | draco/run.py | ngehlenborg/draco | e7e958d9494413e0578053359368b7918a0135de | [
"BSD-3-Clause"
] | null | null | null | draco/run.py | ngehlenborg/draco | e7e958d9494413e0578053359368b7918a0135de | [
"BSD-3-Clause"
] | null | null | null | '''
Run constraint solver to complete spec.
'''
import json
import logging
import os
import subprocess
import tempfile
from typing import Dict, List, Tuple, Optional
import clyngor
from draco.spec import Query, Task
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
DRACO_LP = ['define.lp', 'generate.lp', 'hard.lp', 'soft.lp', 'weights.lp', 'assign_weights.lp', 'optimize.lp', 'output.lp']
DRACO_LP_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), '../asp'))
file_cache: Dict = {}
def load_file(path):
content = file_cache.get(path)
if content is not None:
return content
with open(path) as f:
content = f.read().encode('utf8')
file_cache[path] = content
return content
def run_draco(task: Task, constants: Dict[str, str] = None, files: List[str] = None, silence_warnings=False, debug=False) -> Tuple[str, str]:
'''
Run draco and return stderr and stdout
'''
# default args
files = files or DRACO_LP
constants = constants or {}
options = ['--outf=2', '--quiet=1,2,2']
if silence_warnings:
options.append('--warn=no-atom-undefined')
for name, value in constants.items():
options.append(f'-c {name}={value}')
cmd = ['clingo'] + options
logger.debug('Command: %s', ' '.join(cmd))
proc = subprocess.Popen(
args=cmd,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
task_program = task.to_asp()
file_names = [os.path.join(DRACO_LP_DIR, f) for f in files]
asp_program = b'\n'.join(map(load_file, file_names)) + task_program.encode('utf8')
if debug:
with tempfile.NamedTemporaryFile(mode='w', delete=False) as fd:
fd.write(task_program)
logger.info('Debug ASP with "clingo %s %s"', ' '.join(file_names), fd.name)
stdout, stderr = proc.communicate(asp_program)
return (stderr, stdout)
def run(task: Task, constants: Dict[str, str] = None, files: List[str] = None, silence_warnings=False, debug=False, clear_cache=False) -> Optional[Task]:
''' Run clingo to compute a completion of a partial spec or violations. '''
# Clear file cache. useful during development in notebooks.
if clear_cache and file_cache:
logger.warning('Cleared file cache')
file_cache.clear()
stderr, stdout = run_draco(task, constants, files, silence_warnings, debug)
try:
json_result = json.loads(stdout)
except json.JSONDecodeError:
logger.error('stdout: %s', stdout)
logger.error('stderr: %s', stderr)
raise
if stderr:
logger.error(stderr)
result = json_result['Result']
if result == 'UNSATISFIABLE':
logger.info('Constraints are unsatisfiable.')
return None
elif result == 'OPTIMUM FOUND':
# get the last witness, which is the best result
answers = json_result['Call'][0]['Witnesses'][-1]
logger.debug(answers['Value'])
return Task.parse_from_answer(
clyngor.Answers(answers['Value']).sorted,
data=task.data,
cost=json_result['Models']['Costs'][0])
elif result == 'SATISFIABLE':
answers = json_result['Call'][0]['Witnesses'][-1]
assert json_result['Models']['Number'] == 1, 'Should not have more than one model if we don\'t optimize'
logger.debug(answers['Value'])
return Task.parse_from_answer(
clyngor.Answers(answers['Value']).sorted,
data=task.data)
else:
logger.error('Unsupported result: %s', result)
return None
| 30.291667 | 153 | 0.640165 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 854 | 0.234938 |
96c5fae37373ce25e688f3df204cf57572673e1e | 53,086 | py | Python | diptera_track_ui.py | jmmelis/DipteraTrack | 1d267ccd4248635233147f2035b900a433dc4536 | [
"MIT"
] | 1 | 2019-06-14T10:19:19.000Z | 2019-06-14T10:19:19.000Z | diptera_track_ui.py | jmmelis/DipteraTrack | 1d267ccd4248635233147f2035b900a433dc4536 | [
"MIT"
] | null | null | null | diptera_track_ui.py | jmmelis/DipteraTrack | 1d267ccd4248635233147f2035b900a433dc4536 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'diptera_track.ui'
#
# Created by: PyQt5 UI code generator 5.9.2
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_MainWindow(object):
def setupUi(self, MainWindow):
MainWindow.setObjectName("MainWindow")
MainWindow.resize(1140, 683)
self.centralwidget = QtWidgets.QWidget(MainWindow)
self.centralwidget.setMinimumSize(QtCore.QSize(1124, 674))
self.centralwidget.setObjectName("centralwidget")
self.verticalLayout_5 = QtWidgets.QVBoxLayout(self.centralwidget)
self.verticalLayout_5.setObjectName("verticalLayout_5")
self.tabs = QtWidgets.QTabWidget(self.centralwidget)
self.tabs.setObjectName("tabs")
self.ses_par_tab = QtWidgets.QWidget()
self.ses_par_tab.setObjectName("ses_par_tab")
self.verticalLayout_4 = QtWidgets.QVBoxLayout(self.ses_par_tab)
self.verticalLayout_4.setObjectName("verticalLayout_4")
self.widget = QtWidgets.QWidget(self.ses_par_tab)
self.widget.setMinimumSize(QtCore.QSize(0, 551))
self.widget.setObjectName("widget")
self.folder_select_tree = QtWidgets.QTreeView(self.widget)
self.folder_select_tree.setGeometry(QtCore.QRect(9, 30, 571, 321))
self.folder_select_tree.setMinimumSize(QtCore.QSize(451, 0))
self.folder_select_tree.setObjectName("folder_select_tree")
self.label = QtWidgets.QLabel(self.widget)
self.label.setGeometry(QtCore.QRect(9, 9, 128, 16))
self.label.setObjectName("label")
self.label_3 = QtWidgets.QLabel(self.widget)
self.label_3.setGeometry(QtCore.QRect(600, 0, 124, 16))
self.label_3.setObjectName("label_3")
self.line = QtWidgets.QFrame(self.widget)
self.line.setGeometry(QtCore.QRect(590, 20, 511, 20))
self.line.setFrameShape(QtWidgets.QFrame.HLine)
self.line.setFrameShadow(QtWidgets.QFrame.Sunken)
self.line.setObjectName("line")
self.label_2 = QtWidgets.QLabel(self.widget)
self.label_2.setGeometry(QtCore.QRect(600, 30, 91, 16))
self.label_2.setObjectName("label_2")
self.ses_folder_label = QtWidgets.QLabel(self.widget)
self.ses_folder_label.setGeometry(QtCore.QRect(620, 50, 471, 20))
self.ses_folder_label.setObjectName("ses_folder_label")
self.label_5 = QtWidgets.QLabel(self.widget)
self.label_5.setGeometry(QtCore.QRect(600, 90, 141, 16))
self.label_5.setObjectName("label_5")
self.bckg_folder_label = QtWidgets.QLabel(self.widget)
self.bckg_folder_label.setGeometry(QtCore.QRect(620, 110, 281, 20))
self.bckg_folder_label.setObjectName("bckg_folder_label")
self.line_2 = QtWidgets.QFrame(self.widget)
self.line_2.setGeometry(QtCore.QRect(580, 30, 20, 561))
self.line_2.setFrameShape(QtWidgets.QFrame.VLine)
self.line_2.setFrameShadow(QtWidgets.QFrame.Sunken)
self.line_2.setObjectName("line_2")
self.label_7 = QtWidgets.QLabel(self.widget)
self.label_7.setGeometry(QtCore.QRect(600, 130, 291, 16))
self.label_7.setObjectName("label_7")
self.cal_folder_label = QtWidgets.QLabel(self.widget)
self.cal_folder_label.setGeometry(QtCore.QRect(620, 150, 381, 20))
self.cal_folder_label.setObjectName("cal_folder_label")
self.label_9 = QtWidgets.QLabel(self.widget)
self.label_9.setGeometry(QtCore.QRect(600, 200, 291, 16))
self.label_9.setObjectName("label_9")
self.mov_folder1_label = QtWidgets.QLabel(self.widget)
self.mov_folder1_label.setGeometry(QtCore.QRect(620, 220, 371, 20))
self.mov_folder1_label.setObjectName("mov_folder1_label")
self.mov_folder2_label = QtWidgets.QLabel(self.widget)
self.mov_folder2_label.setGeometry(QtCore.QRect(620, 240, 371, 20))
self.mov_folder2_label.setObjectName("mov_folder2_label")
self.mov_folder3_label = QtWidgets.QLabel(self.widget)
self.mov_folder3_label.setGeometry(QtCore.QRect(620, 260, 371, 20))
self.mov_folder3_label.setObjectName("mov_folder3_label")
self.mov_folder4_label = QtWidgets.QLabel(self.widget)
self.mov_folder4_label.setGeometry(QtCore.QRect(620, 280, 371, 20))
self.mov_folder4_label.setObjectName("mov_folder4_label")
self.mov_folder5_label = QtWidgets.QLabel(self.widget)
self.mov_folder5_label.setGeometry(QtCore.QRect(620, 300, 371, 20))
self.mov_folder5_label.setObjectName("mov_folder5_label")
self.mov_folder6_label = QtWidgets.QLabel(self.widget)
self.mov_folder6_label.setGeometry(QtCore.QRect(620, 320, 371, 20))
self.mov_folder6_label.setObjectName("mov_folder6_label")
self.mov_folder7_label = QtWidgets.QLabel(self.widget)
self.mov_folder7_label.setGeometry(QtCore.QRect(620, 340, 371, 20))
self.mov_folder7_label.setObjectName("mov_folder7_label")
self.mov_folder8_label = QtWidgets.QLabel(self.widget)
self.mov_folder8_label.setGeometry(QtCore.QRect(620, 360, 371, 20))
self.mov_folder8_label.setObjectName("mov_folder8_label")
self.label_18 = QtWidgets.QLabel(self.widget)
self.label_18.setGeometry(QtCore.QRect(600, 390, 301, 20))
self.label_18.setObjectName("label_18")
self.cam_folder1_label = QtWidgets.QLabel(self.widget)
self.cam_folder1_label.setGeometry(QtCore.QRect(620, 410, 371, 20))
self.cam_folder1_label.setObjectName("cam_folder1_label")
self.cam_folder2_label = QtWidgets.QLabel(self.widget)
self.cam_folder2_label.setGeometry(QtCore.QRect(620, 430, 371, 20))
self.cam_folder2_label.setObjectName("cam_folder2_label")
self.cam_folder3_label = QtWidgets.QLabel(self.widget)
self.cam_folder3_label.setGeometry(QtCore.QRect(620, 450, 371, 20))
self.cam_folder3_label.setObjectName("cam_folder3_label")
self.cam_folder4_label = QtWidgets.QLabel(self.widget)
self.cam_folder4_label.setGeometry(QtCore.QRect(620, 470, 371, 20))
self.cam_folder4_label.setObjectName("cam_folder4_label")
self.ses_folder_rbtn = QtWidgets.QRadioButton(self.widget)
self.ses_folder_rbtn.setGeometry(QtCore.QRect(600, 50, 21, 21))
self.ses_folder_rbtn.setObjectName("ses_folder_rbtn")
self.bckg_folder_rbtn = QtWidgets.QRadioButton(self.widget)
self.bckg_folder_rbtn.setGeometry(QtCore.QRect(600, 110, 21, 21))
self.bckg_folder_rbtn.setObjectName("bckg_folder_rbtn")
self.cal_folder_rbtn = QtWidgets.QRadioButton(self.widget)
self.cal_folder_rbtn.setGeometry(QtCore.QRect(600, 150, 21, 21))
self.cal_folder_rbtn.setObjectName("cal_folder_rbtn")
self.mov_folder1_rbtn = QtWidgets.QRadioButton(self.widget)
self.mov_folder1_rbtn.setGeometry(QtCore.QRect(600, 220, 21, 21))
self.mov_folder1_rbtn.setObjectName("mov_folder1_rbtn")
self.mov_folder2_rbtn = QtWidgets.QRadioButton(self.widget)
self.mov_folder2_rbtn.setGeometry(QtCore.QRect(600, 240, 21, 21))
self.mov_folder2_rbtn.setObjectName("mov_folder2_rbtn")
self.mov_folder3_rbtn = QtWidgets.QRadioButton(self.widget)
self.mov_folder3_rbtn.setGeometry(QtCore.QRect(600, 260, 21, 21))
self.mov_folder3_rbtn.setObjectName("mov_folder3_rbtn")
self.mov_folder4_rbtn = QtWidgets.QRadioButton(self.widget)
self.mov_folder4_rbtn.setGeometry(QtCore.QRect(600, 280, 21, 21))
self.mov_folder4_rbtn.setObjectName("mov_folder4_rbtn")
self.mov_folder5_rbtn = QtWidgets.QRadioButton(self.widget)
self.mov_folder5_rbtn.setGeometry(QtCore.QRect(600, 300, 21, 21))
self.mov_folder5_rbtn.setObjectName("mov_folder5_rbtn")
self.mov_folder6_rbtn = QtWidgets.QRadioButton(self.widget)
self.mov_folder6_rbtn.setGeometry(QtCore.QRect(600, 320, 21, 21))
self.mov_folder6_rbtn.setObjectName("mov_folder6_rbtn")
self.mov_folder7_rbtn = QtWidgets.QRadioButton(self.widget)
self.mov_folder7_rbtn.setGeometry(QtCore.QRect(600, 340, 21, 21))
self.mov_folder7_rbtn.setObjectName("mov_folder7_rbtn")
self.mov_folder8_rbtn = QtWidgets.QRadioButton(self.widget)
self.mov_folder8_rbtn.setGeometry(QtCore.QRect(600, 360, 21, 21))
self.mov_folder8_rbtn.setObjectName("mov_folder8_rbtn")
self.cam_folder1_rbtn = QtWidgets.QRadioButton(self.widget)
self.cam_folder1_rbtn.setGeometry(QtCore.QRect(600, 410, 21, 21))
self.cam_folder1_rbtn.setObjectName("cam_folder1_rbtn")
self.cam_folder2_rbtn = QtWidgets.QRadioButton(self.widget)
self.cam_folder2_rbtn.setGeometry(QtCore.QRect(600, 430, 21, 21))
self.cam_folder2_rbtn.setObjectName("cam_folder2_rbtn")
self.cam_folder3_rbtn = QtWidgets.QRadioButton(self.widget)
self.cam_folder3_rbtn.setGeometry(QtCore.QRect(600, 450, 21, 21))
self.cam_folder3_rbtn.setObjectName("cam_folder3_rbtn")
self.cam_folder4_rbtn = QtWidgets.QRadioButton(self.widget)
self.cam_folder4_rbtn.setGeometry(QtCore.QRect(600, 470, 21, 21))
self.cam_folder4_rbtn.setObjectName("cam_folder4_rbtn")
self.cam_folder5_rbtn = QtWidgets.QRadioButton(self.widget)
self.cam_folder5_rbtn.setGeometry(QtCore.QRect(600, 490, 21, 21))
self.cam_folder5_rbtn.setObjectName("cam_folder5_rbtn")
self.cam_folder6_rbtn = QtWidgets.QRadioButton(self.widget)
self.cam_folder6_rbtn.setGeometry(QtCore.QRect(600, 510, 21, 21))
self.cam_folder6_rbtn.setObjectName("cam_folder6_rbtn")
self.cam_folder5_label = QtWidgets.QLabel(self.widget)
self.cam_folder5_label.setGeometry(QtCore.QRect(620, 490, 371, 20))
self.cam_folder5_label.setObjectName("cam_folder5_label")
self.cam_folder6_label = QtWidgets.QLabel(self.widget)
self.cam_folder6_label.setGeometry(QtCore.QRect(620, 510, 371, 20))
self.cam_folder6_label.setObjectName("cam_folder6_label")
self.label_25 = QtWidgets.QLabel(self.widget)
self.label_25.setGeometry(QtCore.QRect(600, 540, 201, 16))
self.label_25.setObjectName("label_25")
self.frame_name_rbtn = QtWidgets.QRadioButton(self.widget)
self.frame_name_rbtn.setGeometry(QtCore.QRect(600, 560, 21, 21))
self.frame_name_rbtn.setObjectName("frame_name_rbtn")
self.frame_name_label = QtWidgets.QLabel(self.widget)
self.frame_name_label.setGeometry(QtCore.QRect(620, 560, 391, 20))
self.frame_name_label.setObjectName("frame_name_label")
self.label_27 = QtWidgets.QLabel(self.widget)
self.label_27.setGeometry(QtCore.QRect(930, 80, 161, 20))
self.label_27.setObjectName("label_27")
self.bck_img_fmt_box = QtWidgets.QComboBox(self.widget)
self.bck_img_fmt_box.setGeometry(QtCore.QRect(1020, 100, 79, 23))
self.bck_img_fmt_box.setObjectName("bck_img_fmt_box")
self.label_28 = QtWidgets.QLabel(self.widget)
self.label_28.setGeometry(QtCore.QRect(930, 130, 161, 20))
self.label_28.setObjectName("label_28")
self.cal_img_fmt_box = QtWidgets.QComboBox(self.widget)
self.cal_img_fmt_box.setGeometry(QtCore.QRect(1020, 150, 79, 23))
self.cal_img_fmt_box.setObjectName("cal_img_fmt_box")
self.label_29 = QtWidgets.QLabel(self.widget)
self.label_29.setGeometry(QtCore.QRect(970, 540, 131, 20))
self.label_29.setObjectName("label_29")
self.frame_img_fmt_box = QtWidgets.QComboBox(self.widget)
self.frame_img_fmt_box.setGeometry(QtCore.QRect(1020, 560, 79, 23))
self.frame_img_fmt_box.setObjectName("frame_img_fmt_box")
self.line_4 = QtWidgets.QFrame(self.widget)
self.line_4.setGeometry(QtCore.QRect(10, 460, 571, 16))
self.line_4.setFrameShape(QtWidgets.QFrame.HLine)
self.line_4.setFrameShadow(QtWidgets.QFrame.Sunken)
self.line_4.setObjectName("line_4")
self.line_5 = QtWidgets.QFrame(self.widget)
self.line_5.setGeometry(QtCore.QRect(10, 580, 1091, 20))
self.line_5.setFrameShape(QtWidgets.QFrame.HLine)
self.line_5.setFrameShadow(QtWidgets.QFrame.Sunken)
self.line_5.setObjectName("line_5")
self.label_30 = QtWidgets.QLabel(self.widget)
self.label_30.setGeometry(QtCore.QRect(250, 470, 151, 16))
self.label_30.setObjectName("label_30")
self.start_frame_spin = QtWidgets.QSpinBox(self.widget)
self.start_frame_spin.setGeometry(QtCore.QRect(120, 490, 91, 24))
self.start_frame_spin.setObjectName("start_frame_spin")
self.label_31 = QtWidgets.QLabel(self.widget)
self.label_31.setGeometry(QtCore.QRect(10, 490, 101, 16))
self.label_31.setObjectName("label_31")
self.label_32 = QtWidgets.QLabel(self.widget)
self.label_32.setGeometry(QtCore.QRect(10, 520, 101, 16))
self.label_32.setObjectName("label_32")
self.trig_frame_spin = QtWidgets.QSpinBox(self.widget)
self.trig_frame_spin.setGeometry(QtCore.QRect(120, 520, 91, 24))
self.trig_frame_spin.setObjectName("trig_frame_spin")
self.label_33 = QtWidgets.QLabel(self.widget)
self.label_33.setGeometry(QtCore.QRect(10, 550, 101, 16))
self.label_33.setObjectName("label_33")
self.end_frame_spin = QtWidgets.QSpinBox(self.widget)
self.end_frame_spin.setGeometry(QtCore.QRect(120, 550, 91, 24))
self.end_frame_spin.setObjectName("end_frame_spin")
self.label_34 = QtWidgets.QLabel(self.widget)
self.label_34.setGeometry(QtCore.QRect(250, 490, 91, 16))
self.label_34.setObjectName("label_34")
self.trig_mode_box = QtWidgets.QComboBox(self.widget)
self.trig_mode_box.setGeometry(QtCore.QRect(350, 490, 111, 23))
self.trig_mode_box.setObjectName("trig_mode_box")
self.line_3 = QtWidgets.QFrame(self.widget)
self.line_3.setGeometry(QtCore.QRect(10, 350, 571, 16))
self.line_3.setFrameShape(QtWidgets.QFrame.HLine)
self.line_3.setFrameShadow(QtWidgets.QFrame.Sunken)
self.line_3.setObjectName("line_3")
self.label_4 = QtWidgets.QLabel(self.widget)
self.label_4.setGeometry(QtCore.QRect(250, 360, 101, 16))
self.label_4.setObjectName("label_4")
self.mdl_loc_rbtn = QtWidgets.QRadioButton(self.widget)
self.mdl_loc_rbtn.setGeometry(QtCore.QRect(10, 400, 21, 21))
self.mdl_loc_rbtn.setObjectName("mdl_loc_rbtn")
self.mdl_loc_label = QtWidgets.QLabel(self.widget)
self.mdl_loc_label.setGeometry(QtCore.QRect(40, 400, 541, 21))
self.mdl_loc_label.setObjectName("mdl_loc_label")
self.label_10 = QtWidgets.QLabel(self.widget)
self.label_10.setGeometry(QtCore.QRect(10, 380, 171, 16))
self.label_10.setObjectName("label_10")
self.label_11 = QtWidgets.QLabel(self.widget)
self.label_11.setGeometry(QtCore.QRect(10, 420, 141, 16))
self.label_11.setObjectName("label_11")
self.mdl_name_rbtn = QtWidgets.QRadioButton(self.widget)
self.mdl_name_rbtn.setGeometry(QtCore.QRect(10, 440, 21, 21))
self.mdl_name_rbtn.setObjectName("mdl_name_rbtn")
self.mdl_name_label = QtWidgets.QLabel(self.widget)
self.mdl_name_label.setGeometry(QtCore.QRect(40, 440, 541, 21))
self.mdl_name_label.setObjectName("mdl_name_label")
self.label_6 = QtWidgets.QLabel(self.widget)
self.label_6.setGeometry(QtCore.QRect(600, 170, 101, 16))
self.label_6.setObjectName("label_6")
self.cal_file_label = QtWidgets.QLabel(self.widget)
self.cal_file_label.setGeometry(QtCore.QRect(710, 170, 291, 20))
self.cal_file_label.setObjectName("cal_file_label")
self.label_8 = QtWidgets.QLabel(self.widget)
self.label_8.setGeometry(QtCore.QRect(600, 70, 101, 16))
self.label_8.setObjectName("label_8")
self.ses_name_label = QtWidgets.QLabel(self.widget)
self.ses_name_label.setGeometry(QtCore.QRect(700, 70, 381, 20))
self.ses_name_label.setObjectName("ses_name_label")
self.reset_selection_push_btn = QtWidgets.QPushButton(self.widget)
self.reset_selection_push_btn.setGeometry(QtCore.QRect(470, 0, 101, 23))
self.reset_selection_push_btn.setObjectName("reset_selection_push_btn")
self.start_session_push_btn = QtWidgets.QPushButton(self.widget)
self.start_session_push_btn.setGeometry(QtCore.QRect(1010, 600, 85, 23))
self.start_session_push_btn.setObjectName("start_session_push_btn")
self.save_settings_push_btn = QtWidgets.QPushButton(self.widget)
self.save_settings_push_btn.setGeometry(QtCore.QRect(870, 600, 131, 23))
self.save_settings_push_btn.setObjectName("save_settings_push_btn")
self.load_settings_file_label = QtWidgets.QLabel(self.widget)
self.load_settings_file_label.setGeometry(QtCore.QRect(40, 600, 671, 21))
self.load_settings_file_label.setObjectName("load_settings_file_label")
self.load_settings_push_btn = QtWidgets.QPushButton(self.widget)
self.load_settings_push_btn.setGeometry(QtCore.QRect(720, 600, 141, 23))
self.load_settings_push_btn.setObjectName("load_settings_push_btn")
self.load_settings_rbtn = QtWidgets.QRadioButton(self.widget)
self.load_settings_rbtn.setGeometry(QtCore.QRect(10, 600, 21, 21))
self.load_settings_rbtn.setObjectName("load_settings_rbtn")
self.verticalLayout_4.addWidget(self.widget)
self.tabs.addTab(self.ses_par_tab, "")
self.focal_grid_tab = QtWidgets.QWidget()
self.focal_grid_tab.setObjectName("focal_grid_tab")
self.gridLayout_2 = QtWidgets.QGridLayout(self.focal_grid_tab)
self.gridLayout_2.setObjectName("gridLayout_2")
self.widget_2 = QtWidgets.QWidget(self.focal_grid_tab)
self.widget_2.setObjectName("widget_2")
self.gridLayout_7 = QtWidgets.QGridLayout(self.widget_2)
self.gridLayout_7.setObjectName("gridLayout_7")
self.line_9 = QtWidgets.QFrame(self.widget_2)
self.line_9.setFrameShape(QtWidgets.QFrame.HLine)
self.line_9.setFrameShadow(QtWidgets.QFrame.Sunken)
self.line_9.setObjectName("line_9")
self.gridLayout_7.addWidget(self.line_9, 0, 0, 2, 8)
self.label_16 = QtWidgets.QLabel(self.widget_2)
self.label_16.setObjectName("label_16")
self.gridLayout_7.addWidget(self.label_16, 1, 2, 2, 3)
self.line_6 = QtWidgets.QFrame(self.widget_2)
self.line_6.setFrameShape(QtWidgets.QFrame.HLine)
self.line_6.setFrameShadow(QtWidgets.QFrame.Sunken)
self.line_6.setObjectName("line_6")
self.gridLayout_7.addWidget(self.line_6, 2, 0, 1, 2)
self.label_12 = QtWidgets.QLabel(self.widget_2)
self.label_12.setObjectName("label_12")
self.gridLayout_7.addWidget(self.label_12, 3, 2, 1, 1)
self.nx_spin = QtWidgets.QSpinBox(self.widget_2)
self.nx_spin.setObjectName("nx_spin")
self.gridLayout_7.addWidget(self.nx_spin, 3, 3, 1, 1)
spacerItem = QtWidgets.QSpacerItem(928, 213, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Expanding)
self.gridLayout_7.addItem(spacerItem, 3, 4, 7, 4)
self.label_13 = QtWidgets.QLabel(self.widget_2)
self.label_13.setObjectName("label_13")
self.gridLayout_7.addWidget(self.label_13, 4, 2, 1, 1)
self.ny_spin = QtWidgets.QSpinBox(self.widget_2)
self.ny_spin.setObjectName("ny_spin")
self.gridLayout_7.addWidget(self.ny_spin, 4, 3, 1, 1)
self.label_14 = QtWidgets.QLabel(self.widget_2)
self.label_14.setObjectName("label_14")
self.gridLayout_7.addWidget(self.label_14, 5, 2, 1, 1)
self.nz_spin = QtWidgets.QSpinBox(self.widget_2)
self.nz_spin.setObjectName("nz_spin")
self.gridLayout_7.addWidget(self.nz_spin, 5, 3, 1, 1)
self.label_15 = QtWidgets.QLabel(self.widget_2)
self.label_15.setObjectName("label_15")
self.gridLayout_7.addWidget(self.label_15, 6, 2, 1, 1)
self.ds_spin = QtWidgets.QDoubleSpinBox(self.widget_2)
self.ds_spin.setObjectName("ds_spin")
self.gridLayout_7.addWidget(self.ds_spin, 6, 3, 1, 1)
self.label_17 = QtWidgets.QLabel(self.widget_2)
self.label_17.setObjectName("label_17")
self.gridLayout_7.addWidget(self.label_17, 7, 2, 1, 1)
self.x0_spin = QtWidgets.QDoubleSpinBox(self.widget_2)
self.x0_spin.setObjectName("x0_spin")
self.gridLayout_7.addWidget(self.x0_spin, 7, 3, 1, 1)
self.label_19 = QtWidgets.QLabel(self.widget_2)
self.label_19.setObjectName("label_19")
self.gridLayout_7.addWidget(self.label_19, 8, 2, 1, 1)
self.y0_spin = QtWidgets.QDoubleSpinBox(self.widget_2)
self.y0_spin.setObjectName("y0_spin")
self.gridLayout_7.addWidget(self.y0_spin, 8, 3, 1, 1)
self.label_20 = QtWidgets.QLabel(self.widget_2)
self.label_20.setObjectName("label_20")
self.gridLayout_7.addWidget(self.label_20, 9, 2, 1, 1)
self.z0_spin = QtWidgets.QDoubleSpinBox(self.widget_2)
self.z0_spin.setObjectName("z0_spin")
self.gridLayout_7.addWidget(self.z0_spin, 9, 3, 1, 1)
self.line_7 = QtWidgets.QFrame(self.widget_2)
self.line_7.setFrameShape(QtWidgets.QFrame.HLine)
self.line_7.setFrameShadow(QtWidgets.QFrame.Sunken)
self.line_7.setObjectName("line_7")
self.gridLayout_7.addWidget(self.line_7, 10, 0, 1, 7)
spacerItem1 = QtWidgets.QSpacerItem(696, 48, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.gridLayout_7.addItem(spacerItem1, 10, 7, 2, 1)
self.calc_vox_btn = QtWidgets.QPushButton(self.widget_2)
self.calc_vox_btn.setObjectName("calc_vox_btn")
self.gridLayout_7.addWidget(self.calc_vox_btn, 11, 0, 1, 4)
self.vox_progress_bar = QtWidgets.QProgressBar(self.widget_2)
self.vox_progress_bar.setMinimumSize(QtCore.QSize(211, 0))
self.vox_progress_bar.setProperty("value", 24)
self.vox_progress_bar.setObjectName("vox_progress_bar")
self.gridLayout_7.addWidget(self.vox_progress_bar, 11, 5, 1, 2)
self.line_8 = QtWidgets.QFrame(self.widget_2)
self.line_8.setFrameShape(QtWidgets.QFrame.HLine)
self.line_8.setFrameShadow(QtWidgets.QFrame.Sunken)
self.line_8.setObjectName("line_8")
self.gridLayout_7.addWidget(self.line_8, 12, 0, 2, 8)
self.label_49 = QtWidgets.QLabel(self.widget_2)
self.label_49.setObjectName("label_49")
self.gridLayout_7.addWidget(self.label_49, 13, 1, 2, 6)
spacerItem2 = QtWidgets.QSpacerItem(804, 48, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.gridLayout_7.addItem(spacerItem2, 14, 6, 2, 2)
self.label_50 = QtWidgets.QLabel(self.widget_2)
self.label_50.setObjectName("label_50")
self.gridLayout_7.addWidget(self.label_50, 15, 1, 1, 3)
self.pixel_size_spin = QtWidgets.QDoubleSpinBox(self.widget_2)
self.pixel_size_spin.setObjectName("pixel_size_spin")
self.gridLayout_7.addWidget(self.pixel_size_spin, 15, 4, 1, 2)
spacerItem3 = QtWidgets.QSpacerItem(1079, 267, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Expanding)
self.gridLayout_7.addItem(spacerItem3, 16, 0, 1, 8)
self.gridLayout_2.addWidget(self.widget_2, 0, 0, 1, 1)
self.tabs.addTab(self.focal_grid_tab, "")
self.model_scale_tab = QtWidgets.QWidget()
self.model_scale_tab.setObjectName("model_scale_tab")
self.verticalLayout_2 = QtWidgets.QVBoxLayout(self.model_scale_tab)
self.verticalLayout_2.setObjectName("verticalLayout_2")
self.widget_3 = QtWidgets.QWidget(self.model_scale_tab)
self.widget_3.setObjectName("widget_3")
self.gridLayout_3 = QtWidgets.QGridLayout(self.widget_3)
self.gridLayout_3.setObjectName("gridLayout_3")
self.rawFrameView = ScaleModelWidget(self.widget_3)
self.rawFrameView.setMinimumSize(QtCore.QSize(1091, 511))
self.rawFrameView.setObjectName("rawFrameView")
self.gridLayout_3.addWidget(self.rawFrameView, 0, 0, 1, 1)
self.widget_4 = QtWidgets.QWidget(self.widget_3)
self.widget_4.setMinimumSize(QtCore.QSize(1091, 0))
self.widget_4.setMaximumSize(QtCore.QSize(16777215, 101))
self.widget_4.setObjectName("widget_4")
self.gridLayout = QtWidgets.QGridLayout(self.widget_4)
self.gridLayout.setObjectName("gridLayout")
self.label_22 = QtWidgets.QLabel(self.widget_4)
self.label_22.setObjectName("label_22")
self.gridLayout.addWidget(self.label_22, 0, 0, 1, 1)
self.scaleTable = QtWidgets.QTableWidget(self.widget_4)
self.scaleTable.setMinimumSize(QtCore.QSize(411, 81))
self.scaleTable.setObjectName("scaleTable")
self.scaleTable.setColumnCount(0)
self.scaleTable.setRowCount(0)
self.gridLayout.addWidget(self.scaleTable, 0, 1, 4, 1)
spacerItem4 = QtWidgets.QSpacerItem(248, 78, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.gridLayout.addItem(spacerItem4, 0, 2, 4, 1)
self.raw_mov_spin = QtWidgets.QSpinBox(self.widget_4)
self.raw_mov_spin.setObjectName("raw_mov_spin")
self.gridLayout.addWidget(self.raw_mov_spin, 1, 0, 1, 1)
self.load_scale_btn = QtWidgets.QPushButton(self.widget_4)
self.load_scale_btn.setObjectName("load_scale_btn")
self.gridLayout.addWidget(self.load_scale_btn, 1, 3, 2, 1)
self.save_scale_btn = QtWidgets.QPushButton(self.widget_4)
self.save_scale_btn.setObjectName("save_scale_btn")
self.gridLayout.addWidget(self.save_scale_btn, 1, 4, 2, 1)
self.raw_frame_spin = QtWidgets.QSpinBox(self.widget_4)
self.raw_frame_spin.setObjectName("raw_frame_spin")
self.gridLayout.addWidget(self.raw_frame_spin, 3, 0, 1, 1)
self.set_model_btn = QtWidgets.QPushButton(self.widget_4)
self.set_model_btn.setObjectName("set_model_btn")
self.gridLayout.addWidget(self.set_model_btn, 1, 5, 2, 1)
self.label_21 = QtWidgets.QLabel(self.widget_4)
self.label_21.setObjectName("label_21")
self.gridLayout.addWidget(self.label_21, 2, 0, 1, 1)
self.gridLayout_3.addWidget(self.widget_4, 1, 0, 1, 1)
self.verticalLayout_2.addWidget(self.widget_3)
self.tabs.addTab(self.model_scale_tab, "")
self.model_view_tab = QtWidgets.QWidget()
self.model_view_tab.setObjectName("model_view_tab")
self.horizontalLayout = QtWidgets.QHBoxLayout(self.model_view_tab)
self.horizontalLayout.setObjectName("horizontalLayout")
self.model_param_disp = QtWidgets.QWidget(self.model_view_tab)
self.model_param_disp.setObjectName("model_param_disp")
self.gridLayout_4 = QtWidgets.QGridLayout(self.model_param_disp)
self.gridLayout_4.setObjectName("gridLayout_4")
self.label_23 = QtWidgets.QLabel(self.model_param_disp)
self.label_23.setMinimumSize(QtCore.QSize(114, 621))
self.label_23.setObjectName("label_23")
self.gridLayout_4.addWidget(self.label_23, 0, 0, 1, 1)
self.horizontalLayout.addWidget(self.model_param_disp)
self.model_view_window = ModelViewWidget(self.model_view_tab)
self.model_view_window.setMinimumSize(QtCore.QSize(971, 631))
self.model_view_window.setFrameShape(QtWidgets.QFrame.StyledPanel)
self.model_view_window.setFrameShadow(QtWidgets.QFrame.Raised)
self.model_view_window.setObjectName("model_view_window")
self.horizontalLayout.addWidget(self.model_view_window)
self.tabs.addTab(self.model_view_tab, "")
self.segment_tab = QtWidgets.QWidget()
self.segment_tab.setObjectName("segment_tab")
self.verticalLayout = QtWidgets.QVBoxLayout(self.segment_tab)
self.verticalLayout.setObjectName("verticalLayout")
self.seg_view = ImageSegmentWidget(self.segment_tab)
self.seg_view.setMinimumSize(QtCore.QSize(1101, 481))
self.seg_view.setObjectName("seg_view")
self.verticalLayout.addWidget(self.seg_view)
self.seg_widget = QtWidgets.QWidget(self.segment_tab)
self.seg_widget.setMinimumSize(QtCore.QSize(1122, 90))
self.seg_widget.setMaximumSize(QtCore.QSize(16777215, 141))
self.seg_widget.setObjectName("seg_widget")
self.gridLayout_5 = QtWidgets.QGridLayout(self.seg_widget)
self.gridLayout_5.setObjectName("gridLayout_5")
self.label_40 = QtWidgets.QLabel(self.seg_widget)
self.label_40.setObjectName("label_40")
self.gridLayout_5.addWidget(self.label_40, 0, 0, 1, 1)
self.label_24 = QtWidgets.QLabel(self.seg_widget)
self.label_24.setObjectName("label_24")
self.gridLayout_5.addWidget(self.label_24, 0, 1, 1, 1)
self.label_26 = QtWidgets.QLabel(self.seg_widget)
self.label_26.setObjectName("label_26")
self.gridLayout_5.addWidget(self.label_26, 0, 2, 1, 1)
self.label_35 = QtWidgets.QLabel(self.seg_widget)
self.label_35.setObjectName("label_35")
self.gridLayout_5.addWidget(self.label_35, 0, 3, 1, 1)
self.label_36 = QtWidgets.QLabel(self.seg_widget)
self.label_36.setObjectName("label_36")
self.gridLayout_5.addWidget(self.label_36, 0, 4, 1, 1)
self.label_37 = QtWidgets.QLabel(self.seg_widget)
self.label_37.setObjectName("label_37")
self.gridLayout_5.addWidget(self.label_37, 0, 5, 1, 1)
self.label_38 = QtWidgets.QLabel(self.seg_widget)
self.label_38.setObjectName("label_38")
self.gridLayout_5.addWidget(self.label_38, 0, 6, 1, 1)
self.label_39 = QtWidgets.QLabel(self.seg_widget)
self.label_39.setObjectName("label_39")
self.gridLayout_5.addWidget(self.label_39, 0, 7, 1, 1)
self.line_10 = QtWidgets.QFrame(self.seg_widget)
self.line_10.setFrameShape(QtWidgets.QFrame.VLine)
self.line_10.setFrameShadow(QtWidgets.QFrame.Sunken)
self.line_10.setObjectName("line_10")
self.gridLayout_5.addWidget(self.line_10, 0, 8, 4, 1)
self.label_43 = QtWidgets.QLabel(self.seg_widget)
self.label_43.setObjectName("label_43")
self.gridLayout_5.addWidget(self.label_43, 0, 9, 1, 2)
self.line_11 = QtWidgets.QFrame(self.seg_widget)
self.line_11.setFrameShape(QtWidgets.QFrame.VLine)
self.line_11.setFrameShadow(QtWidgets.QFrame.Sunken)
self.line_11.setObjectName("line_11")
self.gridLayout_5.addWidget(self.line_11, 0, 12, 4, 1)
spacerItem5 = QtWidgets.QSpacerItem(176, 110, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.gridLayout_5.addItem(spacerItem5, 0, 13, 4, 1)
spacerItem6 = QtWidgets.QSpacerItem(88, 81, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Expanding)
self.gridLayout_5.addItem(spacerItem6, 0, 14, 3, 1)
self.seg_mov_spin = QtWidgets.QSpinBox(self.seg_widget)
self.seg_mov_spin.setObjectName("seg_mov_spin")
self.gridLayout_5.addWidget(self.seg_mov_spin, 1, 0, 1, 1)
self.seg_frame_spin = QtWidgets.QSpinBox(self.seg_widget)
self.seg_frame_spin.setObjectName("seg_frame_spin")
self.gridLayout_5.addWidget(self.seg_frame_spin, 1, 1, 1, 1)
self.body_thresh_spin = QtWidgets.QSpinBox(self.seg_widget)
self.body_thresh_spin.setObjectName("body_thresh_spin")
self.gridLayout_5.addWidget(self.body_thresh_spin, 1, 2, 1, 1)
self.wing_thresh_spin = QtWidgets.QSpinBox(self.seg_widget)
self.wing_thresh_spin.setObjectName("wing_thresh_spin")
self.gridLayout_5.addWidget(self.wing_thresh_spin, 1, 3, 1, 1)
self.sigma_spin = QtWidgets.QDoubleSpinBox(self.seg_widget)
self.sigma_spin.setObjectName("sigma_spin")
self.gridLayout_5.addWidget(self.sigma_spin, 1, 4, 1, 1)
self.K_spin = QtWidgets.QSpinBox(self.seg_widget)
self.K_spin.setObjectName("K_spin")
self.gridLayout_5.addWidget(self.K_spin, 1, 5, 1, 1)
self.min_body_spin = QtWidgets.QSpinBox(self.seg_widget)
self.min_body_spin.setObjectName("min_body_spin")
self.gridLayout_5.addWidget(self.min_body_spin, 1, 6, 1, 1)
self.min_wing_spin = QtWidgets.QSpinBox(self.seg_widget)
self.min_wing_spin.setObjectName("min_wing_spin")
self.gridLayout_5.addWidget(self.min_wing_spin, 1, 7, 1, 1)
self.label_44 = QtWidgets.QLabel(self.seg_widget)
self.label_44.setObjectName("label_44")
self.gridLayout_5.addWidget(self.label_44, 1, 9, 1, 1)
self.mask_cam_nr_spin = QtWidgets.QSpinBox(self.seg_widget)
self.mask_cam_nr_spin.setObjectName("mask_cam_nr_spin")
self.gridLayout_5.addWidget(self.mask_cam_nr_spin, 1, 10, 1, 2)
self.label_45 = QtWidgets.QLabel(self.seg_widget)
self.label_45.setObjectName("label_45")
self.gridLayout_5.addWidget(self.label_45, 2, 9, 1, 1)
self.mask_seg_nr_spin = QtWidgets.QSpinBox(self.seg_widget)
self.mask_seg_nr_spin.setObjectName("mask_seg_nr_spin")
self.gridLayout_5.addWidget(self.mask_seg_nr_spin, 2, 10, 1, 2)
self.seg_update_btn = QtWidgets.QPushButton(self.seg_widget)
self.seg_update_btn.setObjectName("seg_update_btn")
self.gridLayout_5.addWidget(self.seg_update_btn, 3, 0, 1, 2)
self.add_mask_btn = QtWidgets.QPushButton(self.seg_widget)
self.add_mask_btn.setObjectName("add_mask_btn")
self.gridLayout_5.addWidget(self.add_mask_btn, 3, 9, 1, 1)
self.reset_mask_btn = QtWidgets.QPushButton(self.seg_widget)
self.reset_mask_btn.setObjectName("reset_mask_btn")
self.gridLayout_5.addWidget(self.reset_mask_btn, 3, 11, 1, 1)
self.continue_btn = QtWidgets.QPushButton(self.seg_widget)
self.continue_btn.setObjectName("continue_btn")
self.gridLayout_5.addWidget(self.continue_btn, 3, 14, 1, 1)
self.verticalLayout.addWidget(self.seg_widget)
self.tabs.addTab(self.segment_tab, "")
self.pcl_view_tab = QtWidgets.QWidget()
self.pcl_view_tab.setObjectName("pcl_view_tab")
self.verticalLayout_6 = QtWidgets.QVBoxLayout(self.pcl_view_tab)
self.verticalLayout_6.setObjectName("verticalLayout_6")
self.pcl_view = BBoxWidget(self.pcl_view_tab)
self.pcl_view.setMinimumSize(QtCore.QSize(1121, 521))
self.pcl_view.setFrameShape(QtWidgets.QFrame.StyledPanel)
self.pcl_view.setFrameShadow(QtWidgets.QFrame.Raised)
self.pcl_view.setObjectName("pcl_view")
self.verticalLayout_6.addWidget(self.pcl_view)
self.widget_5 = QtWidgets.QWidget(self.pcl_view_tab)
self.widget_5.setMinimumSize(QtCore.QSize(1101, 111))
self.widget_5.setObjectName("widget_5")
self.gridLayout_8 = QtWidgets.QGridLayout(self.widget_5)
self.gridLayout_8.setObjectName("gridLayout_8")
self.label_41 = QtWidgets.QLabel(self.widget_5)
self.label_41.setObjectName("label_41")
self.gridLayout_8.addWidget(self.label_41, 0, 0, 1, 1)
self.flight_select_btn_group = QtWidgets.QGroupBox(self.widget_5)
self.flight_select_btn_group.setObjectName("flight_select_btn_group")
self.gridLayout_6 = QtWidgets.QGridLayout(self.flight_select_btn_group)
self.gridLayout_6.setObjectName("gridLayout_6")
self.tethered_radio_btn = QtWidgets.QRadioButton(self.flight_select_btn_group)
self.tethered_radio_btn.setObjectName("tethered_radio_btn")
self.gridLayout_6.addWidget(self.tethered_radio_btn, 0, 0, 1, 1)
self.free_radio_btn = QtWidgets.QRadioButton(self.flight_select_btn_group)
self.free_radio_btn.setObjectName("free_radio_btn")
self.gridLayout_6.addWidget(self.free_radio_btn, 1, 0, 1, 1)
self.gridLayout_8.addWidget(self.flight_select_btn_group, 0, 1, 4, 1)
self.label_47 = QtWidgets.QLabel(self.widget_5)
self.label_47.setObjectName("label_47")
self.gridLayout_8.addWidget(self.label_47, 0, 2, 1, 1)
self.label_51 = QtWidgets.QLabel(self.widget_5)
self.label_51.setObjectName("label_51")
self.gridLayout_8.addWidget(self.label_51, 0, 3, 1, 1)
spacerItem7 = QtWidgets.QSpacerItem(456, 90, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.gridLayout_8.addItem(spacerItem7, 0, 4, 4, 1)
self.view_select_group = QtWidgets.QGroupBox(self.widget_5)
self.view_select_group.setObjectName("view_select_group")
self.verticalLayout_3 = QtWidgets.QVBoxLayout(self.view_select_group)
self.verticalLayout_3.setObjectName("verticalLayout_3")
self.pcl_view_btn = QtWidgets.QRadioButton(self.view_select_group)
self.pcl_view_btn.setObjectName("pcl_view_btn")
self.verticalLayout_3.addWidget(self.pcl_view_btn)
self.bbox_view_btn = QtWidgets.QRadioButton(self.view_select_group)
self.bbox_view_btn.setObjectName("bbox_view_btn")
self.verticalLayout_3.addWidget(self.bbox_view_btn)
self.model_view_btn = QtWidgets.QRadioButton(self.view_select_group)
self.model_view_btn.setObjectName("model_view_btn")
self.verticalLayout_3.addWidget(self.model_view_btn)
self.gridLayout_8.addWidget(self.view_select_group, 0, 5, 4, 1)
self.pcl_mov_spin = QtWidgets.QSpinBox(self.widget_5)
self.pcl_mov_spin.setObjectName("pcl_mov_spin")
self.gridLayout_8.addWidget(self.pcl_mov_spin, 1, 0, 1, 1)
self.stroke_bound_spin = QtWidgets.QSpinBox(self.widget_5)
self.stroke_bound_spin.setObjectName("stroke_bound_spin")
self.gridLayout_8.addWidget(self.stroke_bound_spin, 1, 2, 1, 1)
self.wing_pitch_bound_spin = QtWidgets.QSpinBox(self.widget_5)
self.wing_pitch_bound_spin.setObjectName("wing_pitch_bound_spin")
self.gridLayout_8.addWidget(self.wing_pitch_bound_spin, 1, 3, 1, 1)
self.label_42 = QtWidgets.QLabel(self.widget_5)
self.label_42.setObjectName("label_42")
self.gridLayout_8.addWidget(self.label_42, 2, 0, 1, 1)
self.label_48 = QtWidgets.QLabel(self.widget_5)
self.label_48.setObjectName("label_48")
self.gridLayout_8.addWidget(self.label_48, 2, 2, 1, 1)
self.label_46 = QtWidgets.QLabel(self.widget_5)
self.label_46.setObjectName("label_46")
self.gridLayout_8.addWidget(self.label_46, 2, 3, 1, 1)
self.pcl_frame_spin = QtWidgets.QSpinBox(self.widget_5)
self.pcl_frame_spin.setObjectName("pcl_frame_spin")
self.gridLayout_8.addWidget(self.pcl_frame_spin, 3, 0, 1, 1)
self.dev_bound_spin = QtWidgets.QSpinBox(self.widget_5)
self.dev_bound_spin.setObjectName("dev_bound_spin")
self.gridLayout_8.addWidget(self.dev_bound_spin, 3, 2, 1, 1)
self.sphere_radius_spin = QtWidgets.QDoubleSpinBox(self.widget_5)
self.sphere_radius_spin.setObjectName("sphere_radius_spin")
self.gridLayout_8.addWidget(self.sphere_radius_spin, 3, 3, 1, 1)
self.verticalLayout_6.addWidget(self.widget_5)
self.tabs.addTab(self.pcl_view_tab, "")
self.opt_tab = QtWidgets.QWidget()
self.opt_tab.setObjectName("opt_tab")
self.verticalLayout_7 = QtWidgets.QVBoxLayout(self.opt_tab)
self.verticalLayout_7.setObjectName("verticalLayout_7")
self.opt_widget = QtWidgets.QWidget(self.opt_tab)
self.opt_widget.setObjectName("opt_widget")
self.verticalLayout_8 = QtWidgets.QVBoxLayout(self.opt_widget)
self.verticalLayout_8.setObjectName("verticalLayout_8")
self.contour_view = ContourViewWidget(self.opt_widget)
self.contour_view.setObjectName("contour_view")
self.verticalLayout_8.addWidget(self.contour_view)
self.opt_settings_widget = QtWidgets.QWidget(self.opt_widget)
self.opt_settings_widget.setMinimumSize(QtCore.QSize(0, 120))
self.opt_settings_widget.setObjectName("opt_settings_widget")
self.gridLayout_9 = QtWidgets.QGridLayout(self.opt_settings_widget)
self.gridLayout_9.setObjectName("gridLayout_9")
self.label_52 = QtWidgets.QLabel(self.opt_settings_widget)
self.label_52.setObjectName("label_52")
self.gridLayout_9.addWidget(self.label_52, 0, 0, 1, 1)
self.label_54 = QtWidgets.QLabel(self.opt_settings_widget)
self.label_54.setObjectName("label_54")
self.gridLayout_9.addWidget(self.label_54, 0, 1, 1, 1)
spacerItem8 = QtWidgets.QSpacerItem(849, 78, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.gridLayout_9.addItem(spacerItem8, 0, 2, 3, 1)
self.init_view_check = QtWidgets.QCheckBox(self.opt_settings_widget)
self.init_view_check.setObjectName("init_view_check")
self.gridLayout_9.addWidget(self.init_view_check, 0, 3, 1, 1)
self.opt_mov_spin = QtWidgets.QSpinBox(self.opt_settings_widget)
self.opt_mov_spin.setObjectName("opt_mov_spin")
self.gridLayout_9.addWidget(self.opt_mov_spin, 1, 0, 1, 1)
self.alpha_spin = QtWidgets.QDoubleSpinBox(self.opt_settings_widget)
self.alpha_spin.setObjectName("alpha_spin")
self.gridLayout_9.addWidget(self.alpha_spin, 1, 1, 1, 1)
self.dest_view_check = QtWidgets.QCheckBox(self.opt_settings_widget)
self.dest_view_check.setObjectName("dest_view_check")
self.gridLayout_9.addWidget(self.dest_view_check, 1, 3, 2, 1)
self.label_53 = QtWidgets.QLabel(self.opt_settings_widget)
self.label_53.setObjectName("label_53")
self.gridLayout_9.addWidget(self.label_53, 2, 0, 1, 1)
self.opt_frame_spin = QtWidgets.QSpinBox(self.opt_settings_widget)
self.opt_frame_spin.setObjectName("opt_frame_spin")
self.gridLayout_9.addWidget(self.opt_frame_spin, 3, 0, 1, 1)
self.src_view_check = QtWidgets.QCheckBox(self.opt_settings_widget)
self.src_view_check.setObjectName("src_view_check")
self.gridLayout_9.addWidget(self.src_view_check, 3, 3, 1, 1)
self.verticalLayout_8.addWidget(self.opt_settings_widget)
self.verticalLayout_7.addWidget(self.opt_widget)
self.tabs.addTab(self.opt_tab, "")
self.verticalLayout_5.addWidget(self.tabs)
MainWindow.setCentralWidget(self.centralwidget)
self.retranslateUi(MainWindow)
self.tabs.setCurrentIndex(6)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
def retranslateUi(self, MainWindow):
_translate = QtCore.QCoreApplication.translate
MainWindow.setWindowTitle(_translate("MainWindow", "DipteraTrack"))
self.label.setText(_translate("MainWindow", "Select session folder:"))
self.label_3.setText(_translate("MainWindow", "Session parameters"))
self.label_2.setText(_translate("MainWindow", "Session folder:"))
self.ses_folder_label.setText(_translate("MainWindow", "..."))
self.label_5.setText(_translate("MainWindow", "Background folder:"))
self.bckg_folder_label.setText(_translate("MainWindow", "..."))
self.label_7.setText(_translate("MainWindow", "Calibration folder:"))
self.cal_folder_label.setText(_translate("MainWindow", "..."))
self.label_9.setText(_translate("MainWindow", "Movie folders:"))
self.mov_folder1_label.setText(_translate("MainWindow", "..."))
self.mov_folder2_label.setText(_translate("MainWindow", "..."))
self.mov_folder3_label.setText(_translate("MainWindow", "..."))
self.mov_folder4_label.setText(_translate("MainWindow", "..."))
self.mov_folder5_label.setText(_translate("MainWindow", "..."))
self.mov_folder6_label.setText(_translate("MainWindow", "..."))
self.mov_folder7_label.setText(_translate("MainWindow", "..."))
self.mov_folder8_label.setText(_translate("MainWindow", "..."))
self.label_18.setText(_translate("MainWindow", "Camera folders:"))
self.cam_folder1_label.setText(_translate("MainWindow", "..."))
self.cam_folder2_label.setText(_translate("MainWindow", "..."))
self.cam_folder3_label.setText(_translate("MainWindow", "..."))
self.cam_folder4_label.setText(_translate("MainWindow", "..."))
self.ses_folder_rbtn.setText(_translate("MainWindow", "RadioButton"))
self.bckg_folder_rbtn.setText(_translate("MainWindow", "RadioButton"))
self.cal_folder_rbtn.setText(_translate("MainWindow", "RadioButton"))
self.mov_folder1_rbtn.setText(_translate("MainWindow", "RadioButton"))
self.mov_folder2_rbtn.setText(_translate("MainWindow", "RadioButton"))
self.mov_folder3_rbtn.setText(_translate("MainWindow", "RadioButton"))
self.mov_folder4_rbtn.setText(_translate("MainWindow", "RadioButton"))
self.mov_folder5_rbtn.setText(_translate("MainWindow", "RadioButton"))
self.mov_folder6_rbtn.setText(_translate("MainWindow", "RadioButton"))
self.mov_folder7_rbtn.setText(_translate("MainWindow", "RadioButton"))
self.mov_folder8_rbtn.setText(_translate("MainWindow", "RadioButton"))
self.cam_folder1_rbtn.setText(_translate("MainWindow", "RadioButton"))
self.cam_folder2_rbtn.setText(_translate("MainWindow", "RadioButton"))
self.cam_folder3_rbtn.setText(_translate("MainWindow", "RadioButton"))
self.cam_folder4_rbtn.setText(_translate("MainWindow", "RadioButton"))
self.cam_folder5_rbtn.setText(_translate("MainWindow", "RadioButton"))
self.cam_folder6_rbtn.setText(_translate("MainWindow", "RadioButton"))
self.cam_folder5_label.setText(_translate("MainWindow", "..."))
self.cam_folder6_label.setText(_translate("MainWindow", "..."))
self.label_25.setText(_translate("MainWindow", "Frame name:"))
self.frame_name_rbtn.setText(_translate("MainWindow", "RadioButton"))
self.frame_name_label.setText(_translate("MainWindow", "..."))
self.label_27.setText(_translate("MainWindow", "Background image format:"))
self.label_28.setText(_translate("MainWindow", "Calibration image format:"))
self.label_29.setText(_translate("MainWindow", "Frame image format:"))
self.label_30.setText(_translate("MainWindow", "Trigger settings"))
self.label_31.setText(_translate("MainWindow", "start frame nr:"))
self.label_32.setText(_translate("MainWindow", "trigger frame nr:"))
self.label_33.setText(_translate("MainWindow", "end frame nr:"))
self.label_34.setText(_translate("MainWindow", "Trigger mode:"))
self.label_4.setText(_translate("MainWindow", "Model settings"))
self.mdl_loc_rbtn.setText(_translate("MainWindow", "RadioButton"))
self.mdl_loc_label.setText(_translate("MainWindow", "..."))
self.label_10.setText(_translate("MainWindow", "Model location:"))
self.label_11.setText(_translate("MainWindow", "Model name:"))
self.mdl_name_rbtn.setText(_translate("MainWindow", "RadioButton"))
self.mdl_name_label.setText(_translate("MainWindow", "..."))
self.label_6.setText(_translate("MainWindow", "Calibration file:"))
self.cal_file_label.setText(_translate("MainWindow", "..."))
self.label_8.setText(_translate("MainWindow", "Session name:"))
self.ses_name_label.setText(_translate("MainWindow", "..."))
self.reset_selection_push_btn.setText(_translate("MainWindow", "reset selection"))
self.start_session_push_btn.setText(_translate("MainWindow", "start session"))
self.save_settings_push_btn.setText(_translate("MainWindow", "save parameter file"))
self.load_settings_file_label.setText(_translate("MainWindow", "..."))
self.load_settings_push_btn.setText(_translate("MainWindow", "load parameter file"))
self.load_settings_rbtn.setText(_translate("MainWindow", "RadioButton"))
self.tabs.setTabText(self.tabs.indexOf(self.ses_par_tab), _translate("MainWindow", "Movie selection"))
self.label_16.setText(_translate("MainWindow", "Voxel grid parameters:"))
self.label_12.setText(_translate("MainWindow", "Nx:"))
self.label_13.setText(_translate("MainWindow", "Ny:"))
self.label_14.setText(_translate("MainWindow", "Nz:"))
self.label_15.setText(_translate("MainWindow", "ds:"))
self.label_17.setText(_translate("MainWindow", "x0:"))
self.label_19.setText(_translate("MainWindow", "y0:"))
self.label_20.setText(_translate("MainWindow", "z0:"))
self.calc_vox_btn.setText(_translate("MainWindow", "calculate voxel grid"))
self.label_49.setText(_translate("MainWindow", "Camera parameters:"))
self.label_50.setText(_translate("MainWindow", "pixel size (mm):"))
self.tabs.setTabText(self.tabs.indexOf(self.focal_grid_tab), _translate("MainWindow", "Voxel grid"))
self.label_22.setText(_translate("MainWindow", "Movie nr:"))
self.load_scale_btn.setText(_translate("MainWindow", "load model scale"))
self.save_scale_btn.setText(_translate("MainWindow", "save model scale"))
self.set_model_btn.setText(_translate("MainWindow", "set model scale"))
self.label_21.setText(_translate("MainWindow", "Frame:"))
self.tabs.setTabText(self.tabs.indexOf(self.model_scale_tab), _translate("MainWindow", "Scale model"))
self.label_23.setText(_translate("MainWindow", "Model parameters:"))
self.tabs.setTabText(self.tabs.indexOf(self.model_view_tab), _translate("MainWindow", "Model view"))
self.label_40.setText(_translate("MainWindow", "movie nr:"))
self.label_24.setText(_translate("MainWindow", "frame:"))
self.label_26.setText(_translate("MainWindow", "body threshold"))
self.label_35.setText(_translate("MainWindow", "wing threshold"))
self.label_36.setText(_translate("MainWindow", "sigma"))
self.label_37.setText(_translate("MainWindow", "K"))
self.label_38.setText(_translate("MainWindow", "min body area"))
self.label_39.setText(_translate("MainWindow", "min wing area"))
self.label_43.setText(_translate("MainWindow", "Set image mask:"))
self.label_44.setText(_translate("MainWindow", "cam nr:"))
self.label_45.setText(_translate("MainWindow", "segment nr:"))
self.seg_update_btn.setText(_translate("MainWindow", "update"))
self.add_mask_btn.setText(_translate("MainWindow", "add to mask"))
self.reset_mask_btn.setText(_translate("MainWindow", "reset"))
self.continue_btn.setText(_translate("MainWindow", "continue"))
self.tabs.setTabText(self.tabs.indexOf(self.segment_tab), _translate("MainWindow", "Segmentation"))
self.label_41.setText(_translate("MainWindow", "movie nr:"))
self.tethered_radio_btn.setText(_translate("MainWindow", "tethered flight"))
self.free_radio_btn.setText(_translate("MainWindow", "free flight"))
self.label_47.setText(_translate("MainWindow", "stroke angle bound:"))
self.label_51.setText(_translate("MainWindow", "wing pitch angle bound:"))
self.pcl_view_btn.setText(_translate("MainWindow", "pcl view"))
self.bbox_view_btn.setText(_translate("MainWindow", "bbox view"))
self.model_view_btn.setText(_translate("MainWindow", "model view"))
self.label_42.setText(_translate("MainWindow", "frame nr:"))
self.label_48.setText(_translate("MainWindow", "deviation angle bound:"))
self.label_46.setText(_translate("MainWindow", "sphere radius:"))
self.tabs.setTabText(self.tabs.indexOf(self.pcl_view_tab), _translate("MainWindow", "Pointcloud view"))
self.label_52.setText(_translate("MainWindow", "movie nr:"))
self.label_54.setText(_translate("MainWindow", "alpha:"))
self.init_view_check.setText(_translate("MainWindow", "initial state"))
self.dest_view_check.setText(_translate("MainWindow", "destination contour"))
self.label_53.setText(_translate("MainWindow", "frame nr:"))
self.src_view_check.setText(_translate("MainWindow", "source contour"))
self.tabs.setTabText(self.tabs.indexOf(self.opt_tab), _translate("MainWindow", "Contour optimization"))
from BoundingBoxWidget import BBoxWidget
from ContourViewWidget import ContourViewWidget
from ImageSegmentWidget import ImageSegmentWidget
from ModelViewWidget import ModelViewWidget
from ScaleModelWidget import ScaleModelWidget
if __name__ == "__main__":
import sys
app = QtWidgets.QApplication(sys.argv)
MainWindow = QtWidgets.QMainWindow()
ui = Ui_MainWindow()
ui.setupUi(MainWindow)
MainWindow.show()
sys.exit(app.exec_())
| 63.047506 | 118 | 0.714878 | 52,382 | 0.986738 | 0 | 0 | 0 | 0 | 0 | 0 | 6,319 | 0.119033 |
96c869993e67e90b938d23f96a94a64def202fe5 | 6,337 | py | Python | orcid_service/tests/stubdata/work_single_409.py | nemanjamart/orcid-service | 596973819b904ad28a9d84a773db09d5039bee26 | [
"MIT"
] | 1 | 2017-11-06T22:45:48.000Z | 2017-11-06T22:45:48.000Z | orcid_service/tests/stubdata/work_single_409.py | nemanjamart/orcid-service | 596973819b904ad28a9d84a773db09d5039bee26 | [
"MIT"
] | 31 | 2015-02-23T14:29:31.000Z | 2021-03-10T14:59:52.000Z | orcid_service/tests/stubdata/work_single_409.py | nemanjamart/orcid-service | 596973819b904ad28a9d84a773db09d5039bee26 | [
"MIT"
] | 10 | 2015-02-12T23:07:24.000Z | 2022-01-04T19:00:20.000Z | data = {
"publication-date": {
"year": {
"value": "2020"},
"month": {"value": "01"}},
"short-description": "With a central surface brightness of 29.3 mag arcsec, and half-light radius of r_half=3.1^{+0.9}_{-1.1} kpc, Andromeda XIX (And XIX) is an extremely diffuse satellite of Andromeda.",
"external-ids": {
"external-id": [
{
"external-id-type": "bibcode",
"external-id-value": "2020MNRAS.491.3496C",
"external-id-relationship": "SELF"
},
{
"external-id-type": "doi",
"external-id-value": "10.1093/mnras/stz3252",
"external-id-relationship": "SELF"
},
{
"external-id-type": "arxiv",
"external-id-value": "1910.12879",
"external-id-relationship": "SELF"
}
]
},
"journal-title": {
"value": "Monthly Notices of the Royal Astronomical Society"
},
"type": "JOURNAL_ARTICLE",
"contributors": {
"contributor": [
{
"credit-name": {
"value": "Collins, Michelle L. M."
},
"contributor-attributes": {
"contributor-role": "AUTHOR"
}
},
{
"credit-name": {
"value": "Tollerud, Erik J."
},
"contributor-attributes": {
"contributor-role": "AUTHOR"
}
},
{
"credit-name": {
"value": "Rich, R. Michael"
},
"contributor-attributes": {
"contributor-role": "AUTHOR"
}
},
{
"credit-name": {
"value": "Ibata, Rodrigo A."
},
"contributor-attributes": {
"contributor-role": "AUTHOR"
}
},
{
"credit-name": {
"value": "Martin, Nicolas F."
},
"contributor-attributes": {
"contributor-role": "AUTHOR"
}
},
{
"credit-name": {
"value": "Chapman, Scott C."
},
"contributor-attributes": {
"contributor-role": "AUTHOR"
}
},
{
"credit-name": {
"value": "Gilbert, Karoline M."
},
"contributor-attributes": {
"contributor-role": "AUTHOR"
}
},
{
"credit-name": {
"value": "Preston, Janet"
},
"contributor-attributes": {
"contributor-role": "AUTHOR"
}
}
]
},
"title": {
"title": {
"value": "A detailed study of Andromeda XIX, an extreme local analogue of ultradiffuse galaxies"
}
},
"put-code": 63945135
}
data_noarxiv = {
"publication-date": {
"year": {
"value": "2020"},
"month": {"value": "01"}},
"short-description": "With a central surface brightness of 29.3 mag arcsec, and half-light radius of r_half=3.1^{+0.9}_{-1.1} kpc, Andromeda XIX (And XIX) is an extremely diffuse satellite of Andromeda.",
"external-ids": {
"external-id": [
{
"external-id-type": "bibcode",
"external-id-value": "2020MNRAS.491.3496C",
"external-id-relationship": "SELF"
},
{
"external-id-type": "doi",
"external-id-value": "10.1093/mnras/stz3252",
"external-id-relationship": "SELF"
}
]
},
"journal-title": {
"value": "Monthly Notices of the Royal Astronomical Society"
},
"type": "JOURNAL_ARTICLE",
"contributors": {
"contributor": [
{
"credit-name": {
"value": "Collins, Michelle L. M."
},
"contributor-attributes": {
"contributor-role": "AUTHOR"
}
},
{
"credit-name": {
"value": "Tollerud, Erik J."
},
"contributor-attributes": {
"contributor-role": "AUTHOR"
}
},
{
"credit-name": {
"value": "Rich, R. Michael"
},
"contributor-attributes": {
"contributor-role": "AUTHOR"
}
},
{
"credit-name": {
"value": "Ibata, Rodrigo A."
},
"contributor-attributes": {
"contributor-role": "AUTHOR"
}
},
{
"credit-name": {
"value": "Martin, Nicolas F."
},
"contributor-attributes": {
"contributor-role": "AUTHOR"
}
},
{
"credit-name": {
"value": "Chapman, Scott C."
},
"contributor-attributes": {
"contributor-role": "AUTHOR"
}
},
{
"credit-name": {
"value": "Gilbert, Karoline M."
},
"contributor-attributes": {
"contributor-role": "AUTHOR"
}
},
{
"credit-name": {
"value": "Preston, Janet"
},
"contributor-attributes": {
"contributor-role": "AUTHOR"
}
}
]
},
"title": {
"title": {
"value": "A detailed study of Andromeda XIX, an extreme local analogue of ultradiffuse galaxies"
}
},
"put-code": 63945135
}
| 30.912195 | 208 | 0.372889 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2,964 | 0.467729 |
96c88c858215a0c25dd7b0d7a56c86c6fc360ef4 | 263 | py | Python | tests/test_day5.py | n1ckdm/advent-of-code-2020 | 913ea4cff29fa76df15c0c22616cc1eebb903490 | [
"MIT"
] | 1 | 2020-12-05T09:25:03.000Z | 2020-12-05T09:25:03.000Z | tests/test_day5.py | n1ckdm/advent-of-code-2020 | 913ea4cff29fa76df15c0c22616cc1eebb903490 | [
"MIT"
] | null | null | null | tests/test_day5.py | n1ckdm/advent-of-code-2020 | 913ea4cff29fa76df15c0c22616cc1eebb903490 | [
"MIT"
] | null | null | null | from aoc_2020.day5 import part1, get_pos, part2
data = """BFFFBBFRRR
FFFBBBFRRR
BBFFBBFRLL
"""
def test_get_pos():
assert get_pos("FBFBBFFRLR") == (44, 5)
def test_part1():
assert part1(data) == 820
def test_part2():
assert part2(data) is None
| 13.842105 | 47 | 0.680608 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 51 | 0.193916 |
96c8adb800760315ee27599c677403371d5d3acb | 22,434 | py | Python | python/util.py | spatialaudio/aes148-shelving-filter | a11de97d6be79c23ffc55084ca95d9da15f3e3eb | [
"MIT"
] | 5 | 2020-06-04T12:28:10.000Z | 2022-03-07T12:35:54.000Z | python/util.py | spatialaudio/aes148-shelving-filter | a11de97d6be79c23ffc55084ca95d9da15f3e3eb | [
"MIT"
] | null | null | null | python/util.py | spatialaudio/aes148-shelving-filter | a11de97d6be79c23ffc55084ca95d9da15f3e3eb | [
"MIT"
] | 1 | 2020-11-18T06:19:37.000Z | 2020-11-18T06:19:37.000Z | """Shelving Filter Cascade with Adjustable Transition Slope and Bandwidth
Frank Schultz, Nara Hahn, Sascha Spors
In: Proc. of 148th AES Convention, Virtual Vienna, May 2020, Paper 10339
http://www.aes.org/e-lib/browse.cfm?elib=20756
"""
import numpy as np
from scipy.signal import tf2sos, freqs
from matplotlib import rcParams
def halfpadloss_shelving_filter_num_den_coeff(G):
"""Half-pad-loss polynomial coefficients for 1st/2nd order shelving filter.
- see type III in
long-url: https://github.com/spatialaudio/digital-signal-processing-lecture/blob/master/filter_desig/audiofilter.ipynb # noqa
- see Sec. 3.2 in https://doi.org/10.3390/app6050129
"""
sign = np.sign(G) # amplify/boost (1) or attenuate/cut (-1)
g = 10**(np.abs(G) / 20) # linear gain
n1, n2 = g**(sign / 4), g**(sign / 2) # numerator coeff
d1, d2 = 1 / n1, 1 / n2 # denominator coeff
return n1, n2, d1, d2
def normalized_low_shelving_1st_coeff(G=-10*np.log10(2)):
"""See low_shelving_1st_coeff() for omega=1."""
n1, n2, d1, d2 = halfpadloss_shelving_filter_num_den_coeff(G)
b, a = np.array([0, 1, n2]), np.array([0, 1, d2])
return b, a
def low_shelving_1st_coeff(omega=1, G=-10*np.log10(2)):
"""Half-pad-loss/mid-level low shelving filter 1st order.
Parameters
----------
omega : angular frequency in rad/s at half-pad-loss/mid-level
G : level in dB (G/2 at omega)
Returns
-------
b[0] s^2 + b[1] s^1 + b[2] s^0
b,a : coefficients for Laplace H(s) = ------------------------------
a[0] s^2 + a[1] s^1 + a[2] s^0
with s = j omega, note: b[0]=a[0]=0 here for 1st order filter
see halfpadloss_shelving_filter_num_den_coeff() for references
"""
b, a = normalized_low_shelving_1st_coeff(G=G)
scale = omega**np.arange(-2., 1.) # powers in the Laplace domain
return b * scale, a * scale
def normalized_high_shelving_1st_coeff(G=-10*np.log10(2)):
"""See high_shelving_1st_coeff() for omega=1."""
n1, n2, d1, d2 = halfpadloss_shelving_filter_num_den_coeff(G)
b, a = np.array([0, n2, 1]), np.array([0, d2, 1])
return b, a
def high_shelving_1st_coeff(omega=1, G=-10*np.log10(2)):
"""Half-pad-loss/mid-level high shelving filter 1st order.
Parameters
----------
omega : angular frequency in rad/s at half-pad-loss/mid-level
G : level in dB (G/2 at omega)
Returns
-------
b[0] s^2 + b[1] s^1 + b[2] s^0
b,a : coefficients for Laplace H(s) = ------------------------------
a[0] s^2 + a[1] s^1 + a[2] s^0
with s = j omega, note: b[0]=a[0]=0 here for 1st order filter
see halfpadloss_shelving_filter_num_den_coeff() for references
"""
b, a = normalized_high_shelving_1st_coeff(G=G)
scale = omega**np.arange(-2., 1.) # powers in the Laplace domain
return b * scale, a * scale
def normalized_low_shelving_2nd_coeff(G=-10*np.log10(2), Q=1/np.sqrt(2)):
"""See low_shelving_2nd_coeff() for omega=1."""
n1, n2, d1, d2 = halfpadloss_shelving_filter_num_den_coeff(G)
b, a = np.array([1, n1 / Q, n2]), np.array([1, d1 / Q, d2])
return b, a
def low_shelving_2nd_coeff(omega=1, G=-10*np.log10(2), Q=1/np.sqrt(2)):
"""Half-pad-loss/mid-level low shelving filter 2nd order.
Parameters
----------
omega : angular frequency in rad/s at half-pad-loss/mid-level
G : level in dB (G/2 at omega)
Q : pole/zero quality, Q>0.5
Returns
-------
b[0] s^2 + b[1] s^1 + b[2] s^0
b,a : coefficients for Laplace H(s) = ------------------------------
a[0] s^2 + a[1] s^1 + a[2] s^0
with s = j omega
see halfpadloss_shelving_filter_num_den_coeff() for references
"""
b, a = normalized_low_shelving_2nd_coeff(G=G, Q=Q)
scale = omega**np.arange(-2., 1.) # powers in the Laplace domain
return b * scale, a * scale
def normalized_high_shelving_2nd_coeff(G=-10*np.log10(2), Q=1/np.sqrt(2)):
"""See high_shelving_2nd_coeff() for omega=1."""
n1, n2, d1, d2 = halfpadloss_shelving_filter_num_den_coeff(G)
b, a = np.array([n2, n1 / Q, 1]), np.array([d2, d1 / Q, 1])
return b, a
def high_shelving_2nd_coeff(omega=1, G=-10*np.log10(2), Q=1/np.sqrt(2)):
"""Half-pad-loss/mid-level high shelving filter 2nd order.
Parameters
----------
omega : angular frequency in rad/s at half-pad-loss/mid-level
G : level in dB (G/2 at omega)
Q : pole/zero quality, Q>0.5
Returns
-------
b[0] s^2 + b[1] s^1 + b[2] s^0
b,a : coefficients for Laplace H(s) = ------------------------------
a[0] s^2 + a[1] s^1 + a[2] s^0
with s = j omega
see halfpadloss_shelving_filter_num_den_coeff() for references
"""
b, a = normalized_high_shelving_2nd_coeff(G=G, Q=Q)
scale = omega**np.arange(-2., 1.) # powers in the Laplace domain
return b * scale, a * scale
def db(x, *, power=False):
"""Convert *x* to decibel.
Parameters
----------
x : array_like
Input data. Values of 0 lead to negative infinity.
power : bool, optional
If ``power=False`` (the default), *x* is squared before
conversion.
"""
with np.errstate(divide='ignore'):
return (10 if power else 20) * np.log10(np.abs(x))
def db2lin(x):
return 10**(x / 20)
def shelving_slope_parameters(slope=None, BWd=None, Gd=None):
"""Compute the third parameter from the given two.
Parameters
----------
slope : float, optional
Desired shelving slope in decibel per octave.
BW : float, optional
Desired bandwidth of the slope in octave.
G : float, optional
Desired gain of the stop band in decibel.
"""
if slope == 0:
raise ValueError("`slope` should be nonzero.")
if slope and BWd is not None:
Gd = -BWd * slope
elif BWd and Gd is not None:
slope = -Gd / BWd
elif Gd and slope is not None:
if Gd * slope > 1:
raise ValueError("`Gd` and `slope` cannot have the same sign.")
else:
BWd = np.abs(Gd / slope)
else:
print('At lest two parameters need to be specified.')
return slope, BWd, Gd
def shelving_filter_parameters(biquad_per_octave, **kwargs):
"""Parameters for shelving filter design.
Parameters
----------
biquad_per_octave : float
Number of biquad filters per octave.
Returns
-------
num_biquad : int
Number of biquad filters.
Gb : float
Gain of each biquad filter in decibel.
G : float
Gain of overall (concatenated) filters in decibel. This might differ
from what is returned by `shelving_parameters`.
"""
slope, BWd, Gd = shelving_slope_parameters(**kwargs)
num_biquad = int(np.ceil(BWd * biquad_per_octave))
Gb = -slope / biquad_per_octave
G = Gb * num_biquad
return num_biquad, Gb, G
def check_shelving_filter_validity(biquad_per_octave, **kwargs):
"""Level, slope, bandwidth validity for shelving filter cascade.
Parameters
----------
biquad_per_octave : float
Number of biquad filters per octave.
see shelving_slope_parameters(), shelving_filter_parameters()
Returns
-------
flag = [Boolean, Boolean, Boolean]
if all True then intended parameter triplet holds, if not all True
deviations from desired response occur
"""
flag = [True, True, True]
slope, BWd, Gd = shelving_slope_parameters(**kwargs)
num_biquad, Gb, G = shelving_filter_parameters(biquad_per_octave, **kwargs)
# BWd < 1 octave generally fails
if BWd <= 1:
flag[0] = False
# BWd * biquad_per_octave needs to be integer
flag[1] = float(BWd * biquad_per_octave).is_integer()
# biquad_per_octave must be large enough
# for slope < 12.04 dB at least one biquad per ocatve is required
tmp = slope / (20*np.log10(4))
if tmp > 1.:
if biquad_per_octave < tmp:
flag[2] = False
else:
if biquad_per_octave < 1:
flag[2] = False
return flag
def low_shelving_1st_cascade(w0, Gb, num_biquad, biquad_per_octave):
"""Low shelving filter design using cascaded biquad filters.
- see low_shelving_2nd_cascade()
- under construction for code improvement
"""
sos = np.zeros((num_biquad, 6))
for m in range(num_biquad):
wm = w0 * 2**(-(m + 0.5) / biquad_per_octave)
b, a = low_shelving_1st_coeff(omega=wm, G=Gb)
sos[m] = tf2sos(b, a)
return sos
def high_shelving_1st_cascade(w0, Gb, num_biquad, biquad_per_octave):
"""High shelving filter design using cascaded biquad filters.
- see low_shelving_2nd_cascade()
- under construction for code improvement
"""
sos = np.zeros((num_biquad, 6))
for m in range(num_biquad):
wm = w0 * 2**(-(m + 0.5) / biquad_per_octave)
b, a = high_shelving_1st_coeff(omega=wm, G=Gb)
sos[m] = tf2sos(b, a)
return sos
def low_shelving_2nd_cascade(w0, Gb, num_biquad, biquad_per_octave,
Q=1/np.sqrt(2)):
"""Low shelving filter design using cascaded biquad filters.
Parameters
----------
w0 : float
Cut-off frequency in radian per second.
Gb : float
Gain of each biquad filter in decibel.
num_biquad : int
Number of biquad filters.
Q : float, optional
Quality factor of each biquad filter.
Returns
-------
sos : array_like
Array of second-order filter coefficients, must have shape
``(n_sections, 6)``. Each row corresponds to a second-order
section, with the first three columns providing the numerator
coefficients and the last three providing the denominator
coefficients.
"""
sos = np.zeros((num_biquad, 6))
for m in range(num_biquad):
wm = w0 * 2**(-(m + 0.5) / biquad_per_octave)
b, a = low_shelving_2nd_coeff(omega=wm, G=Gb, Q=Q)
sos[m] = tf2sos(b, a)
return sos
def high_shelving_2nd_cascade(w0, Gb, num_biquad, biquad_per_octave,
Q=1/np.sqrt(2)):
"""High shelving filter design using cascaded biquad filters.
- see low_shelving_2nd_cascade()
- under construction for code improvement
"""
sos = np.zeros((num_biquad, 6))
for m in range(num_biquad):
wm = w0 * 2**(-(m + 0.5) / biquad_per_octave)
b, a = high_shelving_2nd_coeff(omega=wm, G=Gb, Q=Q)
sos[m] = tf2sos(b, a)
return sos
def sosfreqs(sos, worN=200, plot=None):
"""Compute the frequency response of an analog filter in SOS format.
Parameters
----------
sos : array_like
Array of second-order filter coefficients, must have shape
``(n_sections, 6)``. Each row corresponds to a second-order
section, with the first three columns providing the numerator
coefficients and the last three providing the denominator
coefficients.
worN : {None, int, array_like}, optional
If None, then compute at 200 frequencies around the interesting parts
of the response curve (determined by pole-zero locations). If a single
integer, then compute at that many frequencies. Otherwise, compute the
response at the angular frequencies (e.g. rad/s) given in `worN`.
plot : callable, optional
A callable that takes two arguments. If given, the return parameters
`w` and `h` are passed to plot. Useful for plotting the frequency
response inside `freqs`.
Returns
-------
w : ndarray
The angular frequencies at which `h` was computed.
h : ndarray
The frequency response.
"""
h = 1.
for row in sos:
w, rowh = freqs(row[:3], row[3:], worN=worN, plot=plot)
h *= rowh
return w, h
def matchedz_zpk(s_zeros, s_poles, s_gain, fs):
"""Matched-z transform of poles and zeros.
Parameters
----------
s_zeros : array_like
Zeros in the Laplace domain.
s_poles : array_like
Poles in the Laplace domain.
s_gain : float
System gain in the Laplace domain.
fs : int
Sampling frequency in Hertz.
Returns
-------
z_zeros : numpy.ndarray
Zeros in the z-domain.
z_poles : numpy.ndarray
Poles in the z-domain.
z_gain : float
System gain in the z-domain.
See Also
--------
:func:`scipy.signal.bilinear_zpk`
"""
z_zeros = np.exp(s_zeros / fs)
z_poles = np.exp(s_poles / fs)
omega = 1j * np.pi * fs
s_gain *= np.prod((omega - s_zeros) / (omega - s_poles)
* (-1 - z_poles) / (-1 - z_zeros))
return z_zeros, z_poles, np.abs(s_gain)
def nearest_value(x0, x, f):
"""Plot helping."""
return f[np.abs(x - x0).argmin()]
def set_rcparams():
"""Plot helping."""
rcParams['axes.linewidth'] = 0.5
rcParams['axes.edgecolor'] = 'black'
rcParams['axes.facecolor'] = 'None'
rcParams['axes.labelcolor'] = 'black'
rcParams['xtick.color'] = 'black'
rcParams['ytick.color'] = 'black'
rcParams['font.family'] = 'serif'
rcParams['font.size'] = 13
rcParams['text.usetex'] = True
rcParams['text.latex.preamble'] = r'\usepackage{amsmath}'
rcParams['text.latex.preamble'] = r'\usepackage{gensymb}'
rcParams['legend.title_fontsize'] = 10
def set_outdir():
"""Plot helping."""
return '../graphics/'
def interaction_matrix_sge(G_proto, gain_factor, w_command, w_control,
bandwidth):
"""
Parameters
----------
G_proto: array_like
Prototype gain in decibel.
gain_factor: float
Gain factor.
w_command: array_like
Normalized command frequencies.
w_control: array_like
Normalized control frequencies.
bandwidth: array_like
Bandwidth.
"""
num_command = len(w_command)
num_control = len(w_control)
leak = np.zeros((num_command, num_control))
G_bandwidth = gain_factor * G_proto
g_proto = db2lin(G_proto)
g_bandwidth = db2lin(G_bandwidth)
z1 = np.exp(-1j * w_control)
z2 = z1**2
poly = np.zeros((num_command, 3))
poly[6] = 0.000321, 0.00474, 0.00544
poly[7] = 0.00108, 0.0221, 0.0169
poly[8] = 0.00184, 0.125, 0.0212
poly[9] = -0.00751, 0.730, -0.0672
for m, (Gp, gp, p, gb, wc, bw) in enumerate(
zip(G_proto, g_proto, poly, g_bandwidth, w_command, bandwidth)):
G_nyquist = np.sign(Gp) * np.polyval(p, np.abs(Gp))
gn = db2lin(G_nyquist)
gp2 = gp**2
gb2 = gb**2
gn2 = gn**2
F = np.abs(gp2 - gb2)
G00 = np.abs(gp2 - 1)
F00 = np.abs(gb2 - 1)
G01 = np.abs(gp2 - gn)
G11 = np.abs(gp2 - gn2)
F01 = np.abs(gb2 - gn)
F11 = np.abs(gb2 - gn2)
W2 = np.sqrt(G11 / G00) * np.tan(wc / 2)**2
DW = (1 + np.sqrt(F00 / F11) * W2) * np.tan(bw / 2)
C = F11 * DW**2 - 2 * W2 * (F01 - np.sqrt(F00 * F11))
D = 2 * W2 * (G01 - np.sqrt(G00 * G11))
A = np.sqrt((C + D) / F)
B = np.sqrt((gp2 * C + gb2 * D) / F)
num = np.array([gn+W2+B, -2*(gn-W2), (gn-B+W2)]) / (1+W2+A)
den = np.array([1, -2*(1-W2)/(1+W2+A), (1+W2-A)/(1+W2+A)])
H = (num[0] + num[1]*z1 + num[2]*z2)\
/ (den[0] + den[1]*z1 + den[2]*z2)
G = db(H) / Gp
leak[m] = np.abs(G)
return leak
def peq_seg(g_ref, g_nyquist, g, g_bandwidth, w_command, bandwidth):
"""
Parameters
----------
g_ref: float
Reference linear gain.
g_nyquist: float
Nyquist linear gain.
g_bandwidth: float
(Optimized) linear gain.
w_command: float
Normalized command frequencies.
bandwidth: float
Bandwidth.
"""
g2 = g**2
gb2 = g_bandwidth**2
gr2 = g_ref**2
gn2 = g_nyquist**2
grn = g_ref * g_nyquist
F = np.abs(g2 - gb2)
G00 = np.abs(g2 - gr2)
F00 = np.abs(gb2 - gr2)
G01 = np.abs(g2 - grn)
G11 = np.abs(g2 - gn2)
F01 = np.abs(gb2 - grn)
F11 = np.abs(gb2 - gn2)
W2 = np.sqrt(G11 / G00) * np.tan(w_command / 2)**2
DW = (1 + np.sqrt(F00 / F11) * W2) * np.tan(bandwidth / 2)
C = F11 * DW**2 - 2 * W2 * (F01 - np.sqrt(F00 * F11))
D = 2 * W2 * (G01 - np.sqrt(G00 * G11))
A = np.sqrt((C + D) / F)
B = np.sqrt((g**2 * C + g_bandwidth**2 * D) / F)
b = np.array([(g_nyquist + g_ref * W2 + B),
-2*(g_nyquist - g_ref * W2),
(g_nyquist - B + g_ref * W2)]) / (1 + W2 + A)
a = np.array([1, -2*(1 - W2) / (1 + W2 + A), (1 + W2 - A) / (1 + W2 + A)])
return b, a
def optimized_peq_seg(gain_command, gain_proto, gain_factor, w_command,
w_control, bandwidth):
"""
Parameters
----------
gain_command: array_like
Command gain in decibel.
gain_proto: array_like
Prototype gain in decibel.
gain_factor: float
Gain factor.
w_command: array_like
Normalized command frequencies.
w_control: array_like
Normalized control frequencies.
bandwidth: array_like
Bandwidths.
Returns
-------
b_opt: array_like (N, 3)
Moving average coefficients.
a_opt: array_like (N, 3)
Autoregressive (recursive) coefficients.
"""
num_command = len(gain_command)
# symmetric GEG design
gain_control = np.zeros(2 * num_command - 1)
gain_control[::2] = gain_command
gain_control[1::2] = 0.5 * (gain_command[:-1] + gain_command[1:])
# interaction matrix "B"
B = interaction_matrix_sge(gain_proto, gain_factor,
w_command, w_control, bandwidth)
gain2 = np.zeros((2 * num_command - 1, 1))
gain2[::2, 0] = gain_command
gain2[1::2, 0] = 0.5 * (gain_command[:-1] + gain_command[1:])
# band weights
weights = np.ones(2 * num_command - 1)
weights[1::2] *= 0.5
W = np.diag(weights)
gain_opt =\
np.matmul(np.linalg.inv(np.linalg.multi_dot([B, W, np.transpose(B)])),
np.linalg.multi_dot([B, W, gain2]))
gain_opt_bandwidth = gain_factor * gain_opt
gain_opt = np.squeeze(gain_opt)
gain_opt_bandwidth = np.squeeze(gain_opt_bandwidth)
g_opt = db2lin(gain_opt)
g_opt_bandwidth = db2lin(gain_opt_bandwidth)
poly = np.zeros((num_command, 3))
poly[6] = 0.000321, 0.00474, 0.00544
poly[7] = 0.00108, 0.0221, 0.0169
poly[8] = 0.00184, 0.125, 0.0212
poly[9] = -0.00751, 0.730, -0.0672
b_opt = np.zeros((3, num_command))
a_opt = np.zeros((3, num_command))
for m, (Go, go, gob, wc, bw, p) in enumerate(
zip(gain_opt, g_opt, g_opt_bandwidth, w_command, bandwidth, poly)):
gain_nyquist = np.sign(Go) * np.polyval(p, np.abs(Go))
b, a = peq_seg(1, db2lin(gain_nyquist), go, gob, wc, bw)
b_opt[:, m] = b
a_opt[:, m] = a
return b_opt, a_opt
def fracorder_lowshelving_eastty(w1, w2, G1, G2, rB=None):
"""
Parameters
----------
w1: float
Lower corner frequency.
w2: float
Upper corner frequency.
G1: float
Target level at lower corner frequency in dB.
G2: float
Target level at upper corner frequency in dB.
rB: float
Gain per octave.
Returns
-------
z: array_like
Complex zeros in the Laplace domain.
p: array_like
Complex poles in the Laplace domain.
k: float
Gain.
"""
Gd = G1 - G2
n_eff = effective_order(w1, w2, Gd, rB)
n_int, n_frac = np.divmod(n_eff, 1)
n_int = int(n_int)
z = np.array([])
p = np.array([])
# Second-order sections (complex conjugate pole/zero pairs)
if n_int > 0:
alpha = complex_zp_angles(n_int, n_frac)
alpha = np.concatenate((alpha, -alpha))
z = w1 * np.exp(1j * alpha)
p = w2 * np.exp(1j * alpha)
# First-order section (real pole/zero)
if n_eff % 2 != 0:
s_lower, s_upper = real_zp(n_int, n_frac, w1, w2)
if n_int % 2 == 0:
z_real = s_lower
p_real = s_upper
elif n_int % 2 == 1:
z_real = s_upper
p_real = s_lower
z = np.append(z, z_real)
p = np.append(p, p_real)
return z, p, 1
def effective_order(w1, w2, Gd, rB=None):
"""Effective order of shelving filter.
Parameters
----------
w1: float
Lower corner frequency.
w2: float
Upper corner frequency.
Gd: float
Target level difference in dB.
rB: float
Gain per octave.
"""
if rB is None:
rB = db(2) * np.sign(Gd) # Butterworth
return Gd / rB / np.log2(w2/w1)
def complex_zp_angles(n_int, n_frac):
"""Polar angles of the complex conjugate zeros/poles.
These correspond to the second-order section filters.
Parameters
----------
n_int: int
Interger order.
n_frac: float
Fractional order [0, 1).
"""
# linear interpolation of angles
num_zp_pair = int(n_int+1) // 2
return np.pi/2 * np.stack([
(1-n_frac) * (1 + (2*m+1)/n_int)
+ n_frac * (1 + (2*m+1)/(n_int+1))
for m in range(num_zp_pair)])
def real_zp(n_int, n_frac, w_lower, w_upper):
"""Real-valued zero and pole.
These correspond to the first-order section filters.
Parameters
----------
n_int: int
Integer order
n_frac: float
Fractional order [0, 1).
w_lower: float
Lower corner frequency.
w_upper: float
Upper corner frequency.
Returns
-------
s_lower: float
Smaller real-valued zero or pole.
s_upper: float
Larger real-valued zero or pole.
"""
w_mean = np.sqrt(w_lower * w_upper)
ratio = (w_upper / w_lower)
# logarithmic interpolation of zero/pole radius
if n_int % 2 == 0: # even
s_lower = -w_mean * ratio**(-n_frac/2)
s_upper = -w_mean * ratio**(n_frac/2)
elif n_int % 2 == 1: # odd
s_lower = -w_lower * ratio**(n_frac/2)
s_upper = -w_upper * ratio**(-n_frac/2)
return s_lower, s_upper
| 30.072386 | 130 | 0.585362 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 11,072 | 0.493537 |
96c928af19979ff24c55a99b28243be2add708ab | 1,529 | py | Python | Segment Tree Query II.py | RijuDasgupta9116/LintCode | 4629a3857b2c57418b86a3b3a7180ecb15e763e3 | [
"Apache-2.0"
] | 321 | 2015-01-04T04:01:44.000Z | 2022-03-20T13:21:55.000Z | Segment Tree Query II.py | leifoo/LintCode | 2520762a1cfbd486081583136396a2b2cac6e4fb | [
"Apache-2.0"
] | 1 | 2016-01-11T04:29:37.000Z | 2016-01-11T04:29:37.000Z | Segment Tree Query II.py | leifoo/LintCode | 2520762a1cfbd486081583136396a2b2cac6e4fb | [
"Apache-2.0"
] | 114 | 2015-01-27T06:08:17.000Z | 2022-03-23T03:58:11.000Z | """
For an array, we can build a SegmentTree for it, each node stores an extra attribute count to denote the number of
elements in the the array which value is between interval start and end. (The array may not fully filled by elements)
Design a query method with three parameters root, start and end, find the number of elements in the in array's interval
[start, end] by the given root of value SegmentTree.
Have you met this question in a real interview? Yes
Example
For array [0, empty, 2, 3], the corresponding value Segment Tree is:
[0, 3, count=3]
/ \
[0,1,count=1] [2,3,count=2]
/ \ / \
[0,0,count=1] [1,1,count=0] [2,2,count=1], [3,3,count=1]
query(1, 1), return 0
query(1, 2), return 1
query(2, 3), return 2
query(0, 2), return 2
"""
__author__ = 'Daniel'
DEFAULT = 0
f = lambda x, y: x+y
class Solution:
def query(self, root, s, e):
"""
Segment: [s, e]
:param root: The root of segment tree
:param start: start of segment/interval
:param end: end of segment/interval
:return: The count number in the interval [start, end]
"""
if not root:
return DEFAULT
if s <= root.start and e >= root.end:
return root.count
if s > root.end or e < root.start:
return DEFAULT
l = self.query(root.left, s, e)
r = self.query(root.right, s, e)
return f(l, r)
| 29.403846 | 119 | 0.576194 | 599 | 0.391759 | 0 | 0 | 0 | 0 | 0 | 0 | 1,120 | 0.732505 |
96ca626a4b0b422d3111b3bc01bab71f87bc23d8 | 944 | py | Python | inferencia/util/reader/reader_factory.py | yuya-mochimaru-np/inferencia | e09f298d0a80672fc5bb9383e23c941290eff334 | [
"Apache-2.0"
] | null | null | null | inferencia/util/reader/reader_factory.py | yuya-mochimaru-np/inferencia | e09f298d0a80672fc5bb9383e23c941290eff334 | [
"Apache-2.0"
] | 5 | 2021-07-25T23:19:29.000Z | 2021-07-26T23:35:13.000Z | inferencia/util/reader/reader_factory.py | yuya-mochimaru-np/inferencia | e09f298d0a80672fc5bb9383e23c941290eff334 | [
"Apache-2.0"
] | 1 | 2021-09-18T12:06:13.000Z | 2021-09-18T12:06:13.000Z | import os.path as osp
from .reader.video_reader import VideoReader
class ReaderFactory():
video_exts = [".mp4", ".avi", ".mov", ".MOV", ".mkv"]
def create(target_input, target_fps):
if osp.isfile(target_input):
ext = osp.splitext(target_input)[1]
if ext in ReaderFactory.video_exts:
return VideoReader(target_input, target_fps)
else:
msg = "{} is not supported. {} are supported.".format(
ext, ReaderFactory.video_exts)
raise TypeError(msg)
# elif osp.isdir(target_input):
# return ImageReader(target_input)
# # USB camera
# elif isinstance(target_input, int):
# return VideoReader(target_input)
# # network camera
# elif isinstance(target_input, str):
# return NetworkCameraReader(target_input)
else:
raise ValueError()
| 30.451613 | 70 | 0.581568 | 874 | 0.925847 | 0 | 0 | 0 | 0 | 0 | 0 | 329 | 0.348517 |
96cee147c0593a95de8069b59d56dcb5797d1ec4 | 15,755 | py | Python | src/utils.py | lubianat/complex_bot | e0ddabcc0487c52b14fb94950c5a812f0bdb2283 | [
"MIT"
] | 1 | 2021-10-06T00:21:10.000Z | 2021-10-06T00:21:10.000Z | src/utils.py | lubianat/complex_bot | e0ddabcc0487c52b14fb94950c5a812f0bdb2283 | [
"MIT"
] | 14 | 2021-01-15T21:51:38.000Z | 2021-11-10T10:08:22.000Z | src/utils.py | lubianat/complex_bot | e0ddabcc0487c52b14fb94950c5a812f0bdb2283 | [
"MIT"
] | 1 | 2021-01-18T10:32:56.000Z | 2021-01-18T10:32:56.000Z | # Code modified from original by @jvfe (BSD2)
# Copyright (c) 2020, jvfe
# https://github.com/jvfe/wdt_contribs/tree/master/complex_portal/src
import math
import re
from collections import defaultdict
from ftplib import FTP
from functools import lru_cache, reduce
from time import gmtime, strftime
import pandas as pd
from wikidata2df import wikidata2df
from wikidataintegrator import wdi_core
from wikidataintegrator.wdi_core import WDItemEngine
import json
with open("mappings.json", "r") as fp:
MAPPINGS = json.load(fp)
def get_list_of_complexes(
datasets, species_id, test_on_wikidata=True, max_complexes=999999
):
"""
Clean and process table of complexes
Parses table of complexes into Complex classes
Args:
datasets (DataFrame): one of the species datasets
species_id (str): The NCBI species ID
test_on_wikidata (bool): A boolean indicating whether to return only complexes that are or aren't on Wikidata. Defaults to True.
max_complexes (str): The maximum number of complexes to be modified on Wikidata
Returns:
list_of_complexes (list): Objects of the Complex class
"""
raw_table = pd.read_table(datasets[species_id], na_values=["-"])
if test_on_wikidata:
raw_table = remove_rows_on_wikidata(raw_table)
columns_to_keep = get_columns_to_keep()
raw_table = raw_table[columns_to_keep]
list_of_complexes = []
print("====== Parsing list to extract into class Complex ======")
# Counter for bot test
counter = 0
for complex_id in raw_table["#Complex ac"]:
counter = counter + 1
list_of_complexes.append(Complex(raw_table, complex_id))
if counter == max_complexes:
break
return list_of_complexes
def update_complex(login_instance, protein_complex, references):
"""
Updates the information for an existing complex on Wikidata.
Args:
login_instance: A Wikidata Integrator login instance
protein_complex: An object of the class Complex containing the information for a protein complex
references: The set of references for WDI
"""
instance_of = wdi_core.WDItemID(
value="Q22325163", prop_nr="P279", references=references
)
subclass_of = wdi_core.WDItemID(
value="Q107509287", prop_nr="P31", references=references
)
found_in_taxon = wdi_core.WDItemID(
value=protein_complex.taxon_qid, prop_nr="P703", references=references
)
complex_portal_id = wdi_core.WDString(
value=protein_complex.complex_id, prop_nr="P7718", references=references
)
data = [instance_of, subclass_of, found_in_taxon, complex_portal_id]
has_parts = []
for component in protein_complex.list_of_components:
quantity = component.quantity
component_qid = component.qid
print(f"Component QID: {component_qid}")
def is_nan(string):
return string != string
if is_nan(component_qid):
break
if quantity != "0" and not math.isnan(int(quantity)):
print(f"Quantity of this component: {str(quantity)}")
# Quantity is valid. 0 represents unknown in Complex Portal.
quantity_qualifier = wdi_core.WDQuantity(
value=int(quantity), prop_nr="P1114", is_qualifier=True
)
statement = wdi_core.WDItemID(
value=component_qid,
prop_nr="P527",
qualifiers=[quantity_qualifier],
references=references,
)
else:
statement = wdi_core.WDItemID(
value=component_qid, prop_nr="P527", references=references
)
has_parts.append(statement)
data.extend(has_parts)
# Reference table via https://w.wiki/3dTC
go_statements = []
go_reference = pd.read_csv("./reference_go_terms.csv")
for go_term in protein_complex.go_ids:
# Considers that each term has only one GO type
try:
row = go_reference[go_reference["id"] == go_term]
obj = row["go_term_qid"].values[0]
label = row["go_termLabel"].values[0]
prop = row["go_props_qid"].values[0]
# Heuristic: Cell components containing the word "complex" in the label
# are actually superclasses.
if "complex" in label and prop == "P681":
prop = "P279"
statement = wdi_core.WDItemID(
value=obj, prop_nr=prop, references=references
)
go_statements.append(statement)
except BaseException as e:
print(e)
print("Problem with " + go_term)
with open("errors/log.csv", "a") as f:
f.write(f"{go_term},'problem with GO term'\n")
data.extend(go_statements)
label = protein_complex.name
aliases = protein_complex.aliases
taxon_name = get_wikidata_label(protein_complex.taxon_qid)
descriptions = {
"en": "macromolecular complex found in " + taxon_name,
"pt": "complexo macromolecular encontrado em " + taxon_name,
"pt-br": "complexo macromolecular encontrado em " + taxon_name,
"nl": "macromoleculair complex gevonden in " + taxon_name,
"de": "makromolekularer Komplex auffindbar in " + taxon_name,
}
# For the list below, the bot will not remove values added on Wikidata
properties_to_append_value = ["P703", "P680", "P681", "P682", "P527"]
wd_item = wdi_core.WDItemEngine(
data=data,
append_value=properties_to_append_value,
debug=True,
)
wd_item.set_label(label=label, lang="en")
wd_item.set_aliases(aliases, lang="en")
# As fast-run is set, I will not update descriptions.
for lang, description in descriptions.items():
wd_item.set_description(description, lang=lang)
wd_item.write(login_instance)
class ComplexComponent:
def __init__(self, external_id, quantity):
self.external_id = external_id
self.quantity = quantity
self.get_qid_for_component()
def get_qid_for_component(self):
external_id = self.external_id
print(external_id)
if "CHEBI" in self.external_id:
external_id = external_id.replace("CHEBI:", "")
# ChEBI ID (P683)
self.qid = get_wikidata_item_by_propertyvalue("P683", external_id)
elif "CPX" in self.external_id:
# Complex Portal ID (P7718)
self.qid = get_wikidata_item_by_propertyvalue("P7718", self.external_id)
elif "URS" in self.external_id:
# RNACentral ID (P8697)
self.qid = get_wikidata_item_by_propertyvalue("P8697", self.external_id)
else:
# UniProt protein ID (P352)
self.qid = get_wikidata_item_by_propertyvalue("P352", self.external_id)
class Complex:
def __init__(self, dataset, complex_id):
self.complex_id = complex_id
# Info is a 1 row data frame with the following columns:
# #Complex ac
# Recommended name
# Aliases for complex
# Taxonomy identifier
# Identifiers (and stoichiometry) of molecules in complex
# Confidence
# Experimental evidence
# Go Annotations
# Cross references
# Description
# Complex properties
# Complex assembly
# Ligand
# Disease
# Agonist
# Antagonist
# Comment
# Source
# Expanded participant list
self.info = dataset[dataset["#Complex ac"] == complex_id]
self.list_of_components = []
self.go_ids = []
self.extract_fields()
print(f"Parsing {self.name}")
def extract_fields(self):
self.get_name()
self.get_aliases()
self.get_components()
self.get_go_ids()
self.get_wikidata_ids()
def get_name(self):
self.name = self.info["Recommended name"].values[0]
def get_aliases(self):
aliases_string = self.info["Aliases for complex"].values[0]
# "-" represents NA in this column
# Sometimes we get true NAs there
if aliases_string == "-" or not isinstance(aliases_string, str):
self.aliases = []
else:
self.aliases = aliases_string.split("|")
def get_components(self):
molecules_column = "Identifiers (and stoichiometry) of molecules in complex"
molecules_string = self.info[molecules_column].values[0]
molecules = molecules_string.split("|")
matches_quantities = [re.search(r"\((.*)\)", i) for i in molecules]
quantities = [m.group(1) for m in matches_quantities]
matches_uniprot_ids = [re.search(r"(.*)\(.*\)", i) for i in molecules]
uniprot_ids = [m.group(1) for m in matches_uniprot_ids]
component_and_quantities = dict(zip(uniprot_ids, quantities))
for external_id in component_and_quantities:
component = ComplexComponent(
external_id, component_and_quantities[external_id]
)
self.list_of_components.append(component)
def get_go_ids(self):
go_column = "Go Annotations"
try:
go_string = self.info[go_column].values[0]
go_list = re.findall(pattern="GO:[0-9]*", string=go_string)
self.go_ids = go_list
except Exception:
print(f"No GOs for {self.complex_id}")
def get_wikidata_ids(self):
# NCBI taxonomy ID (P685)
tax_id = self.info["Taxonomy identifier"].values[0]
self.taxon_qid = get_wikidata_item_by_propertyvalue("P685", int(tax_id))
def get_wikidata_complexes():
"""Gets all Wikidata items with a Complex Portal ID property"""
print("====== Getting complexes on Wikidata ======")
get_macromolecular = """
SELECT ?item ?ComplexPortalID
WHERE
{
?item wdt:P7718 ?ComplexPortalID .
}"""
wikidata_complexes = WDItemEngine.execute_sparql_query(
get_macromolecular, as_dataframe=True
).replace({"http://www.wikidata.org/entity/": ""}, regex=True)
return wikidata_complexes
def get_wikidata_label(qid, langcode="en"):
"""Gets a Wikidata item for a determined property-value pair
Args:
qid (str): The qid to get the label
langcode (str): The language code of the label
"""
query_result = WDItemEngine.execute_sparql_query(
f'SELECT ?label WHERE {{ wd:{qid} rdfs:label ?label. FILTER(LANG(?label)="{langcode}") }}'
)
try:
match = query_result["results"]["bindings"][0]
except IndexError:
print(f"Couldn't find label for {qid}")
raise ("label nof found for " + qid)
label = match["label"]["value"]
return label
@lru_cache(maxsize=None)
def get_wikidata_item_by_propertyvalue(property, value, mappings=MAPPINGS):
"""Gets a Wikidata item for a determined property-value pair
Args:
property (str): The property to search
value (str): The value of said property
"""
try:
qid = mappings[property][value]
return str(qid)
except:
pass
query_result = WDItemEngine.execute_sparql_query(
f'SELECT distinct ?item WHERE {{ ?item wdt:{property} "{value}" }}'
)
try:
match = query_result["results"]["bindings"][0]
except IndexError:
print(f"Couldn't find item for {value}")
if "URS" in value:
with open("errors/rna_central_log.csv", "a") as f:
f.write(f"{value},'not found'\n")
with open("errors/log.csv", "a") as f:
f.write(f"{value},'not found'\n")
return pd.np.NaN
qid = match["item"]["value"]
qid = qid.split("/")[4]
try:
mappings[property][str(value)] = str(qid)
except:
mappings[property] = {}
mappings[property][str(value)] = str(qid)
with open("mappings.json", "w") as fp:
json.dump(MAPPINGS, fp, sort_keys=True, indent=4)
return qid
def get_complex_portal_species_ids():
"""Gets a dictionary of Complex portal datasets
Returns a dictionary of species as keys and dataset url as values.
"""
domain = "ftp.ebi.ac.uk"
complex_data = "pub/databases/intact/complex/current/complextab/"
print("====== Getting Complex Portal Species IDs ======")
ftp = FTP(domain)
ftp.login()
ftp.cwd(complex_data)
files = ftp.nlst()
species_list = []
for species in files:
if "tsv" in species:
species_list.append(species.replace(".tsv", "").strip())
query = (
"""
SELECT ?itemLabel ?id WHERE {
VALUES ?id { """
+ '"'
+ '" "'.join(species_list)
+ '"'
+ """ }
?item wdt:P685 ?id.
SERVICE wikibase:label { bd:serviceParam wikibase:language "[AUTO_LANGUAGE],en". }
}
"""
)
df = wikidata2df(query)
return df
def get_complex_portal_dataset_urls():
"""Gets a dictionary of Complex portal datasets
Returns a dictionary of species as keys and dataset url as values.
"""
domain = "ftp.ebi.ac.uk"
complex_data = "pub/databases/intact/complex/current/complextab/"
print("====== Getting Complex Portal datasets via FTP ======")
ftp = FTP(domain)
ftp.login()
ftp.cwd(complex_data)
files = ftp.nlst()
string_replacements = (".tsv", ""), ("_", " ")
cp_datasets = defaultdict()
for species in files:
if "README" not in species:
current_key = reduce(
lambda a, kv: a.replace(*kv), string_replacements, species
)
cp_datasets[current_key] = f"ftp://{domain}/{complex_data}{species}"
return cp_datasets
def remove_rows_on_wikidata(complex_dataframe):
"""
Return complex portal entities that don't have Wikidata links.
"""
print("====== Checking which complexes are not on Wikidata ======")
wikidata_complexes = get_wikidata_complexes()
merged_data = pd.merge(
wikidata_complexes,
complex_dataframe,
how="outer",
left_on=["ComplexPortalID"],
right_on=["#Complex ac"],
indicator=True,
)
missing_from_wikidata = merged_data[merged_data["_merge"] == "right_only"][
complex_dataframe.columns
]
keep = get_columns_to_keep()
missing_from_wikidata = missing_from_wikidata[keep]
return missing_from_wikidata
def split_complexes(species_dataframe):
complex_dfs = [
species_dataframe[
species_dataframe["#Complex ac"] == unique_complex
].reset_index()
for unique_complex in species_dataframe["#Complex ac"].unique()
]
return complex_dfs
def prepare_refs(species_id):
stated_in = wdi_core.WDItemID(value="Q47196990", prop_nr="P248", is_reference=True)
wikidata_time = strftime("+%Y-%m-%dT00:00:00Z", gmtime())
retrieved = wdi_core.WDTime(wikidata_time, prop_nr="P813", is_reference=True)
ftp_url = "https://ftp.ebi.ac.uk/pub/databases/intact/complex/current/complextab"
ref_url = wdi_core.WDString(ftp_url, prop_nr="P854", is_reference=True)
filename_in_archive = f"{species_id}.tsv"
# reference of filename in archive (P7793)
ref_filename = wdi_core.WDString(
filename_in_archive, prop_nr="P7793", is_reference=True
)
references = [[stated_in, retrieved, ref_url, ref_filename]]
return references
def get_columns_to_keep():
keep = [
"#Complex ac",
"Recommended name",
"Aliases for complex",
"Taxonomy identifier",
"Go Annotations",
"Identifiers (and stoichiometry) of molecules in complex",
"Description",
]
return keep
| 32.551653 | 136 | 0.635227 | 3,761 | 0.238718 | 0 | 0 | 1,244 | 0.078959 | 0 | 0 | 5,276 | 0.334878 |
96d020593d682ed45dc1c5d9066ff6a5caf2f8fb | 1,264 | py | Python | tests/t_11_serial_test.py | llbxg/NIST-SP-800-22 | 7e82243643b62fdc07cbe5f40d540b0a16a4372a | [
"MIT"
] | null | null | null | tests/t_11_serial_test.py | llbxg/NIST-SP-800-22 | 7e82243643b62fdc07cbe5f40d540b0a16a4372a | [
"MIT"
] | null | null | null | tests/t_11_serial_test.py | llbxg/NIST-SP-800-22 | 7e82243643b62fdc07cbe5f40d540b0a16a4372a | [
"MIT"
] | null | null | null | import scipy.special as sc
from tests.src.utils import split_list, __print
# .11 Serial Test
def serial_test(key, n, m=3, b_print=True):
def compute(s,m):
if m == 0:
return 0
if m == 1: head = ''
else : head = s[0:(m-1)]
s = s + head
v = [0]*2**m
for i in range(m):
ss=s[i:]
split_key_m=list(split_list(ss,m))
if len(split_key_m[-1]) != len(split_key_m[0]):
split_key_m=split_key_m[0:-1]
split_key_m=list(map(lambda x : int(x,2),split_key_m))
for i in range(2**m):
v[i] = v[i]+split_key_m.count(i)
psi2_m = 2**m/n*(sum(list(map(lambda x : x**2,v)))) - n
return psi2_m
key = ''.join(list(map(str, key)))
psi2_m0=compute(key,m)
psi2_m1=compute(key,(m-1))
psi2_m2=compute(key,(m-2))
d_psi2 = psi2_m0 - psi2_m1
d2_psi2 = psi2_m0 - 2*psi2_m1 + psi2_m2
p1=sc.gammaincc(2**(m-2),d_psi2 / 2)
p2=sc.gammaincc(2**(m-3),d2_psi2 / 2)
b1 = (p1 >= 0.01)
b2 = (p2 >= 0.01)
__print(b_print, '{:40} : {:.3f} -> {} '.format('serial test',p1,b1))
__print(b_print, '{:40} : {:.3f} -> {} '.format('',p2,b2))
return [p1, p2], all([b1, b2]) | 25.795918 | 73 | 0.509494 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 82 | 0.064873 |
96d04e179f9f7c4579746fedd4c15e79b9631946 | 1,337 | py | Python | views/routes.py | macwille/python-chat-app | 95a5bc28c3daeb977e1a1c9e5801941389baeb21 | [
"CC0-1.0"
] | 1 | 2021-06-01T12:27:58.000Z | 2021-06-01T12:27:58.000Z | views/routes.py | macwille/python-chat-app | 95a5bc28c3daeb977e1a1c9e5801941389baeb21 | [
"CC0-1.0"
] | null | null | null | views/routes.py | macwille/python-chat-app | 95a5bc28c3daeb977e1a1c9e5801941389baeb21 | [
"CC0-1.0"
] | null | null | null | from app import app
from views import subject_routes, room_routes, user_routes, message_routes
from models import user_service
from flask import Flask, flash, render_template, request, session, abort
from db import db
# Rest of the routes are imported from /views
@app.route("/")
def index():
return render_template("index.html")
@app.route("/create/id=<int:id>")
def create(id):
return render_template("create.html", id=id)
@ app.route("/search")
def search():
return render_template("search.html")
# Utilties
def check_token():
token = session["csrf_token"]
form_token = request.form["csrf_token"]
if token != form_token:
print("Failed token")
abort(403)
else:
print("Token checked")
def word_too_long(string):
lengths = [len(x) for x in string.split()]
if any(l > 30 for l in lengths):
return True
else:
return False
@app.errorhandler(403)
def resource_not_found(e):
flash("Forbidden 403", "error")
return render_template("index.html")
@app.errorhandler(500)
def server_error(e):
flash("Server encountered an internal error", "error")
return render_template("index.html")
@app.errorhandler(Exception)
def error_dump(error):
print(error)
flash("Unexpected Error", "error")
return render_template("index.html")
| 21.918033 | 74 | 0.689604 | 0 | 0 | 0 | 0 | 662 | 0.495138 | 0 | 0 | 307 | 0.229619 |
96d1b8d26dc9756efe3d492e86bffbd382fb0866 | 2,082 | py | Python | ievv_opensource/ievv_i18n_url/i18n_url_utils/i18n_urlpatterns.py | appressoas/ievv_opensource | 63e87827952ddc8f6f86145b79478ef21d6a0990 | [
"BSD-3-Clause"
] | null | null | null | ievv_opensource/ievv_i18n_url/i18n_url_utils/i18n_urlpatterns.py | appressoas/ievv_opensource | 63e87827952ddc8f6f86145b79478ef21d6a0990 | [
"BSD-3-Clause"
] | 37 | 2015-10-26T09:14:12.000Z | 2022-02-10T10:35:33.000Z | ievv_opensource/ievv_i18n_url/i18n_url_utils/i18n_urlpatterns.py | appressoas/ievv_opensource | 63e87827952ddc8f6f86145b79478ef21d6a0990 | [
"BSD-3-Clause"
] | 1 | 2015-11-06T07:56:34.000Z | 2015-11-06T07:56:34.000Z | import re
from django.urls import path, URLResolver
from django.conf import settings
from ievv_opensource.ievv_i18n_url import active_i18n_url_translation
from ievv_opensource.ievv_i18n_url.views import RedirectToLanguagecodeView
#
# Note: Basically copy and tuning from https://github.com/django/django/blob/1.11.29/django/conf/urls/i18n.py
#
# When updating for Django 2+, see https://github.com/django/django/blob/master/django/conf/urls/i18n.py
#
def i18n_patterns(*urls, include_redirect_view=True):
"""
Adds the language code prefix to every URL pattern within this
function. This may only be used in the root URLconf, not in an included
URLconf.
"""
if not settings.USE_I18N:
return list(urls)
new_urls = []
if include_redirect_view:
new_urls.append(
path('_ievv-i18n-redirect-to/<str:languagecode>/<path:path>',
RedirectToLanguagecodeView.as_view(),
name='ievv_i18n_url_redirect_to_languagecode'),
)
new_urls.append(
URLResolver(
I18nLocalePrefixPattern(),
list(urls)
)
)
return new_urls
class I18nLocalePrefixPattern:
def __init__(self, prefix_default_language=True):
self.prefix_default_language = prefix_default_language
self.converters = {}
@property
def regex(self):
# This is only used by reverse() and cached in _reverse_dict.
return re.compile(self.language_prefix)
@property
def language_prefix(self):
urlpath_prefix = active_i18n_url_translation.get_active_language_urlpath_prefix()
if urlpath_prefix:
return f'{urlpath_prefix}/'
else:
return ''
def match(self, path):
language_prefix = self.language_prefix
if path.startswith(language_prefix):
return path[len(language_prefix):], (), {}
return None
def check(self):
return []
def describe(self):
return "'{}'".format(self)
def __str__(self):
return self.language_prefix
| 28.916667 | 109 | 0.670509 | 924 | 0.443804 | 0 | 0 | 381 | 0.182997 | 0 | 0 | 567 | 0.272334 |
96d2afcdf9b53f37c987f04d16fd91b999a57d2d | 2,818 | py | Python | test_autocorrelation.py | jacob975/TATIRP | 2d81fa280e039aa931c6f8456632a23ef123282a | [
"MIT"
] | null | null | null | test_autocorrelation.py | jacob975/TATIRP | 2d81fa280e039aa931c6f8456632a23ef123282a | [
"MIT"
] | 4 | 2017-08-22T03:15:22.000Z | 2017-12-19T17:55:31.000Z | test_autocorrelation.py | jacob975/TATIRP | 2d81fa280e039aa931c6f8456632a23ef123282a | [
"MIT"
] | null | null | null | #!/usr/bin/python
'''
Program:
This is a test program for autocorrelation
Usage:
test_autocorrelation.py
Editor:
Jacob975
20181127
#################################
update log
'''
import numpy as np
import time
import matplotlib.pyplot as plt
from uncertainties import unumpy, ufloat
# Convert flux to magnitude
def flux2mag(flux, err_flux):
uflux = unumpy.uarray(flux, err_flux)
umag = -2.5 * unumpy.log(uflux, 10)
mag = unumpy.nominal_values(umag)
err_mag = unumpy.std_devs(umag)
return mag, err_mag
# Convert magnitude to flux
def mag2flux(mag, err_mag):
umag = unumpy.uarray(mag, err_mag)
uflux = 10 ** (-0.4*umag)
flux = unumpy.nominal_values(uflux)
err_flux = unumpy.std_devs(uflux)
return flux, err_flux
def autocorr(x):
# Compute the autocorrelation of the signal, based on the properties of the
# power spectral density of the signal.
xp = x-np.mean(x)
f = np.fft.fft(xp)
p = np.array([np.real(v)**2+np.imag(v)**2 for v in f])
pi = np.fft.ifft(p)
return np.real(pi)[:x.size/2]/np.sum(xp**2)
#--------------------------------------------
# Main code
if __name__ == "__main__":
# Measure time
start_time = time.time()
#----------------------------------------
# Period
T = 50
term = 500
# standard deviation
stdev = 2
# Model a Target Star
# Sine wave (radian)
t = np.arange(term)
intrinsic = np.sin(2 * np.pi * t / T) + 10
intrinsic_mag, _ = flux2mag(intrinsic, stdev)
uncertainties = np.random.normal(0, stdev, term)
flux = intrinsic + uncertainties
mag, err_mag = flux2mag(flux, stdev)
target = np.transpose([ t, mag, err_mag])
correlation = autocorr(mag)
#----------------------------------------
# Plot the answers
# target
target_plt = plt.figure("target", figsize=(9, 6), dpi=100)
plt.subplot(3, 1, 1)
plt.title('intrinsic_target')
plt.xlabel('time')
plt.ylabel('mag')
frame1 = plt.gca()
frame1.axes.get_xaxis().set_visible(False)
plt.plot(t, intrinsic_mag, label = 'intrinsic_target')
plt.legend()
plt.subplot(3, 1, 2)
plt.title('observed_target')
plt.xlabel("time")
plt.ylabel('mag')
frame1 = plt.gca()
frame1.axes.get_xaxis().set_visible(False)
plt.errorbar(target[:,0], target[:,1], yerr = target[:,2], fmt = 'ro', alpha = 0.5, label = 'observed_target')
plt.legend()
plt.subplot(3, 1, 3)
plt.title('autocorrelation')
plt.ylabel('R')
plt.xlabel('time')
plt.bar(t[1:t.size/2], correlation[1:], label = 'correlation')
plt.legend()
plt.show()
#---------------------------------------
# Measure time
elapsed_time = time.time() - start_time
print "Exiting Main Program, spending ", elapsed_time, "seconds."
| 28.755102 | 114 | 0.590845 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 858 | 0.304471 |
96d37afffd4e6e28f939d7f7417d5487bda9c877 | 8,617 | py | Python | lambda/ecs-lifecycle-hook-launch.py | asilvas/ecs-cluster-manager | 9c6cc28b0da047e66e4cb04c3e653fdd84614367 | [
"Apache-2.0"
] | 47 | 2019-02-14T01:52:15.000Z | 2022-02-22T20:37:04.000Z | lambda/ecs-lifecycle-hook-launch.py | eshack94/ecs-cluster-manager | 9c6cc28b0da047e66e4cb04c3e653fdd84614367 | [
"Apache-2.0"
] | 3 | 2019-04-30T18:35:18.000Z | 2019-10-29T01:21:03.000Z | lambda/ecs-lifecycle-hook-launch.py | eshack94/ecs-cluster-manager | 9c6cc28b0da047e66e4cb04c3e653fdd84614367 | [
"Apache-2.0"
] | 35 | 2019-01-30T14:54:18.000Z | 2022-01-17T09:34:16.000Z | # Copyright 2018 Amazon.com, Inc. or its affiliates.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License").
# You may not use this file except in compliance with the License.
# A copy of the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file.
# This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
# CONDITIONS OF ANY KIND, either express or implied. See the License for the
# specific language governing permissions and limitations under the License.
import boto3
import json
import time
import base64
import re
from datetime import datetime
def find_cluster_name(ec2_c, instance_id):
"""
Provided an instance that is currently, or should be part of an ECS cluster
determines the ECS cluster name. This is derived from the user-data
which contains a command to inject the cluster name into ECS agent config
files.
On failure we raise an exception which means this instance isn't a ECS
cluster member so we can proceed with termination.
"""
response = ec2_c.describe_instance_attribute(
InstanceId=instance_id,
Attribute='userData'
)
userdata = base64.b64decode(response['UserData']['Value'])
clustername = re.search("ECS_CLUSTER\s?=\s?(.*?)\s", str(userdata))
if clustername:
return(clustername.group(1))
raise(ValueError(
"Unable to determine the ECS cluster name from instance metadata"
))
def container_instance_healthy(ecs_c, cluster_name, instance_id, context):
"""
Lists all the instances in the cluster to see if we have one joined
that matches the instance ID of the one we've just started.
If we find a cluster member that matches our recently launched instance
ID, checks whether it's in a status of ACTIVE and shows it's ECS
agent is connected to the cluster.
There could be additional checks put in as desired to verify the
instance is healthy!
If we're getting short of time waiting for stability return false
so we can get a continuation.
"""
while True:
paginator = ecs_c.get_paginator('list_container_instances')
instances = paginator.paginate(
cluster=cluster_name,
PaginationConfig={
"PageSize": 10
}
)
for instance in instances:
response = ecs_c.describe_container_instances(
cluster=cluster_name,
containerInstances=instance["containerInstanceArns"]
)
for container_instance in response["containerInstances"]:
if container_instance["ec2InstanceId"] == instance_id:
if container_instance["status"] == "ACTIVE":
if container_instance["agentConnected"] is True:
return(True)
if context.get_remaining_time_in_millis() <= 40000:
return(False)
time.sleep(30)
def find_hook_duration(asg_c, asg_name, instance_id):
"""
Our Lambda function operates in five-minute time samples, however
we eventually give up our actions if they take more than 60 minutes.
This function finds out how long we've been working on our present
operation by listing current Autoscaling activities, and checking
for our instance ID to get a datestamp.
We can then compare that datestamp with present to determine our
overall duration.
"""
paginator = asg_c.get_paginator('describe_scaling_activities')
response_iterator = paginator.paginate(
AutoScalingGroupName=asg_name,
PaginationConfig={
'PageSize': 10,
}
)
hook_start_time = datetime.utcnow()
for response in response_iterator:
for activity in response["Activities"]:
if re.match(
"Terminating.*{}".format(instance_id),
activity["Description"]
):
hook_start_time = activity["StartTime"]
continue
hook_start_time = hook_start_time.replace(tzinfo=None)
hook_duration = (datetime.utcnow() - hook_start_time).total_seconds()
return(int(hook_duration))
def lambda_handler(event, context):
print("Received event {}".format(json.dumps(event)))
# Our hook message can look different depending on how we're called.
# The initial call from AutoScaling has one format, and the call when
# we send a HeartBeat message has another. We need to massage them into
# a consistent format. We'll follow the format used by AutoScaling
# versus the HeartBeat message.
hook_message = {}
# Identify if this is the AutoScaling call
if "LifecycleHookName" in event["detail"]:
hook_message = event["detail"]
# Otherwise this is a HeartBeat call
else:
hook_message = event["detail"]["requestParameters"]
# Heartbeat comes with instanceId instead of EC2InstanceId
hook_message["EC2InstanceId"] = hook_message["instanceId"]
# Our other three elements need to be capitlized
hook_message["LifecycleHookName"] = hook_message["lifecycleHookName"]
hook_message["AutoScalingGroupName"] = \
hook_message["autoScalingGroupName"]
hook_message["LifecycleActionToken"] = \
hook_message["lifecycleActionToken"]
print("Received Lifecycle Hook message {}".format(
json.dumps(hook_message)
))
try:
ec2_c = boto3.client('ec2')
ecs_c = boto3.client('ecs')
asg_c = boto3.client('autoscaling')
print("Determining our ECS Cluster name . . .")
cluster_name = find_cluster_name(
ec2_c,
hook_message["EC2InstanceId"]
)
print(". . . found ECS Cluster name '{}'".format(
cluster_name
))
print("Checking status of new instance in the ECS Cluster . . .")
if container_instance_healthy(
ecs_c, cluster_name, hook_message["EC2InstanceId"], context
):
print(". . . Instance {} connected and active".format(
hook_message["EC2InstanceId"]
))
print("Proceeding with instance {} Launch".format(
hook_message["EC2InstanceId"]
))
asg_c.complete_lifecycle_action(
LifecycleHookName=hook_message["LifecycleHookName"],
AutoScalingGroupName=hook_message["AutoScalingGroupName"],
LifecycleActionToken=hook_message["LifecycleActionToken"],
LifecycleActionResult="CONTINUE",
InstanceId=hook_message["EC2InstanceId"]
)
else:
# Figure out how long we've be at this.
hook_duration = find_hook_duration(
asg_c,
hook_message["AutoScalingGroupName"],
hook_message["EC2InstanceId"]
)
print("Determined we cannot proceed with launch.")
hook_duration = find_hook_duration(
asg_c,
hook_message["AutoScalingGroupName"],
hook_message["EC2InstanceId"]
)
print("We've been waiting {} seconds for instance join.".format(
hook_duration
))
if hook_duration > 3600:
print("Exceeded 3600 seconds waiting to stabilize. Aborting")
asg_c.complete_lifecycle_action(
LifecycleHookName=hook_message["LifecycleHookName"],
AutoScalingGroupName=hook_message["AutoScalingGroupName"],
LifecycleActionToken=hook_message["LifecycleActionToken"],
LifecycleActionResult="ABANDON",
InstanceId=hook_message["EC2InstanceId"]
)
else:
print("Sending a Heartbeat to continue waiting")
asg_c.record_lifecycle_action_heartbeat(
LifecycleHookName=hook_message["LifecycleHookName"],
AutoScalingGroupName=hook_message["AutoScalingGroupName"],
LifecycleActionToken=hook_message["LifecycleActionToken"],
InstanceId=hook_message["EC2InstanceId"]
)
except Exception as e:
print("Exception: {}".format(e))
# Exception handling simply involves a raise so we can be retried.
# CWE should re-try us at least 3 times. Hopefully the issue resolves
# next invocation.
raise
| 36.512712 | 79 | 0.635952 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 4,015 | 0.465939 |
96d3857b0085dd30303fde36c9f088ee7b337a65 | 681 | py | Python | leetcode/ag_107.py | baobei813214232/common-alglib | 303e4edc5f7c1b5e9a2a6ebc4742d7bae61c5d31 | [
"MIT"
] | 4 | 2017-05-02T09:47:48.000Z | 2019-05-01T06:26:26.000Z | leetcode/ag_107.py | xiaolongnk/Alglib | 303e4edc5f7c1b5e9a2a6ebc4742d7bae61c5d31 | [
"MIT"
] | null | null | null | leetcode/ag_107.py | xiaolongnk/Alglib | 303e4edc5f7c1b5e9a2a6ebc4742d7bae61c5d31 | [
"MIT"
] | 1 | 2021-01-31T07:21:53.000Z | 2021-01-31T07:21:53.000Z | from ojlib.TreeLib import *
class Solution(object):
def levelOrderBottom(self, root):
"""
:type root: TreeNode
:rtype: List[List[int]]
"""
if not root:
return []
def slu(root, level, ans):
if not root: return []
if level >= len(ans):
ans.insert(0,[])
slu(root.left, level+1, ans)
slu(root.right, level+1, ans)
ans[len(ans) - level - 1].append(root.val)
ans = []
slu(root, 0 , ans)
return ans
def run():
s = Solution()
root = make_tree()
ans = s.levelOrderBottom(root)
for i in ans:
print i
| 23.482759 | 54 | 0.484581 | 527 | 0.773862 | 0 | 0 | 0 | 0 | 0 | 0 | 76 | 0.111601 |
96d45139aca244273384a51c530c63b4656ed83e | 72 | py | Python | tests/test_main.py | joaogcs/python-project-template | 079f1606a474e155449ccd29da5970f571bf97a8 | [
"MIT"
] | null | null | null | tests/test_main.py | joaogcs/python-project-template | 079f1606a474e155449ccd29da5970f571bf97a8 | [
"MIT"
] | null | null | null | tests/test_main.py | joaogcs/python-project-template | 079f1606a474e155449ccd29da5970f571bf97a8 | [
"MIT"
] | null | null | null | # This is a sample Python script.
def test_print_hi():
assert True
| 14.4 | 33 | 0.708333 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 33 | 0.458333 |
96d4fa95c81cfe8edb8a039e4f400fd39027e05f | 1,974 | py | Python | products/api/api.py | Arvind-4/E-commerce-REST-API | ee6bf15f08590db99c423639db382f8c3fb3ab05 | [
"MIT"
] | null | null | null | products/api/api.py | Arvind-4/E-commerce-REST-API | ee6bf15f08590db99c423639db382f8c3fb3ab05 | [
"MIT"
] | null | null | null | products/api/api.py | Arvind-4/E-commerce-REST-API | ee6bf15f08590db99c423639db382f8c3fb3ab05 | [
"MIT"
] | null | null | null | from django.shortcuts import render
from rest_framework.generics import (
ListAPIView,
RetrieveAPIView,
)
from rest_framework.views import APIView
from rest_framework.response import Response
from django.core import serializers
from django.http import Http404
from rest_framework.permissions import AllowAny
from rest_framework.decorators import permission_classes
from products.models import Product
from .serializers import ProductSerializer
# Create your views here.
# class ProductCreate(CreateAPIView):
# queryset = Product.objects.all()
# serializer_class = ProductSerializer
@permission_classes((AllowAny, ))
class ProductListView(ListAPIView):
queryset = Product.objects.all()
serializer_class = ProductSerializer
# class ProductUpdate(RetrieveUpdateAPIView):
# queryset = Product.objects.all()
# serializer_class = ProductSerializer
# lookup_field = 'slug'
# class ProductDelete(DestroyAPIView):
# queryset = Product.objects.all()
# serializer_class = ProductSerializer
# lookup_field = 'slug'
# @permission_classes((AllowAny, ))
# class ProductDetail(APIView):
# def get_object(self, slug):
# try:
# return Product.objects.filter(slug=slug).first()
# except:
# raise Http404
# def get(self, request, slug, format=None, *args, **kwargs):
# instance = self.get_object(slug)
# if instance:
# obj = ProductSerializer(instance)
# return Response(obj.data)
# else:
# return Http404
@permission_classes((AllowAny, ))
class ProductDetail(RetrieveAPIView):
serializer_class = ProductSerializer
lookup_field = ['slug', 'id']
def get_object(self):
slug = self.kwargs.get('slug')
id_ = self.kwargs.get('id')
product_qs = Product.objects.filter(slug=slug, id=id_)
if not product_qs.exists():
raise Http404
return product_qs.first()
| 29.029412 | 65 | 0.692503 | 485 | 0.245694 | 0 | 0 | 553 | 0.280142 | 0 | 0 | 936 | 0.474164 |
96d5b41db8b7fcb7f3b93add103d8acc7fa5230e | 1,067 | py | Python | main.py | adelhult/please-plot | dd47b4c5ce9b9a0d69ed0b36f11e949ef08cb428 | [
"MIT"
] | null | null | null | main.py | adelhult/please-plot | dd47b4c5ce9b9a0d69ed0b36f11e949ef08cb428 | [
"MIT"
] | null | null | null | main.py | adelhult/please-plot | dd47b4c5ce9b9a0d69ed0b36f11e949ef08cb428 | [
"MIT"
] | null | null | null | from traceback import print_exc
from uuid import uuid4
from flask import Flask, request, send_from_directory
from function_plot import *
app = Flask(__name__)
max_simultaneous_requests = 8
simultaneous_requests = 0
@app.route('/generate/')
def generate():
global simultaneous_requests
simultaneous_requests += 1
try:
if simultaneous_requests >= max_simultaneous_requests:
raise Exception("Too many simultaneous requests")
fn = request.args.get("fn")
x_min = float(request.args.get("x_min", -8))
x_max = float(request.args.get("x_max", 8))
y_min = float(request.args.get("y_min", -5))
y_max = float(request.args.get("y_max", 5))
filename = str(uuid4()) + ".mp4"
return plot(fn, filename, x_min, x_max, y_min, y_max)
except:
print_exc()
return ""
finally:
simultaneous_requests -= 1
@app.route('/videos/<path:filename>')
def download_file(filename):
return send_from_directory(path, filename, mimetype="video/mp4", as_attachment=True)
| 26.675 | 88 | 0.670103 | 0 | 0 | 0 | 0 | 844 | 0.791003 | 0 | 0 | 120 | 0.112465 |
96d6269877deb16ea1414529d7a4c8eeec745fe2 | 758 | py | Python | structmanager/optimization/genesis/utils.py | saullocastro/structmanager | 01e9677f201c9ef577fdf8a15833be7e364441ab | [
"BSD-3-Clause"
] | 1 | 2015-09-17T20:48:08.000Z | 2015-09-17T20:48:08.000Z | structmanager/optimization/genesis/utils.py | saullocastro/structmanager | 01e9677f201c9ef577fdf8a15833be7e364441ab | [
"BSD-3-Clause"
] | 1 | 2019-01-09T20:31:17.000Z | 2019-01-10T11:10:07.000Z | structmanager/optimization/genesis/utils.py | saullocastro/structmanager | 01e9677f201c9ef577fdf8a15833be7e364441ab | [
"BSD-3-Clause"
] | 1 | 2020-12-29T00:22:23.000Z | 2020-12-29T00:22:23.000Z | def format_float(x, size=8, lr):
"""Format a float number
Parameters
----------
x : float
The float number.
size : int, optional
Desired size of the output string.
lr : str ('<' or '>')
Indicates if it should be left or right aligned.
Returns
-------
out : str
The formatted string.
"""
y = str(x)
if '.' in y:
has_floating_point = True
else:
has_floating_point = False
if lr == '<':
y = y.ljust(size)
elif lr == '>':
y = y.rjust(size)
else:
raise ValueError("`lr` must be '<' or '>'")
if not '.' in y and has_floating_point:
raise ValueError('Float %f does not fit in size = %d' % (x, size))
return y
| 22.294118 | 74 | 0.514512 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 397 | 0.523747 |
96d74d2ff0dc3fdfbc59821572994b8df28d1d63 | 5,412 | py | Python | mtwaffle/mtsite.py | leomiquelutti/mtwaffle | 99f769a5796c27adf0dd7e8c30fecadec4fa9581 | [
"MIT"
] | null | null | null | mtwaffle/mtsite.py | leomiquelutti/mtwaffle | 99f769a5796c27adf0dd7e8c30fecadec4fa9581 | [
"MIT"
] | null | null | null | mtwaffle/mtsite.py | leomiquelutti/mtwaffle | 99f769a5796c27adf0dd7e8c30fecadec4fa9581 | [
"MIT"
] | null | null | null | import inspect
import sys
import numpy as np
import attrdict
from mtwaffle import graphs
from mtwaffle import mt
class Site(attrdict.AttrDict):
index_map = {
'xx': [0, 0],
'xy': [0, 1],
'yx': [1, 0],
'yy': [1, 1]
}
EXCLUDED_CALLABLES = ('between_freqs', )
def __init__(self, freqs, zs, name='', phase_func=None, **kwargs):
super(attrdict.AttrDict, self).__init__()
self.freqs = np.asarray(freqs)
self.zs = np.asarray(zs)
self.name = name
if phase_func is None:
phase_func = mt.phase
self.phase_func = phase_func
for key, value in kwargs.items():
setattr(self, key, value)
@property
def periods(self):
return 1. / self.freqs
@property
def phases(self):
return self.phase_func(self.zs)
def inspect_mt_callable(self, name):
f = mt.callables[name]
argnames = [ # Find arguments of callable from mtwaffle.mt
p.name for p in inspect.signature(f).parameters.values()
if p.kind == p.POSITIONAL_OR_KEYWORD and p.default is p.empty
]
return f, argnames
def help(self, output=sys.stdout):
'''Print a list of the attributes which are available.'''
output.write('''
Attributes of mtwaffle.mtsite.Site are calculated using functions from the mtwaffle.mt module:
mtsite.Site mtwaffle.mt function
attribute (args are Site attributes) Function description
-------------- ------------------------------ ----------------------------------------------
''')
label = lambda f: f.__doc__.splitlines()[0] if f.__doc__ else 'MISSING DOC'
fnames = []
for fname, f in mt.callables.items():
try:
getattr(self, fname)
except:
pass
else:
fnames.append(fname)
for fname in fnames:
f, argnames = self.inspect_mt_callable(fname)
cname = self.__class__.__name__
argsig = ', '.join(['{}'.format(arg) for arg in argnames])
source = '{}({})'.format(fname, argsig)
label_attr = '{}'.format(fname.ljust(14))
label_source = source.ljust(30)
label_help = label(f)
output.write('{} {} {}\n'.format(label_attr, label_source, label_help))
# print('{fname}({sig})'.format(
# fname=fname, sig=', '.join([
# '{c}.{a}'.format(c=self.__class__.__name__, a=arg) for arg in f_arg_names])))
# output.write('{}.{} -- {}\n'.format(
# self.__class__.__name__,
# fname.ljust(max([len(fi) for fi in fnames])),
# doc(mt.callables[fname])
# )
# )
def get_property(self, key):
# Is the key ending with xx, xy, yx, or yy?
if key[-2:] in self.index_map:
indices = self.index_map[key[-2:]]
if key.startswith('res_'):
return self.appres[[Ellipsis] + indices]
elif key.startswith('phase_'):
return self.phases[[Ellipsis] + indices]
elif key.startswith('zr_'):
return self.zs.real[[Ellipsis] + indices]
elif key.startswith('zi_'):
return self.zs.imag[[Ellipsis] + indices]
# See if we can complete a function from mtwaffle.mt using the
# existing attributes in this Site:
elif key in mt.callables and not key in self.EXCLUDED_CALLABLES:
f, argnames = self.inspect_mt_callable(key)
return f(*[getattr(self, arg) for arg in argnames])
return False
def __getattr__(self, key):
value = self.get_property(key)
if value is False:
return super(attrdict.AttrDict, self).__getattr__(key)
else:
return value
def __getitem__(self, key):
value = self.get_property(key)
if value is False:
return super(attrdict.AttrDict, self).__getitem__(key)
else:
return value
def plot_res_phase(self, **kwargs):
args = (
(self.freqs, self.freqs),
(self.res_xy, self.res_yx),
(self.phase_xy, self.phase_yx),
)
if not 'res_indiv_kws' in kwargs:
kwargs['res_indiv_kws'] = (
{'label': 'xy', 'color': 'b'},
{'label': 'yx', 'color': 'g'},
)
return graphs.plot_res_phase(*args, **kwargs)
def plot_impedance_tensors(self, *args, **kwargs):
return graphs.plot_impedance_tensors(
self.zs, self.freqs, **kwargs)
def plot_ptensell(self, *args, **kwargs):
return graphs.plot_ptensell(
self.ptensors, self.freqs, *args, **kwargs
)
def plot_ptensell_filled(self, *args, **kwargs):
return graphs.plot_ptensell_filled(
self.ptensors, self.freqs, *args, **kwargs
)
def plot_mohr_imp(self, *args, **kwargs):
kwargs['title'] = kwargs.get('title', self.name)
return graphs.plot_mohr_imp(
self.zs, self.freqs, *args, **kwargs
)
def plot_mohr_ptensor(self, *args, **kwargs):
return graphs.plot_mohr_ptensor(
self.ptensors, self.freqs, *args, **kwargs
) | 33.825 | 99 | 0.543052 | 5,294 | 0.978197 | 0 | 0 | 134 | 0.02476 | 0 | 0 | 1,070 | 0.197709 |
96d7d97e9464432987e79cd5bc37343800b4edcd | 2,186 | py | Python | Testing/evaluate.py | codedecde/WordEmbeddings | 84e6cd5424b74da6e889dd1b6ab7a6c5df9432d9 | [
"MIT"
] | 2 | 2018-09-19T01:37:31.000Z | 2019-09-22T02:45:09.000Z | Testing/evaluate.py | codedecde/WordEmbeddings | 84e6cd5424b74da6e889dd1b6ab7a6c5df9432d9 | [
"MIT"
] | null | null | null | Testing/evaluate.py | codedecde/WordEmbeddings | 84e6cd5424b74da6e889dd1b6ab7a6c5df9432d9 | [
"MIT"
] | null | null | null | import numpy as np
import heapq
def cosine(x, y):
eps = 1e-10
return np.dot(x, y) / np.sqrt((np.dot(x, x) * np.dot(y, y)) + eps)
def get_nearest_k(word, vocab, vocab_matrix, k=4, return_score=False):
k_nearest_neighbors = []
vector_word = vocab_matrix[vocab[word]]
for w in vocab:
if w == word:
continue
dist = cosine(vector_word, vocab_matrix[vocab[w]])
if len(k_nearest_neighbors) < k:
heapq.heappush(k_nearest_neighbors, (dist, w))
else:
dist_min, _ = k_nearest_neighbors[0]
if dist_min < dist:
heapq.heappop(k_nearest_neighbors)
heapq.heappush(k_nearest_neighbors, (dist, w))
k_nearest_neighbors = [w for (d, w) in k_nearest_neighbors] if not return_score else k_nearest_neighbors
return k_nearest_neighbors
def get_nearest_k_with_matrix(vector, word2vec, k, return_score):
k_nearest_neighbors = []
for w in word2vec:
dist = cosine(vector, word2vec[w])
if len(k_nearest_neighbors) < k:
heapq.heappush(k_nearest_neighbors, (dist, w))
else:
dist_min, _ = k_nearest_neighbors[0]
if dist_min < dist:
heapq.heappop(k_nearest_neighbors)
heapq.heappush(k_nearest_neighbors, (dist, w))
k_nearest_neighbors = [w for (d, w) in k_nearest_neighbors] if not return_score else k_nearest_neighbors
return k_nearest_neighbors
def get_furthest_k(word, vocab, vocab_matrix, k=4, return_score=False):
k_nearest_neighbors = []
vector_word = vocab_matrix[vocab[word]]
for w in vocab:
if w == word:
continue
dist = -cosine(vector_word, vocab_matrix[vocab[w]])
if len(k_nearest_neighbors) < k:
heapq.heappush(k_nearest_neighbors, (dist, w))
else:
dist_min, _ = k_nearest_neighbors[0]
if dist_min < dist:
heapq.heappop(k_nearest_neighbors)
heapq.heappush(k_nearest_neighbors, (dist, w))
k_nearest_neighbors = [w for (d, w) in k_nearest_neighbors] if not return_score else k_nearest_neighbors
return k_nearest_neighbors
| 37.689655 | 108 | 0.641354 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
96d86338e0559532f2558c6914fd242a57096663 | 239 | py | Python | src/chapter3/exercise1.py | group13bse1/BSE-2021 | edd715d06696e5993c2a5b458e31bd8b87f67ded | [
"MIT"
] | null | null | null | src/chapter3/exercise1.py | group13bse1/BSE-2021 | edd715d06696e5993c2a5b458e31bd8b87f67ded | [
"MIT"
] | null | null | null | src/chapter3/exercise1.py | group13bse1/BSE-2021 | edd715d06696e5993c2a5b458e31bd8b87f67ded | [
"MIT"
] | null | null | null | hours = input('Enter Hours \n')
rate = input('Enter Rate\n')
hours = int(hours)
rate = float(rate)
if (hours <= 40):
pay = rate*hours
else:
extra_time = hours - 40
pay = (rate*hours) + ((rate*extra_time)/2)
print('Pay: ', pay)
| 21.727273 | 46 | 0.610879 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 37 | 0.154812 |
96d87afa9be08112d9a71a5802531bd5f19ae363 | 415 | py | Python | hello.py | nickstenning/hello-pyramid | d01e8f71b2b9f691b05d1c1f50c0a5aa3d416b6e | [
"MIT"
] | 1 | 2016-12-15T19:01:05.000Z | 2016-12-15T19:01:05.000Z | hello.py | nickstenning/hello-pyramid | d01e8f71b2b9f691b05d1c1f50c0a5aa3d416b6e | [
"MIT"
] | null | null | null | hello.py | nickstenning/hello-pyramid | d01e8f71b2b9f691b05d1c1f50c0a5aa3d416b6e | [
"MIT"
] | null | null | null | from pyramid.config import Configurator
from pyramid.view import view_config
@view_config(route_name='index', renderer='templates/index.html.jinja2')
def index(request):
return {}
def create_app():
config = Configurator()
config.include('pyramid_jinja2')
config.add_route('index', '/')
config.add_static_view(name='static', path='static')
config.scan()
return config.make_wsgi_app()
| 24.411765 | 72 | 0.722892 | 0 | 0 | 0 | 0 | 106 | 0.255422 | 0 | 0 | 78 | 0.187952 |
96d961e7c4273593f91da5aa4b4a9f9e0753b755 | 2,836 | py | Python | src/pytheas/tasks/schema.py | dcronkite/pytheas | 3cdd6a21bda488e762931cbf5975964d5e574abd | [
"MIT"
] | null | null | null | src/pytheas/tasks/schema.py | dcronkite/pytheas | 3cdd6a21bda488e762931cbf5975964d5e574abd | [
"MIT"
] | null | null | null | src/pytheas/tasks/schema.py | dcronkite/pytheas | 3cdd6a21bda488e762931cbf5975964d5e574abd | [
"MIT"
] | null | null | null | def get_schema():
return {
'type': 'object',
'properties': {
'connections': {
'type': 'array',
'items': {
'type': 'object',
'properties': {
'name': {'type': 'string'},
'path': {'type': 'string'},
'driver': {'type': 'string'},
'server': {'type': 'string'},
'database': {'type': 'string'},
'name_col': {'type': 'string'},
'text_col': {'type': 'string'},
}
}
},
'documents': {'type': 'array', 'items': {'$ref': '#/definitions/document'}},
'irr_documents': {'type': 'array', 'items': {'$ref': '#/definitions/document'}},
'labels': {'type': 'array', 'items': {'type': 'string'}},
'highlights': {'type': 'array', 'items': {'type': 'string'}},
'project': {'type': 'string'},
'subproject': {'type': 'string'},
'start_date': {'type': 'string'},
'end_date': {'type': 'string'},
'annotation': {
'type': 'object',
'properties': {
'irr_percent': {'type': 'number'},
'irr_count': {'type': 'integer'},
'annotators': {
'type': 'array',
'items': {
'type': 'object',
'properties': {
'name': {'type': 'string'},
'number': {'type': 'integer'},
'percent': {'type': 'number', 'maximum': 1.0, 'minimum': 0.0},
'documents': {'type': 'array', 'items': {'$ref': '#/definitions/document'}},
},
},
},
},
}
},
'definitions': {
'document': {
'type': 'object',
'properties': {
'name': {'type': 'string'},
'metadata': {'type': 'object'},
'text': {'type': 'string'},
'offsets': {'type': 'array', 'items': {'$ref': '#/definitions/offset'}},
'highlights': {'type': 'array', 'items': {'type': 'string'}},
'expiration_date': {'type': 'string'},
}
},
'offset': {
'type': 'object',
'properties': {
'start': {'type': 'integer', 'minimum': 0},
'end': {'type': 'integer', 'minimum': 0}
}
},
}
} | 41.705882 | 108 | 0.322285 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,189 | 0.419252 |
96db45847d6f57ad072a49bd80261093667430ad | 6,722 | py | Python | src/autobridge/Floorplan/LegalizeFloorplan.py | mfkiwl/AutoBridge | 1f428c271d2e338f39ea7478d81c66636c419670 | [
"MIT"
] | null | null | null | src/autobridge/Floorplan/LegalizeFloorplan.py | mfkiwl/AutoBridge | 1f428c271d2e338f39ea7478d81c66636c419670 | [
"MIT"
] | null | null | null | src/autobridge/Floorplan/LegalizeFloorplan.py | mfkiwl/AutoBridge | 1f428c271d2e338f39ea7478d81c66636c419670 | [
"MIT"
] | null | null | null | import logging
from collections import defaultdict
from typing import Dict, List, Tuple, Optional
from mip import Model, minimize, BINARY, xsum, OptimizationStatus, Var
from autobridge.Floorplan.Utilities import *
from autobridge.Opt.DataflowGraph import Vertex
from autobridge.Opt.Slot import Slot
from autobridge.Opt.SlotManager import SlotManager, Dir
from autobridge.util import get_mip_model_silent
_logger = logging.getLogger('autobridge')
def _create_ilp_vars(
m: Model, v_list: List[Vertex], s_list: List[Slot]
) -> Tuple[Dict[Vertex, Dict[Slot, Var]], Dict[Slot, Dict[Vertex, Var]]]:
"""
for each vertex, for each slot, create a binary var if the vertex is assigned to the slot
"""
v_to_s_to_var = defaultdict(dict)
s_to_v_to_var = defaultdict(dict)
for v in v_list:
for s in s_list:
v_to_s_to_var[v][s] = m.add_var(var_type=BINARY)
s_to_v_to_var[s][v] = v_to_s_to_var[v][s]
return v_to_s_to_var, s_to_v_to_var
def _add_area_constraints(
m: Model,
s_to_v_to_var: Dict[Slot, Dict[Vertex, Var]],
resource_usage_limit: int
) -> None:
"""
limit the capacity of each slot
"""
for r in RESOURCE_TYPES:
for s, v_to_var in s_to_v_to_var.items():
capacity = s.area[r] * resource_usage_limit
m += xsum(v.getVertexAndInboundFIFOArea()[r] * var for v, var in v_to_var.items()) <= capacity
def _add_unique_assign_constraints(m: Model, v_to_s_to_var: Dict[Vertex, Dict[Slot, Var]]) -> None:
"""
each vertex is assigned to one slot
"""
for v, s_to_var in v_to_s_to_var.items():
m += xsum(var for var in s_to_var.values()) == 1
def _add_pre_assignments(
m: Model,
v_to_s_to_var: Dict[Vertex, Dict[Slot, Var]],
pre_assignments: Dict[Vertex, Slot],
all_slot_list: List[Slot],
) -> None:
for v, target_slot in pre_assignments.items():
for candidate in all_slot_list:
if not candidate.containsChildSlot(target_slot):
m += v_to_s_to_var[v][candidate] == 0
def _get_v_to_s_to_cost(
v_list: List[Vertex], s_list: List[Slot], orig_v2s: Dict[Vertex, Slot]
) -> Dict[Vertex, Dict[Slot, int]]:
"""
cost for assigning a vertex to a slot
Define the cost as the distance from the original location * the total wire length
"""
v_to_s_to_cost = defaultdict(dict)
for v in v_list:
for s in s_list:
orig_s = orig_v2s[v]
# +1 to handle the case where a module has 0 wire length
v_to_s_to_cost[v][s] = orig_s.getDistance(s) * (v.getTotalWireWidth() + 1)
return v_to_s_to_cost
def _add_opt_goal(
m: Model,
v_to_s_to_cost: Dict[Vertex, Dict[Slot, int]],
v_to_s_to_var: Dict[Vertex, Dict[Slot, Var]]
) -> None:
"""
minimize the cost
"""
cost_var_pair_list: List[Tuple[int, Var]] = []
for v, s_to_var in v_to_s_to_var.items():
for s, var in s_to_var.items():
cost = v_to_s_to_cost[v][s]
cost_var_pair_list.append((cost, var))
m.objective = minimize(xsum(cost * var for cost, var in cost_var_pair_list))
def _add_grouping_constraints(
m: Model,
grouping_list: List[List[Vertex]],
v_to_s_to_var: Dict[Vertex, Dict[Slot, Var]],
s_list: List[Slot]
) -> None:
for grouping in grouping_list:
for i in range(1, len(grouping)):
v1 = grouping[0]
v2 = grouping[i]
_logger.debug(f'Grouping {v1.name} and {v2.name}')
for s in s_list:
m += v_to_s_to_var[v1][s] == v_to_s_to_var[v2][s]
def _get_ilp_results(
v_to_s_to_var: Dict[Vertex, Dict[Slot, int]]
) -> Tuple[ Dict[Vertex, Slot], Dict[Slot, List[Vertex]] ]:
"""
extract which modules is assigned to which slots
"""
# get v2s
new_v2s = {}
for v, s_to_var in v_to_s_to_var.items():
for s, var in s_to_var.items():
if round(var.x) == 1:
new_v2s[v] = s
break
assert v in new_v2s
# get s2v
new_s2v = defaultdict(list)
for v, s in new_v2s.items():
new_s2v[s].append(v)
return new_v2s, new_s2v
def _log_results(
new_v2s: Dict[Vertex, Slot],
orig_v2s: Dict[Vertex, Slot]
) -> None:
"""
analyze and log the new floorplan results
"""
# log which vertices are re-placed
for v, s in new_v2s.items():
orig_s = orig_v2s[v]
if s != orig_s:
_logger.debug(f'Vertex {v.name} is moved {orig_s.getDistance(s)} units from {orig_s.getRTLModuleName()} to {s.getRTLModuleName()}')
def get_legalized_v2s(
orig_v2s: Dict[Vertex, Slot],
grouping_list: List[ List[Vertex] ],
all_slot_list: List[Slot],
pre_assignments: Dict[Vertex, Slot],
resource_usage_limit: int
) -> Dict[Vertex, Slot]:
"""
adjust the floorplanning to satisfy the area requirement
"""
_logger.debug(f'Begin legalizing the floorplan results, target resource usage limit: {resource_usage_limit}')
m = get_mip_model_silent()
v_list = list(orig_v2s.keys())
s_list = all_slot_list
v_to_s_to_var, s_to_v_to_var = _create_ilp_vars(m, v_list, s_list)
v_to_s_to_cost = _get_v_to_s_to_cost(v_list, s_list, orig_v2s)
_add_area_constraints(m, s_to_v_to_var, resource_usage_limit)
_add_pre_assignments(m, v_to_s_to_var, pre_assignments, all_slot_list)
_add_unique_assign_constraints(m, v_to_s_to_var)
_add_grouping_constraints(m, grouping_list, v_to_s_to_var, s_list)
_add_opt_goal(m, v_to_s_to_cost, v_to_s_to_var)
status = m.optimize()
if status != OptimizationStatus.OPTIMAL:
_logger.debug(f'Fail to legalize the floorplan under target ratio {resource_usage_limit}')
return {}
new_v2s, new_s2v = _get_ilp_results(v_to_s_to_var)
_log_results(new_v2s, orig_v2s)
_logger.info('Finish legalizing the floorplan results.')
return new_v2s
def legalize_floorplan(
curr_v2s: Dict[Vertex, Slot],
slot_manager: SlotManager,
grouping_constraints: List[List[Vertex]],
pre_assignments: Dict[Vertex, Slot],
partition_order: List[Dir],
init_usage_ratio: float = 0.7,
limit_increase_step: float = 0.01,
max_usage_ratio: float = 0.85,
) -> Optional[Dict[Vertex, Slot]]:
"""
the iterative partitioning process may result in some slots being overused
Re-assign some of the Vertices from the overused slots to the under-used slots
"""
# if a slot is empty, it is not included in v2s
all_leaf_slots = slot_manager.getLeafSlotsAfterPartition(partition_order)
curr_limit = init_usage_ratio
while 1:
if curr_limit > max_usage_ratio:
_logger.info(f'Fail to legalize under the cut threhold {max_usage_ratio}')
return {}
new_v2s = get_legalized_v2s(curr_v2s, grouping_constraints, all_leaf_slots, pre_assignments, curr_limit)
if new_v2s:
_logger.info(f'Legalization succeeded with target usage limit {curr_limit}')
return new_v2s
else:
curr_limit += limit_increase_step | 29.875556 | 137 | 0.705296 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,353 | 0.201279 |
96dcb20e01404e4765e91a644c54a3f39200919c | 4,823 | py | Python | tests/unit/spanner_dbapi/test_connect.py | larkee/python-spanner | cbb4ee3eca9ac878b4f3cd78cfcfe8fc1acb86f9 | [
"Apache-2.0"
] | null | null | null | tests/unit/spanner_dbapi/test_connect.py | larkee/python-spanner | cbb4ee3eca9ac878b4f3cd78cfcfe8fc1acb86f9 | [
"Apache-2.0"
] | null | null | null | tests/unit/spanner_dbapi/test_connect.py | larkee/python-spanner | cbb4ee3eca9ac878b4f3cd78cfcfe8fc1acb86f9 | [
"Apache-2.0"
] | null | null | null | # Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""connect() module function unit tests."""
import unittest
from unittest import mock
import google.auth.credentials
INSTANCE = "test-instance"
DATABASE = "test-database"
PROJECT = "test-project"
USER_AGENT = "user-agent"
def _make_credentials():
class _CredentialsWithScopes(
google.auth.credentials.Credentials, google.auth.credentials.Scoped
):
pass
return mock.Mock(spec=_CredentialsWithScopes)
@mock.patch("google.cloud.spanner_v1.Client")
class Test_connect(unittest.TestCase):
def test_w_implicit(self, mock_client):
from google.cloud.spanner_dbapi import connect
from google.cloud.spanner_dbapi import Connection
client = mock_client.return_value
instance = client.instance.return_value
database = instance.database.return_value
connection = connect(INSTANCE, DATABASE)
self.assertIsInstance(connection, Connection)
self.assertIs(connection.instance, instance)
client.instance.assert_called_once_with(INSTANCE)
self.assertIs(connection.database, database)
instance.database.assert_called_once_with(DATABASE, pool=None)
# Datbase constructs its own pool
self.assertIsNotNone(connection.database._pool)
def test_w_explicit(self, mock_client):
from google.cloud.spanner_v1.pool import AbstractSessionPool
from google.cloud.spanner_dbapi import connect
from google.cloud.spanner_dbapi import Connection
from google.cloud.spanner_dbapi.version import PY_VERSION
credentials = _make_credentials()
pool = mock.create_autospec(AbstractSessionPool)
client = mock_client.return_value
instance = client.instance.return_value
database = instance.database.return_value
connection = connect(
INSTANCE, DATABASE, PROJECT, credentials, pool=pool, user_agent=USER_AGENT,
)
self.assertIsInstance(connection, Connection)
mock_client.assert_called_once_with(
project=PROJECT, credentials=credentials, client_info=mock.ANY
)
client_info = mock_client.call_args_list[0][1]["client_info"]
self.assertEqual(client_info.user_agent, USER_AGENT)
self.assertEqual(client_info.python_version, PY_VERSION)
self.assertIs(connection.instance, instance)
client.instance.assert_called_once_with(INSTANCE)
self.assertIs(connection.database, database)
instance.database.assert_called_once_with(DATABASE, pool=pool)
def test_w_instance_not_found(self, mock_client):
from google.cloud.spanner_dbapi import connect
client = mock_client.return_value
instance = client.instance.return_value
instance.exists.return_value = False
with self.assertRaises(ValueError):
connect(INSTANCE, DATABASE)
instance.exists.assert_called_once_with()
def test_w_database_not_found(self, mock_client):
from google.cloud.spanner_dbapi import connect
client = mock_client.return_value
instance = client.instance.return_value
database = instance.database.return_value
database.exists.return_value = False
with self.assertRaises(ValueError):
connect(INSTANCE, DATABASE)
database.exists.assert_called_once_with()
def test_w_credential_file_path(self, mock_client):
from google.cloud.spanner_dbapi import connect
from google.cloud.spanner_dbapi import Connection
from google.cloud.spanner_dbapi.version import PY_VERSION
credentials_path = "dummy/file/path.json"
connection = connect(
INSTANCE,
DATABASE,
PROJECT,
credentials=credentials_path,
user_agent=USER_AGENT,
)
self.assertIsInstance(connection, Connection)
factory = mock_client.from_service_account_json
factory.assert_called_once_with(
credentials_path, project=PROJECT, client_info=mock.ANY,
)
client_info = factory.call_args_list[0][1]["client_info"]
self.assertEqual(client_info.user_agent, USER_AGENT)
self.assertEqual(client_info.python_version, PY_VERSION)
| 34.45 | 87 | 0.716774 | 3,890 | 0.806552 | 0 | 0 | 3,811 | 0.790172 | 0 | 0 | 773 | 0.160274 |
96dcf01c244ee7438297fd322846f89558653bfd | 943 | py | Python | section_3/5_magic_methods/main.py | hgohel/Python-for-Everyday-Life | 957963e67dca8c2d20a86fc7e66e818c80d013aa | [
"MIT"
] | 43 | 2018-04-09T11:59:11.000Z | 2022-01-29T14:27:37.000Z | section_3/5_magic_methods/main.py | hgohel/Python-for-Everyday-Life | 957963e67dca8c2d20a86fc7e66e818c80d013aa | [
"MIT"
] | 12 | 2019-11-03T16:50:39.000Z | 2021-09-07T23:52:37.000Z | section_3/5_magic_methods/main.py | hgohel/Python-for-Everyday-Life | 957963e67dca8c2d20a86fc7e66e818c80d013aa | [
"MIT"
] | 45 | 2018-05-10T21:40:46.000Z | 2022-03-01T05:50:07.000Z | # -*- coding: utf-8 -*-
# !/usr/bin/env python3
if __name__ == '__main__':
import money
# crafting money
euro = money.Money(1.0, 'EUR')
five_euros = money.Money(5.0, 'EUR')
ten_euros = money.Money(10.0, 'EUR')
dollar = money.Money(1.0, 'USD')
# money algebra
eleven_euros = euro + ten_euros
sixteen_euros = euro + five_euros + ten_euros
six_euros = sixteen_euros - ten_euros
# money comparisons
print('11 EUR > 6 EUR ? {}'.format(eleven_euros > six_euros))
print('11 EUR == (10 EUR + 1 EUR) ? {}'.format(eleven_euros == ten_euros + euro))
print('11 EUR > 50 EUR ? {}'.format(eleven_euros > money.Money(50.0, 'EUR')))
# playing with a wallet
wallet = money.Wallet('My Wallet')
wallet.add(euro)
wallet.add(ten_euros)
wallet.add(dollar)
print('\n{} has {} items:'.format(str(wallet), len(wallet)))
for item in wallet:
print('{}'.format(str(item)))
| 29.46875 | 85 | 0.612937 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 265 | 0.281018 |