hexsha stringlengths 40 40 | size int64 5 2.06M | ext stringclasses 11 values | lang stringclasses 1 value | max_stars_repo_path stringlengths 3 251 | max_stars_repo_name stringlengths 4 130 | max_stars_repo_head_hexsha stringlengths 40 78 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 3 251 | max_issues_repo_name stringlengths 4 130 | max_issues_repo_head_hexsha stringlengths 40 78 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 116k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 3 251 | max_forks_repo_name stringlengths 4 130 | max_forks_repo_head_hexsha stringlengths 40 78 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 1 1.05M | avg_line_length float64 1 1.02M | max_line_length int64 3 1.04M | alphanum_fraction float64 0 1 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
7907243674e9e866161964f1907b28118b6c5588 | 7,238 | py | Python | test/functional/test_f_xcompat.py | farleyb-amazon/aws-encryption-sdk-python | 7950abd73ee333407d2dadd02ef2d57c3df464cf | [
"Apache-2.0"
] | 95 | 2018-08-20T23:10:00.000Z | 2022-02-17T02:54:32.000Z | test/functional/test_f_xcompat.py | farleyb-amazon/aws-encryption-sdk-python | 7950abd73ee333407d2dadd02ef2d57c3df464cf | [
"Apache-2.0"
] | 220 | 2018-08-01T20:56:29.000Z | 2022-03-28T18:12:35.000Z | test/functional/test_f_xcompat.py | farleyb-amazon/aws-encryption-sdk-python | 7950abd73ee333407d2dadd02ef2d57c3df464cf | [
"Apache-2.0"
] | 63 | 2018-08-01T19:37:33.000Z | 2022-03-20T17:14:15.000Z | # Copyright 2017 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
"""Functional test suite testing decryption of known good test files encrypted using static RawMasterKeyProvider."""
import base64
import json
import logging
import os
import sys
from collections import defaultdict
import attr
import pytest
import six
import aws_encryption_sdk
from aws_encryption_sdk.exceptions import InvalidKeyIdError
from aws_encryption_sdk.identifiers import EncryptionKeyType, WrappingAlgorithm
from aws_encryption_sdk.internal.crypto.wrapping_keys import WrappingKey
from aws_encryption_sdk.internal.str_ops import to_bytes
from aws_encryption_sdk.key_providers.raw import RawMasterKeyProvider
pytestmark = [pytest.mark.accept]
# Environment-specific test file locator. May not always exist.
try:
from .aws_test_file_finder import file_root
except ImportError:
file_root = _file_root
_LOGGER = logging.getLogger()
_WRAPPING_ALGORITHM_MAP = {
b"AES": {
128: {b"": {b"": WrappingAlgorithm.AES_128_GCM_IV12_TAG16_NO_PADDING}},
192: {b"": {b"": WrappingAlgorithm.AES_192_GCM_IV12_TAG16_NO_PADDING}},
256: {b"": {b"": WrappingAlgorithm.AES_256_GCM_IV12_TAG16_NO_PADDING}},
},
b"RSA": defaultdict(
lambda: {
b"PKCS1": {b"": WrappingAlgorithm.RSA_PKCS1},
b"OAEP-MGF1": {
b"SHA-1": WrappingAlgorithm.RSA_OAEP_SHA1_MGF1,
b"SHA-256": WrappingAlgorithm.RSA_OAEP_SHA256_MGF1,
b"SHA-384": WrappingAlgorithm.RSA_OAEP_SHA384_MGF1,
b"SHA-512": WrappingAlgorithm.RSA_OAEP_SHA512_MGF1,
},
}
),
}
_KEY_TYPES_MAP = {b"AES": EncryptionKeyType.SYMMETRIC, b"RSA": EncryptionKeyType.PRIVATE}
_STATIC_KEYS = defaultdict(dict)
def _generate_test_cases(): # noqa=C901
try:
root_dir = os.path.abspath(file_root())
except Exception: # pylint: disable=broad-except
root_dir = os.getcwd()
if not os.path.isdir(root_dir):
root_dir = os.getcwd()
base_dir = os.path.join(root_dir, "aws_encryption_sdk_resources")
ciphertext_manifest_path = os.path.join(base_dir, "manifests", "ciphertext.manifest")
if not os.path.isfile(ciphertext_manifest_path):
# Make no test cases if the ciphertext file is not found
return []
with open(ciphertext_manifest_path, encoding="utf-8") as f:
ciphertext_manifest = json.load(f)
_test_cases = []
# Collect keys from ciphertext manifest
for algorithm, keys in ciphertext_manifest["test_keys"].items():
algorithm = to_bytes(algorithm.upper())
for key_bits, key_desc in keys.items():
key_desc = to_bytes(key_desc)
key_bits = int(key_bits)
raw_key = to_bytes(key_desc.get("line_separator", "").join(key_desc["key"]))
if key_desc["encoding"].lower() in ("raw", "pem"):
_STATIC_KEYS[algorithm][key_bits] = raw_key
elif key_desc["encoding"].lower() == "base64":
_STATIC_KEYS[algorithm][key_bits] = base64.b64decode(raw_key)
else:
raise Exception("TODO" + "Unknown key encoding")
# Collect test cases from ciphertext manifest
for test_case in ciphertext_manifest["test_cases"]:
key_ids = []
algorithm = aws_encryption_sdk.Algorithm.get_by_id(int(test_case["algorithm"], 16))
for key in test_case["master_keys"]:
sys.stderr.write("XC:: " + json.dumps(key) + "\n")
if key["provider_id"] == StaticStoredMasterKeyProvider.provider_id:
key_ids.append(
RawKeyDescription(
key["encryption_algorithm"],
key.get("key_bits", algorithm.data_key_len * 8),
key.get("padding_algorithm", ""),
key.get("padding_hash", ""),
).key_id
)
if key_ids:
_test_cases.append(
Scenario(
os.path.join(base_dir, test_case["plaintext"]["filename"]),
os.path.join(base_dir, test_case["ciphertext"]["filename"]),
key_ids,
)
)
return _test_cases
| 39.336957 | 116 | 0.678088 |
7907463be0399381dbb251da2399a40b35f47313 | 986 | py | Python | {{cookiecutter.app_name}}/{{cookiecutter.app_name}}/app.py | opatua/cookiecutter-flask-api | 67e5a37ee2b8ca32823ac2f8051bab6653b3b44e | [
"MIT"
] | 2 | 2019-04-06T05:08:15.000Z | 2019-04-06T19:23:44.000Z | {{cookiecutter.app_name}}/{{cookiecutter.app_name}}/app.py | opatua/cookiecutter-flask-api | 67e5a37ee2b8ca32823ac2f8051bab6653b3b44e | [
"MIT"
] | null | null | null | {{cookiecutter.app_name}}/{{cookiecutter.app_name}}/app.py | opatua/cookiecutter-flask-api | 67e5a37ee2b8ca32823ac2f8051bab6653b3b44e | [
"MIT"
] | null | null | null | from flask import Flask
from flask_restful import Api
from flask_cors import CORS
from flask_migrate import Migrate, MigrateCommand
from flask_script import Manager
from {{cookiecutter.app_name}}.config import app_config
from {{cookiecutter.app_name}}.models import db, bcrypt
from {{cookiecutter.app_name}}.resources import Login, Register
from {{cookiecutter.app_name}}.schemas import ma
def create_app(env_name):
"""
Create app
"""
# app initiliazation
app = Flask(__name__)
CORS(app)
app.config.from_object(app_config[env_name])
# initializing bcrypt and db
bcrypt.init_app(app)
db.init_app(app)
ma.init_app(app)
migrate = Migrate(app, db)
manager = Manager(app)
manager.add_command('db', MigrateCommand)
if __name__ == '__main__':
manager.run()
# Route
api = Api(app)
# user endpoint
api.add_resource(Login, '/auth/login')
api.add_resource(Register, '/auth/register')
return app
| 22.930233 | 63 | 0.704868 |
79085a6c06f94f9781c1a341cbcc3d429b30a260 | 17,381 | py | Python | docs/examples/Moving_Platform_Simulation.py | Red-Portal/Stone-Soup-1 | 267621c86161a839da9b144c2745d28d9166d903 | [
"MIT"
] | 157 | 2019-04-14T20:43:11.000Z | 2022-03-30T08:30:33.000Z | docs/examples/Moving_Platform_Simulation.py | Red-Portal/Stone-Soup-1 | 267621c86161a839da9b144c2745d28d9166d903 | [
"MIT"
] | 364 | 2019-04-18T15:54:49.000Z | 2022-03-31T09:50:02.000Z | docs/examples/Moving_Platform_Simulation.py | Red-Portal/Stone-Soup-1 | 267621c86161a839da9b144c2745d28d9166d903 | [
"MIT"
] | 86 | 2019-04-20T02:01:18.000Z | 2022-03-28T01:03:11.000Z | #!/usr/bin/env python
# coding: utf-8
"""
Multi-Sensor Moving Platform Simulation Example
===============================================
This example looks at how multiple sensors can be mounted on a single moving platform and exploiting a defined moving
platform as a sensor target.
"""
# %%
# Building a Simulated Multi-Sensor Moving Platform
# -------------------------------------------------
# The focus of this example is to show how to setup and configure a simulation environment in order to provide a
# multi-sensor moving platform, as such the application of a tracker will not be covered in detail. For more information
# about trackers and how to configure them review of the tutorials and demonstrations is recommended.
#
# This example makes use of Stone Soup :class:`~.MovingPlatform`, :class:`~.MultiTransitionMovingPlatform` and
# :class:`~.Sensor` objects.
#
# In order to configure platforms, sensors and the simulation we will need to import some specific Stone Soup objects.
# As these have been introduced in previous tutorials they are imported upfront. New functionality within this example
# will be imported at the relevant point in order to draw attention to the new features.
# Some general imports and set up
from datetime import datetime
from datetime import timedelta
from matplotlib import pyplot as plt
import numpy as np
# Stone Soup imports:
from stonesoup.types.state import State, GaussianState
from stonesoup.types.array import StateVector
from stonesoup.types.array import CovarianceMatrix
from stonesoup.models.transition.linear import (
CombinedLinearGaussianTransitionModel, ConstantVelocity)
from stonesoup.predictor.particle import ParticlePredictor
from stonesoup.resampler.particle import SystematicResampler
from stonesoup.updater.particle import ParticleUpdater
from stonesoup.measures import Mahalanobis
from stonesoup.hypothesiser.distance import DistanceHypothesiser
from stonesoup.dataassociator.neighbour import GNNWith2DAssignment
from stonesoup.tracker.simple import SingleTargetTracker
# Define the simulation start time
start_time = datetime.now()
# %%
# Create a multi-sensor platform
# ------------------------------
# We have previously demonstrated how to create a :class:`~.FixedPlatform` which exploited a
# :class:`~.RadarRangeBearingElevation` *Sensor* in order to detect and track targets generated within a
# :class:`~.MultiTargetGroundTruthSimulator`.
#
# In this example we are going to create a moving platform which will be mounted with a pair of sensors and moves within
# a 6 dimensional state space according to the following :math:`\mathbf{x}`.
#
# .. math::
# \mathbf{x} = \begin{bmatrix}
# x\\ \dot{x}\\ y\\ \dot{y}\\ z\\ \dot{z} \end{bmatrix}
# = \begin{bmatrix}
# 0\\ 0\\ 0\\ 50\\ 8000\\ 0 \end{bmatrix}
#
# The platform will be initiated with a near constant velocity model which has been parameterised to have zero noise.
# Therefore the platform location at time :math:`k` is given by :math:`F_{k}x_{k-1}` where :math:`F_{k}` is given by:
#
# .. math::
# F_{k} = \begin{bmatrix}
# 1 & \triangle k & 0 & 0 & 0 & 0\\
# 0 & 1 & 0 & 0 & 0 & 0\\
# 0 & 0 & 1 & \triangle k & 0 & 0\\
# 0 & 0 & 0 & 1 & 0 & 0\\
# 0 & 0 & 0 & 0 & 1 & \triangle k \\
# 0 & 0 & 0 & 0 & 0 & 1\\
# \end{bmatrix}
# First import the Moving platform
from stonesoup.platform.base import MovingPlatform
# Define the initial platform position, in this case the origin
initial_loc = StateVector([[0], [0], [0], [50], [8000], [0]])
initial_state = State(initial_loc, start_time)
# Define transition model and position for 3D platform
transition_model = CombinedLinearGaussianTransitionModel(
[ConstantVelocity(0.), ConstantVelocity(0.), ConstantVelocity(0.)])
# create our fixed platform
sensor_platform = MovingPlatform(states=initial_state,
position_mapping=(0, 2, 4),
velocity_mapping=(1, 3, 5),
transition_model=transition_model)
# %%
# With our platform generated we now need to build a set of sensors which will be mounted onto the platform. In this
# case we will exploit a :class:`~.RadarElevationBearingRangeRate` and a :class:`~.PassiveElevationBearing` sensor
# (e.g. an optical sensor, which has no capability to directly measure range).
#
# First we will create a radar which is capable of measuring bearing (:math:`\phi`), elevation (:math:`\theta`), range
# (:math:`r`) and range-rate (:math:`\dot{r}`) of the target platform.
# Import a range rate bearing elevation capable radar
from stonesoup.sensor.radar.radar import RadarElevationBearingRangeRate
# Create a radar sensor
radar_noise_covar = CovarianceMatrix(np.diag(
np.array([np.deg2rad(3), # Elevation
np.deg2rad(3), # Bearing
100., # Range
25.]))) # Range Rate
# radar mountings
radar_mounting_offsets = StateVector([10, 0, 0]) # e.g. nose cone
radar_rotation_offsets = StateVector([0, 0, 0])
# Mount the radar onto the platform
radar = RadarElevationBearingRangeRate(ndim_state=6,
position_mapping=(0, 2, 4),
velocity_mapping=(1, 3, 5),
noise_covar=radar_noise_covar,
mounting_offset=radar_mounting_offsets,
rotation_offset=radar_rotation_offsets,
)
sensor_platform.add_sensor(radar)
# %%
# Our second sensor is a passive sensor, capable of measuring the bearing (:math:`\phi`) and elevation (:math:`\theta`)
# of the target platform. For the purposes of this example we will assume that the passive sensor is an imager.
# The imager sensor model is described by the following equations:
#
# .. math::
# \mathbf{z}_k = h(\mathbf{x}_k, \dot{\mathbf{x}}_k)
#
# where:
#
# * :math:`\mathbf{z}_k` is a measurement vector of the form:
#
# .. math::
# \mathbf{z}_k = \begin{bmatrix} \theta \\ \phi \end{bmatrix}
#
# * :math:`h` is a non - linear model function of the form:
#
# .. math::
# h(\mathbf{x}_k,\dot{\mathbf{x}}_k) = \begin{bmatrix}
# \arcsin(\mathcal{z} /\sqrt{\mathcal{x} ^ 2 + \mathcal{y} ^ 2 +\mathcal{z} ^ 2}) \\
# \arctan(\mathcal{y},\mathcal{x}) \ \
# \end{bmatrix} + \dot{\mathbf{x}}_k
#
# * :math:`\mathbf{z}_k` is Gaussian distributed with covariance :math:`R`, i.e.:
#
# .. math::
# \mathbf{z}_k \sim \mathcal{N}(0, R)
#
# .. math::
# R = \begin{bmatrix}
# \sigma_{\theta}^2 & 0 \\
# 0 & \sigma_{\phi}^2 \\
# \end{bmatrix}
# Import a passive sensor capability
from stonesoup.sensor.passive import PassiveElevationBearing
imager_noise_covar = CovarianceMatrix(np.diag(np.array([np.deg2rad(0.05), # Elevation
np.deg2rad(0.05)]))) # Bearing
# imager mounting offset
imager_mounting_offsets = StateVector([0, 8, -1]) # e.g. wing mounted imaging pod
imager_rotation_offsets = StateVector([0, 0, 0])
# Mount the imager onto the platform
imager = PassiveElevationBearing(ndim_state=6,
mapping=(0, 2, 4),
noise_covar=imager_noise_covar,
mounting_offset=imager_mounting_offsets,
rotation_offset=imager_rotation_offsets,
)
sensor_platform.add_sensor(imager)
# %%
# Notice that we have added sensors to specific locations on the aircraft, defined by the mounting_offset parameter.
# The values in this array are defined in the platforms local coordinate frame of reference. So in this case an offset
# of :math:`[0, 8, -1]` means the sensor is located 8 meters to the right and 1 meter below the center point of the
# platform.
#
# Now that we have mounted the two sensors we can see that the platform object has both associated with it:
sensor_platform.sensors
# %%
# Create a Target Platform
# ------------------------
# There are two ways of generating a target in Stone Soup. Firstly, we can use the inbuilt ground-truth generator
# functionality within Stone Soup, which we demonstrated in the previous example, and creates a random target based on
# our selected parameters. The second method provides a means to generate a target which will perform specific
# behaviours, this is the approach we will take here.
#
# In order to create a target which moves in pre-defined sequences we exploit the fact that platforms can be used as
# sensor targets within a simulation, coupled with the :class:`~.MultiTransitionMovingPlatform` which enables a platform
# to be provided with a pre-defined list of transition models and transition times. The platform will continue to loop
# over the transition sequence provided until the simulation ends.
#
# When simulating sensor platforms it is important to note that within the simulation Stone Soup treats all platforms as
# potential targets. Therefore if we created multiple sensor platforms they would each *sense* all other platforms
# within the simulation (sensor-target geometry dependant).
#
# For this example we will create an air target which will fly a sequence of straight and level followed by a
# coordinated turn in the :math:`x-y` plane. This is configured such that the target will perform each manoeuvre for 8
# seconds, and it will turn through 45 degrees over the course of the turn manoeuvre.
# Import a Constant Turn model to enable target to perform basic manoeuvre
from stonesoup.models.transition.linear import ConstantTurn
straight_level = CombinedLinearGaussianTransitionModel(
[ConstantVelocity(0.), ConstantVelocity(0.), ConstantVelocity(0.)])
# Configure the aircraft turn behaviour
turn_noise_diff_coeffs = np.array([0., 0.])
turn_rate = np.pi/32 # specified in radians per seconds...
turn_model = ConstantTurn(turn_noise_diff_coeffs=turn_noise_diff_coeffs, turn_rate=turn_rate)
# Configure turn model to maintain current altitude
turning = CombinedLinearGaussianTransitionModel(
[turn_model, ConstantVelocity(0.)])
manoeuvre_list = [straight_level, turning]
manoeuvre_times = [timedelta(seconds=8),
timedelta(seconds=8)]
# %%
# Now that we have created a list of manoeuvre behaviours and durations we can build our multi-transition moving
# platform. Because we intend for this platform to be a target we do not need to attach any sensors to it.
# Import a multi-transition moving platform
from stonesoup.platform.base import MultiTransitionMovingPlatform
initial_target_location = StateVector([[0], [-40], [1800], [0], [8000], [0]])
initial_target_state = State(initial_target_location, start_time)
target = MultiTransitionMovingPlatform(transition_models=manoeuvre_list,
transition_times=manoeuvre_times,
states=initial_target_state,
position_mapping=(0, 2, 4),
velocity_mapping=(1, 3, 5),
sensors=None)
# %%
# Creating the simulator
# ----------------------
# Now that we have build our sensor platform and a target platform we need to wrap them in a simulator. Because we do
# not want any additional ground truth objects, which is how most simulators work in Stone Soup, we need to use a
# :class:`~.DummyGroundTruthSimulator` which returns a set of empty ground truth paths with timestamps. These are then
# feed into a :class:`~.PlatformDetectionSimulator` with the two platforms we have already built.
# Import the required simulators
from stonesoup.simulator.simple import DummyGroundTruthSimulator
from stonesoup.simulator.platform import PlatformDetectionSimulator
# %%
# We now need to create an array of timestamps which starts at *datetime.now()* and enable the simulator to run for
# 25 seconds.
times = np.arange(0, 24, 1) # 25 seconds
timestamps = [start_time + timedelta(seconds=float(elapsed_time)) for elapsed_time in times]
truths = DummyGroundTruthSimulator(times=timestamps)
sim = PlatformDetectionSimulator(groundtruth=truths, platforms=[sensor_platform, target])
# %%
# Create a Tracker
# ------------------------------------
# Now that we have setup our sensor platform, target and simulation we need to create a tracker. For this example we
# will use a Particle Filter as this enables us to handle the non-linear nature of the imaging sensor. In this example
# we will use an inflated constant noise model to account for target motion uncertainty.
#
# Note that we don't add a measurement model to the updater, this is because each sensor adds their measurement model to
# each detection they generate. The tracker handles this internally by checking for a measurement model with each
# detection it receives and applying only the relevant measurement model.
target_transition_model = CombinedLinearGaussianTransitionModel(
[ConstantVelocity(5), ConstantVelocity(5), ConstantVelocity(1)])
# First add a Particle Predictor
predictor = ParticlePredictor(target_transition_model)
# Now create a resampler and particle updater
resampler = SystematicResampler()
updater = ParticleUpdater(measurement_model=None,
resampler=resampler)
# Create a particle initiator
from stonesoup.initiator.simple import GaussianParticleInitiator, SinglePointInitiator
single_point_initiator = SinglePointInitiator(
GaussianState([[0], [-40], [2000], [0], [8000], [0]], np.diag([10000, 1000, 10000, 1000, 10000, 1000])),
None)
initiator = GaussianParticleInitiator(number_particles=500,
initiator=single_point_initiator)
hypothesiser = DistanceHypothesiser(predictor, updater, measure=Mahalanobis(), missed_distance=np.inf)
data_associator = GNNWith2DAssignment(hypothesiser)
from stonesoup.deleter.time import UpdateTimeStepsDeleter
deleter = UpdateTimeStepsDeleter(time_steps_since_update=10)
# Create a Kalman single-target tracker
tracker = SingleTargetTracker(
initiator=initiator,
deleter=deleter,
detector=sim,
data_associator=data_associator,
updater=updater
)
# %%
# The final step is to iterate our tracker over the simulation and plot out the results. Because we have a bearing
# only sensor it does not make sense to plot out the detections without animating the resulting plot. This
# animation shows the sensor platform (blue) moving towards the true target position (red). The estimated target
# position is shown in black, radar detections are shown in yellow while the bearing only imager detections are
# coloured green.
from matplotlib import animation
import matplotlib
matplotlib.rcParams['animation.html'] = 'jshtml'
from stonesoup.models.measurement.nonlinear import CartesianToElevationBearingRangeRate
from stonesoup.functions import sphere2cart
fig = plt.figure(figsize=(10, 6))
ax = fig.add_subplot(1, 1, 1)
frames = []
for time, ctracks in tracker:
artists = []
ax.set_xlabel("$East$")
ax.set_ylabel("$North$")
ax.set_ylim(0, 2250)
ax.set_xlim(-1000, 1000)
X = [state.state_vector[0] for state in sensor_platform]
Y = [state.state_vector[2] for state in sensor_platform]
artists.extend(ax.plot(X, Y, color='b'))
for detection in sim.detections:
if isinstance(detection.measurement_model, CartesianToElevationBearingRangeRate):
x, y = detection.measurement_model.inverse_function(detection)[[0, 2]]
color = 'y'
else:
r = 10000000
# extract the platform rotation offsets
_, el_offset, az_offset = sensor_platform.orientation
# obtain measurement angles and map to cartesian
e, a = detection.state_vector
x, y, _ = sphere2cart(r, a + az_offset, e + el_offset)
color = 'g'
X = [sensor_platform.state_vector[0], x]
Y = [sensor_platform.state_vector[2], y]
artists.extend(ax.plot(X, Y, color=color))
X = [state.state_vector[0] for state in target]
Y = [state.state_vector[2] for state in target]
artists.extend(ax.plot(X, Y, color='r'))
for track in ctracks:
X = [state.state_vector[0] for state in track]
Y = [state.state_vector[2] for state in track]
artists.extend(ax.plot(X, Y, color='k'))
frames.append(artists)
animation.ArtistAnimation(fig, frames)
# %%
# To increase your confidence with simulated platform targets it would be good practice to modify the target to fly
# pre-defined shapes, a race track oval for example. You could also experiment with different sensor performance levels
# in order to see at what point the tracker is no longer able to generate a reasonable estimate of the target location.
# %%
# Key points
# ----------
# 1. Platforms, static or moving, can be used as targets for sensor platforms.
# 2. Simulations can be built with only known platform behaviours when you want to test specific scenarios.
# 3. A tracker can be configured to exploit all sensor data created in a simulation.
| 44.452685 | 120 | 0.693976 |
79093ae44bacb9494b8349f6098239d9b14a8d37 | 567 | py | Python | Glyph-Builders/lowercase_from_upper.py | m4rc1e/mf-glyphs-scripts | c5ed026e5b72a886f1e574f85659cdcae041e66a | [
"MIT"
] | 27 | 2015-09-01T00:19:34.000Z | 2021-12-05T01:59:01.000Z | Glyph-Builders/lowercase_from_upper.py | m4rc1e/mf-glyphs-scripts | c5ed026e5b72a886f1e574f85659cdcae041e66a | [
"MIT"
] | 26 | 2016-01-03T09:31:39.000Z | 2018-06-01T18:05:58.000Z | Glyph-Builders/lowercase_from_upper.py | m4rc1e/mf-glyphs-scripts | c5ed026e5b72a886f1e574f85659cdcae041e66a | [
"MIT"
] | 7 | 2016-01-03T07:09:04.000Z | 2018-04-06T00:24:14.000Z | #MenuTitle: Generate lowercase from uppercase
"""
Generate lowercase a-z from uppercase A-Z
TODO (M Foley) Generate all lowercase glyphs, not just a-z
"""
font = Glyphs.font
glyphs = list('abcdefghijklmnopqrstuvwxyz')
masters = font.masters
for glyph_name in glyphs:
glyph = GSGlyph(glyph_name)
glyph.updateGlyphInfo()
font.glyphs.append(glyph)
for idx,layer in enumerate(masters):
comp_name = glyph_name.upper()
component = GSComponent(comp_name, (0,0))
glyph.layers[idx].components.append(component)
Glyphs.redraw()
| 24.652174 | 58 | 0.714286 |
7909cb31dce0f5d0d244a16c56e9e7a864d3c124 | 2,372 | py | Python | src/gui/SubVision.py | bochkovoi/AHP | b51dc598f8f7a65a2ade039d887dccfa6d070f1e | [
"MIT"
] | null | null | null | src/gui/SubVision.py | bochkovoi/AHP | b51dc598f8f7a65a2ade039d887dccfa6d070f1e | [
"MIT"
] | null | null | null | src/gui/SubVision.py | bochkovoi/AHP | b51dc598f8f7a65a2ade039d887dccfa6d070f1e | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
from PyQt5 import QtWidgets, QtGui, QtCore
import sys, os.path as op
path1 = op.join( op.abspath(op.dirname(__file__)), '..', 'Structure')
path2 = op.join( op.abspath(op.dirname(__file__)), '..')
sys.path.append(path1)
sys.path.append(path2)
from Structure import *
from VisObject import *
| 38.885246 | 85 | 0.643339 |
790a31602a2e6231958a1ed23fbe61a5ef5fd6fa | 23 | py | Python | examples/ndfd/ndfd.py | eLBati/pyxb | 14737c23a125fd12c954823ad64fc4497816fae3 | [
"Apache-2.0"
] | 123 | 2015-01-12T06:43:22.000Z | 2022-03-20T18:06:46.000Z | examples/ndfd/ndfd.py | eLBati/pyxb | 14737c23a125fd12c954823ad64fc4497816fae3 | [
"Apache-2.0"
] | 103 | 2015-01-08T18:35:57.000Z | 2022-01-18T01:44:14.000Z | examples/ndfd/ndfd.py | eLBati/pyxb | 14737c23a125fd12c954823ad64fc4497816fae3 | [
"Apache-2.0"
] | 54 | 2015-02-15T17:12:00.000Z | 2022-03-07T23:02:32.000Z | from raw.ndfd import *
| 11.5 | 22 | 0.73913 |
790a4f9b1ca5315576470030e7218150601d0818 | 56 | py | Python | pandoc_mustache/__init__.py | copart/pandoc-mustache | f6ace29cd0c8d6b4d8f182eedcf36ad38a2412fa | [
"CC0-1.0"
] | 43 | 2017-12-27T05:57:00.000Z | 2022-03-18T10:07:28.000Z | pandoc_mustache/__init__.py | copart/pandoc-mustache | f6ace29cd0c8d6b4d8f182eedcf36ad38a2412fa | [
"CC0-1.0"
] | 10 | 2018-02-07T11:20:37.000Z | 2021-04-22T21:44:19.000Z | pandoc_mustache/__init__.py | copart/pandoc-mustache | f6ace29cd0c8d6b4d8f182eedcf36ad38a2412fa | [
"CC0-1.0"
] | 8 | 2018-11-05T13:10:35.000Z | 2021-08-30T18:14:02.000Z | from .version import __version__
import pandoc_mustache
| 18.666667 | 32 | 0.875 |
790a863e1b7c7976c78fdf15265431950cd90024 | 5,163 | py | Python | espnet2/gan_tts/espnet_model.py | actboy/espnet | c0ca15e9da6e89ff6df5fe70ed08654deeca2ac0 | [
"Apache-2.0"
] | null | null | null | espnet2/gan_tts/espnet_model.py | actboy/espnet | c0ca15e9da6e89ff6df5fe70ed08654deeca2ac0 | [
"Apache-2.0"
] | 1 | 2021-08-11T08:35:36.000Z | 2021-08-13T07:12:47.000Z | espnet2/gan_tts/espnet_model.py | shirayu/espnet | 66f0f8382b0e1195bed7c280c29711f8436b3db4 | [
"Apache-2.0"
] | null | null | null | # Copyright 2021 Tomoki Hayashi
# Apache 2.0 (http://www.apache.org/licenses/LICENSE-2.0)
"""GAN-based TTS ESPnet model."""
from contextlib import contextmanager
from distutils.version import LooseVersion
from typing import Any
from typing import Dict
from typing import Optional
import torch
from typeguard import check_argument_types
from espnet2.gan_tts.abs_gan_tts import AbsGANTTS
from espnet2.layers.abs_normalize import AbsNormalize
from espnet2.layers.inversible_interface import InversibleInterface
from espnet2.train.abs_gan_espnet_model import AbsGANESPnetModel
from espnet2.tts.feats_extract.abs_feats_extract import AbsFeatsExtract
if LooseVersion(torch.__version__) >= LooseVersion("1.6.0"):
from torch.cuda.amp import autocast
else:
# Nothing to do if torch < 1.6.0
| 35.363014 | 81 | 0.627929 |
790b72b5977bc41bc1fa4f394888d33023e6e512 | 1,309 | py | Python | array/bot/others/P_ex07.py | timkphd/examples | 04c162ec890a1c9ba83498b275fbdc81a4704062 | [
"Unlicense"
] | 5 | 2020-11-01T00:29:22.000Z | 2022-01-24T19:09:47.000Z | array/bot/others/P_ex07.py | timkphd/examples | 04c162ec890a1c9ba83498b275fbdc81a4704062 | [
"Unlicense"
] | 1 | 2022-02-09T01:59:47.000Z | 2022-02-09T01:59:47.000Z | array/bot/others/P_ex07.py | timkphd/examples | 04c162ec890a1c9ba83498b275fbdc81a4704062 | [
"Unlicense"
] | null | null | null | #!/usr/bin/env python
#
# This program shows how to use MPI_Alltoall. Each processor
# send/rec a different random number to/from other processors.
#
# numpy is required
import numpy
from numpy import *
# mpi4py module
from mpi4py import MPI
import sys
# Initialize MPI and print out hello
comm=MPI.COMM_WORLD
myid=comm.Get_rank()
numprocs=comm.Get_size()
print("hello from ",myid," of ",numprocs)
# We are going to send/recv a single value to/from
# each processor. Here we allocate arrays
s_vals=zeros(numprocs,"i")
r_vals=zeros(numprocs,"i")
# Fill the send arrays with random numbers
random.seed(myid)
for i in range(0, numprocs):
s_vals[i]=random.randint(1,10)
print("myid=",myid,"s_vals=",s_vals)
# Send/recv to/from all
comm.Alltoall(s_vals, r_vals)
print("myid=",myid,"r_vals=",r_vals)
MPI.Finalize()
# Note, the sent values and the recv values are
# like a transpose of each other
#
# mpiexec -n 4 ./P_ex07.py | grep s_v | sort
# myid= 0 s_vals= [6 1 4 4]
# myid= 1 s_vals= [6 9 6 1]
# myid= 2 s_vals= [9 9 7 3]
# myid= 3 s_vals= [9 4 9 9]
# mpiexec -n 4 ./P_ex07.py | grep r_v | sort
# myid= 0 r_vals= [6 6 9 9]
# myid= 1 r_vals= [1 9 9 4]
# myid= 2 r_vals= [4 6 7 9]
# myid= 3 r_vals= [4 1 3 9]
| 20.453125 | 63 | 0.675325 |
790c207725e1c54d9a32196cd02ceb7f9a4e7af7 | 18,376 | py | Python | renderer/render_fmo.py | 12564985/DeFMO | 8ed9c2963678e2c59c7431ec8786302eea841572 | [
"MIT"
] | 1 | 2022-03-14T12:46:38.000Z | 2022-03-14T12:46:38.000Z | renderer/render_fmo.py | 12564985/DeFMO | 8ed9c2963678e2c59c7431ec8786302eea841572 | [
"MIT"
] | null | null | null | renderer/render_fmo.py | 12564985/DeFMO | 8ed9c2963678e2c59c7431ec8786302eea841572 | [
"MIT"
] | null | null | null | """ render_fmo.py renders obj file to rgb image with fmo model
Aviable function:
- clear_mash: delete all the mesh in the secene
- scene_setting_init: set scene configurations
- node_setting_init: set node configurations
- render: render rgb image for one obj file and one viewpoint
- render_obj: wrapper function for render() render
- init_all: a wrapper function, initialize all configurations
= set_image_path: reset defualt image output folder
author baiyu
modified by rozumden
"""
import sys
import os
import random
import pickle
import bpy
import glob
import numpy as np
from mathutils import Vector
from mathutils import Euler
import cv2
from PIL import Image
from skimage.draw import line_aa
from scipy import signal
from skimage.measure import regionprops
# import moviepy.editor as mpy
from array2gif import write_gif
abs_path = os.path.abspath(__file__)
sys.path.append(os.path.dirname(abs_path))
from render_helper import *
from settings import *
import settings
import pdb
def clear_mesh():
""" clear all meshes in the secene
"""
bpy.ops.object.select_all(action='DESELECT')
for obj in bpy.data.objects:
if obj.type == 'MESH':
obj.select = True
bpy.ops.object.delete()
for block in bpy.data.meshes:
if block.users == 0:
bpy.data.meshes.remove(block)
for block in bpy.data.materials:
if block.users == 0:
bpy.data.materials.remove(block)
for block in bpy.data.textures:
if block.users == 0:
bpy.data.textures.remove(block)
for block in bpy.data.images:
if block.users == 0:
bpy.data.images.remove(block)
def scene_setting_init(use_gpu):
"""initialize blender setting configurations
"""
sce = bpy.context.scene.name
bpy.data.scenes[sce].render.engine = g_engine_type
bpy.data.scenes[sce].cycles.film_transparent = g_use_film_transparent
#output
bpy.data.scenes[sce].render.image_settings.color_mode = g_rgb_color_mode
bpy.data.scenes[sce].render.image_settings.color_depth = g_rgb_color_depth
bpy.data.scenes[sce].render.image_settings.file_format = g_rgb_file_format
bpy.data.scenes[sce].render.use_overwrite = g_depth_use_overwrite
bpy.data.scenes[sce].render.use_file_extension = g_depth_use_file_extension
if g_ambient_light:
world = bpy.data.worlds['World']
world.use_nodes = True
bg = world.node_tree.nodes['Background']
bg.inputs[0].default_value[:3] = g_bg_color
bg.inputs[1].default_value = 1.0
#dimensions
bpy.data.scenes[sce].render.resolution_x = g_resolution_x
bpy.data.scenes[sce].render.resolution_y = g_resolution_y
bpy.data.scenes[sce].render.resolution_percentage = g_resolution_percentage
if use_gpu:
bpy.data.scenes[sce].render.engine = 'CYCLES' #only cycles engine can use gpu
bpy.data.scenes[sce].render.tile_x = g_hilbert_spiral
bpy.data.scenes[sce].render.tile_x = g_hilbert_spiral
bpy.context.user_preferences.addons['cycles'].preferences.devices[0].use = False
bpy.context.user_preferences.addons['cycles'].preferences.devices[1].use = True
ndev = len(bpy.context.user_preferences.addons['cycles'].preferences.devices)
print('Number of devices {}'.format(ndev))
for ki in range(2,ndev):
bpy.context.user_preferences.addons['cycles'].preferences.devices[ki].use = False
bpy.context.user_preferences.addons['cycles'].preferences.compute_device_type = 'CUDA'
# bpy.types.CyclesRenderSettings.device = 'GPU'
bpy.data.scenes[sce].cycles.device = 'GPU'
def render(obj_path, viewpoint, temp_folder):
"""render rbg image
render a object rgb image by a given camera viewpoint and
choose random image as background, only render one image
at a time.
Args:
obj_path: a string variable indicate the obj file path
viewpoint: a vp parameter(contains azimuth,elevation,tilt angles and distance)
"""
vp = viewpoint
cam_location = camera_location(vp.azimuth, vp.elevation, vp.distance)
cam_rot = camera_rot_XYZEuler(vp.azimuth, vp.elevation, vp.tilt)
cam_obj = bpy.data.objects['Camera']
cam_obj.location[0] = cam_location[0]
cam_obj.location[1] = cam_location[1]
cam_obj.location[2] = cam_location[2]
cam_obj.rotation_euler[0] = cam_rot[0]
cam_obj.rotation_euler[1] = cam_rot[1]
cam_obj.rotation_euler[2] = cam_rot[2]
if not os.path.exists(g_syn_rgb_folder):
os.mkdir(g_syn_rgb_folder)
obj = bpy.data.objects['model_normalized']
ni = g_fmo_steps
maxlen = 0.5
maxrot = 1.57/6
tri = 0
# rot_base = np.array([math.pi/2,0,0])
while tri <= g_max_trials:
do_repeat = False
tri += 1
if not g_apply_texture:
for oi in range(len(bpy.data.objects)):
if bpy.data.objects[oi].type == 'CAMERA' or bpy.data.objects[oi].type == 'LAMP':
continue
for tempi in range(len(bpy.data.objects[oi].data.materials)):
if bpy.data.objects[oi].data.materials[tempi].alpha != 1.0:
return True, True ## transparent object
los_start = Vector((random.uniform(-maxlen/10, maxlen/10), random.uniform(-maxlen, maxlen), random.uniform(-maxlen, maxlen)))
loc_step = Vector((random.uniform(-maxlen/10, maxlen/10), random.uniform(-maxlen, maxlen), random.uniform(-maxlen, maxlen)))/ni
rot_base = np.array((random.uniform(0, 2*math.pi), random.uniform(0, 2*math.pi), random.uniform(0, 2*math.pi)))
rot_step = np.array((random.uniform(-maxrot, maxrot), random.uniform(-maxrot, maxrot), random.uniform(-maxrot, maxrot)))/ni
old = open_log(temp_folder)
for ki in [0, ni-1]+list(range(1,ni-1)):
for oi in range(len(bpy.data.objects)):
if bpy.data.objects[oi].type == 'CAMERA' or bpy.data.objects[oi].type == 'LAMP':
continue
bpy.data.objects[oi].location = los_start + loc_step*ki
bpy.data.objects[oi].rotation_euler = Euler(rot_base + (rot_step*ki))
bpy.context.scene.frame_set(ki + 1)
bpy.ops.render.render(write_still=True) #start rendering
if ki == 0 or ki == (ni-1):
Mt = cv2.imread(os.path.join(bpy.context.scene.node_tree.nodes[1].base_path,'image-{:06d}.png'.format(ki+1)),cv2.IMREAD_UNCHANGED)[:,:,-1] > 0
is_border = ((Mt[0,:].sum()+Mt[-1,:].sum()+Mt[:,0].sum()+Mt[:,-1].sum()) > 0) or Mt.sum()==0
if is_border:
if ki == 0:
close_log(old)
return False, True ## sample different starting viewpoint
else:
do_repeat = True ## just sample another motion direction
if do_repeat:
break
close_log(old)
if do_repeat == False:
break
if do_repeat: ## sample different starting viewpoint
return False, True
return False, False
def render_obj(obj_path, path, objid, obj_name, temp_folder):
""" render one obj file by a given viewpoint list
a wrapper function for render()
Args:
obj_path: a string variable indicate the obj file path
"""
vps_path = random.sample(g_view_point_file, 1)[0]
vps = list(load_viewpoint(vps_path))
random.shuffle(vps)
save_path = os.path.join(path,"{}_{:04d}.png".format(obj_name,objid))
gt_path = os.path.join(path,"GT","{}_{:04d}".format(obj_name,objid))
video_path = os.path.join(path,"{}_{:04d}.avi".format(obj_name,objid))
if not os.path.exists(gt_path):
os.mkdir(gt_path)
image_output_node = bpy.context.scene.node_tree.nodes[1]
image_output_node.base_path = gt_path
for imt in bpy.data.images:
bpy.data.images.remove(imt)
if g_apply_texture:
for oi in range(len(bpy.data.objects)):
if bpy.data.objects[oi].type == 'CAMERA' or bpy.data.objects[oi].type == 'LAMP':
continue
bpy.context.scene.objects.active = bpy.data.objects[oi]
# pdb.set_trace()
# for m in bpy.data.materials:
# bpy.data.materials.remove(m)
# bpy.ops.object.material_slot_remove()
bpy.ops.object.editmode_toggle()
bpy.ops.uv.cube_project()
bpy.ops.object.editmode_toggle()
texture_images = os.listdir(g_texture_path)
texture = random.choice(texture_images)
tex_path = os.path.join(g_texture_path,texture)
# mat = bpy.data.materials.new(texture)
# mat.use_nodes = True
# nt = mat.node_tree
# nodes = nt.nodes
# links = nt.links
# # Image Texture
# textureNode = nodes.new("ShaderNodeTexImage")
# textureNode.image = bpy.data.images.load(tex_path)
# links.new(nodes['Diffuse BSDF'].inputs['Color'], textureNode.outputs['Color'])
# mat.specular_intensity = 0
# bpy.data.objects[oi].active_material = mat
# print(bpy.data.objects[oi].active_material)
for mat in bpy.data.materials:
nodes = mat.node_tree.nodes
links = mat.node_tree.links
textureNode = nodes.new("ShaderNodeTexImage")
textureNode.image = bpy.data.images.load(tex_path)
links.new(nodes['Diffuse BSDF'].inputs['Color'], textureNode.outputs['Color'])
# print(bpy.data.objects[oi].active_material)
tri = 0
while tri <= g_max_trials:
tri += 1
vp = random.sample(vps, 1)[0]
sample_different_object, sample_different_vp = render(obj_path, vp, temp_folder)
if sample_different_vp:
if sample_different_object:
print('Transparent object!')
return False
print('Rendering failed, repeating')
continue
success = make_fmo(save_path, gt_path, video_path)
if success:
return True
print('Making FMO failed, repeating')
return False
def init_all():
"""init everything we need for rendering
an image
"""
scene_setting_init(g_gpu_render_enable)
node_setting_init()
cam_obj = bpy.data.objects['Camera']
cam_obj.rotation_mode = g_rotation_mode
if g_render_light:
bpy.data.objects['Lamp'].data.energy = 50
bpy.ops.object.lamp_add(type='SUN')
bpy.data.objects['Sun'].data.energy = 5
### YOU CAN WRITE YOUR OWN IMPLEMENTATION TO GENERATE DATA
init_all()
argv = sys.argv
argv = argv[argv.index("--") + 1:]
start_index = int(argv[0])
step_index = int(argv[1])
print('Start index {}, step index {}'.format(start_index, step_index))
temp_folder = g_syn_rgb_folder+g_render_objs[start_index]+'/'
for obj_name in g_render_objs[start_index:(start_index+step_index)]:
print("Processing object {}".format(obj_name))
obj_folder = os.path.join(g_syn_rgb_folder, obj_name)
if not os.path.exists(obj_folder):
os.makedirs(obj_folder)
if not os.path.exists(os.path.join(obj_folder,"GT")):
os.mkdir(os.path.join(obj_folder,"GT"))
num = g_shapenet_categlory_pair[obj_name]
search_path = os.path.join(g_shapenet_path, num, '**','*.obj')
pathes = glob.glob(search_path, recursive=True)
random.shuffle(pathes)
objid = 1
tri = 0
while objid <= g_number_per_category:
print(" instance {}".format(objid))
clear_mesh()
path = random.sample(pathes, 1)[0]
old = open_log(temp_folder)
bpy.ops.import_scene.obj(filepath=path, axis_forward='-Z', axis_up='Y', filter_glob="*.obj;*.mtl", use_split_groups=False, use_split_objects=True)
# bpy.ops.import_scene.obj(filepath=path)
close_log(old)
#combine_objects()
#scale_objects(0.5)
result = render_obj(path, obj_folder, objid, obj_name, temp_folder)
if result:
objid += 1
tri = 0
else:
print('Error! Rendering another object from the category!')
tri += 1
if tri > g_max_trials:
print('No object find in the category!!!!!!!!!')
break | 39.181237 | 200 | 0.619286 |
790ca91d1e267c27a75b0c472c8aadefd871871f | 11,385 | py | Python | main.py | VV123/NLIDB_gradient | f42a6f383d2d4ac41c354cf55df2a21507577b02 | [
"MIT"
] | null | null | null | main.py | VV123/NLIDB_gradient | f42a6f383d2d4ac41c354cf55df2a21507577b02 | [
"MIT"
] | 1 | 2021-01-11T03:42:43.000Z | 2021-02-19T17:06:59.000Z | main.py | VV123/NLIDB_gradient | f42a6f383d2d4ac41c354cf55df2a21507577b02 | [
"MIT"
] | null | null | null | # coding=utf-8
import sys
import argparse
import os
from tensorflow.python.platform import gfile
import numpy as np
import tensorflow as tf
from tensorflow.python.layers.core import Dense
from utils.data_manager import load_data, load_data_one
from collections import defaultdict
from argparse import ArgumentParser
from decode_helper import decode_one
import sys
reload(sys)
sys.setdefaultencoding('utf8')
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3'
from tf_helper import train, evaluate, decode_data, decode_data_recover
from model1 import construct_graph
if __name__ == '__main__':
args = init_args()
print(args)
if args.mode == 'train':
print('\nTrain model.')
train_model(args)
elif args.mode == 'infer':
print('\nInference.')
inferrence(args)
elif args.mode == 'txt':
print('\nInference from txt.')
infer_one(args)
elif args.mode == 'transfer':
print('\nTransfer.')
transfer(args)
| 33.683432 | 133 | 0.623188 |
790e259abafc3b78efd22c4e49725337604761c5 | 55 | py | Python | src/__init__.py | codespacedot/CodeSpaceAPI | 22b457088aa592c4fb9111718810075d2643d9ca | [
"Apache-2.0"
] | 3 | 2021-07-05T17:28:14.000Z | 2021-12-07T10:08:14.000Z | src/__init__.py | git-vish/CodeSpaceAPI | 7ad4327e0eef3019098730358c4a23312bc85615 | [
"Apache-2.0"
] | 2 | 2021-07-29T13:55:15.000Z | 2021-07-31T16:49:03.000Z | src/__init__.py | git-vish/CodeSpaceAPI | 7ad4327e0eef3019098730358c4a23312bc85615 | [
"Apache-2.0"
] | 3 | 2021-07-01T16:32:20.000Z | 2021-07-05T04:50:30.000Z | """FastAPI Project for CodeSpace.
https://csdot.ml
"""
| 13.75 | 33 | 0.690909 |
790e708e4fd42df30662fd05e0fd27cb6d2b56ae | 1,525 | py | Python | gdsfactory/components/cdsem_straight.py | jorgepadilla19/gdsfactory | 68e1c18257a75d4418279851baea417c8899a165 | [
"MIT"
] | 42 | 2020-05-25T09:33:45.000Z | 2022-03-29T03:41:19.000Z | gdsfactory/components/cdsem_straight.py | jorgepadilla19/gdsfactory | 68e1c18257a75d4418279851baea417c8899a165 | [
"MIT"
] | 133 | 2020-05-28T18:29:04.000Z | 2022-03-31T22:21:42.000Z | gdsfactory/components/cdsem_straight.py | jorgepadilla19/gdsfactory | 68e1c18257a75d4418279851baea417c8899a165 | [
"MIT"
] | 17 | 2020-06-30T07:07:50.000Z | 2022-03-17T15:45:27.000Z | """CD SEM structures."""
from functools import partial
from typing import Optional, Tuple
from gdsfactory.cell import cell
from gdsfactory.component import Component
from gdsfactory.components.straight import straight as straight_function
from gdsfactory.components.text_rectangular import text_rectangular
from gdsfactory.cross_section import strip
from gdsfactory.grid import grid
from gdsfactory.types import ComponentFactory, CrossSectionFactory
text_rectangular_mini = partial(text_rectangular, size=1)
LINE_LENGTH = 420.0
if __name__ == "__main__":
c = cdsem_straight()
c.show()
| 28.773585 | 76 | 0.685902 |
79107ce8bb54a81242a6381a90d895c5d61ecf37 | 10,057 | py | Python | Examples/Tests/particle_fields_diags/analysis_particle_diags_impl.py | hklion/WarpX | 3c2d0ee2815ab1df21b9f78d899fe7b1a9651758 | [
"BSD-3-Clause-LBNL"
] | null | null | null | Examples/Tests/particle_fields_diags/analysis_particle_diags_impl.py | hklion/WarpX | 3c2d0ee2815ab1df21b9f78d899fe7b1a9651758 | [
"BSD-3-Clause-LBNL"
] | null | null | null | Examples/Tests/particle_fields_diags/analysis_particle_diags_impl.py | hklion/WarpX | 3c2d0ee2815ab1df21b9f78d899fe7b1a9651758 | [
"BSD-3-Clause-LBNL"
] | null | null | null | #!/usr/bin/env python3
# Copyright 2019-2022 Luca Fedeli, Yinjian Zhao, Hannah Klion
#
# This file is part of WarpX.
#
# License: BSD-3-Clause-LBNL
# This script tests the reduced particle diagnostics.
# The setup is a uniform plasma with electrons, protons and photons.
# Various particle and field quantities are written to file using the reduced diagnostics
# and compared with the corresponding quantities computed from the data in the plotfiles.
import os
import sys
import numpy as np
import openpmd_api as io
from scipy.constants import c
from scipy.constants import epsilon_0 as eps0
from scipy.constants import m_e, m_p
from scipy.constants import mu_0 as mu0
import yt
sys.path.insert(1, '../../../../warpx/Regression/Checksum/')
import checksumAPI
| 46.995327 | 126 | 0.645521 |
7910b1ce3b116e87579add349dab0e8dadaa95e7 | 1,420 | py | Python | predict.py | stonebegin/Promise12-3DUNet | d48d95ae7f2da98d068f84391dc547abd968981d | [
"MIT"
] | 2 | 2020-12-20T12:35:24.000Z | 2021-01-04T03:21:37.000Z | predict.py | stonebegin/Promise12-3DUNet | d48d95ae7f2da98d068f84391dc547abd968981d | [
"MIT"
] | 1 | 2020-12-27T05:08:02.000Z | 2020-12-27T08:08:50.000Z | predict.py | stonebegin/Promise12-3DUNet | d48d95ae7f2da98d068f84391dc547abd968981d | [
"MIT"
] | null | null | null | import importlib
import os
from datasets.hdf5 import get_test_loaders
from unet3d import utils
from unet3d.config import load_config
from unet3d.model import get_model
logger = utils.get_logger('UNet3DPredictor')
if __name__ == '__main__':
main()
| 30.869565 | 84 | 0.687324 |
7910bb4a1911643dedff502020dff254dc351cc8 | 9,248 | py | Python | gitScrabber/scrabTasks/file/languageDetector.py | Eyenseo/gitScrabber | e3f5ce1a7b034fa3e40a54577268228a3be2b141 | [
"MIT"
] | null | null | null | gitScrabber/scrabTasks/file/languageDetector.py | Eyenseo/gitScrabber | e3f5ce1a7b034fa3e40a54577268228a3be2b141 | [
"MIT"
] | null | null | null | gitScrabber/scrabTasks/file/languageDetector.py | Eyenseo/gitScrabber | e3f5ce1a7b034fa3e40a54577268228a3be2b141 | [
"MIT"
] | null | null | null | """
The MIT License (MIT)
Copyright (c) 2017 Andreas Poppele
Copyright (c) 2017 Roland Jaeger
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
from ..scrabTask import FileTask
import os
name = "LanguageDetector"
version = "1.1.1"
| 35.706564 | 79 | 0.561635 |
7911642cb8be401271e397388edbb0e1b9d4ae27 | 4,667 | py | Python | VAE/full_model/model_training.py | youngmg1995/NES-Music-Maker | aeda10a541cfd439cfa46c45e63411e0d98e41c1 | [
"MIT"
] | 3 | 2020-06-26T22:02:35.000Z | 2021-11-20T19:24:33.000Z | VAE/full_model/model_training.py | youngmg1995/NES-Music-Maker | aeda10a541cfd439cfa46c45e63411e0d98e41c1 | [
"MIT"
] | null | null | null | VAE/full_model/model_training.py | youngmg1995/NES-Music-Maker | aeda10a541cfd439cfa46c45e63411e0d98e41c1 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
"""
Created on Wed Apr 1 17:14:19 2020
@author: Mitchell
model_training.py
~~~~~~~~~~~~~~~~~
This file serves as a script for building and training our VAE model. To do
so we used the VAE and DataSequence classes defined in the file `VAE.py`, as
well as helper functions from the file `dataset_utils` for loading and parsing
our datasets.
The user has the the ability to specify several parameters that control the
loading of our data, the structure of our model, as well as the traininig plan
for our model. After training is complete the script also plots metrics tracked
during training and saves the final model.
"""
# Imports
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
from dataset_utils import load_training, load_validation
from VAE import VAE, DataSequence
import tensorflow as tf
import numpy as np
import matplotlib.pyplot as plt
import os, time, json
### Load Data
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Parameters for shape of dataset (note these are also used for model def. and
# training.)
measures = 8
measure_len = 96
# training
training_foldername = '../../nesmdb24_seprsco/train/'
train_save_filename = 'transformed_dataset.json'
dataset , labels2int_map , int2labels_map = \
load_training(training_foldername, train_save_filename,
measures = measures, measure_len = measure_len)
# validation
validation_foldername = '../../nesmdb24_seprsco/valid/'
val_save_filename = 'transformed_val_dataset.json'
val_dataset = load_validation(validation_foldername,\
labels2int_map, val_save_filename,
measures = measures, measure_len = measure_len)
### Build Model
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
### Model Parameters
latent_dim = 124
input_dims = [mapping.shape[0]-1 for mapping in int2labels_map]
dropout = .1
maxnorm = None
vae_b1 , vae_b2 = .02 , .1
# Build Model
model = VAE(latent_dim, input_dims, measures, measure_len, dropout,
maxnorm, vae_b1 , vae_b2)
model.build([tf.TensorShape([None, measures, measure_len, input_dims[i]])
for i in range(4)])
model.summary()
### Train Model
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Training Parameters
batch_size = 100
epochs = 10
# Cost Function
cost_function = model.vae_loss
# Learning_rate schedule
lr_0 = .001
decay_rate = .998
lr_decay = lambda t: lr_0 * decay_rate**t
lr_schedule = tf.keras.callbacks.LearningRateScheduler(lr_decay)
# Optimizer
optimizer = tf.keras.optimizers.Adam()
# Define callbacks
callbacks = [lr_schedule]
# Keras Sequences for Datasets (need to use since one-hot datasets too
# large for storing in memory)
training_seq = DataSequence(dataset, int2labels_map, batch_size)
validation_seq = DataSequence(val_dataset, int2labels_map, batch_size)
# Compile Model
model.compile(optimizer = optimizer,
loss = cost_function)
# Train model
tic = time.perf_counter()
history = model.fit_generator(generator = training_seq,
epochs = epochs)
toc = time.perf_counter()
print(f"Trained Model in {(toc - tic)/60:0.1f} minutes")
### Plot Training Metrics
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
training_loss = history.history['loss']
# Total Loss
plt.figure(1)
plt.plot(training_loss, 'b', label='Training')
plt.title('Loss vs Time')
plt.xlabel('Training Epoch')
plt.ylabel('Avg. Total Loss')
plt.legend()
plt.show()
### Save Model and History
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Save Model Weights
save_model = False
if save_model:
checkpoint_dir = '.\\training_checkpoints'
checkpoint_prefix = os.path.join(checkpoint_dir, "model_ckpt")
model.save_weights(checkpoint_prefix)
print('Model weights saved to files: '+checkpoint_prefix+'.*')
# Save Training History
save_history = False
if save_history:
checkpoint_dir = '.\\training_checkpoints'
history_filename = os.path.join(checkpoint_dir, "training_history.json")
with open(history_filename, 'w') as f:
json.dump({
key:[float(value) for value in history.history[key]]
for key in history.history
}, f)
print('Training history saved to file: '+ history_filename)
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
#----------------------------------END FILE------------------------------------
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ | 31.748299 | 79 | 0.611099 |
7911efa6a596e02ff81a8a1e7aa08e6a17b34751 | 721 | py | Python | tests/validation/test_is_subnational1.py | StuartMacKay/ebird-api | 14b5c777548416a58abec05e25cd4b9a8e22f210 | [
"MIT"
] | 9 | 2020-05-16T20:26:33.000Z | 2021-11-02T06:24:46.000Z | tests/validation/test_is_subnational1.py | StuartMacKay/ebird-api | 14b5c777548416a58abec05e25cd4b9a8e22f210 | [
"MIT"
] | 17 | 2019-06-22T09:41:22.000Z | 2020-09-11T06:25:21.000Z | tests/validation/test_is_subnational1.py | ProjectBabbler/ebird-api | 14b5c777548416a58abec05e25cd4b9a8e22f210 | [
"MIT"
] | null | null | null | import unittest
from ebird.api.validation import is_subnational1
| 30.041667 | 60 | 0.744799 |
79135bcae4aa65725d47cfe68fe799e301d340b1 | 7,172 | py | Python | backend/kale/tests/assets/kfp_dsl/simple_data_passing.py | brness/kale | d90310dbebc765c68915df0cf832a7a5d1ec1551 | [
"Apache-2.0"
] | 502 | 2019-07-18T16:19:16.000Z | 2022-03-30T19:45:31.000Z | backend/kale/tests/assets/kfp_dsl/simple_data_passing.py | brness/kale | d90310dbebc765c68915df0cf832a7a5d1ec1551 | [
"Apache-2.0"
] | 189 | 2019-09-22T10:54:02.000Z | 2022-03-28T13:46:31.000Z | backend/kale/tests/assets/kfp_dsl/simple_data_passing.py | brness/kale | d90310dbebc765c68915df0cf832a7a5d1ec1551 | [
"Apache-2.0"
] | 111 | 2019-09-25T20:28:47.000Z | 2022-03-24T01:31:46.000Z | import json
import kfp.dsl as _kfp_dsl
import kfp.components as _kfp_components
from collections import OrderedDict
from kubernetes import client as k8s_client
_kale_step1_op = _kfp_components.func_to_container_op(step1)
_kale_step2_op = _kfp_components.func_to_container_op(step2)
_kale_step3_op = _kfp_components.func_to_container_op(step3)
if __name__ == "__main__":
pipeline_func = auto_generated_pipeline
pipeline_filename = pipeline_func.__name__ + '.pipeline.tar.gz'
import kfp.compiler as compiler
compiler.Compiler().compile(pipeline_func, pipeline_filename)
# Get or create an experiment and submit a pipeline run
import kfp
client = kfp.Client()
experiment = client.create_experiment('test')
# Submit a pipeline run
from kale.common import kfputils
pipeline_id, version_id = kfputils.upload_pipeline(
pipeline_filename, "test")
run_result = kfputils.run_pipeline(
experiment_name=experiment.name, pipeline_id=pipeline_id, version_id=version_id)
| 34.480769 | 88 | 0.725739 |
791468fb9834f8a61e661025dfae37ea17e85be7 | 135 | py | Python | note/urls.py | StevenYwch/CloudNote | c36efba53d83a040f4c9cff861d0df28d9db8f1b | [
"MIT"
] | null | null | null | note/urls.py | StevenYwch/CloudNote | c36efba53d83a040f4c9cff861d0df28d9db8f1b | [
"MIT"
] | null | null | null | note/urls.py | StevenYwch/CloudNote | c36efba53d83a040f4c9cff861d0df28d9db8f1b | [
"MIT"
] | null | null | null | from django.urls import path
from . import views
urlpatterns = [
path('list', views.list_view),
path('add', views.add_view),
] | 19.285714 | 34 | 0.681481 |
791572847749537988baaf3cd53a31420b81f7a2 | 3,158 | py | Python | roombapy/discovery.py | Erelen-Laiquendi/roombapy | 104908ec040ebb72e16d3763741565eacc585801 | [
"MIT"
] | 17 | 2018-01-27T19:53:06.000Z | 2022-03-16T07:29:13.000Z | roombapy/discovery.py | Erelen-Laiquendi/roombapy | 104908ec040ebb72e16d3763741565eacc585801 | [
"MIT"
] | 78 | 2017-09-03T17:37:03.000Z | 2022-03-30T10:41:55.000Z | roombapy/discovery.py | bdraco/Roomba980-Python | d25583a7b8cd6e65148caeebc6849e73dff645da | [
"MIT"
] | 25 | 2017-09-03T13:43:21.000Z | 2022-03-19T23:41:51.000Z | import json
import logging
import socket
from roombapy.roomba_info import RoombaInfo
| 28.972477 | 79 | 0.594997 |
7915bd6303c3c35d054564976537a39f4bb990be | 76 | py | Python | nonbonded/cli/projects/__init__.py | SimonBoothroyd/nonbonded | 3efbb7d943d936b47248975f9ad0d8a006ea8684 | [
"MIT"
] | 5 | 2020-05-11T18:25:00.000Z | 2022-01-27T10:55:09.000Z | nonbonded/cli/projects/__init__.py | SimonBoothroyd/nonbonded | 3efbb7d943d936b47248975f9ad0d8a006ea8684 | [
"MIT"
] | 88 | 2020-06-02T14:40:05.000Z | 2022-03-02T09:20:39.000Z | nonbonded/cli/projects/__init__.py | SimonBoothroyd/nonbonded | 3efbb7d943d936b47248975f9ad0d8a006ea8684 | [
"MIT"
] | null | null | null | # from nonbonded.cli.project.project import project
#
# __all__ = [project]
| 19 | 51 | 0.75 |
791688dbd138ffb5132f957ed4ac7f6e3567bcff | 30,666 | py | Python | pmaf/biome/essentials/_taxonomy.py | mmtechslv/PhyloMAF | bab43dd4a4d2812951b1fdf4f1abb83edb79ea88 | [
"BSD-3-Clause"
] | 1 | 2021-07-02T06:24:17.000Z | 2021-07-02T06:24:17.000Z | pmaf/biome/essentials/_taxonomy.py | mmtechslv/PhyloMAF | bab43dd4a4d2812951b1fdf4f1abb83edb79ea88 | [
"BSD-3-Clause"
] | 1 | 2021-06-28T12:02:46.000Z | 2021-06-28T12:02:46.000Z | pmaf/biome/essentials/_taxonomy.py | mmtechslv/PhyloMAF | bab43dd4a4d2812951b1fdf4f1abb83edb79ea88 | [
"BSD-3-Clause"
] | null | null | null | import warnings
warnings.simplefilter("ignore", category=FutureWarning)
from pmaf.biome.essentials._metakit import EssentialFeatureMetabase
from pmaf.biome.essentials._base import EssentialBackboneBase
from pmaf.internal._constants import (
AVAIL_TAXONOMY_NOTATIONS,
jRegexGG,
jRegexQIIME,
BIOM_TAXONOMY_NAMES,
VALID_RANKS,
)
from pmaf.internal._shared import (
generate_lineages_from_taxa,
get_rank_upto,
indentify_taxon_notation,
validate_ranks,
extract_valid_ranks,
cols2ranks,
)
from collections import defaultdict
from os import path
import pandas as pd
import numpy as np
import biom
from typing import Union, Sequence, Tuple, Any, Optional
from pmaf.internal._typing import AnyGenericIdentifier, Mapper
def _merge_features_by_map(
self, map_dict: Mapper, done: bool = False, **kwargs: Any
) -> Optional[Mapper]:
"""Merge features and ratify action.
Parameters
----------
map_dict
Map to use for merging
done
Whether merging was completed or not. Compatibility.
kwargs
Compatibility
"""
if not done:
raise NotImplementedError
if map_dict:
return self._ratify_action(
"_merge_features_by_map",
map_dict,
_annotations=self.__internal_taxonomy.loc[:, "lineage"].to_dict(),
**kwargs
)
def drop_feature_by_id(
self, ids: AnyGenericIdentifier, **kwargs: Any
) -> Optional[AnyGenericIdentifier]:
"""Remove features by feature `ids`.
Parameters
----------
ids
Feature identifiers
kwargs
Compatibility
"""
target_ids = np.asarray(ids)
if self.xrid.isin(target_ids).sum() == len(target_ids):
return self._remove_features_by_id(target_ids, **kwargs)
else:
raise ValueError("Invalid feature ids are provided.")
def get_taxonomy_by_id(
self, ids: Optional[AnyGenericIdentifier] = None
) -> pd.DataFrame:
"""Get taxonomy :class:`~pandas.DataFrame` by feature `ids`.
Parameters
----------
ids
Either feature indices or None for all.
Returns
-------
class:`pandas.DataFrame` with taxonomy data
"""
if ids is None:
target_ids = self.xrid
else:
target_ids = np.asarray(ids)
if self.xrid.isin(target_ids).sum() <= len(target_ids):
return self.__internal_taxonomy.loc[target_ids, self.__avail_ranks]
else:
raise ValueError("Invalid feature ids are provided.")
def get_lineage_by_id(
self,
ids: Optional[AnyGenericIdentifier] = None,
missing_rank: bool = False,
desired_ranks: Union[bool, Sequence[str]] = False,
drop_ranks: Union[bool, Sequence[str]] = False,
**kwargs: Any
) -> pd.Series:
"""Get taxonomy lineages by feature `ids`.
Parameters
----------
ids
Either feature indices or None for all.
missing_rank
If True will generate prefix like `s__` or `d__`
desired_ranks
List of desired ranks to generate.
If False then will generate all main ranks
drop_ranks
List of ranks to drop from desired ranks.
This parameter only useful if `missing_rank` is True
kwargs
Compatibility.
Returns
-------
class:`pandas.Series` with consensus lineages and corresponding IDs
"""
if ids is None:
target_ids = self.xrid
else:
target_ids = np.asarray(ids)
tmp_desired_ranks = VALID_RANKS if desired_ranks is False else desired_ranks
total_valid_rids = self.xrid.isin(target_ids).sum()
if total_valid_rids == len(target_ids):
return generate_lineages_from_taxa(
self.__internal_taxonomy.loc[target_ids],
missing_rank,
tmp_desired_ranks,
drop_ranks,
)
elif total_valid_rids < len(target_ids):
return generate_lineages_from_taxa(
self.__internal_taxonomy.loc[np.unique(target_ids)],
missing_rank,
tmp_desired_ranks,
drop_ranks,
)
else:
raise ValueError("Invalid feature ids are provided.")
def find_features_by_pattern(
self, pattern_str: str, case_sensitive: bool = False, regex: bool = False
) -> np.ndarray:
"""Searches for features with taxa that matches `pattern_str`
Parameters
----------
pattern_str
Pattern to search for
case_sensitive
Case sensitive mode
regex
Use regular expressions
Returns
-------
class:`~numpy.ndarray` with indices
"""
return self.__internal_taxonomy[
self.__internal_taxonomy.loc[:, "lineage"].str.contains(
pattern_str, case=case_sensitive, regex=regex
)
].index.values
def drop_features_without_taxa(
self, **kwargs: Any
) -> Optional[AnyGenericIdentifier]:
"""Remove features that do not contain taxonomy.
Parameters
----------
kwargs
Compatibility
"""
ids_to_drop = self.find_features_without_taxa()
return self._remove_features_by_id(ids_to_drop, **kwargs)
def drop_features_without_ranks(
self, ranks: Sequence[str], any: bool = False, **kwargs: Any
) -> Optional[AnyGenericIdentifier]: # Done
"""Remove features that do not contain `ranks`
Parameters
----------
ranks
Ranks to look for
any
If True removes feature with single occurrence of missing rank.
If False all `ranks` must be missing.
kwargs
Compatibility
"""
target_ranks = np.asarray(ranks)
if self.__internal_taxonomy.columns.isin(target_ranks).sum() == len(
target_ranks
):
no_rank_mask = self.__internal_taxonomy.loc[:, ranks].isna()
no_rank_mask_adjusted = (
no_rank_mask.any(axis=1) if any else no_rank_mask.all(axis=1)
)
ids_to_drop = self.__internal_taxonomy.loc[no_rank_mask_adjusted].index
return self._remove_features_by_id(ids_to_drop, **kwargs)
else:
raise ValueError("Invalid ranks are provided.")
def merge_duplicated_features(self, **kwargs: Any) -> Optional[Mapper]:
"""Merge features with duplicated taxonomy.
Parameters
----------
kwargs
Compatibility
"""
ret = {}
groupby = self.__internal_taxonomy.groupby("lineage")
if any([len(group) > 1 for group in groupby.groups.values()]):
tmp_feature_lineage = []
tmp_groups = []
group_indices = list(range(len(groupby.groups)))
for lineage, feature_ids in groupby.groups.items():
tmp_feature_lineage.append(lineage)
tmp_groups.append(list(feature_ids))
self.__init_internal_taxonomy(
pd.Series(data=tmp_feature_lineage, index=group_indices)
)
ret = dict(zip(group_indices, tmp_groups))
return self._merge_features_by_map(ret, True, **kwargs)
def merge_features_by_rank(self, level: str, **kwargs: Any) -> Optional[Mapper]:
"""Merge features by taxonomic rank/level.
Parameters
----------
level
Taxonomic rank/level to use for merging.
kwargs
Compatibility
"""
ret = {}
if not isinstance(level, str):
raise TypeError("`rank` must have str type.")
if level in self.__avail_ranks:
target_ranks = get_rank_upto(self.avail_ranks, level, True)
if target_ranks:
tmp_lineages = generate_lineages_from_taxa(
self.__internal_taxonomy, False, target_ranks, False
)
groups = tmp_lineages.groupby(tmp_lineages)
if len(groups.groups) > 1:
tmp_feature_lineage = []
tmp_groups = []
group_indices = list(range(len(groups.groups)))
for lineage, feature_ids in groups.groups.items():
tmp_feature_lineage.append(lineage)
tmp_groups.append(list(feature_ids))
self.__init_internal_taxonomy(
pd.Series(data=tmp_feature_lineage, index=group_indices)
)
ret = dict(zip(group_indices, tmp_groups))
else:
raise ValueError("Invalid rank are provided.")
return self._merge_features_by_map(ret, True, **kwargs)
def find_features_without_taxa(self) -> np.ndarray:
"""Find features without taxa.
Returns
-------
class:`~numpy.ndarray` with feature indices.
"""
return self.__internal_taxonomy.loc[
self.__internal_taxonomy.loc[:, VALID_RANKS].agg(
lambda rank: len("".join(map(lambda x: (str(x or "")), rank))), axis=1
)
< 1
].index.values
def get_subset(
self, rids: Optional[AnyGenericIdentifier] = None, *args, **kwargs: Any
) -> "RepTaxonomy":
"""Get subset of the :class:`.RepTaxonomy`.
Parameters
----------
rids
Feature identifiers.
args
Compatibility
kwargs
Compatibility
Returns
-------
class:`.RepTaxonomy`
"""
if rids is None:
target_rids = self.xrid
else:
target_rids = np.asarray(rids).astype(self.__internal_taxonomy.index.dtype)
if not self.xrid.isin(target_rids).sum() == len(target_rids):
raise ValueError("Invalid feature ids are provided.")
return type(self)(
taxonomy=self.__internal_taxonomy.loc[target_rids, "lineage"],
metadata=self.metadata,
name=self.name,
)
def _export(
self, taxlike: str = "lineage", ascending: bool = True, **kwargs: Any
) -> Tuple[pd.Series, dict]:
"""Creates taxonomy for export.
Parameters
----------
taxlike
Generate taxonomy in format(currently only `lineage` is supported.)
ascending
Sorting
kwargs
Compatibility
"""
if taxlike == "lineage":
return (
self.get_lineage_by_id(**kwargs).sort_values(ascending=ascending),
kwargs,
)
else:
raise NotImplemented
def export(
self,
output_fp: str,
*args,
_add_ext: bool = False,
sep: str = ",",
**kwargs: Any
) -> None:
"""Exports the taxonomy into the specified file.
Parameters
----------
output_fp
Export filepath
args
Compatibility
_add_ext
Add file extension or not.
sep
Delimiter
kwargs
Compatibility
"""
tmp_export, rkwarg = self._export(*args, **kwargs)
if _add_ext:
tmp_export.to_csv("{}.csv".format(output_fp), sep=sep)
else:
tmp_export.to_csv(output_fp, sep=sep)
def copy(self) -> "RepTaxonomy":
"""Copy of the instance."""
return type(self)(
taxonomy=self.__internal_taxonomy.loc[:, "lineage"],
metadata=self.metadata,
name=self.name,
)
def __fix_taxon_names(self) -> None:
"""Fix invalid taxon names."""
self.__internal_taxonomy.loc[:, VALID_RANKS] = self.__internal_taxonomy.loc[
:, VALID_RANKS
].applymap(taxon_fixer)
def __reconstruct_internal_lineages(self) -> None:
"""Reconstruct the internal lineages."""
self.__internal_taxonomy.loc[:, "lineage"] = generate_lineages_from_taxa(
self.__internal_taxonomy, True, self.__avail_ranks, False
)
def __init_internal_taxonomy(
self,
taxonomy_data: Union[pd.Series, pd.DataFrame],
taxonomy_notation: Optional[str] = "greengenes",
order_ranks: Optional[Sequence[str]] = None,
**kwargs: Any
) -> None:
"""Main method to initialize taxonomy.
Parameters
----------
taxonomy_data
Incoming parsed taxonomy data
taxonomy_notation
Taxonomy lineage notation style. Can be one of
:const:`pmaf.internals._constants.AVAIL_TAXONOMY_NOTATIONS`
order_ranks
List with the target rank order. Default is set to None.
The 'silva' notation require `order_ranks`.
kwargs
Compatibility
"""
if isinstance(taxonomy_data, pd.Series):
new_taxonomy = self.__init_taxonomy_from_lineages(
taxonomy_data, taxonomy_notation, order_ranks
)
elif isinstance(taxonomy_data, pd.DataFrame):
if taxonomy_data.shape[1] == 1:
taxonomy_data_series = pd.Series(
data=taxonomy_data.iloc[:, 0], index=taxonomy_data.index
)
new_taxonomy = self.__init_taxonomy_from_lineages(
taxonomy_data_series, taxonomy_notation, order_ranks
)
else:
new_taxonomy = self.__init_taxonomy_from_frame(
taxonomy_data, taxonomy_notation, order_ranks
)
else:
raise RuntimeError(
"`taxonomy_data` must be either pd.Series or pd.Dataframe"
)
if new_taxonomy is None:
raise ValueError("Provided taxonomy is invalid.")
# Assign newly constructed taxonomy to the self.__internal_taxonomy
self.__internal_taxonomy = new_taxonomy
self.__fix_taxon_names() # Fix incorrect taxa
tmp_avail_ranks = [rank for rank in VALID_RANKS if rank in new_taxonomy.columns]
self.__avail_ranks = [
rank for rank in tmp_avail_ranks if new_taxonomy.loc[:, rank].notna().any()
]
# Reconstruct internal lineages for default greengenes notation
self.__reconstruct_internal_lineages()
self._init_state = True
def __init_taxonomy_from_lineages(
self,
taxonomy_series: pd.Series,
taxonomy_notation: Optional[str],
order_ranks: Optional[Sequence[str]],
) -> pd.DataFrame: # Done
"""Main method that produces taxonomy dataframe from lineages.
Parameters
----------
taxonomy_series
:class:`pandas.Series` with taxonomy lineages
taxonomy_notation
Taxonomy lineage notation style. Can be one of :const:`pmaf.internals._constants.AVAIL_TAXONOMY_NOTATIONS`
order_ranks
List with the target rank order. Default is set to None. The 'silva' notation require `order_ranks`.
"""
# Check if taxonomy is known and is available for parsing. Otherwise indentify_taxon_notation() will try to identify notation
if taxonomy_notation in AVAIL_TAXONOMY_NOTATIONS:
notation = taxonomy_notation
else:
# Get first lineage _sample for notation testing assuming the rest have the the same notations
sample_taxon = taxonomy_series.iloc[0]
# Identify notation of the lineage string
notation = indentify_taxon_notation(sample_taxon)
if order_ranks is not None:
if all([rank in VALID_RANKS for rank in order_ranks]):
target_order_ranks = order_ranks
else:
raise NotImplementedError
else:
target_order_ranks = VALID_RANKS
if notation == "greengenes":
lineages = taxonomy_series.reset_index().values.tolist()
ordered_taxa_list = []
ordered_indices_list = [elem[0] for elem in lineages]
for lineage in lineages:
tmp_lineage = jRegexGG.findall(lineage[1])
tmp_taxa_dict = {
elem[0]: elem[1] for elem in tmp_lineage if elem[0] in VALID_RANKS
}
for rank in VALID_RANKS:
if rank not in tmp_taxa_dict.keys():
tmp_taxa_dict.update({rank: None})
tmp_taxa_ordered = [tmp_taxa_dict[rank] for rank in VALID_RANKS]
ordered_taxa_list.append([None] + tmp_taxa_ordered)
taxonomy = pd.DataFrame(
index=ordered_indices_list,
data=ordered_taxa_list,
columns=["lineage"] + VALID_RANKS,
)
return taxonomy
elif notation == "qiime":
lineages = taxonomy_series.reset_index().values.tolist()
tmp_taxa_dict_list = []
tmp_ranks = set()
for lineage in lineages:
tmp_lineage = jRegexQIIME.findall(lineage[1])
tmp_lineage.sort(key=lambda x: x[0])
tmp_taxa_dict = defaultdict(None)
tmp_taxa_dict[None] = lineage[0]
for rank, taxon in tmp_lineage:
tmp_taxa_dict[rank] = taxon
tmp_ranks.add(rank)
tmp_taxa_dict_list.append(dict(tmp_taxa_dict))
tmp_taxonomy_df = pd.DataFrame.from_records(tmp_taxa_dict_list)
tmp_taxonomy_df.set_index(None, inplace=True)
tmp_taxonomy_df = tmp_taxonomy_df.loc[:, sorted(list(tmp_ranks))]
tmp_taxonomy_df.columns = [
rank for rank in target_order_ranks[::-1][: len(tmp_ranks)]
][::-1]
for rank in VALID_RANKS:
if rank not in tmp_taxonomy_df.columns:
tmp_taxonomy_df.loc[:, rank] = None
return tmp_taxonomy_df
elif notation == "silva":
lineages = taxonomy_series.reset_index().values.tolist()
tmp_taxa_dict_list = []
tmp_ranks = set()
for lineage in lineages:
tmp_lineage = lineage[1].split(";")
tmp_taxa_dict = defaultdict(None)
tmp_taxa_dict[None] = lineage[0]
for rank_i, taxon in enumerate(tmp_lineage):
rank = target_order_ranks[rank_i]
tmp_taxa_dict[rank] = taxon
tmp_ranks.add(rank)
tmp_taxa_dict_list.append(dict(tmp_taxa_dict))
tmp_taxonomy_df = pd.DataFrame.from_records(tmp_taxa_dict_list)
tmp_taxonomy_df.set_index(None, inplace=True)
tmp_rank_ordered = [
rank for rank in target_order_ranks if rank in VALID_RANKS
]
tmp_taxonomy_df = tmp_taxonomy_df.loc[:, tmp_rank_ordered]
tmp_taxonomy_df.columns = [
rank for rank in target_order_ranks[::-1][: len(tmp_ranks)]
][::-1]
for rank in VALID_RANKS:
if rank not in tmp_taxonomy_df.columns:
tmp_taxonomy_df.loc[:, rank] = None
return tmp_taxonomy_df
else:
raise NotImplementedError
def __init_taxonomy_from_frame(
self,
taxonomy_dataframe: pd.DataFrame,
taxonomy_notation: Optional[str],
order_ranks: Optional[Sequence[str]],
) -> pd.DataFrame: # Done # For now only pass to _init_taxonomy_from_series
"""Main method that produces taxonomy sheet from dataframe.
Parameters
----------
taxonomy_dataframe
:class:`~pandas.DataFrame` with taxa split by ranks.
taxonomy_notation
Taxonomy lineage notation style. Can be one of :const:`pmaf.internals._constants.AVAIL_TAXONOMY_NOTATIONS`
order_ranks
List with the target rank order. Default is set to None. The 'silva' notation require `order_ranks`.
Returns
-------
:class:`~pandas.DataFrame`
"""
valid_ranks = extract_valid_ranks(taxonomy_dataframe.columns, VALID_RANKS)
if valid_ranks is not None:
if len(valid_ranks) > 0:
return pd.concat(
[
taxonomy_dataframe,
pd.DataFrame(
data="",
index=taxonomy_dataframe.index,
columns=[
rank for rank in VALID_RANKS if rank not in valid_ranks
],
),
],
axis=1,
)
else:
taxonomy_series = taxonomy_dataframe.apply(
lambda taxa: ";".join(taxa.values.tolist()), axis=1
)
return self.__init_taxonomy_from_lineages(
taxonomy_series, taxonomy_notation, order_ranks
)
else:
valid_ranks = cols2ranks(taxonomy_dataframe.columns)
taxonomy_dataframe.columns = valid_ranks
taxonomy_series = taxonomy_dataframe.apply(
lambda taxa: ";".join([(t if isinstance(t,str) else '') for t in taxa.values]), axis=1
)
return self.__init_taxonomy_from_lineages(
taxonomy_series, taxonomy_notation, order_ranks
)
| 37.081016 | 133 | 0.559903 |
7916e1c58cd3262cc6b3f5abd2ae3b7c7603279e | 9,607 | py | Python | users_django/users/tests/test_views.py | r-o-main/users-exercise | ecd6e33308140f72cb6c446e0e7e93f327b57a97 | [
"MIT"
] | null | null | null | users_django/users/tests/test_views.py | r-o-main/users-exercise | ecd6e33308140f72cb6c446e0e7e93f327b57a97 | [
"MIT"
] | null | null | null | users_django/users/tests/test_views.py | r-o-main/users-exercise | ecd6e33308140f72cb6c446e0e7e93f327b57a97 | [
"MIT"
] | null | null | null | from rest_framework.test import APIRequestFactory
from rest_framework import status
from django.test import TestCase
from django.urls import reverse
from ..models import User
from ..serializer import UserSerializer
from ..views import UserViewSet
import ipapi
| 39.052846 | 108 | 0.65192 |
79181888e71b95f21231a74673bce1df5f5dad06 | 1,058 | py | Python | jburt/mask.py | jbburt/jburt | 7745491214ef2b665ca8d1fc526bc802a36985ff | [
"MIT"
] | null | null | null | jburt/mask.py | jbburt/jburt | 7745491214ef2b665ca8d1fc526bc802a36985ff | [
"MIT"
] | null | null | null | jburt/mask.py | jbburt/jburt | 7745491214ef2b665ca8d1fc526bc802a36985ff | [
"MIT"
] | null | null | null | from typing import List
import numpy as np
def mask_nan(arrays: List[np.ndarray]) -> List[np.ndarray]:
"""
Drop indices from equal-sized arrays if the element at that index is NaN in
any of the input arrays.
Parameters
----------
arrays : List[np.ndarray]
list of ndarrays containing NaNs, to be masked
Returns
-------
List[np.ndarray]
masked arrays (free of NaNs)
Notes
-----
This function find the indices where one or more elements is NaN in one or
more of the input arrays, then drops those indices from all arrays.
For example:
>> a = np.array([0, 1, np.nan, 3])
>> b = np.array([np.nan, 5, np.nan, 7])
>> c = np.array([8, 9, 10, 11])
>> mask_nan([a, b, c])
[array([ 1., 3.]), array([ 5., 7.]), array([ 9, 11])]
"""
n = arrays[0].size
assert all(a.size == n for a in arrays[1:])
mask = np.array([False] * n)
for arr in arrays:
mask = np.logical_or(mask, np.isnan(arr))
return [arr[np.where(~mask)[0]] for arr in arrays]
| 27.128205 | 79 | 0.581285 |
7918bd9392635ed706771c33b08bee283e79ec85 | 838 | py | Python | ExpenseTracker/grocery/migrations/0004_auto_20200908_1918.py | lennyAiko/LifeExpenses | ec345228bca00742b0b08cf3fc294dba6574b515 | [
"MIT"
] | null | null | null | ExpenseTracker/grocery/migrations/0004_auto_20200908_1918.py | lennyAiko/LifeExpenses | ec345228bca00742b0b08cf3fc294dba6574b515 | [
"MIT"
] | null | null | null | ExpenseTracker/grocery/migrations/0004_auto_20200908_1918.py | lennyAiko/LifeExpenses | ec345228bca00742b0b08cf3fc294dba6574b515 | [
"MIT"
] | 1 | 2020-09-01T15:38:19.000Z | 2020-09-01T15:38:19.000Z | # Generated by Django 3.1.1 on 2020-09-08 18:18
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
| 31.037037 | 142 | 0.656325 |
7919878f4085d6d12cdcb153170df1fa3bde8e8d | 1,035 | py | Python | my_hello_world_app/web_api/router.py | gsjay980/data-science-IP | 715550d1cbf67e552c0df533619460c0fee15b94 | [
"MIT"
] | 5 | 2020-05-26T09:33:54.000Z | 2021-07-01T02:42:30.000Z | my_hello_world_app/web_api/router.py | gsjay980/data-science-IP | 715550d1cbf67e552c0df533619460c0fee15b94 | [
"MIT"
] | 3 | 2019-12-26T17:34:24.000Z | 2020-02-04T03:16:23.000Z | my_hello_world_app/web_api/router.py | gsjay980/data-science-IP | 715550d1cbf67e552c0df533619460c0fee15b94 | [
"MIT"
] | 2 | 2021-12-17T00:46:03.000Z | 2022-02-26T11:04:55.000Z | from os import getenv
from typing import Optional, Dict
from flask import Flask
TestConfig = Optional[Dict[str, bool]]
def create_app(test_config: TestConfig = None) -> Flask:
""" App factory method to initialize the application with given configuration """
app: Flask = Flask(__name__)
if test_config is not None:
app.config.from_mapping(test_config)
return app
| 32.34375 | 97 | 0.672464 |
791a179ef2265637a66974e7b35a3ad2c3c5a16a | 10,666 | py | Python | src/pythonModules/fourgp_rv/fourgp_rv/templates_resample.py | dcf21/4most-4gp | 0421d76791315aa3ca8ff9e4bd2e37ad36c0141f | [
"MIT"
] | null | null | null | src/pythonModules/fourgp_rv/fourgp_rv/templates_resample.py | dcf21/4most-4gp | 0421d76791315aa3ca8ff9e4bd2e37ad36c0141f | [
"MIT"
] | null | null | null | src/pythonModules/fourgp_rv/fourgp_rv/templates_resample.py | dcf21/4most-4gp | 0421d76791315aa3ca8ff9e4bd2e37ad36c0141f | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
"""
Code to take template spectra, used for RV fitting, and pass them through 4FS to resample them to 4MOST's resolution.
It then further resamples each arm onto a fixed logarithmic stride.
"""
import argparse
import hashlib
import logging
import numpy as np
import os
from os import path as os_path
from fourgp_fourfs import FourFS
from fourgp_degrade.resample import SpectrumResampler
from fourgp_degrade import SpectrumProperties
from fourgp_speclib import SpectrumLibrarySqlite
def command_line_interface(root_path):
"""
A simple command-line interface for running a tool to resample a library of template spectra onto fixed
logarithmic rasters representing each of the 4MOST arms.
We use the python argparse module to build the interface, and return the inputs supplied by the user.
:param root_path:
The root path of this 4GP installation; the directory where we can find 4FS.
:return:
An object containing the arguments supplied by the user.
"""
# Read input parameters
parser = argparse.ArgumentParser(description=__doc__.strip())
parser.add_argument('--templates-in',
required=False,
default='turbospec_rv_templates',
dest='templates_in',
help="Library of spectra to use as templates for RV code")
parser.add_argument('--workspace', dest='workspace', default="",
help="Directory where we expect to find spectrum libraries")
parser.add_argument('--templates-out',
required=False,
default="rv_templates_resampled",
dest="templates_out",
help="Library into which to place resampled templates for RV code")
parser.add_argument('--binary-path',
required=False,
default=root_path,
dest="binary_path",
help="Specify a directory where 4FS binary package is installed")
args = parser.parse_args()
# Set up logger
logging.basicConfig(level=logging.INFO, format='[%(asctime)s] %(levelname)s:%(filename)s:%(message)s',
datefmt='%d/%m/%Y %H:%M:%S')
logger = logging.getLogger(__name__)
logger.info("Resampling template spectra")
return args
def logarithmic_raster(lambda_min, lambda_max, lambda_step):
"""
Create a logarithmic raster with a fixed logarithmic stride, based on a starting wavelength, finishing wavelength,
and a mean wavelength step.
:param lambda_min:
Smallest wavelength in raster.
:param lambda_max:
Largest wavelength in raster.
:param lambda_step:
The approximate pixel size in the raster.
:return:
A numpy array containing a wavelength raster with fixed logarithmic stride.
"""
return np.exp(np.arange(
np.log(lambda_min),
np.log(lambda_max),
np.log(1 + lambda_step / lambda_min)
))
def resample_templates(args, logger):
"""
Resample a spectrum library of templates onto a fixed logarithmic stride, representing each of the 4MOST arms in
turn. We use 4FS to down-sample the templates to the resolution of 4MOST observations, and automatically detect
the list of arms contained within each 4FS mock observation. We then resample the 4FS output onto a new raster
with fixed logarithmic stride.
:param args:
Object containing arguments supplied by the used, for example the name of the spectrum libraries we use for
input and output. The required fields are defined by the user interface above.
:param logger:
A python logging object.
:return:
None.
"""
# Set path to workspace where we expect to find libraries of spectra
workspace = args.workspace if args.workspace else os_path.join(args.our_path, "../../../workspace")
# Open input template spectra
spectra = SpectrumLibrarySqlite.open_and_search(
library_spec=args.templates_in,
workspace=workspace,
extra_constraints={"continuum_normalised": 0}
)
templates_library, templates_library_items, templates_spectra_constraints = \
[spectra[i] for i in ("library", "items", "constraints")]
# Create new SpectrumLibrary to hold the resampled output templates
library_path = os_path.join(workspace, args.templates_out)
output_library = SpectrumLibrarySqlite(path=library_path, create=True)
# Instantiate 4FS wrapper
etc_wrapper = FourFS(
path_to_4fs=os_path.join(args.binary_path, "OpSys/ETC"),
snr_list=[250.],
magnitude=13,
snr_per_pixel=True
)
for input_spectrum_id in templates_library_items:
logger.info("Working on <{}>".format(input_spectrum_id['filename']))
# Open Spectrum data from disk
input_spectrum_array = templates_library.open(ids=input_spectrum_id['specId'])
# Load template spectrum (flux normalised)
template_flux_normalised = input_spectrum_array.extract_item(0)
# Look up the unique ID of the star we've just loaded
# Newer spectrum libraries have a uid field which is guaranteed unique; for older spectrum libraries use
# Starname instead.
# Work out which field we're using (uid or Starname)
spectrum_matching_field = 'uid' if 'uid' in template_flux_normalised.metadata else 'Starname'
# Look up the unique ID of this object
object_name = template_flux_normalised.metadata[spectrum_matching_field]
# Search for the continuum-normalised version of this same object (which will share the same uid / name)
search_criteria = {
spectrum_matching_field: object_name,
'continuum_normalised': 1
}
continuum_normalised_spectrum_id = templates_library.search(**search_criteria)
# Check that continuum-normalised spectrum exists and is unique
assert len(continuum_normalised_spectrum_id) == 1, "Could not find continuum-normalised spectrum."
# Load the continuum-normalised version
template_continuum_normalised_arr = templates_library.open(
ids=continuum_normalised_spectrum_id[0]['specId']
)
# Turn the SpectrumArray we got back into a single Spectrum
template_continuum_normalised = template_continuum_normalised_arr.extract_item(0)
# Now create a mock observation of this template using 4FS
logger.info("Passing template through 4FS")
mock_observed_template = etc_wrapper.process_spectra(
spectra_list=((template_flux_normalised, template_continuum_normalised),)
)
# Loop over LRS and HRS
for mode in mock_observed_template:
# Loop over the spectra we simulated (there was only one!)
for index in mock_observed_template[mode]:
# Loop over the various SNRs we simulated (there was only one!)
for snr in mock_observed_template[mode][index]:
# Create a unique ID for this arm's data
unique_id = hashlib.md5(os.urandom(32)).hexdigest()[:16]
# Import the flux- and continuum-normalised spectra separately, but give them the same ID
for spectrum_type in mock_observed_template[mode][index][snr]:
# Extract continuum-normalised mock observation
logger.info("Resampling {} spectrum".format(mode))
mock_observed = mock_observed_template[mode][index][snr][spectrum_type]
# Replace errors which are nans with a large value
mock_observed.value_errors[np.isnan(mock_observed.value_errors)] = 1000.
# Check for NaN values in spectrum itself
if not np.all(np.isfinite(mock_observed.values)):
print("Warning: NaN values in template <{}>".format(template_flux_normalised.metadata['Starname']))
mock_observed.value_errors[np.isnan(mock_observed.values)] = 1000.
mock_observed.values[np.isnan(mock_observed.values)] = 1.
# Resample template onto a logarithmic raster of fixed step
resampler = SpectrumResampler(mock_observed)
# Construct the raster for each wavelength arm
wavelength_arms = SpectrumProperties(mock_observed.wavelengths).wavelength_arms()
# Resample 4FS output for each arm onto a fixed logarithmic stride
for arm_count, arm in enumerate(wavelength_arms["wavelength_arms"]):
arm_raster, mean_pixel_width = arm
name = "{}_{}".format(mode, arm_count)
arm_info = {
"lambda_min": arm_raster[0],
"lambda_max": arm_raster[-1],
"lambda_step": mean_pixel_width
}
arm_raster = logarithmic_raster(lambda_min=arm_info['lambda_min'],
lambda_max=arm_info['lambda_max'],
lambda_step=arm_info['lambda_step']
)
# Resample 4FS output onto a fixed logarithmic step
mock_observed_arm = resampler.onto_raster(arm_raster)
# Save it into output spectrum library
output_library.insert(spectra=mock_observed_arm,
filenames=input_spectrum_id['filename'],
metadata_list={
"uid": unique_id,
"template_id": object_name,
"mode": mode,
"arm_name": "{}_{}".format(mode,arm_count),
"lambda_min": arm_raster[0],
"lambda_max": arm_raster[-1],
"lambda_step": mean_pixel_width
})
| 45.387234 | 127 | 0.603975 |
791a74027f2dc3fbe44b27f9c9f0523352b4d029 | 149 | py | Python | datahub/activity_feed/apps.py | Staberinde/data-hub-api | 3d0467dbceaf62a47158eea412a3dba827073300 | [
"MIT"
] | 6 | 2019-12-02T16:11:24.000Z | 2022-03-18T10:02:02.000Z | datahub/activity_feed/apps.py | Staberinde/data-hub-api | 3d0467dbceaf62a47158eea412a3dba827073300 | [
"MIT"
] | 1,696 | 2019-10-31T14:08:37.000Z | 2022-03-29T12:35:57.000Z | datahub/activity_feed/apps.py | Staberinde/data-hub-api | 3d0467dbceaf62a47158eea412a3dba827073300 | [
"MIT"
] | 9 | 2019-11-22T12:42:03.000Z | 2021-09-03T14:25:05.000Z | from django.apps import AppConfig
| 18.625 | 39 | 0.738255 |
791aafa638d97478db2d6a462067e347380d5760 | 117 | py | Python | ffmpeg_normalize/__init__.py | kostalski/ffmpeg-normalize | 2c73f47ec4369de08c1e2051af490322084fd17b | [
"MIT"
] | 3 | 2019-04-01T11:03:04.000Z | 2019-12-31T02:17:15.000Z | ffmpeg_normalize/__init__.py | kostalski/ffmpeg-normalize | 2c73f47ec4369de08c1e2051af490322084fd17b | [
"MIT"
] | 1 | 2021-04-15T18:46:45.000Z | 2021-04-15T18:46:45.000Z | ffmpeg_normalize/__init__.py | kostalski/ffmpeg-normalize | 2c73f47ec4369de08c1e2051af490322084fd17b | [
"MIT"
] | 1 | 2021-09-23T13:43:07.000Z | 2021-09-23T13:43:07.000Z | from ._ffmpeg_normalize import FFmpegNormalize
from ._media_file import MediaFile
from ._version import __version__
| 23.4 | 46 | 0.863248 |
791adee85a8db7759e5f3f2e7403b68b0df27e62 | 113 | py | Python | __init__.py | nuxeo-cps/zope2--PortalTransforms | 753f67202b016d0b07edd3bc65fd827cb39e50db | [
"BSD-3-Clause"
] | null | null | null | __init__.py | nuxeo-cps/zope2--PortalTransforms | 753f67202b016d0b07edd3bc65fd827cb39e50db | [
"BSD-3-Clause"
] | null | null | null | __init__.py | nuxeo-cps/zope2--PortalTransforms | 753f67202b016d0b07edd3bc65fd827cb39e50db | [
"BSD-3-Clause"
] | null | null | null | __revision__ = '$Id$'
from utils import HAS_ZOPE
if HAS_ZOPE:
from Products.PortalTransforms.zope import *
| 16.142857 | 48 | 0.752212 |
791be8749fa60c1fc2eb6569f7089a3ef2f48994 | 11,259 | py | Python | SpoTwillio/lib/python3.6/site-packages/twilio/rest/api/v2010/account/call/feedback.py | Natfan/funlittlethings | 80d5378b45b5c0ead725942ee50403bd057514a6 | [
"MIT"
] | 3 | 2019-11-12T07:55:51.000Z | 2020-04-01T11:19:18.000Z | SpoTwillio/lib/python3.6/site-packages/twilio/rest/api/v2010/account/call/feedback.py | Natfan/funlittlethings | 80d5378b45b5c0ead725942ee50403bd057514a6 | [
"MIT"
] | 7 | 2020-06-06T01:06:19.000Z | 2022-02-10T11:15:14.000Z | SpoTwillio/lib/python3.6/site-packages/twilio/rest/api/v2010/account/call/feedback.py | Natfan/funlittlethings | 80d5378b45b5c0ead725942ee50403bd057514a6 | [
"MIT"
] | 2 | 2019-10-20T14:54:47.000Z | 2020-06-11T07:29:37.000Z | # coding=utf-8
"""
This code was generated by
\ / _ _ _| _ _
| (_)\/(_)(_|\/| |(/_ v1.0.0
/ /
"""
from twilio.base import deserialize
from twilio.base import values
from twilio.base.instance_context import InstanceContext
from twilio.base.instance_resource import InstanceResource
from twilio.base.list_resource import ListResource
from twilio.base.page import Page
def fetch(self):
"""
Fetch a FeedbackInstance
:returns: Fetched FeedbackInstance
:rtype: twilio.rest.api.v2010.account.call.feedback.FeedbackInstance
"""
return self._proxy.fetch()
def update(self, quality_score, issue=values.unset):
"""
Update the FeedbackInstance
:param unicode quality_score: An integer from 1 to 5
:param FeedbackInstance.Issues issue: Issues experienced during the call
:returns: Updated FeedbackInstance
:rtype: twilio.rest.api.v2010.account.call.feedback.FeedbackInstance
"""
return self._proxy.update(
quality_score,
issue=issue,
)
def __repr__(self):
"""
Provide a friendly representation
:returns: Machine friendly representation
:rtype: str
"""
context = ' '.join('{}={}'.format(k, v) for k, v in self._solution.items())
return '<Twilio.Api.V2010.FeedbackInstance {}>'.format(context)
| 29.551181 | 101 | 0.606981 |
791fc2d140f54e02c2b4d1000be7565797957857 | 136 | py | Python | gtf2bed/__init__.py | jvfe/gtf2bed | 7ac21759498ca9495030982d2a11c2a63149a75c | [
"BSD-3-Clause"
] | 1 | 2021-04-22T09:27:35.000Z | 2021-04-22T09:27:35.000Z | gtf2bed/__init__.py | jvfe/gtf2bed | 7ac21759498ca9495030982d2a11c2a63149a75c | [
"BSD-3-Clause"
] | null | null | null | gtf2bed/__init__.py | jvfe/gtf2bed | 7ac21759498ca9495030982d2a11c2a63149a75c | [
"BSD-3-Clause"
] | null | null | null | """Top-level package for gtf2bed."""
__author__ = """Joo Vitor F. Cavalcante"""
__email__ = "jvfecav@gmail.com"
__version__ = "0.1.0"
| 22.666667 | 43 | 0.683824 |
79206dc12be47bbbc702eacb1b5f27bdf824bf1f | 2,993 | py | Python | fastapi_cloudauth/firebase.py | jleclanche/fastapi-cloudauth | 9c098f91f46d9d927e1f10b82b80340951d0b1f2 | [
"MIT"
] | null | null | null | fastapi_cloudauth/firebase.py | jleclanche/fastapi-cloudauth | 9c098f91f46d9d927e1f10b82b80340951d0b1f2 | [
"MIT"
] | null | null | null | fastapi_cloudauth/firebase.py | jleclanche/fastapi-cloudauth | 9c098f91f46d9d927e1f10b82b80340951d0b1f2 | [
"MIT"
] | null | null | null | from calendar import timegm
from datetime import datetime
from typing import Any, Dict
from fastapi import HTTPException
from pydantic import BaseModel, Field
from starlette import status
from .base import UserInfoAuth
from .messages import NOT_VERIFIED
from .verification import JWKS, ExtraVerifier
| 36.060241 | 114 | 0.636151 |
79222572360ae305c1ba2a36f8edf19a01cdcedf | 2,410 | py | Python | tests/instrumentation/sqlite_tests.py | dsanders11/opbeat_python | 4bdfe494ed4dba12550dff86366b4402613bce92 | [
"BSD-3-Clause"
] | 99 | 2015-02-27T02:21:41.000Z | 2021-02-09T15:13:25.000Z | tests/instrumentation/sqlite_tests.py | dsanders11/opbeat_python | 4bdfe494ed4dba12550dff86366b4402613bce92 | [
"BSD-3-Clause"
] | 114 | 2015-01-16T15:06:49.000Z | 2018-04-13T20:29:18.000Z | tests/instrumentation/sqlite_tests.py | dsanders11/opbeat_python | 4bdfe494ed4dba12550dff86366b4402613bce92 | [
"BSD-3-Clause"
] | 51 | 2015-01-07T12:13:56.000Z | 2019-05-06T14:16:35.000Z | import sqlite3
import mock
import opbeat.instrumentation.control
from tests.helpers import get_tempstoreclient
from tests.utils.compat import TestCase
| 38.253968 | 77 | 0.641494 |
7922c3c12c906f5e3ff236bb30e73bcdb61a9ea9 | 477 | py | Python | setup.py | samytessier/group_9_mlops | 774f69354aeb5a9ddb59eb2cf5f8460832ab21b2 | [
"MIT"
] | 1 | 2022-01-20T02:18:16.000Z | 2022-01-20T02:18:16.000Z | setup.py | samytessier/group_9_mlops | 774f69354aeb5a9ddb59eb2cf5f8460832ab21b2 | [
"MIT"
] | 1 | 2022-01-08T17:18:04.000Z | 2022-01-08T17:18:04.000Z | setup.py | samytessier/group_9_mlops | 774f69354aeb5a9ddb59eb2cf5f8460832ab21b2 | [
"MIT"
] | null | null | null | from setuptools import find_packages, setup
setup(
name='src',
packages=find_packages(),
version='0.1.0',
description='This MLOps project aims to use the Transformers framework from Hugging Face in order to tweak a pre-trained NLP model to accurately gauge the sentiment of an Amazon review (being able to guess the whether the rating of a product is positive or negative given only the text in a review).',
author='group9 DTU MLops',
license='MIT',
)
| 43.363636 | 305 | 0.742138 |
7923b39638368ab2ae741c772b643949cd865155 | 423 | py | Python | gaphor/RAAML/stpa/connectors.py | Texopolis/gaphor | 3b190620075fd413258af1e7a007b4b2167a7564 | [
"Apache-2.0"
] | 867 | 2018-01-09T00:19:09.000Z | 2022-03-31T02:49:23.000Z | gaphor/RAAML/stpa/connectors.py | burakozturk16/gaphor | 86267a5200ac4439626d35d306dbb376c3800107 | [
"Apache-2.0"
] | 790 | 2018-01-13T23:47:07.000Z | 2022-03-31T16:04:27.000Z | gaphor/RAAML/stpa/connectors.py | burakozturk16/gaphor | 86267a5200ac4439626d35d306dbb376c3800107 | [
"Apache-2.0"
] | 117 | 2018-01-09T02:24:49.000Z | 2022-03-23T08:07:42.000Z | from gaphor.diagram.connectors import Connector
from gaphor.diagram.presentation import Classified
from gaphor.RAAML.raaml import RelevantTo
from gaphor.RAAML.stpa import RelevantToItem
from gaphor.SysML.requirements.connectors import DirectedRelationshipPropertyPathConnect
| 35.25 | 88 | 0.87234 |
7923c47de0831caf8141bfde82615c01392124f5 | 1,197 | py | Python | voltagemetricspublisher/services/extractionService.py | SumudithaR/svc.voltage-metrics-publisher | 4e0418c855920d3e984acf097681e2fc8c8ec081 | [
"Apache-2.0"
] | null | null | null | voltagemetricspublisher/services/extractionService.py | SumudithaR/svc.voltage-metrics-publisher | 4e0418c855920d3e984acf097681e2fc8c8ec081 | [
"Apache-2.0"
] | null | null | null | voltagemetricspublisher/services/extractionService.py | SumudithaR/svc.voltage-metrics-publisher | 4e0418c855920d3e984acf097681e2fc8c8ec081 | [
"Apache-2.0"
] | null | null | null | from time import sleep
from gpiozero import MCP3008 # Installed in GAM 13/09/2019.
import time
import gpiozero
from ..models.rawMetricDto import RawMetricDto | 32.351351 | 71 | 0.622389 |
792592d09cfb1da8cbdd06e8e2cb4970a31ce4e6 | 553 | py | Python | data_browser/migrations/0002_auto_20200331_1842.py | me2d09/django-data-browser | 1108f714229aab8c30a27d93f264f2f26b8b0aee | [
"BSD-3-Clause"
] | null | null | null | data_browser/migrations/0002_auto_20200331_1842.py | me2d09/django-data-browser | 1108f714229aab8c30a27d93f264f2f26b8b0aee | [
"BSD-3-Clause"
] | null | null | null | data_browser/migrations/0002_auto_20200331_1842.py | me2d09/django-data-browser | 1108f714229aab8c30a27d93f264f2f26b8b0aee | [
"BSD-3-Clause"
] | null | null | null | # Generated by Django 2.0.13 on 2020-03-31 17:42
from django.db import migrations, models
import data_browser.models
| 21.269231 | 51 | 0.549729 |
7926794343cfee2a3a93c437f389f8d256dd16f9 | 320 | py | Python | Community/get_audit_info/__init__.py | spenney-bc/gateway-workflows | 0311a9224b2d53c01689eb6a9a0a593177abed63 | [
"Apache-2.0"
] | 43 | 2017-12-04T17:38:24.000Z | 2021-12-29T09:17:17.000Z | Community/get_audit_info/__init__.py | spenney-bc/gateway-workflows | 0311a9224b2d53c01689eb6a9a0a593177abed63 | [
"Apache-2.0"
] | 49 | 2017-12-07T21:02:29.000Z | 2022-02-04T22:27:16.000Z | Community/get_audit_info/__init__.py | spenney-bc/gateway-workflows | 0311a9224b2d53c01689eb6a9a0a593177abed63 | [
"Apache-2.0"
] | 82 | 2017-12-04T17:56:00.000Z | 2021-12-29T09:17:21.000Z | # Copyright 2020 BlueCat Networks. All rights reserved.
# -*- coding: utf-8 -*-
type = 'ui'
sub_pages = [
{
'name' : 'get_audit_info_page',
'title' : u'Get Audit Info',
'endpoint' : 'get_audit_info/get_audit_info_endpoint',
'description' : u'get_audit_info'
},
]
| 24.615385 | 65 | 0.56875 |
79273775aa326888e7143a25472099fb24c7a2cc | 548 | py | Python | ALDS/ALDS1_10_A.py | yu8ikmnbgt6y/MyAOJ | 474b21a2a0c25e1c1f3d6d66d2a2ea52aecaa39b | [
"Unlicense"
] | 1 | 2020-01-08T16:33:46.000Z | 2020-01-08T16:33:46.000Z | ALDS/ALDS1_10_A.py | yu8ikmnbgt6y/MyAOJ | 474b21a2a0c25e1c1f3d6d66d2a2ea52aecaa39b | [
"Unlicense"
] | null | null | null | ALDS/ALDS1_10_A.py | yu8ikmnbgt6y/MyAOJ | 474b21a2a0c25e1c1f3d6d66d2a2ea52aecaa39b | [
"Unlicense"
] | null | null | null | import sys
import io
input_txt = """
44
"""
sys.stdin = io.StringIO(input_txt)
tmp = input()
# copy the below part and paste to the submission form.
# ---------function------------
main()
# -----------------------------
sys.stdin = sys.__stdin__
| 17.125 | 56 | 0.509124 |
7927bbe2f2d0526128722c38428b7bbf96221e46 | 2,389 | py | Python | armada/tests/unit/utils/test_lint.py | One-Fine-Day/armada | 9cd71c8b55173a9c9c45bfb939d19277fabd902d | [
"Apache-2.0"
] | null | null | null | armada/tests/unit/utils/test_lint.py | One-Fine-Day/armada | 9cd71c8b55173a9c9c45bfb939d19277fabd902d | [
"Apache-2.0"
] | null | null | null | armada/tests/unit/utils/test_lint.py | One-Fine-Day/armada | 9cd71c8b55173a9c9c45bfb939d19277fabd902d | [
"Apache-2.0"
] | null | null | null | # Copyright 2017 The Armada Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
import yaml
from armada.utils import lint
| 29.8625 | 74 | 0.541231 |
7927d5d5ec363318061f6e9faac288240c333204 | 7,149 | py | Python | mliv/dgps.py | microsoft/AdversarialGMM | 7a5cd51353c8a81e16c01220b71f77e4e1102add | [
"MIT"
] | 23 | 2020-12-01T22:55:40.000Z | 2022-01-26T04:11:14.000Z | mliv/dgps.py | microsoft/AdversarialGMM | 7a5cd51353c8a81e16c01220b71f77e4e1102add | [
"MIT"
] | null | null | null | mliv/dgps.py | microsoft/AdversarialGMM | 7a5cd51353c8a81e16c01220b71f77e4e1102add | [
"MIT"
] | 10 | 2020-12-05T17:12:49.000Z | 2022-01-10T23:42:37.000Z | # Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.
import numpy as np
# continuously differentiable
fn_dict_cdiff = {'2dpoly': 1, 'sigmoid': 2,
'sin': 3, 'frequent_sin': 4,
'3dpoly': 7, 'linear': 8}
# continuous but not differentiable
fn_dict_cont = {'abs': 0, 'abs_sqrt': 5, 'rand_pw': 9,
'abspos': 10, 'sqrpos': 11, 'pwlinear': 15}
# discontinuous
fn_dict_disc = {'step': 6, 'band': 12, 'invband': 13,
'steplinear': 14}
# monotone
fn_dict_monotone = {'sigmoid': 2,
'step': 6, 'linear': 8,
'abspos': 10, 'sqrpos': 11, 'pwlinear': 15}
# convex
fn_dict_convex = {'abs': 0, '2dpoly': 1, 'linear': 8,
'abspos': 10, 'sqrpos': 11}
# all functions
fn_dict = {'abs': 0, '2dpoly': 1, 'sigmoid': 2,
'sin': 3, 'frequent_sin': 4, 'abs_sqrt': 5,
'step': 6, '3dpoly': 7, 'linear': 8, 'rand_pw': 9,
'abspos': 10, 'sqrpos': 11, 'band': 12, 'invband': 13,
'steplinear': 14, 'pwlinear': 15}
| 40.619318 | 136 | 0.527207 |
7928e18542e9bd6bf82dff12dad8c28ca120e4fe | 16,097 | py | Python | tests/test_definitions/test_expectations_cfe.py | OmriBromberg/great_expectations | 60eb81ebfb08fef5d37d55c316dc962928beb165 | [
"Apache-2.0"
] | 1 | 2021-11-09T05:07:43.000Z | 2021-11-09T05:07:43.000Z | tests/test_definitions/test_expectations_cfe.py | OmriBromberg/great_expectations | 60eb81ebfb08fef5d37d55c316dc962928beb165 | [
"Apache-2.0"
] | 1 | 2021-12-07T13:06:29.000Z | 2021-12-07T13:06:29.000Z | tests/test_definitions/test_expectations_cfe.py | OmriBromberg/great_expectations | 60eb81ebfb08fef5d37d55c316dc962928beb165 | [
"Apache-2.0"
] | null | null | null | import glob
import json
import os
import random
import string
import pandas as pd
import pytest
from great_expectations.execution_engine.pandas_batch_data import PandasBatchData
from great_expectations.execution_engine.sparkdf_batch_data import SparkDFBatchData
from great_expectations.execution_engine.sqlalchemy_batch_data import (
SqlAlchemyBatchData,
)
from great_expectations.self_check.util import (
BigQueryDialect,
candidate_test_is_on_temporary_notimplemented_list_cfe,
evaluate_json_test_cfe,
get_test_validator_with_data,
mssqlDialect,
mysqlDialect,
postgresqlDialect,
sqliteDialect,
)
from tests.conftest import build_test_backends_list_cfe
from tests.test_definitions.test_expectations import tmp_dir
| 48.927052 | 118 | 0.391067 |
792b96690a5711f347a2fe1364e3eef792d1ebea | 1,393 | py | Python | corehq/apps/commtrack/resources/v0_1.py | johan--/commcare-hq | 86ee99c54f55ee94e4c8f2f6f30fc44e10e69ebd | [
"BSD-3-Clause"
] | null | null | null | corehq/apps/commtrack/resources/v0_1.py | johan--/commcare-hq | 86ee99c54f55ee94e4c8f2f6f30fc44e10e69ebd | [
"BSD-3-Clause"
] | 1 | 2022-03-12T01:03:25.000Z | 2022-03-12T01:03:25.000Z | corehq/apps/commtrack/resources/v0_1.py | johan--/commcare-hq | 86ee99c54f55ee94e4c8f2f6f30fc44e10e69ebd | [
"BSD-3-Clause"
] | null | null | null | from tastypie import fields
from corehq.apps.api.resources.v0_1 import CustomResourceMeta, DomainAdminAuthentication
from corehq.apps.products.models import Product
from corehq.apps.api.util import get_object_or_not_exist
from corehq.apps.api.resources import HqBaseResource
"""
Implementation of the CommCare Supply APIs. For more information see:
https://confluence.dimagi.com/display/lmis/API
"""
| 36.657895 | 93 | 0.740847 |
792bbfea31e2c47d9dc8a86be0bf40d5cfa67a78 | 7,381 | py | Python | spectrl/util/io.py | luigiberducci/dirl | 5f7997aea20dfb7347ebdee66de9bea4e6cd3c62 | [
"MIT"
] | null | null | null | spectrl/util/io.py | luigiberducci/dirl | 5f7997aea20dfb7347ebdee66de9bea4e6cd3c62 | [
"MIT"
] | null | null | null | spectrl/util/io.py | luigiberducci/dirl | 5f7997aea20dfb7347ebdee66de9bea4e6cd3c62 | [
"MIT"
] | null | null | null | import argparse
import os
import pathlib
import cv2
import pickle
import numpy as np
from matplotlib import pyplot as plt
from PIL import Image
from numpy import genfromtxt
def open_log_file(itno, folder):
'''
Open a log file to periodically flush data.
Parameters:
itno: int
folder: str
'''
fname = _get_prefix(folder) + 'log' + _get_suffix(itno) + '.txt'
open(fname, 'w').close()
file = open(fname, 'a')
return file
def save_object(name, object, itno, folder):
'''
Save any pickle-able object.
Parameters:
name: str
object: Object
itno: int
folder: str
'''
file = open(_get_prefix(folder) + name + _get_suffix(itno) + '.pkl', 'wb')
pickle.dump(object, file)
file.close()
def load_object(name, itno, folder):
'''
Load pickled object.
Parameters:
name: str
itno: int
folder: str
'''
file = open(_get_prefix(folder) + name + _get_suffix(itno) + '.pkl', 'rb')
object = pickle.load(file)
file.close()
return object
def log_to_file(file, iter, num_transitions, reward, prob, additional_data={}):
'''
Log data to file.
Parameters:
file: file_handle
iter: int
num_transitions: int (number of simulation steps in each iter)
reward: float
prob: float (satisfaction probability)
additional_data: dict
'''
file.write('**** Iteration Number {} ****\n'.format(iter))
file.write('Environment Steps Taken: {}\n'.format(num_transitions))
file.write('Reward: {}\n'.format(reward))
file.write('Satisfaction Probability: {}\n'.format(prob))
for key in additional_data:
file.write('{}: {}\n'.format(key, additional_data[key]))
file.write('\n')
file.flush()
def plot_error_bar(x, data, color, label, points=False):
'''
Plot the error bar from the data.
Parameters:
samples_per_iter: int (number of sample rollouts per iteration of the algorithm)
data: (3+)-tuple of np.array (curve, lower error bar, upper error bar, ...)
color: color of the plot
label: string
'''
plt.subplots_adjust(bottom=0.126)
plt.rcParams.update({'font.size': 18})
if points:
plt.errorbar(x, data[0], data[0] - data[1], fmt='--o', color=color, label=label)
else:
plt.plot(x, data[0], color=color, label=label)
plt.fill_between(x, data[1], data[2], color=color, alpha=0.15)
def extract_plot_data(folder, column_num, low, up, csv=False):
'''
Load and parse log_info to generate error bars
Parameters:
folder: string (name of folder)
column_num: int (column number in log.npy to use)
l: int (lower limit on run number)
u: int (upper limit on run number)
Returns:
4-tuple of numpy arrays (curve, lower error bar, upper error bar, max_over_runs)
'''
log_infos = []
min_length = 1000000
for itno in range(low, up):
log_info = np.transpose(load_log_info(
itno, folder, csv=csv))[column_num]
log_info = np.append([0], log_info)
min_length = min(min_length, len(log_info))
log_infos.append(log_info)
log_infos = [log_info[:min_length] for log_info in log_infos]
data = np.array(log_infos)
curve = np.mean(data, axis=0)
std = np.std(data, axis=0)
max_curve = np.amax(data, axis=0)
return curve, (curve - std), (curve + std), max_curve
# save and render current plot
# get prefix for file name
# get suffix from itno
| 28.608527 | 98 | 0.605067 |
792c98d61321846aacf5f5f89a160ce13339bfd4 | 940 | py | Python | pyqt_sql_demo/syntax_highlighter/sql.py | nshiell/pyqt-sql-demo | 9e64ba069de744f69c2ecc2eeddac5b0b9f0968a | [
"Unlicense"
] | 18 | 2018-05-14T16:27:24.000Z | 2022-02-24T06:47:45.000Z | pyqt_sql_demo/syntax_highlighter/sql.py | nshiell/pyqt-sql-demo | 9e64ba069de744f69c2ecc2eeddac5b0b9f0968a | [
"Unlicense"
] | 2 | 2020-09-11T07:56:05.000Z | 2021-03-05T14:50:36.000Z | pyqt_sql_demo/syntax_highlighter/sql.py | nshiell/pyqt-sql-demo | 9e64ba069de744f69c2ecc2eeddac5b0b9f0968a | [
"Unlicense"
] | 9 | 2019-01-16T16:03:51.000Z | 2021-03-14T01:01:55.000Z | from pygments import highlight as _highlight
from pygments.lexers import SqlLexer
from pygments.formatters import HtmlFormatter
| 33.571429 | 76 | 0.738298 |
792d78778cc9f57f44aeb718a24a94af2accc6bf | 4,337 | py | Python | raytracing.py | avigael/raytracing-example | e1e9448fdf371c401e9ada642fd0ca8ed2702609 | [
"MIT"
] | null | null | null | raytracing.py | avigael/raytracing-example | e1e9448fdf371c401e9ada642fd0ca8ed2702609 | [
"MIT"
] | null | null | null | raytracing.py | avigael/raytracing-example | e1e9448fdf371c401e9ada642fd0ca8ed2702609 | [
"MIT"
] | null | null | null | '''2D Raytracing Example using Pygame'''
import sys
from math import pi, cos, sin
import pygame
# Constants
SIZE = (600, 600)
BORDERS = [[0, 0, SIZE[0], 0], [0, 0, 0, SIZE[1]],
[0, SIZE[1], SIZE[0], SIZE[1]], [SIZE[0], 0, SIZE[0], SIZE[1]]]
WHITE = (255, 255, 255)
BLACK = (0, 0, 0)
GREEN = (0, 255, 0)
def draw_barrier():
'''draws Barriers'''
for barrier in Barrier.collection:
p_1 = (barrier[0], barrier[1])
p_2 = (barrier[2], barrier[3])
pygame.draw.aaline(screen, BLACK, p_1, p_2)
def create_map():
'''initializes custom map'''
width = SIZE[0]
height = SIZE[1]
Barrier(width/6, height, width/6, height/2)
Barrier(width/3, height, width/3, height/1.5)
Barrier(width/2, height/2, width/6, height/2)
Barrier(width/2, height/1.5, width/3, height/1.5)
Barrier(width/1.5, height/1.5, width/1.5, height/2)
Barrier(width/1.2, height/2, width/1.5, height/2)
Barrier(width/1.2, height/2, width/1.2, height/1.5)
Barrier(width/1.5, height/1.5, width/1.2, height/1.5)
Barrier(width/3, height/6, width/3, height/3)
Barrier(width/3, height/6, width/2, height/3)
Barrier(width/2, height/6, width/2, height/3)
Barrier(width/2, height/6, width/1.5, height/3)
Barrier(width/1.5, height/6, width/1.5, height/3)
# Initialize Screen
pygame.init()
pygame.display.set_caption("Raytracing Example")
screen = pygame.display.set_mode(SIZE)
create_map()
# Game Loop
while True:
for event in pygame.event.get():
if event.type == pygame.QUIT:
sys.exit()
pygame.display.flip()
mouse = pygame.mouse.get_pos()
radar = Radar(mouse[0], mouse[1], 25)
screen.fill(WHITE)
draw_barrier()
radar.radiate()
| 32.125926 | 78 | 0.512797 |
792f039bf3dcdae9faa7ebde493ddb3c49ba4954 | 3,041 | py | Python | preprocessed_data/UCM/Code/global_histogram_stretching.py | SaiKrishna1207/Underwater-Image-Segmentation | 78def27e577b10e6722c02807bdcfeb7ba53d760 | [
"MIT"
] | null | null | null | preprocessed_data/UCM/Code/global_histogram_stretching.py | SaiKrishna1207/Underwater-Image-Segmentation | 78def27e577b10e6722c02807bdcfeb7ba53d760 | [
"MIT"
] | null | null | null | preprocessed_data/UCM/Code/global_histogram_stretching.py | SaiKrishna1207/Underwater-Image-Segmentation | 78def27e577b10e6722c02807bdcfeb7ba53d760 | [
"MIT"
] | null | null | null | import numpy as np
| 37.54321 | 95 | 0.560671 |
792f0b4ef299a46239013a5b5a1e30079b053c00 | 1,854 | py | Python | python/test/experimental/test_tb_graph_writer.py | daniel-falk/nnabla | 3fe132ea52dc10521cc029a5d6ba8f565cf65ccf | [
"Apache-2.0"
] | 2,792 | 2017-06-26T13:05:44.000Z | 2022-03-28T07:55:26.000Z | python/test/experimental/test_tb_graph_writer.py | daniel-falk/nnabla | 3fe132ea52dc10521cc029a5d6ba8f565cf65ccf | [
"Apache-2.0"
] | 138 | 2017-06-27T07:04:44.000Z | 2022-02-28T01:37:15.000Z | python/test/experimental/test_tb_graph_writer.py | daniel-falk/nnabla | 3fe132ea52dc10521cc029a5d6ba8f565cf65ccf | [
"Apache-2.0"
] | 380 | 2017-06-26T13:23:52.000Z | 2022-03-25T16:51:30.000Z | # Copyright 2021 Sony Corporation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pytest
import numpy as np
import nnabla as nn
import nnabla.functions as F
import nnabla.parametric_functions as PF
| 30.9 | 74 | 0.662891 |
792f6c2ed852cae02d142182deaf73d1e0349382 | 1,328 | py | Python | pokeman/coatings/resolver_attribute_methods/selective_consumer.py | wmarcuse/pokeman | 5d654c227c456a065b2fea6a0d5827bff424c703 | [
"BSD-3-Clause"
] | null | null | null | pokeman/coatings/resolver_attribute_methods/selective_consumer.py | wmarcuse/pokeman | 5d654c227c456a065b2fea6a0d5827bff424c703 | [
"BSD-3-Clause"
] | null | null | null | pokeman/coatings/resolver_attribute_methods/selective_consumer.py | wmarcuse/pokeman | 5d654c227c456a065b2fea6a0d5827bff424c703 | [
"BSD-3-Clause"
] | null | null | null | import json
import logging
LOGGER = logging.getLogger(__name__)
def on_message(self, channel, method, properties, body):
"""
Invoked by pika when a message is delivered from the AMQP broker. The
channel is passed for convenience. The basic_deliver object that
is passed in carries the exchange, routing key, delivery tag and
a redelivered flag for the message. The properties passed in is an
instance of BasicProperties with the message properties and the body
is the message that was sent.
:param channel: The channel object.
:type channel: pika.channel.Channel
:param method: basic_deliver method.
:type method: pika.Spec.Basic.Deliver
:param properties: The properties.
:type properties: pika.Spec.BasicProperties
:param body: The message body.
:type body: bytes
"""
try:
print('message received')
print(properties.correlation_id)
if properties.correlation_id == self.correlation_id_reference:
print("SUCCEEDEEDRT")
self.callback_method(json.loads(body), properties)
self.acknowledge_message(method.delivery_tag)
self.channel.stop_consuming()
except Exception:
LOGGER.exception("Synchronous callback method exception:") | 31.619048 | 73 | 0.707831 |
79346eb30e63c170afbf3ea69f6c87de3e761345 | 4,100 | py | Python | mc-core/mc/data_gen/gnb_status_indication_pb2.py | copslock/o-ran_ric-app_mc | 243f8671c28596b1dc70dd295029d6151c9dd778 | [
"Apache-2.0",
"CC-BY-4.0"
] | null | null | null | mc-core/mc/data_gen/gnb_status_indication_pb2.py | copslock/o-ran_ric-app_mc | 243f8671c28596b1dc70dd295029d6151c9dd778 | [
"Apache-2.0",
"CC-BY-4.0"
] | null | null | null | mc-core/mc/data_gen/gnb_status_indication_pb2.py | copslock/o-ran_ric-app_mc | 243f8671c28596b1dc70dd295029d6151c9dd778 | [
"Apache-2.0",
"CC-BY-4.0"
] | 1 | 2021-07-07T06:43:16.000Z | 2021-07-07T06:43:16.000Z | # -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: gnb_status_indication.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
import x2ap_common_types_pb2 as x2ap__common__types__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='gnb_status_indication.proto',
package='streaming_protobufs',
syntax='proto3',
serialized_options=_b('Z1gerrit.o-ran-sc.org/r/ric-plt/streaming-protobufs'),
serialized_pb=_b('\n\x1bgnb_status_indication.proto\x12\x13streaming_protobufs\x1a\x17x2ap_common_types.proto\"W\n\x13GNBStatusIndication\x12@\n\x0bprotocolIEs\x18\x01 \x01(\x0b\x32+.streaming_protobufs.GNBStatusIndicationIEs\"h\n\x16GNBStatusIndicationIEs\x12N\n\x19id_GNBOverloadInformation\x18\x01 \x01(\x0b\x32+.streaming_protobufs.GNBOverloadInformationB3Z1gerrit.o-ran-sc.org/r/ric-plt/streaming-protobufsb\x06proto3')
,
dependencies=[x2ap__common__types__pb2.DESCRIPTOR,])
_GNBSTATUSINDICATION = _descriptor.Descriptor(
name='GNBStatusIndication',
full_name='streaming_protobufs.GNBStatusIndication',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='protocolIEs', full_name='streaming_protobufs.GNBStatusIndication.protocolIEs', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=77,
serialized_end=164,
)
_GNBSTATUSINDICATIONIES = _descriptor.Descriptor(
name='GNBStatusIndicationIEs',
full_name='streaming_protobufs.GNBStatusIndicationIEs',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='id_GNBOverloadInformation', full_name='streaming_protobufs.GNBStatusIndicationIEs.id_GNBOverloadInformation', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=166,
serialized_end=270,
)
_GNBSTATUSINDICATION.fields_by_name['protocolIEs'].message_type = _GNBSTATUSINDICATIONIES
_GNBSTATUSINDICATIONIES.fields_by_name['id_GNBOverloadInformation'].message_type = x2ap__common__types__pb2._GNBOVERLOADINFORMATION
DESCRIPTOR.message_types_by_name['GNBStatusIndication'] = _GNBSTATUSINDICATION
DESCRIPTOR.message_types_by_name['GNBStatusIndicationIEs'] = _GNBSTATUSINDICATIONIES
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
GNBStatusIndication = _reflection.GeneratedProtocolMessageType('GNBStatusIndication', (_message.Message,), {
'DESCRIPTOR' : _GNBSTATUSINDICATION,
'__module__' : 'gnb_status_indication_pb2'
# @@protoc_insertion_point(class_scope:streaming_protobufs.GNBStatusIndication)
})
_sym_db.RegisterMessage(GNBStatusIndication)
GNBStatusIndicationIEs = _reflection.GeneratedProtocolMessageType('GNBStatusIndicationIEs', (_message.Message,), {
'DESCRIPTOR' : _GNBSTATUSINDICATIONIES,
'__module__' : 'gnb_status_indication_pb2'
# @@protoc_insertion_point(class_scope:streaming_protobufs.GNBStatusIndicationIEs)
})
_sym_db.RegisterMessage(GNBStatusIndicationIEs)
DESCRIPTOR._options = None
# @@protoc_insertion_point(module_scope)
| 35.652174 | 426 | 0.79439 |
7934eac98ca8ffa62c49d783f06b030e9d1fdffb | 902 | py | Python | test_tflite_model.py | jh88/fbnet | 5bd12ab8c7f6befc61efd8619d71e710db794c2b | [
"MIT"
] | 6 | 2020-01-16T14:38:10.000Z | 2021-01-24T15:49:11.000Z | test_tflite_model.py | jh88/fbnet | 5bd12ab8c7f6befc61efd8619d71e710db794c2b | [
"MIT"
] | null | null | null | test_tflite_model.py | jh88/fbnet | 5bd12ab8c7f6befc61efd8619d71e710db794c2b | [
"MIT"
] | null | null | null | import numpy as np
import tensorflow as tf
from time import perf_counter as timer
if __name__ == '__main__':
main()
| 25.771429 | 80 | 0.649667 |
7935bc304873f87a9dd8551b03972144b4f09bb2 | 582 | py | Python | src/pymor/vectorarrays/constructions.py | JuliaBru/pymor | 46343b527267213f4279ea36f208b542ab291c4e | [
"Unlicense"
] | null | null | null | src/pymor/vectorarrays/constructions.py | JuliaBru/pymor | 46343b527267213f4279ea36f208b542ab291c4e | [
"Unlicense"
] | null | null | null | src/pymor/vectorarrays/constructions.py | JuliaBru/pymor | 46343b527267213f4279ea36f208b542ab291c4e | [
"Unlicense"
] | null | null | null | # This file is part of the pyMOR project (http://www.pymor.org).
# Copyright 2013-2016 pyMOR developers and contributors. All rights reserved.
# License: BSD 2-Clause License (http://opensource.org/licenses/BSD-2-Clause)
def cat_arrays(vector_arrays):
"""Return a new |VectorArray| which a concatenation of the arrays in `vector_arrays`."""
vector_arrays = list(vector_arrays)
total_length = sum(map(len, vector_arrays))
cated_arrays = vector_arrays[0].empty(reserve=total_length)
for a in vector_arrays:
cated_arrays.append(a)
return cated_arrays
| 41.571429 | 92 | 0.74055 |
7935f670a579e41f9498d1b3fe1e3afe2409108d | 407 | py | Python | swampytodo/urls.py | mrbaboon/swampytodo | 096c39a57db0d8640e03262550dd1ed07191ecde | [
"MIT"
] | null | null | null | swampytodo/urls.py | mrbaboon/swampytodo | 096c39a57db0d8640e03262550dd1ed07191ecde | [
"MIT"
] | 2 | 2015-04-23T00:21:01.000Z | 2015-04-23T00:29:23.000Z | swampytodo/urls.py | mrbaboon/swampytodo | 096c39a57db0d8640e03262550dd1ed07191ecde | [
"MIT"
] | null | null | null | from django.conf.urls import patterns, include, url
from django.contrib import admin
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'swampytodo.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^monitor', 'monitor.views.monitor_view', name='monitor'),
url(r'^todo', include('todo.urls', namespace='todo')),
url(r'^admin/', include(admin.site.urls)),
)
| 29.071429 | 67 | 0.643735 |
79371535785d9b4c1a14c7350dbe3a0fef48e07d | 3,669 | py | Python | src/pymor/playground/progressbar.py | JuliaBru/pymor | 46343b527267213f4279ea36f208b542ab291c4e | [
"Unlicense"
] | null | null | null | src/pymor/playground/progressbar.py | JuliaBru/pymor | 46343b527267213f4279ea36f208b542ab291c4e | [
"Unlicense"
] | null | null | null | src/pymor/playground/progressbar.py | JuliaBru/pymor | 46343b527267213f4279ea36f208b542ab291c4e | [
"Unlicense"
] | null | null | null | #!/usr/bin/env python
# This file is part of the pyMOR project (http://www.pymor.org).
# Copyright 2013-2016 pyMOR developers and contributors. All rights reserved.
# License: BSD 2-Clause License (http://opensource.org/licenses/BSD-2-Clause)
import sys
############################################################
#
# A progress bar that actually shows progress!
#
# Source:
# http://code.activestate.com/recipes/168639-progress-bar-class/
#
############################################################
if __name__ == '__main__':
from time import sleep
p = ProgressBar()
for i in range(0, 201):
p(1)
if i == 90:
p.max = 200
sleep(0.02)
| 35.970588 | 80 | 0.562006 |
79372371d63d8554463e6ea69f517b712b741c97 | 8,184 | py | Python | toontown/minigame/TwoDBattleMgr.py | SuperM0use24/TT-CL-Edition | fdad8394f0656ae122b687d603f72afafd220c65 | [
"MIT"
] | 1 | 2021-02-13T22:40:50.000Z | 2021-02-13T22:40:50.000Z | toontown/minigame/TwoDBattleMgr.py | SuperM0use24/TT-CL-Edition | fdad8394f0656ae122b687d603f72afafd220c65 | [
"MIT"
] | 1 | 2018-07-28T20:07:04.000Z | 2018-07-30T18:28:34.000Z | toontown/minigame/TwoDBattleMgr.py | SuperM0use24/TT-CL-Edition | fdad8394f0656ae122b687d603f72afafd220c65 | [
"MIT"
] | 3 | 2021-06-03T05:36:36.000Z | 2021-06-22T15:07:31.000Z | from panda3d.core import *
from direct.showbase.DirectObject import DirectObject
from toontown.toonbase.ToonBaseGlobal import *
from direct.directnotify import DirectNotifyGlobal
from direct.interval.IntervalGlobal import *
from toontown.battle.BattleProps import *
from toontown.battle import MovieUtil
import math
| 43.301587 | 454 | 0.660191 |
7937d7c40eebe24b6b2fbdc5b2fcb247cedd3bed | 1,211 | py | Python | lesson-12/ex1.py | alirsamar/intro-ml | 36450b26b7ea09472ccdd2a0abce51b6c3889a20 | [
"MIT"
] | null | null | null | lesson-12/ex1.py | alirsamar/intro-ml | 36450b26b7ea09472ccdd2a0abce51b6c3889a20 | [
"MIT"
] | null | null | null | lesson-12/ex1.py | alirsamar/intro-ml | 36450b26b7ea09472ccdd2a0abce51b6c3889a20 | [
"MIT"
] | null | null | null | # Explained Variance of Each PC
#### Boilerplate #################################################################
print __doc__
from time import time
import logging
import pylab as pl
import numpy as np
from sklearn.cross_validation import train_test_split
from sklearn.datasets import fetch_lfw_people
from sklearn.grid_search import GridSearchCV
from sklearn.metrics import classification_report
from sklearn.metrics import confusion_matrix
from sklearn.decomposition import RandomizedPCA
from sklearn.svm import SVC
logging.basicConfig(level=logging.INFO, format='%(asctime)s %(message)s')
lfw_people = fetch_lfw_people(min_faces_per_person=70, resize=0.4)
n_samples, h, w = lfw_people.images.shape
np.random.seed(42)
X = lfw_people.data
n_features = X.shape[1]
y = lfw_people.target
target_names = lfw_people.target_names
n_classes = target_names.shape[0]
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.25, random_state=42)
n_components = 150
t0 = time()
pca = RandomizedPCA(n_components=n_components, whiten=True).fit(X_train)
#### Exercise code #############################################################
print "Variance ratio:"
print pca.explained_variance_ratio_
| 26.911111 | 90 | 0.720066 |
793b2e2631986fe445d59015d41bd730fa29fbfa | 352 | py | Python | qt/__init__.py | popupcad/popupcad | d3da448260cd5cb9e05417b0a723d7f73ae4e06e | [
"MIT"
] | 19 | 2015-08-01T22:13:39.000Z | 2020-03-07T03:55:46.000Z | qt/__init__.py | CadQuery/popupcad | b0c7b406d4b288c7cb375340323bba0252aedbfb | [
"MIT"
] | 106 | 2015-07-23T19:58:01.000Z | 2019-05-14T03:46:08.000Z | qt/__init__.py | CadQuery/popupcad | b0c7b406d4b288c7cb375340323bba0252aedbfb | [
"MIT"
] | 9 | 2015-10-04T23:38:41.000Z | 2020-07-16T03:50:34.000Z | # -*- coding: utf-8 -*-
"""
Written by Daniel M. Aukes and CONTRIBUTORS
Email: danaukes<at>asu.edu.
Please see LICENSE for full license.
"""
import sys
argv = [item.lower() for item in sys.argv]
if 'qt4' in argv:
loaded = 'PyQt4'
elif 'qt5' in argv:
loaded = 'PyQt5'
elif 'pyside' in argv:
loaded = 'PySide'
else:
loaded = 'PyQt5'
| 16 | 43 | 0.636364 |
793db8fe0005397f1d556c93efea7bcd3624a441 | 3,941 | py | Python | fast_carpenter/__main__.py | lgray/fast-carpenter | c33b83d16031e5ac4b857ac1a644433b6132bb05 | [
"Apache-2.0"
] | null | null | null | fast_carpenter/__main__.py | lgray/fast-carpenter | c33b83d16031e5ac4b857ac1a644433b6132bb05 | [
"Apache-2.0"
] | null | null | null | fast_carpenter/__main__.py | lgray/fast-carpenter | c33b83d16031e5ac4b857ac1a644433b6132bb05 | [
"Apache-2.0"
] | null | null | null | """
Chop up those trees into nice little tables and dataframes
"""
from __future__ import print_function
import sys
from .help import help_stages
import fast_flow.v1 as fast_flow
import fast_curator
import logging
import atuproot.atuproot_main as atup
from .event_builder import EventBuilder
from atsge.build_parallel import build_parallel
from .utils import mkdir_p
from .version import __version__
atup.EventBuilder = EventBuilder
atup.build_parallel = build_parallel
logging.getLogger(__name__).setLevel(logging.INFO)
if __name__ == "__main__":
main()
| 39.019802 | 112 | 0.632073 |
793dc7d4ffbc96247a33db7d9520735900231242 | 1,283 | py | Python | pictures/tests.py | FredAtei/Photo-app | 5f9e72948af6a27b1c6c438fa22652c06fc4f6d4 | [
"MIT"
] | null | null | null | pictures/tests.py | FredAtei/Photo-app | 5f9e72948af6a27b1c6c438fa22652c06fc4f6d4 | [
"MIT"
] | null | null | null | pictures/tests.py | FredAtei/Photo-app | 5f9e72948af6a27b1c6c438fa22652c06fc4f6d4 | [
"MIT"
] | null | null | null | from django.test import TestCase
from .models import Image,Location,Category
# Create your tests here. | 31.292683 | 136 | 0.683554 |
793dd777651fda3f7f0226048a22a03099d8826c | 2,110 | py | Python | {{ cookiecutter.project_name|replace(' ', '_')|replace('-', '_')|lower }}/project/apps/users/views.py | digitalashes/django2.0-template | 4387c25fb94cbff4f201b279f2eefcb174658eff | [
"Apache-2.0"
] | 1 | 2018-03-13T21:16:49.000Z | 2018-03-13T21:16:49.000Z | {{ cookiecutter.project_name|replace(' ', '_')|replace('-', '_')|lower }}/project/apps/users/views.py | digitalashes/django2.0-template | 4387c25fb94cbff4f201b279f2eefcb174658eff | [
"Apache-2.0"
] | null | null | null | {{ cookiecutter.project_name|replace(' ', '_')|replace('-', '_')|lower }}/project/apps/users/views.py | digitalashes/django2.0-template | 4387c25fb94cbff4f201b279f2eefcb174658eff | [
"Apache-2.0"
] | null | null | null | {%- if cookiecutter.use_allauth == "y" and cookiecutter.use_rest == "y" %}
from django.contrib.auth import logout as auth_logout
from django.core.exceptions import ObjectDoesNotExist
from django.utils.translation import ugettext_lazy as _
from rest_auth.app_settings import create_token
from rest_auth.registration.views import RegisterView as RegisterViewBase
from rest_auth.views import PasswordChangeView as BasePasswordChangeView
from rest_framework import status
from rest_framework.permissions import AllowAny, IsAuthenticated
from rest_framework.response import Response
from rest_framework.views import APIView
from users.jwt import jwt_response_payload_handler
registration = RegisterApiView.as_view()
logout = LogoutApiView.as_view()
password_change = PasswordChangeApiView.as_view()
{%- endif %}
| 27.763158 | 93 | 0.734597 |
793e098cac69c0e739d368a1fa2b5c6d69bbe98f | 4,519 | py | Python | pyperform/tools.py | timgates42/pyperform | 97d87e8b9ddb35bd8f2a6782965fd7735ab0349f | [
"MIT"
] | 250 | 2015-01-03T10:15:26.000Z | 2022-03-31T19:43:37.000Z | pyperform/tools.py | timgates42/pyperform | 97d87e8b9ddb35bd8f2a6782965fd7735ab0349f | [
"MIT"
] | 4 | 2015-01-23T00:19:45.000Z | 2015-10-29T17:17:46.000Z | pyperform/tools.py | timgates42/pyperform | 97d87e8b9ddb35bd8f2a6782965fd7735ab0349f | [
"MIT"
] | 14 | 2015-01-17T16:23:04.000Z | 2021-07-15T10:59:53.000Z | __author__ = 'calvin'
import re
import sys
from math import log10
if sys.version[0] == '3':
pass
else:
range = xrange
classdef_regex = re.compile(r"\S*def .*#!|class .*#!")
tagged_line_regex = re.compile(r".*#!")
def convert_time_units(t):
""" Convert time in seconds into reasonable time units. """
if t == 0:
return '0 s'
order = log10(t)
if -9 < order < -6:
time_units = 'ns'
factor = 1000000000
elif -6 <= order < -3:
time_units = 'us'
factor = 1000000
elif -3 <= order < -1:
time_units = 'ms'
factor = 1000.
elif -1 <= order:
time_units = 's'
factor = 1
return "{:.3f} {}".format(factor * t, time_units)
def globalize_indentation(src):
""" Strip the indentation level so the code runs in the global scope. """
lines = src.splitlines()
indent = len(lines[0]) - len(lines[0].strip(' '))
func_src = ''
for ii, l in enumerate(src.splitlines()):
line = l[indent:]
func_src += line + '\n'
return func_src
def remove_decorators(src):
""" Remove decorators from the source code """
src = src.strip()
src_lines = src.splitlines()
multi_line = False
n_deleted = 0
for n in range(len(src_lines)):
line = src_lines[n - n_deleted].strip()
if (line.startswith('@') and 'Benchmark' in line) or multi_line:
del src_lines[n - n_deleted]
n_deleted += 1
if line.endswith(')'):
multi_line = False
else:
multi_line = True
setup_src = '\n'.join(src_lines)
return setup_src
| 30.741497 | 107 | 0.548351 |
793e3ac3dcb05f0d0810a86209b05739d4ea782a | 7,522 | py | Python | dictify.py | Dharma-Sagar/dictify | c76713feaf45670b245ed7e7feb894c12dffb9cd | [
"Apache-2.0"
] | null | null | null | dictify.py | Dharma-Sagar/dictify | c76713feaf45670b245ed7e7feb894c12dffb9cd | [
"Apache-2.0"
] | null | null | null | dictify.py | Dharma-Sagar/dictify | c76713feaf45670b245ed7e7feb894c12dffb9cd | [
"Apache-2.0"
] | null | null | null | from collections import defaultdict
from pathlib import Path
import re
import yaml
import json
from botok import Text
import pyewts
conv = pyewts.pyewts()
def dictify_text(string, is_split=False, selection_yaml='data/dictionaries/dict_cats.yaml', expandable=True, mode='en_bo'):
"""
takes segmented text and finds entries from dictionaries
:param expandable: will segment definitions into senses if True, not if False
:param selection_yaml: add None or "" to prevent selection
:param string: segmented text to be processed
:return: list of tuples containing the word and a dict containing the definitions(selected or not) and an url
"""
words = []
if is_split:
for w in string:
if w:
words.append((w, {}))
else:
string = string.replace('\n', ' ')
for w in string.split(' '):
if w:
words.append((w, {}))
dicts = load_dicts()
for num, word in enumerate(words):
lemma = word[0].rstrip('')
defs = dicts[lemma]
# filter
if selection_yaml:
defs = select_defs(defs, yaml_path=selection_yaml, mode=mode)
# split in senses
if expandable:
if defs and 'en' in defs:
entry_en = defs['en'][1]
defs['en'][1] = split_in_senses(entry_en, lang='en')
if defs and 'bo' in defs:
entry_bo = defs['bo'][1]
defs['bo'][1] = split_in_senses(entry_bo, lang='bo')
words[num][1]['defs'] = defs
# url
url = gen_link(lemma)
words[num][1]['url'] = url
return words
if __name__ == '__main__':
for f in Path('input').glob('*.txt'):
dump = f.read_text(encoding='utf-8')
out = dictify_text(dump, expandable=True)
out_f = Path('output') / f.name
out_f.write_text(json.dumps(out, ensure_ascii=False, indent=4))
__all__ = [dictify_text]
| 37.237624 | 123 | 0.534964 |
f700e260a7d6b3f4dc9cdfd4df281f246d308a20 | 2,504 | py | Python | tests/test_validators.py | fakeezz/edipy | 00c125621201e7290add135240c131c22feb3a72 | [
"MIT"
] | 1 | 2018-05-15T18:27:31.000Z | 2018-05-15T18:27:31.000Z | tests/test_validators.py | fakeezz/edipy | 00c125621201e7290add135240c131c22feb3a72 | [
"MIT"
] | null | null | null | tests/test_validators.py | fakeezz/edipy | 00c125621201e7290add135240c131c22feb3a72 | [
"MIT"
] | 2 | 2020-12-25T16:37:56.000Z | 2021-06-22T13:13:18.000Z | # coding: utf-8
import pytest
from edipy import fields, validators, exceptions
def test_throws_exception_when_regex_is_invalid():
with pytest.raises(ValueError):
field = fields.String(5, validators=[validators.Regex(")")])
| 33.837838 | 76 | 0.69369 |
f7013f89ddf7249cb8c21753c974a4e817c0eaa2 | 45,183 | py | Python | archetypal/schedule.py | brunomarct/archetypal | ce8daf4e18ef3ec92967e5d6837b392199caf83b | [
"MIT"
] | null | null | null | archetypal/schedule.py | brunomarct/archetypal | ce8daf4e18ef3ec92967e5d6837b392199caf83b | [
"MIT"
] | null | null | null | archetypal/schedule.py | brunomarct/archetypal | ce8daf4e18ef3ec92967e5d6837b392199caf83b | [
"MIT"
] | null | null | null | ################################################################################
# Module: schedule.py
# Description: Functions for handling conversion of EnergyPlus schedule objects
# License: MIT, see full license in LICENSE.txt
# Web: https://github.com/samuelduchesne/archetypal
################################################################################
import functools
import io
import logging as lg
from datetime import datetime, timedelta
import archetypal
import numpy as np
import pandas as pd
from archetypal import log
def get_schedule_type_limits_data(self, sch_name=None):
"""Returns Schedule Type Limits data from schedule name"""
if sch_name is None:
sch_name = self.schName
schedule_values = self.idf.get_schedule_data_by_name(sch_name)
try:
schedule_limit_name = schedule_values.Schedule_Type_Limits_Name
except:
# this schedule is probably a 'Schedule:Week:Daily' which does
# not have a Schedule_Type_Limits_Name field
return '', '', '', ''
else:
lower_limit, upper_limit, numeric_type, unit_type = \
self.idf.get_schedule_type_limits_data_by_name(
schedule_limit_name)
self.unit = unit_type
if self.unit == "unknown":
self.unit = numeric_type
return lower_limit, upper_limit, numeric_type, unit_type
def get_schedule_type(self, sch_name=None):
"""Return the schedule type"""
if sch_name is None:
sch_name = self.schName
schedule_values = self.idf.get_schedule_data_by_name(sch_name)
sch_type = schedule_values.fieldvalues[0]
return sch_type
def start_date(self):
"""The start date of the schedule. Satisfies `startDayOfTheWeek`"""
import calendar
c = calendar.Calendar(firstweekday=self.startDayOfTheWeek)
start_date = c.monthdatescalendar(self.year, 1)[0][0]
return datetime(start_date.year, start_date.month, start_date.day)
def get_interval_day_ep_schedule_values(self, sch_name=None):
"""'Schedule:Day:Interval"""
if sch_name is None:
sch_name = self.schName
values = self.idf.getobject('Schedule:Day:Interval'.upper(), sch_name)
lower_limit, upper_limit, numeric_type, unit_type = \
self.get_schedule_type_limits_data(sch_name)
number_of_day_sch = int((len(values.fieldvalues) - 3) / 2)
hourly_values = np.arange(24)
start_hour = 0
for i in range(number_of_day_sch):
value = float(values['Value_Until_Time_{}'.format(i + 1)])
until_time = [int(s.strip()) for s in
values['Time_{}'.format(i + 1)].split(":") if
s.strip().isdigit()]
end_hour = int(until_time[0] + until_time[1] / 60)
for hour in range(start_hour, end_hour):
hourly_values[hour] = value
start_hour = end_hour
if numeric_type.strip().lower() == "discrete":
hourly_values = hourly_values.astype(int)
return hourly_values
def get_hourly_day_ep_schedule_values(self, sch_name=None):
"""'Schedule:Day:Hourly'"""
if sch_name is None:
sch_name = self.schName
values = self.idf.getobject('Schedule:Day:Hourly'.upper(), sch_name)
fieldvalues_ = np.array(values.fieldvalues[3:])
return fieldvalues_
def get_compact_weekly_ep_schedule_values(self, sch_name=None,
start_date=None, index=None):
"""'schedule:week:compact'"""
if start_date is None:
start_date = self.startDate
if index is None:
idx = pd.date_range(start=start_date, periods=168, freq='1H')
slicer_ = pd.Series([False] * (len(idx)), index=idx)
else:
slicer_ = pd.Series([False] * (len(index)), index=index)
if sch_name is None:
sch_name = self.schName
values = self.idf.getobject('schedule:week:compact'.upper(), sch_name)
weekly_schedules = pd.Series([0] * len(slicer_), index=slicer_.index)
# update last day of schedule
if self.count == 0:
self.schType = values.key
self.endHOY = 168
num_of_daily_schedules = int(len(values.fieldvalues[2:]) / 2)
for i in range(num_of_daily_schedules):
day_type = values['DayType_List_{}'.format(i + 1)].lower()
how = self.field_set(day_type, slicer_)
if not weekly_schedules.loc[how].empty:
# Loop through days and replace with day:schedule values
days = []
for name, day in weekly_schedules.loc[how].groupby(pd.Grouper(
freq='D')):
if not day.empty:
ref = values.get_referenced_object(
"ScheduleDay_Name_{}".format(i + 1))
day.loc[:] = self.get_schedule_values(
sch_name=ref.Name, sch_type=ref.key)
days.append(day)
new = pd.concat(days)
slicer_.update(
pd.Series([True] * len(new.index), index=new.index))
slicer_ = slicer_.apply(lambda x: x == True)
weekly_schedules.update(new)
else:
return weekly_schedules.values
return weekly_schedules.values
def get_daily_weekly_ep_schedule_values(self, sch_name=None):
"""'schedule:week:daily'"""
if sch_name is None:
sch_name = self.schName
values = self.idf.getobject('schedule:week:daily'.upper(), sch_name)
# 7 list for 7 days of the week
hourly_values = []
for day in ['Monday', 'Tuesday', 'Wednesday', 'Thursday',
'Friday', 'Saturday', 'Sunday']:
ref = values.get_referenced_object(
'{}_ScheduleDay_Name'.format(day))
h = self.get_schedule_values(sch_name=ref.Name, sch_type=ref.key)
hourly_values.append(h)
hourly_values = np.array(hourly_values)
# shift days earlier by self.startDayOfTheWeek
hourly_values = np.roll(hourly_values, -self.startDayOfTheWeek, axis=0)
return hourly_values.ravel()
def get_list_day_ep_schedule_values(self, sch_name=None):
"""'schedule:day:list'"""
if sch_name is None:
sch_name = self.schName
values = self.idf.getobject('schedule:day:list'.upper(), sch_name)
lower_limit, upper_limit, numeric_type, unit_type = \
self.get_schedule_type_limits_data(sch_name)
import pandas as pd
freq = int(values['Minutes_per_Item']) # Frequency of the values
num_values = values.fieldvalues[5:] # List of values
method = values['Interpolate_to_Timestep'] # How to resample
# fill a list of available values and pad with zeros (this is safer
# but should not occur)
all_values = np.arange(int(24 * 60 / freq))
for i in all_values:
try:
all_values[i] = num_values[i]
except:
all_values[i] = 0
# create a fake index to help us with the resampling
index = pd.date_range(start=self.startDate,
periods=(24 * 60) / freq,
freq='{}T'.format(freq))
series = pd.Series(all_values, index=index)
# resample series to hourly values and apply resampler function
series = series.resample('1H').apply(_how(method))
return series.values
def get_constant_ep_schedule_values(self, sch_name=None):
"""'schedule:constant'"""
if sch_name is None:
sch_name = self.schName
values = self.idf.getobject('schedule:constant'.upper(), sch_name)
lower_limit, upper_limit, numeric_type, unit_type = \
self.get_schedule_type_limits_data(sch_name)
hourly_values = np.arange(8760)
value = float(values['Hourly_Value'])
for hour in hourly_values:
hourly_values[hour] = value
if numeric_type.strip().lower() == 'discrete':
hourly_values = hourly_values.astype(int)
return hourly_values
def get_file_ep_schedule_values(self, sch_name=None):
"""'schedule:file'"""
if sch_name is None:
sch_name = self.schName
values = self.idf.getobject('schedule:file'.upper(), sch_name)
lower_limit, upper_limit, numeric_type, unit_type = \
self.get_schedule_type_limits_data(sch_name)
filename = values['File_Name']
column = values['Column_Number']
rows = values['Rows_to_Skip_at_Top']
hours = values['Number_of_Hours_of_Data']
sep = values['Column_Separator']
interp = values['Interpolate_to_Timestep']
import pandas as pd
import os
idfdir = os.path.dirname(self.idf.idfname)
file = os.path.join(idfdir, filename)
delimeter = _separator(sep)
skip_rows = int(rows) - 1 # We want to keep the column
col = [int(column) - 1] # zero-based
values = pd.read_csv(file, delimiter=delimeter, skiprows=skip_rows,
usecols=col)
return values.iloc[:, 0].values
def get_compact_ep_schedule_values(self, sch_name=None):
"""'schedule:compact'"""
if sch_name is None:
sch_name = self.schName
values = self.idf.getobject('schedule:compact'.upper(), sch_name)
lower_limit, upper_limit, numeric_type, unit_type = \
self.get_schedule_type_limits_data(sch_name)
field_sets = ['through', 'for', 'interpolate', 'until', 'value']
fields = values.fieldvalues[3:]
index = pd.date_range(start=self.startDate, periods=8760, freq='H')
zeros = np.zeros(len(index))
slicer_ = pd.Series([False] * len(index), index=index)
series = pd.Series(zeros, index=index)
from_day = self.startDate
ep_from_day = datetime(self.year, 1, 1)
from_time = '00:00'
how_interpolate = None
for field in fields:
if any([spe in field.lower() for spe in field_sets]):
f_set, hour, minute, value = self.field_interpreter(field)
if f_set.lower() == 'through':
# main condition. All sub-conditions must obey a
# `Through` condition
# First, initialize the slice (all False for now)
through_conditions = self.invalidate_condition(series)
# reset from_time
from_time = '00:00'
# Prepare ep_to_day variable
ep_to_day = self.date_field_interpretation(value) + \
timedelta(days=1)
# Calculate Timedelta in days
days = (ep_to_day - ep_from_day).days
# Add timedelta to start_date
to_day = from_day + timedelta(days=days) + timedelta(
hours=-1)
# slice the conditions with the range and apply True
through_conditions.loc[from_day:to_day] = True
from_day = to_day + timedelta(hours=1)
ep_from_day = ep_to_day
elif f_set.lower() == 'for':
# slice specific days
# reset from_time
from_time = '00:00'
for_condition = self.invalidate_condition(series)
values = value.split()
if len(values) > 1:
# if multiple `For`. eg.: For: Weekends Holidays,
# Combine both conditions
for value in values:
if value.lower() == 'allotherdays':
# Apply condition to slice
how = self.field_set(value, slicer_)
# Reset though condition
through_conditions = how
for_condition = how
else:
how = self.field_set(value, slicer_)
for_condition.loc[how] = True
elif value.lower() == 'allotherdays':
# Apply condition to slice
how = self.field_set(value, slicer_)
# Reset though condition
through_conditions = how
for_condition = how
else:
# Apply condition to slice
how = self.field_set(value)
for_condition.loc[how] = True
# Combine the for_condition with all_conditions
all_conditions = through_conditions & for_condition
# update in memory slice
# self.sliced_day_.loc[all_conditions] = True
elif 'interpolate' in f_set.lower():
# we need to upsample to series to 8760 * 60 values
new_idx = pd.date_range(start=self.startDate,
periods=525600, closed='left',
freq='T')
series = series.resample('T').pad()
series = series.reindex(new_idx)
series.fillna(method='pad', inplace=True)
through_conditions = through_conditions.resample('T').pad()
through_conditions = through_conditions.reindex(new_idx)
through_conditions.fillna(method='pad', inplace=True)
for_condition = for_condition.resample('T').pad()
for_condition = for_condition.reindex(new_idx)
for_condition.fillna(method='pad', inplace=True)
how_interpolate = value.lower()
elif f_set.lower() == 'until':
until_condition = self.invalidate_condition(series)
if series.index.freq.name == 'T':
# until_time = str(int(hour) - 1) + ':' + minute
until_time = timedelta(hours=int(hour),
minutes=int(minute)) - timedelta(
minutes=1)
else:
until_time = str(int(hour) - 1) + ':' + minute
until_condition.loc[until_condition.between_time(from_time,
str(
until_time)).index] = True
all_conditions = for_condition & through_conditions & \
until_condition
from_time = str(int(hour)) + ':' + minute
elif f_set.lower() == 'value':
# If the therm `Value: ` field is used, we will catch it
# here.
# update in memory slice
slicer_.loc[all_conditions] = True
series[all_conditions] = value
else:
# Do something here before looping to the next Field
pass
else:
# If the term `Value: ` is not used; the variable is simply
# passed in the Field
value = float(field)
series[all_conditions] = value
# update in memory slice
slicer_.loc[all_conditions] = True
if how_interpolate:
return series.resample('H').mean().values
else:
return series.values
def field_interpreter(self, field):
"""dealing with a Field-Set (Through, For, Interpolate,
# Until, Value) and return the parsed string"""
if 'through' in field.lower():
# deal with through
if ':' in field.lower():
# parse colon
f_set, statement = field.split(':')
hour = None
minute = None
value = statement.strip()
else:
msg = 'The schedule "{sch}" contains a Field ' \
'that is not understood: "{field}"'.format(
sch=self.schName, field=field)
raise NotImplementedError(msg)
elif 'for' in field.lower():
if ':' in field.lower():
# parse colon
f_set, statement = field.split(':')
value = statement.strip()
hour = None
minute = None
else:
# parse without a colon
msg = 'The schedule "{sch}" contains a Field ' \
'that is not understood: "{field}"'.format(
sch=self.schName, field=field)
raise NotImplementedError(msg)
elif 'interpolate' in field.lower():
msg = 'The schedule "{sch}" contains sub-hourly values (' \
'Field-Set="{field}"). The average over the hour is ' \
'taken'.format(sch=self.schName, field=field)
log(msg, lg.WARNING)
f_set, value = field.split(':')
hour = None
minute = None
elif 'until' in field.lower():
if ':' in field.lower():
# parse colon
try:
f_set, hour, minute = field.split(':')
hour = hour.strip() # remove trailing spaces
minute = minute.strip() # remove trailing spaces
value = None
except:
f_set = 'until'
hour, minute = field.split(':')
hour = hour[-2:].strip()
minute = minute.strip()
value = None
else:
msg = 'The schedule "{sch}" contains a Field ' \
'that is not understood: "{field}"'.format(
sch=self.schName, field=field)
raise NotImplementedError(msg)
elif 'value' in field.lower():
if ':' in field.lower():
# parse colon
f_set, statement = field.split(':')
value = statement.strip()
hour = None
minute = None
else:
msg = 'The schedule "{sch}" contains a Field ' \
'that is not understood: "{field}"'.format(
sch=self.schName, field=field)
raise NotImplementedError(msg)
else:
# deal with the data value
f_set = field
hour = None
minute = None
value = field[len(field) + 1:].strip()
return f_set, hour, minute, value
def get_yearly_ep_schedule_values(self, sch_name=None):
"""'schedule:year'"""
# first week
start_date = self.startDate
idx = pd.date_range(start=start_date, periods=8760, freq='1H')
hourly_values = pd.Series([0] * 8760, index=idx)
# update last day of schedule
self.endHOY = 8760
if sch_name is None:
sch_name = self.schName
values = self.idf.getobject('schedule:year'.upper(), sch_name)
# generate weekly schedules
num_of_weekly_schedules = int(len(values.fieldvalues[3:]) / 5)
for i in range(num_of_weekly_schedules):
ref = values.get_referenced_object(
'ScheduleWeek_Name_{}'.format(i + 1))
start_month = values['Start_Month_{}'.format(i + 1)]
end_month = values['End_Month_{}'.format(i + 1)]
start_day = values['Start_Day_{}'.format(i + 1)]
end_day = values['End_Day_{}'.format(i + 1)]
start = datetime.strptime(
'{}/{}/{}'.format(self.year, start_month, start_day),
'%Y/%m/%d')
end = datetime.strptime(
'{}/{}/{}'.format(self.year, end_month, end_day),
'%Y/%m/%d')
days = (end - start).days + 1
end_date = start_date + timedelta(days=days) + timedelta(hours=23)
how = pd.IndexSlice[start_date:end_date]
weeks = []
for name, week in hourly_values.loc[how].groupby(
pd.Grouper(freq='168H')):
if not week.empty:
try:
week.loc[:] = self.get_schedule_values(
sch_name=ref.Name, start_date=week.index[0],
index=week.index, sch_type=ref.key)
except ValueError:
week.loc[:] = self.get_schedule_values(
ref.Name, week.index[0])[0:len(week)]
finally:
weeks.append(week)
new = pd.concat(weeks)
hourly_values.update(new)
start_date += timedelta(days=days)
return hourly_values.values
def get_schedule_values(self, sch_name=None, start_date=None, index=None,
sch_type=None):
"""Main function that returns the schedule values
Args:
sch_type:
index:
start_date:
"""
if sch_name is None:
sch_name = self.schName
if sch_type is None:
schedule_values = self.idf.get_schedule_data_by_name(sch_name)
self.schType = schedule_values.key.upper()
sch_type = self.schType
if self.count == 0:
# This is the first time, get the schedule type and the type limits.
self.schTypeLimitsName = self.get_schedule_type_limits_name()
self.count += 1
if sch_type.upper() == "schedule:year".upper():
hourly_values = self.get_yearly_ep_schedule_values(
sch_name)
elif sch_type.upper() == "schedule:day:interval".upper():
hourly_values = self.get_interval_day_ep_schedule_values(
sch_name)
elif sch_type.upper() == "schedule:day:hourly".upper():
hourly_values = self.get_hourly_day_ep_schedule_values(
sch_name)
elif sch_type.upper() == "schedule:day:list".upper():
hourly_values = self.get_list_day_ep_schedule_values(
sch_name)
elif sch_type.upper() == "schedule:week:compact".upper():
hourly_values = self.get_compact_weekly_ep_schedule_values(
sch_name, start_date, index)
elif sch_type.upper() == "schedule:week:daily".upper():
hourly_values = self.get_daily_weekly_ep_schedule_values(
sch_name)
elif sch_type.upper() == "schedule:constant".upper():
hourly_values = self.get_constant_ep_schedule_values(
sch_name)
elif sch_type.upper() == "schedule:compact".upper():
hourly_values = self.get_compact_ep_schedule_values(
sch_name)
elif sch_type.upper() == "schedule:file".upper():
hourly_values = self.get_file_ep_schedule_values(
sch_name)
else:
log('Archetypal does not support "{}" currently'.format(
self.schType), lg.WARNING)
hourly_values = []
return hourly_values
def is_schedule(self, sch_name):
"""Returns True if idfobject is one of 'schedule_types'"""
if sch_name.upper() in self.idf.schedules_dict:
return True
else:
return False
def to_year_week_day(self):
"""convert a Schedule Class to the 'Schedule:Year',
'Schedule:Week:Daily' and 'Schedule:Day:Hourly' representation
Returns:
'Schedule:Year', list of ['Schedule:Week:Daily'],
list of ['Schedule:Day:Hourly']
"""
full_year = np.array(self.all_values) # array of shape (8760,)
values = full_year.reshape(-1, 24) # shape (365, 24)
# create unique days
unique_days, nds = np.unique(values, axis=0, return_inverse=True)
ep_days = []
dict_day = {}
count_day = 0
for unique_day in unique_days:
name = 'd_' + self.schName + '_' + '%03d' % count_day
name, count_day = archetypal.check_unique_name('d', count_day,
name,
archetypal.settings.unique_schedules,
suffix=True)
dict_day[name] = unique_day
archetypal.settings.unique_schedules.append(name)
# Create idf_objects for schedule:day:hourly
ep_day = self.idf.add_object(
ep_object='Schedule:Day:Hourly'.upper(),
save=False,
**dict(Name=name,
Schedule_Type_Limits_Name=self.schType,
**{'Hour_{}'.format(i + 1): unique_day[i]
for i in range(24)})
)
ep_days.append(ep_day)
# create unique weeks from unique days
unique_weeks, nwsi, nws, count = np.unique(
full_year[:364 * 24, ...].reshape(-1, 168), return_index=True,
axis=0, return_inverse=True, return_counts=True)
# Appending unique weeks in dictionary with name and values of weeks as
# keys
# {'name_week': {'dayName':[]}}
dict_week = {}
count_week = 0
for unique_week in unique_weeks:
week_id = 'w_' + self.schName + '_' + '%03d' % count_week
week_id, count_week = archetypal.check_unique_name('w',
count_week,
week_id,
archetypal.settings.unique_schedules,
suffix=True)
archetypal.settings.unique_schedules.append(week_id)
dict_week[week_id] = {}
for i in list(range(0, 7)):
day_of_week = unique_week[..., i * 24:(i + 1) * 24]
for key in dict_day:
if (day_of_week == dict_day[key]).all():
dict_week[week_id]['day_{}'.format(i)] = key
# Create idf_objects for schedule:week:daily
list_day_of_week = ['Sunday', 'Monday', 'Tuesday',
'Wednesday', 'Thursday', 'Friday', 'Saturday']
ordered_day_n = np.array([6, 0, 1, 2, 3, 4, 5])
ordered_day_n = np.roll(ordered_day_n, self.startDayOfTheWeek)
ep_weeks = []
for week_id in dict_week:
ep_week = self.idf.add_object(
ep_object='Schedule:Week:Daily'.upper(),
save=False,
**dict(Name=week_id,
**{'{}_ScheduleDay_Name'.format(
weekday): dict_week[week_id][
'day_{}'.format(i)] for
i, weekday in
zip(ordered_day_n, list_day_of_week)
},
Holiday_ScheduleDay_Name=
dict_week[week_id]['day_6'],
SummerDesignDay_ScheduleDay_Name=
dict_week[week_id]['day_1'],
WinterDesignDay_ScheduleDay_Name=
dict_week[week_id]['day_1'],
CustomDay1_ScheduleDay_Name=
dict_week[week_id]['day_2'],
CustomDay2_ScheduleDay_Name=
dict_week[week_id]['day_5'])
)
ep_weeks.append(ep_week)
import itertools
blocks = {}
from_date = datetime(self.year, 1, 1)
bincount = [sum(1 for _ in group)
for key, group in itertools.groupby(nws + 1) if key]
week_order = {i: v for i, v in enumerate(np.array(
[key for key, group in itertools.groupby(nws + 1) if key]) - 1)}
for i, (week_n, count) in enumerate(
zip(week_order, bincount)):
week_id = list(dict_week)[week_order[i]]
to_date = from_date + timedelta(days=int(count * 7), hours=-1)
blocks[i] = {}
blocks[i]['week_id'] = week_id
blocks[i]['from_day'] = from_date.day
blocks[i]['end_day'] = to_date.day
blocks[i]['from_month'] = from_date.month
blocks[i]['end_month'] = to_date.month
from_date = to_date + timedelta(hours=1)
# If this is the last block, force end of year
if i == len(bincount) - 1:
blocks[i]['end_day'] = 31
blocks[i]['end_month'] = 12
new_dict = dict(Name=self.schName + '_',
Schedule_Type_Limits_Name=self.schTypeLimitsName)
for i in blocks:
new_dict.update({"ScheduleWeek_Name_{}".format(i + 1):
blocks[i]['week_id'],
"Start_Month_{}".format(i + 1):
blocks[i]['from_month'],
"Start_Day_{}".format(i + 1):
blocks[i]['from_day'],
"End_Month_{}".format(i + 1):
blocks[i]['end_month'],
"End_Day_{}".format(i + 1):
blocks[i]['end_day']})
ep_year = self.idf.add_object(ep_object='Schedule:Year'.upper(),
save=False, **new_dict)
return ep_year, ep_weeks, ep_days
def date_field_interpretation(self, field):
"""Date Field Interpretation
Args:
field (str): The EnergyPlus Field Contents
Returns:
(datetime): The datetime object
Info:
See EnergyPlus documentation for more details:
1.6.8.1.2 Field: Start Date (Table 1.4: Date Field Interpretation)
"""
# < number > Weekday in Month
formats = ['%m/%d', '%d %B', '%B %d', '%d %b', '%b %d']
date = None
for format_str in formats:
# Tru to parse using each defined formats
try:
date = datetime.strptime(field, format_str)
except:
pass
else:
date = datetime(self.year, date.month, date.day)
if date is None:
# if the defined formats did not work, try the fancy parse
try:
date = self.parse_fancy_string(field)
except:
msg = "the schedule '{sch}' contains a " \
"Field that is not understood: '{field}'".format(
sch=self.schName,
field=field)
raise ValueError(msg)
else:
return date
else:
return date
def parse_fancy_string(self, field):
"""Will try to parse cases such as `3rd Monday in February` or `Last
Weekday In Month`
Args:
field (str): The EnergyPlus Field Contents
Returns:
(datetime): The datetime object
"""
import re
# split the string at the term ' in '
time, month = field.lower().split(' in ')
month = datetime.strptime(month, '%B').month
# split the first part into nth and dayofweek
nth, dayofweek = time.split(' ')
if 'last' in nth:
nth = -1 # Use the last one
else:
nth = re.findall(r'\d+', nth) # use the nth one
nth = int(nth[0]) - 1 # python is zero-based
weekday = {'monday': 0, 'tuesday': 1, 'wednesday': 2, 'thursday': 3,
'friday': 4, 'saturday': 5, 'sunday': 6}
# parse the dayofweek eg. monday
dayofweek = weekday.get(dayofweek, 6)
# create list of possible days using Calendar
import calendar
c = calendar.Calendar(firstweekday=self.startDayOfTheWeek)
monthcal = c.monthdatescalendar(self.year, month)
# iterate though the month and get the nth weekday
date = [day for week in monthcal for day in week if \
day.weekday() == dayofweek and \
day.month == month][nth]
return datetime(date.year, date.month, date.day)
def field_set(self, field, slicer_=None):
"""helper function to return the proper slicer depending on the
field_set value.
Available values are:
Weekdays, Weekends, Holidays, Alldays, SummerDesignDay,
WinterDesignDay, Sunday, Monday, Tuesday, Wednesday, Thursday,
Friday, Saturday, CustomDay1, CustomDay2, AllOtherDays
Args:
field (str): The EnergyPlus field set value.
slicer_ (pd.Series): The persistent slicer for this schedule
Returns:
(indexer-like): Returns the appropriate indexer for the series.
"""
if field.lower() == 'weekdays':
# return only days of weeks
return lambda x: x.index.dayofweek < 5
elif field.lower() == 'weekends':
# return only weekends
return lambda x: x.index.dayofweek >= 5
elif field.lower() == 'alldays':
log('For schedule "{}", the field-set "AllDays" may be overridden '
'by the "AllOtherDays" field-set'.format(
self.schName), lg.WARNING)
# return all days := equivalenet to .loc[:]
return pd.IndexSlice[:]
elif field.lower() == 'allotherdays':
# return unused days (including special days). Uses the global
# variable `slicer_`
import operator
if slicer_ is not None:
return _conjunction(*[self.special_day(field, slicer_),
~slicer_], logical=operator.or_)
else:
raise NotImplementedError
elif field.lower() == 'sunday':
# return only sundays
return lambda x: x.index.dayofweek == 6
elif field.lower() == 'monday':
# return only mondays
return lambda x: x.index.dayofweek == 0
elif field.lower() == 'tuesday':
# return only Tuesdays
return lambda x: x.index.dayofweek == 1
elif field.lower() == 'wednesday':
# return only Wednesdays
return lambda x: x.index.dayofweek == 2
elif field.lower() == 'thursday':
# return only Thursdays
return lambda x: x.index.dayofweek == 3
elif field.lower() == 'friday':
# return only Fridays
return lambda x: x.index.dayofweek == 4
elif field.lower() == 'saturday':
# return only Saturdays
return lambda x: x.index.dayofweek == 5
elif field.lower() == 'summerdesignday':
# return design_day(self, field)
return None
elif field.lower() == 'winterdesignday':
# return design_day(self, field)
return None
elif field.lower() == 'holiday' or field.lower() == 'holidays':
field = 'holiday'
return self.special_day(field, slicer_)
elif not self.strict:
# If not strict, ignore missing field-sets such as CustomDay1
return pd.IndexSlice[:]
else:
raise NotImplementedError(
'Archetypal does not yet support The '
'Field_set "{}"'.format(field))
def __len__(self):
"""returns the length of all values of the schedule"""
return len(self.all_values)
def __eq__(self, other):
"""Overrides the default implementation"""
if isinstance(other, Schedule):
return self.all_values == other.all_values
else:
raise NotImplementedError
def get_sdow(self, start_day_of_week):
"""Returns the start day of the week"""
if start_day_of_week is None:
return self.idf.day_of_week_for_start_day
else:
return start_day_of_week
def special_day(self, field, slicer_):
"""try to get the RunPeriodControl:SpecialDays for the corresponding
Day Type"""
sp_slicer_ = slicer_.copy()
sp_slicer_.loc[:] = False
special_day_types = ['holiday', 'customday1', 'customday2']
dds = self.idf.idfobjects['RunPeriodControl:SpecialDays'.upper()]
dd = [dd for dd in dds if dd.Special_Day_Type.lower() == field
or dd.Special_Day_Type.lower() in special_day_types]
if len(dd) > 0:
slice = []
for dd in dd:
# can have more than one special day types
data = dd.Start_Date
ep_start_date = self.date_field_interpretation(data)
ep_orig = datetime(self.year, 1, 1)
days_to_speciald = (ep_start_date - ep_orig).days
duration = int(dd.Duration)
from_date = self.startDate + timedelta(days=days_to_speciald)
to_date = from_date + timedelta(days=duration) + timedelta(
hours=-1)
sp_slicer_.loc[from_date:to_date] = True
return sp_slicer_
elif not self.strict:
return sp_slicer_
else:
msg = 'Could not find a "SizingPeriod:DesignDay" object ' \
'needed for schedule "{}" with Day Type "{}"'.format(
self.schName, field.capitalize()
)
raise ValueError(msg)
def design_day(schedule, field):
# try to get the SizingPeriod:DesignDay for the corresponding Day Type
dds = schedule.idf.idfobjects['SizingPeriod:DesignDay'.upper()]
dd = [dd for dd in dds if dd.Day_Type.lower() == field]
if len(dd) > 0:
# should have found only one design day matching the Day Type
data = [dd[0].Month, dd[0].Day_of_Month]
date = '/'.join([str(item).zfill(2) for item in data])
date = schedule.date_field_interpretation(date)
return lambda x: x.index == date
else:
msg = 'Could not find a "SizingPeriod:DesignDay" object ' \
'needed for schedule "{}" with Day Type "{}"'.format(
schedule.schName, field.capitalize()
)
raise ValueError(msg)
def _conjunction(*conditions, logical=np.logical_and):
"""Applies a logical function on n conditions"""
return functools.reduce(logical, conditions)
def _separator(sep):
"""helper function to return the correct delimiter"""
if sep == 'Comma':
return ','
elif sep == 'Tab':
return '\t'
elif sep == 'Fixed':
return None
elif sep == 'Semicolon':
return ';'
else:
return ','
def _how(how):
"""Helper function to return the correct resampler"""
if how.lower() == 'average':
return 'mean'
elif how.lower() == 'linear':
return 'interpolate'
elif how.lower() == 'no':
return 'max'
else:
return 'max'
| 39.808811 | 100 | 0.535799 |
f7024605869dd7788905637cfccaa41707efb6c3 | 256 | py | Python | data/scripts/reverse.py | levindu/OpenCC | 345ea91303e5b3d9332dc51ea73370dac83e4c6b | [
"Apache-2.0"
] | 43 | 2018-09-17T00:45:35.000Z | 2021-11-14T23:56:45.000Z | data/scripts/reverse.py | levindu/OpenCC | 345ea91303e5b3d9332dc51ea73370dac83e4c6b | [
"Apache-2.0"
] | 7 | 2019-11-26T10:48:14.000Z | 2021-06-13T04:49:58.000Z | data/scripts/reverse.py | levindu/OpenCC | 345ea91303e5b3d9332dc51ea73370dac83e4c6b | [
"Apache-2.0"
] | 6 | 2018-09-17T02:09:59.000Z | 2020-08-15T13:57:44.000Z | #!/usr/bin/env python
#coding: utf-8
import sys
from common import reverse_items
if len(sys.argv) != 3:
print("Reverse key and value of all pairs")
print(("Usage: ", sys.argv[0], "[input] [output]"))
exit(1)
reverse_items(sys.argv[1], sys.argv[2])
| 21.333333 | 53 | 0.671875 |
f703531b591af3d5317bed220eaa477c0403e4d5 | 2,576 | py | Python | stock_predictions/web/template.py | abakhru/stock_prediction | bfb4483ac888bc67e2a8928fdf037d23acbf48f9 | [
"MIT"
] | 1 | 2020-07-14T09:05:56.000Z | 2020-07-14T09:05:56.000Z | stock_predictions/web/template.py | abakhru/stock_prediction | bfb4483ac888bc67e2a8928fdf037d23acbf48f9 | [
"MIT"
] | null | null | null | stock_predictions/web/template.py | abakhru/stock_prediction | bfb4483ac888bc67e2a8928fdf037d23acbf48f9 | [
"MIT"
] | null | null | null | template = """<!DOCTYPE html>
<html>
<head>
<meta charset="UTF-8">
<title>Title of the document</title>
<script type="text/javascript" src="https://s3.tradingview.com/tv.js"></script>
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/milligram/1.3.0/milligram.min.css">
<style>
.tradingview-widget-container {{
position: sticky;
top: 20px;
}}
.stocks-view {{
display: flex;
flex-wrap: nowrap;
}}
.stocks-listing {{
width: 780px;
flex-wrap: nowrap;
padding: 20px;
}}
.stocks-graph {{
flex-wrap: nowrap;
padding: 20px;
}}
th.sticky-header {{
position: sticky;
top: 0;
z-index: 10;
background-color: white;
}}
.positive-movement {{
color: green;
font-weight: bold;
}}
.negative-movement {{
color: red;
font-weight: bold;
}}
.blue-category {{
background-color: lightsteelblue;
}}
</style>
</head>
<body>
{}
<div class="stocks-view">
<div class="stocks-listing">
<table>
<thead>
<tr>
<th class="sticky-header">Symbol</th>
<th class="sticky-header">April 1 2019</th>
<th class="sticky-header">Dec 2 2019</th>
<th class="sticky-header">Today</th>
<th class="sticky-header">Movement since April 1 2019</th>
<th class="sticky-header">Movement since Dec 2 2019</th>
<th class="sticky-header">Bankruptcy probability</th>
</tr>
</thead>
<tbody>
{}
</tbody>
</table>
</div>
<div class="stocks-graph"
<!-- TradingView Widget BEGIN -->
<div class="tradingview-widget-container">
<div id="tradingview_63a66"></div>
<div class="tradingview-widget-copyright"><a href="https://www.tradingview.com/symbols/AAPL/" rel="noopener" target="_blank"><span class="blue-text">AAPL Chart</span></a> by TradingView</div>
</div>
<!-- TradingView Widget END -->
</div>
</div>
<script type="text/javascript">
function renderChart(symbol) {{
new TradingView.widget(
{{
"width": 750,
"height": 500,
"symbol": symbol,
"interval": "180",
"timezone": "Etc/UTC",
"theme": "light",
"style": "1",
"locale": "en",
"toolbar_bg": "#f1f3f6",
"enable_publishing": false,
"allow_symbol_change": true,
"container_id": "tradingview_63a66"
}}
);
}}
document.addEventListener('DOMContentLoaded', function(){{
renderChart('BA');
}}, false);
</script>
</body>
</html>"""
| 24.533333 | 195 | 0.572593 |
f704431757b191fd6a6405e1724d23679ca1b2f0 | 1,173 | py | Python | script/app/agg.py | Intelligent-Systems-Lab/ISL-BCFL | 42ceb86708a76e28b31c22b33c15ee9a6a745ec7 | [
"Apache-2.0"
] | null | null | null | script/app/agg.py | Intelligent-Systems-Lab/ISL-BCFL | 42ceb86708a76e28b31c22b33c15ee9a6a745ec7 | [
"Apache-2.0"
] | null | null | null | script/app/agg.py | Intelligent-Systems-Lab/ISL-BCFL | 42ceb86708a76e28b31c22b33c15ee9a6a745ec7 | [
"Apache-2.0"
] | null | null | null | import os
# import torch
import argparse
import base64
import sys
import io
import torch
import torch.nn as nn
from torchvision import transforms
from torch.utils.data import DataLoader
from torch.utils.data.sampler import SubsetRandomSampler
model_list = []
f = open(sys.argv[1], "r")
models = f.read().split(",")
f.close()
print(models)
for m in models:
model_list.append(base642fullmodel(m))
new_model_state = model_list[0].state_dict()
#sum the weight of the model
for m in model_list[1:]:
state_m = m.state_dict()
for key in state_m:
new_model_state[key] += state_m[key]
#average the model weight
for key in new_model_state:
new_model_state[key] /= len(model_list)
new_model = model_list[0]
new_model.load_state_dict(new_model_state)
output = fullmodel2base64(new_model)
print(output)
| 19.55 | 56 | 0.734868 |
f70474925eb078c598d03d4255e4e76e7b6c9361 | 420 | py | Python | examples/function_examples/bpod_info.py | ckarageorgkaneen/pybpod-api | ebccef800ae1abf3b6a643ff33166fab2096c780 | [
"MIT"
] | 1 | 2021-01-18T08:18:22.000Z | 2021-01-18T08:18:22.000Z | examples/function_examples/bpod_info.py | ckarageorgkaneen/pybpod-api | ebccef800ae1abf3b6a643ff33166fab2096c780 | [
"MIT"
] | 1 | 2020-09-18T20:46:11.000Z | 2020-12-29T14:55:20.000Z | examples/function_examples/bpod_info.py | ckarageorgkaneen/pybpod-api | ebccef800ae1abf3b6a643ff33166fab2096c780 | [
"MIT"
] | 3 | 2020-09-12T15:32:11.000Z | 2022-03-11T23:08:03.000Z | # !/usr/bin/python3
# -*- coding: utf-8 -*-
"""
Get hardware info from Bpod
"""
from pybpodapi.protocol import Bpod
from confapp import conf
my_bpod = Bpod()
my_bpod.close()
print("Target Bpod firmware version: ", conf.TARGET_BPOD_FIRMWARE_VERSION)
print("Firmware version (read from device): ", my_bpod.hardware.firmware_version)
print("Machine type version (read from device): ", my_bpod.hardware.machine_type)
| 21 | 81 | 0.742857 |
f704c0f9b4488488f3aae9f679bb84275d8e52d4 | 11,405 | py | Python | src/core/src/core_logic/PackageFilter.py | Azure/LinuxPatchExtension | 6af622afb4298805bdf47328d6bc66a785f7166b | [
"Apache-2.0"
] | 4 | 2020-06-01T14:36:30.000Z | 2021-08-24T16:55:50.000Z | src/core/src/core_logic/PackageFilter.py | Azure/LinuxPatchExtension | 6af622afb4298805bdf47328d6bc66a785f7166b | [
"Apache-2.0"
] | 34 | 2020-09-11T17:20:42.000Z | 2022-03-28T14:08:44.000Z | src/core/src/core_logic/PackageFilter.py | Azure/LinuxPatchExtension | 6af622afb4298805bdf47328d6bc66a785f7166b | [
"Apache-2.0"
] | 1 | 2020-12-28T10:13:20.000Z | 2020-12-28T10:13:20.000Z | # Copyright 2020 Microsoft Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Requires Python 2.7+
"""Package Filter"""
from core.src.bootstrap.Constants import Constants
import fnmatch
# endregion
# region Get installation classifications from execution configuration
def is_msft_critsec_classification_only(self):
return ('Critical' in self.installation_included_classifications or 'Security' in self.installation_included_classifications) and 'Other' not in self.installation_included_classifications
def is_msft_other_classification_only(self):
return 'Other' in self.installation_included_classifications and not ('Critical' in self.installation_included_classifications or 'Security' in self.installation_included_classifications)
def is_msft_all_classification_included(self):
"""Returns true if all classifications were individually selected *OR* (nothing was selected AND no inclusion list is present) -- business logic"""
all_classifications = [key for key in Constants.PackageClassification.__dict__.keys() if not key.startswith('__')]
all_classifications_explicitly_selected = bool(len(self.installation_included_classifications) == (len(all_classifications) - 1))
no_classifications_selected = bool(len(self.installation_included_classifications) == 0)
only_unclassified_selected = bool('Unclassified' in self.installation_included_classifications and len(self.installation_included_classifications) == 1)
return all_classifications_explicitly_selected or ((no_classifications_selected or only_unclassified_selected) and not self.is_inclusion_list_present())
def is_invalid_classification_combination(self):
return ('Other' in self.installation_included_classifications and 'Critical' in self.installation_included_classifications and 'Security' not in self.installation_included_classifications) or \
('Other' in self.installation_included_classifications and 'Security' in self.installation_included_classifications and 'Critical' not in self.installation_included_classifications)
# endregion
| 65.924855 | 216 | 0.728979 |
f70920a45d8b352e57cdd5c4ba4ed7a956b3f421 | 4,150 | py | Python | pyesgf/util.py | ggarcias/esgf-pyclient-cmip6 | 9e7975d2e676ed2c4001edb4e25c9c20cc16b7af | [
"BSD-3-Clause"
] | 17 | 2016-09-07T02:55:30.000Z | 2022-03-10T15:34:53.000Z | pyesgf/util.py | ggarcias/esgf-pyclient-cmip6 | 9e7975d2e676ed2c4001edb4e25c9c20cc16b7af | [
"BSD-3-Clause"
] | 61 | 2015-05-27T08:10:46.000Z | 2022-03-17T12:36:45.000Z | pyesgf/util.py | ggarcias/esgf-pyclient-cmip6 | 9e7975d2e676ed2c4001edb4e25c9c20cc16b7af | [
"BSD-3-Clause"
] | 22 | 2015-10-27T11:21:05.000Z | 2022-01-12T08:26:16.000Z | """
Utility functions using the pyesgf package.
"""
import sys
from urllib.parse import quote_plus
def ats_url(base_url):
"""
Return the URL for the ESGF SAML AttributeService
"""
# Strip '/' from url as necessary
base_url = base_url.rstrip('/')
return '/'.join([base_url,
'esgf-idp/saml/soap/secure/attributeService.htm'])
def get_manifest(drs_id, version, connection):
"""
Retrieve the filenames, sizes and checksums of a dataset.
This function will raise ValueError if more than one dataset is found
matching the given drs_id and version on a search without replicas.
The connection should be either distrib=True or be connected to a suitable
ESGF search interface.
:param drs_id: a string containing the DRS identifier without version
:param version: The version as a string or int
"""
if isinstance(version, int):
version = str(version)
context = connection.new_context(drs_id=drs_id, version=version)
results = context.search()
if len(results) > 1:
raise ValueError("Search for dataset %s.v%s returns multiple hits" %
(drs_id, version))
file_context = results[0].file_context()
manifest = {}
for file in file_context.search():
manifest[file.filename] = {
'checksum_type': file.checksum_type,
'checksum': file.checksum,
'size': file.size,
}
return manifest
def urlencode(query):
"""
Encode a sequence of two-element tuples or dictionary into a URL query
string.
This version is adapted from the standard library to understand operators
in the pyesgf.search.constraints module.
If the query arg is a sequence of two-element tuples, the order of the
parameters in the output will match the order of parameters in the
input.
"""
if hasattr(query, "items"):
# mapping objects
query = list(query.items())
else:
# it's a bother at times that strings and string-like objects are
# sequences...
try:
# non-sequence items should not work with len()
# non-empty strings will fail this
if len(query) and not isinstance(query[0], tuple):
raise TypeError
# zero-length sequences of all types will get here and succeed,
# but that's a minor nit - since the original implementation
# allowed empty dicts that type of behavior probably should be
# preserved for consistency
except TypeError:
ty, va, tb = sys.exc_info()
raise TypeError("not a valid non-string sequence "
"or mapping object", tb)
lst = []
for k, v in query:
tag, v = strip_tag(v)
k = quote_plus(str(k))
if isinstance(v, str):
if hasattr(v, 'encode'):
# is there a reasonable way to convert to ASCII?
# encode generates a string, but "replace" or "ignore"
# lose information and "strict" can raise UnicodeError
v = quote_plus(v.encode("ASCII", "replace"))
else:
v = quote_plus(v)
append(k, v, tag, lst)
else:
try:
# is this a sufficient test for sequence-ness?
len(v)
except TypeError:
# not a sequence
v = quote_plus(str(v))
append(k, v, tag, lst)
else:
# loop over the sequence
for elt in v:
append(k, quote_plus(str(elt)), tag, lst)
return '&'.join(lst)
| 30.291971 | 78 | 0.576867 |
f709b6ad81d25a0c074deaa1308cf04158654f02 | 1,373 | py | Python | tests/book/ch05/classify_name.py | TITC/pyhanlp | ad062f358805da5bf97f78d9f37f441c06ae4d19 | [
"Apache-2.0"
] | null | null | null | tests/book/ch05/classify_name.py | TITC/pyhanlp | ad062f358805da5bf97f78d9f37f441c06ae4d19 | [
"Apache-2.0"
] | null | null | null | tests/book/ch05/classify_name.py | TITC/pyhanlp | ad062f358805da5bf97f78d9f37f441c06ae4d19 | [
"Apache-2.0"
] | null | null | null | # -*- coding:utf-8 -*-
# Authorhankcs
# Date: 2018-06-21 19:46
# 5.3
# http://nlp.hankcs.com/book.php
# https://bbs.hankcs.com/
import sys,os# environment, adjust the priority
sys.path.insert(0,os.path.dirname(os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))))
from pyhanlp import *
from tests.test_utility import ensure_data
PerceptronNameGenderClassifier = JClass('com.hankcs.hanlp.model.perceptron.PerceptronNameGenderClassifier')
cnname = ensure_data('cnname', 'http://file.hankcs.com/corpus/cnname.zip')
TRAINING_SET = os.path.join(cnname, 'train.csv')
TESTING_SET = os.path.join(cnname, 'test.csv')
MODEL = cnname + ".bin"
if __name__ == '__main__':
run_classifier(False)
run_classifier(True)
| 38.138889 | 112 | 0.718864 |
f70b82a64651b669501101e2383b4a201ac4b9ba | 5,305 | py | Python | tests/test_content_download.py | easydo-cn/edo_client | 775f185c54f2eeda6a7dd6482de8228ca9ad89b0 | [
"Apache-2.0"
] | null | null | null | tests/test_content_download.py | easydo-cn/edo_client | 775f185c54f2eeda6a7dd6482de8228ca9ad89b0 | [
"Apache-2.0"
] | null | null | null | tests/test_content_download.py | easydo-cn/edo_client | 775f185c54f2eeda6a7dd6482de8228ca9ad89b0 | [
"Apache-2.0"
] | null | null | null | # coding: utf-8
import io
import os
import shutil
import tempfile
import unittest
from edo_client import WoClient
def test_11_download_to_stream_all(self):
''''''
stream = io.BytesIO()
self.client.content.download_to_stream(
stream, url=self.download_url
)
self.assertEqual(
self.file_size,
stream.tell(),
'Cursor should be at the end of stream after download'
)
stream.seek(0, os.SEEK_SET)
self.assertEqual(
self.file_size,
len(stream.read()),
'File length should be 10240 bytes'
)
def test_12_download_stream_first_byte(self):
''''''
stream = io.BytesIO()
self.client.content.download_to_stream(
stream, url=self.download_url, start=0, end=0,
)
self.assertEqual(1, stream.tell(), 'Download first byte of file')
def test_13_download_stream_head_part(self):
''''''
stream = io.BytesIO()
self.client.content.download_to_stream(
stream, url=self.download_url, start=0, end=(5 * (2 ** 20) - 1),
)
self.assertEqual(5 * (2 ** 20), stream.tell())
def test_14_download_stream_tail_part(self):
''''''
stream = io.BytesIO()
self.client.content.download_to_stream(
stream, url=self.download_url, start=(5 * (2 ** 20)), end=None,
)
self.assertEqual(5 * (2 ** 20), stream.tell())
def test_15_download_partial(self):
''''''
stream = io.BytesIO()
start, end = 1234, 54321
self.client.content.download_to_stream(
stream, url=self.download_url, start=start, end=end,
)
self.assertEqual(stream.tell(), end - start + 1)
def test_21_get_data_full_size(self):
''''''
self.assertEqual(
self.file_size,
len(self.client.content.get_data(url=self.download_url)),
'.get_data shoule be able to download the whole file by default',
)
def test_22_get_data_first_byte(self):
''''''
self.assertEqual(
1,
len(self.client.content.get_data(url=self.download_url, size=1)),
'.get_data should be able to download the 1st byte of given file',
)
def test_23_get_data_head_part(self):
''''''
size = 5432
self.assertEqual(
size,
len(self.client.content.get_data(url=self.download_url, size=size)), # noqa E501
'.get_data should download the first {} bytes'.format(size),
)
def test_24_get_data_tail_part(self):
''''''
start = 12345
size = self.file_size - start
self.assertEqual(
size,
len(self.client.content.get_data(
url=self.download_url,
offset=start, size=size
)),
'.get_data shoule download last {} bytes'.format(size),
)
def test_25_get_data_partial(self):
''''''
start = 23451
size = self.file_size - start
self.assertEqual(
size,
len(self.client.content.get_data(
url=self.download_url,
offset=start, size=size,
)),
'.get_data should download {} bytes starting from offset {}'.format(size, start), # noqa E501
)
def test_31_download_to_file(self):
''''''
fd, fpath = tempfile.mkstemp(dir=self.tmpdir)
os.close(fd)
self.client.content.download_to_file(destination=fpath, url=self.download_url)
self.assertEqual(self.file_size, os.stat(fpath).st_size)
def test_41_download_empty_file(self):
''''''
fd, fpath = tempfile.mkstemp(dir=self.tmpdir)
os.close(fd)
self.client.content.download_to_file(destination=fpath, url=self.empty_file_url)
self.assertEqual(0, os.stat(fpath).st_size)
| 32.746914 | 106 | 0.590575 |
f70e1eec634ed0c89cd786687c6b726187e816d5 | 11,426 | py | Python | src/train.py | Gordonbuck/ml-oov-we | ce28cd8b556a16125ba36cd41781a3e60bb26422 | [
"MIT"
] | null | null | null | src/train.py | Gordonbuck/ml-oov-we | ce28cd8b556a16125ba36cd41781a3e60bb26422 | [
"MIT"
] | null | null | null | src/train.py | Gordonbuck/ml-oov-we | ce28cd8b556a16125ba36cd41781a3e60bb26422 | [
"MIT"
] | null | null | null | import higher
from leap import Leap
import numpy as np
import os
import torch
import torch.nn as nn
import gc
| 52.412844 | 120 | 0.591896 |
f70e20602d9329f0b785241b32a1ae744bf6d702 | 119 | py | Python | number reverser.py | Jayapraveen34/crazy-lover | be5bd897c40c31b3e5e6eafe3b6436cb3d888efe | [
"BSD-2-Clause"
] | null | null | null | number reverser.py | Jayapraveen34/crazy-lover | be5bd897c40c31b3e5e6eafe3b6436cb3d888efe | [
"BSD-2-Clause"
] | null | null | null | number reverser.py | Jayapraveen34/crazy-lover | be5bd897c40c31b3e5e6eafe3b6436cb3d888efe | [
"BSD-2-Clause"
] | null | null | null | a = str(input('Enter the number you want to reverse:'))
b = (a[::-1])
c = int(b)
print('the reversed number is',c)
| 23.8 | 56 | 0.605042 |
f70ec64b9e31daafd1fb2f1ca0a900fb5ba86171 | 3,473 | py | Python | pyexcel_xls/xlsw.py | pyexcel/pyexcel-xls | 995cfd273d5360947a528ff3a1ed3f9e52a429ad | [
"BSD-3-Clause"
] | 40 | 2016-05-18T20:09:39.000Z | 2022-02-09T06:39:41.000Z | pyexcel_xls/xlsw.py | wenxuefeng3930/pyexcel-xls | 995cfd273d5360947a528ff3a1ed3f9e52a429ad | [
"BSD-3-Clause"
] | 46 | 2016-02-01T22:12:31.000Z | 2021-10-07T18:57:05.000Z | pyexcel_xls/xlsw.py | wenxuefeng3930/pyexcel-xls | 995cfd273d5360947a528ff3a1ed3f9e52a429ad | [
"BSD-3-Clause"
] | 24 | 2016-01-29T12:26:27.000Z | 2021-10-31T15:37:15.000Z | """
pyexcel_xlsw
~~~~~~~~~~~~~~~~~~~
The lower level xls file format handler using xlwt
:copyright: (c) 2016-2021 by Onni Software Ltd
:license: New BSD License
"""
import datetime
import xlrd
from xlwt import XFStyle, Workbook
from pyexcel_io import constants
from pyexcel_io.plugin_api import IWriter, ISheetWriter
DEFAULT_DATE_FORMAT = "DD/MM/YY"
DEFAULT_TIME_FORMAT = "HH:MM:SS"
DEFAULT_LONGTIME_FORMAT = "[HH]:MM:SS"
DEFAULT_DATETIME_FORMAT = "%s %s" % (DEFAULT_DATE_FORMAT, DEFAULT_TIME_FORMAT)
EMPTY_SHEET_NOT_ALLOWED = "xlwt does not support a book without any sheets"
| 31.008929 | 78 | 0.589692 |
f70ef0f412e5276c5b8da11a1ad63834bedea5f9 | 593 | py | Python | venv/lib/python3.6/site-packages/gensim/__init__.py | bopopescu/wired_cli | 844b5c2bf32c95ad2974663f0501a85ff6134bd4 | [
"MIT"
] | 2 | 2021-06-09T20:55:17.000Z | 2021-11-03T03:07:37.000Z | venv/lib/python3.6/site-packages/gensim/__init__.py | bopopescu/wired_cli | 844b5c2bf32c95ad2974663f0501a85ff6134bd4 | [
"MIT"
] | 4 | 2020-07-26T02:10:42.000Z | 2021-03-31T18:48:58.000Z | venv/lib/python3.6/site-packages/gensim/__init__.py | bopopescu/wired_cli | 844b5c2bf32c95ad2974663f0501a85ff6134bd4 | [
"MIT"
] | 1 | 2020-07-25T23:57:23.000Z | 2020-07-25T23:57:23.000Z | """This package contains interfaces and functionality to compute pair-wise document similarities within a corpus
of documents.
"""
from gensim import parsing, corpora, matutils, interfaces, models, similarities, summarization, utils # noqa:F401
import logging
__version__ = '3.5.0'
logger = logging.getLogger('gensim')
if len(logger.handlers) == 0: # To ensure reload() doesn't add another one
logger.addHandler(NullHandler())
| 28.238095 | 114 | 0.726813 |
f70efa147c6f9c7ee90e557fe0740d068a1ce522 | 213 | py | Python | tests/test_ai.py | divanorama/katrain | dc22aa88526fb6446f908259f06020d649a2d0a9 | [
"MIT"
] | null | null | null | tests/test_ai.py | divanorama/katrain | dc22aa88526fb6446f908259f06020d649a2d0a9 | [
"MIT"
] | null | null | null | tests/test_ai.py | divanorama/katrain | dc22aa88526fb6446f908259f06020d649a2d0a9 | [
"MIT"
] | null | null | null | import pytest
from katrain.core.constants import AI_STRATEGIES_RECOMMENDED_ORDER, AI_STRATEGIES
| 23.666667 | 81 | 0.798122 |
f70fbb21c94acb9d07d8e2e1ca75454e92d0eaf5 | 28,076 | py | Python | game_client.py | wenlianglaw/Tetris-in-Python | d4f0a22c4827e7eeb44c55def3f024e0c6932ebe | [
"MIT"
] | 1 | 2021-06-25T20:43:19.000Z | 2021-06-25T20:43:19.000Z | game_client.py | wenlianglaw/Tetris-in-Python | d4f0a22c4827e7eeb44c55def3f024e0c6932ebe | [
"MIT"
] | null | null | null | game_client.py | wenlianglaw/Tetris-in-Python | d4f0a22c4827e7eeb44c55def3f024e0c6932ebe | [
"MIT"
] | null | null | null | # This file defines the back end of the Tetris game
#
# GameState is the base class of GameClient.
#
# GameClient.Run() will start two threads:
# - _ProcessActions: Process the action list every x seconds
# - _AutoDrop: Auto drops the current piece.
#
# GameClient:
# - current piece
# - held piece
# - piece list
# - color_map: game board
# - InputActions(...): Inputs a list of actions.
# - ProcessActions(...): Lets the game client process a list of actions
# directly
# - ProcessAction(...): Lets the game client process one actions directly
# - PutPiece(...): Puts the current piece if the position is valid.
# - GetState(...): Gets game state, useful to AI
# - CheckValidity(...): Checks if a move is valid
# - SpawnPiece(...): Sets the current piece.
# - Restart(...): Restarts the game.
# - Rotate(...): Alternatively, callers can directly call Rotate to rotate
# current_piece
# - Move(...): Alternatively, callers can directly call Move to move the
# current_piece
#
import copy
import queue
import threading
import time
from threading import Lock
from typing import Tuple, List
import numpy as np
import actions
import shape
# Some global settings
DEFAULT_LENGTH = 20
DEFAULT_WIDTH = 10
MAP_PADDING_SIZE = 4
# When there are less than threshold pieces, spawn a new bag.
REFILL_THRESHOLD = 5
# Disable the auto drop in next few seconds
MAXIMUM_LOCK_TIME = 4
INCREMENTAL_LOCK_TIME = 1
# Scores
SINGLE = 5
DOUBLE = 10
TSS = 20
TRIPLE = 40
QUAD = 50
TSD = 60
TST = 80
PC = 120
# ATTACKS
ATTACK_DOUBLE = 1
ATTACK_TSS = 2
ATTACK_TRIPLE = 2
ATTACK_QUAD = 4
ATTACK_TSD = 4
ATTACK_TST = 6
ATTACK_PC = 10
def CreateGameFromState(state: GameState) -> GameClient:
game = GameClient(height=state.height, width=state.width)
game.color_map = np.copy(state.color_map)
game.current_piece = state.current_piece.copy()
if state.held_piece is not None:
game.held_piece = state.held_piece.copy()
else:
game.held_piece = None
game.score = state.score
game.piece_list = state.piece_list.copy()
game.can_swap = state.can_swap
game.is_gameover = state.is_gameover
game.accumulated_lines_eliminated = state.accumulated_lines_eliminated
game.piece_dropped = state.piece_dropped
game.line_sent = state.line_sent
game.line_received = state.line_received
return game
| 32.799065 | 123 | 0.63036 |
f711bbbc339573d1744df69fd2b79a94a7b3f1b9 | 2,615 | py | Python | gateway/builders/authorization_builder.py | TarlanPayments/gw-python-client | a0dd5292c877ab06bf549693a1bfc9fb06ef9d19 | [
"MIT"
] | null | null | null | gateway/builders/authorization_builder.py | TarlanPayments/gw-python-client | a0dd5292c877ab06bf549693a1bfc9fb06ef9d19 | [
"MIT"
] | null | null | null | gateway/builders/authorization_builder.py | TarlanPayments/gw-python-client | a0dd5292c877ab06bf549693a1bfc9fb06ef9d19 | [
"MIT"
] | null | null | null | # The MIT License
#
# Copyright (c) 2017 Tarlan Payments.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
| 41.507937 | 120 | 0.728489 |
f712616c7f9dabddbf70b11e2c6cc653c11f9e33 | 1,931 | py | Python | cas9/score.py | cangtu/cot | 2ecbe83fe7bb3538f80692fc4412830f6c976558 | [
"MIT"
] | 1 | 2018-07-11T06:12:51.000Z | 2018-07-11T06:12:51.000Z | cas9/score.py | cangtu/cot | 2ecbe83fe7bb3538f80692fc4412830f6c976558 | [
"MIT"
] | null | null | null | cas9/score.py | cangtu/cot | 2ecbe83fe7bb3538f80692fc4412830f6c976558 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
#
# Copyright @ 0x6c78.
#
# 16-10-20 1:27 0x6c78@gmail.com
#
# Distributed under terms of the MIT License
from operator import mul
from itertools import combinations
| 26.094595 | 88 | 0.518384 |
f71362b0a4e90908b800515208bd4b73487ecd9e | 1,823 | py | Python | RiotGames/API/Match.py | Timohiho/RiotGames | f75256cca1b5c224393dca99296a6163b70b335f | [
"MIT"
] | 2 | 2021-05-05T12:33:51.000Z | 2021-12-15T13:08:44.000Z | RiotGames/API/Match.py | Timohiho/RiotGames | f75256cca1b5c224393dca99296a6163b70b335f | [
"MIT"
] | null | null | null | RiotGames/API/Match.py | Timohiho/RiotGames | f75256cca1b5c224393dca99296a6163b70b335f | [
"MIT"
] | null | null | null | # Copyright (c) 2021.
# The copyright lies with Timo Hirsch-Hoffmann, the further use is only permitted with reference to source
import urllib.request
from RiotGames.API.RiotApi import RiotApi
| 28.936508 | 114 | 0.580362 |
f714e5ccca4b369e0fbd09fb0a4e6218788b9ed7 | 3,513 | py | Python | google_or_tools/coloring_ip_sat.py | tias/hakank | 87b7f180c9393afce440864eb9e5fb119bdec1a4 | [
"MIT"
] | 279 | 2015-01-10T09:55:35.000Z | 2022-03-28T02:34:03.000Z | google_or_tools/coloring_ip_sat.py | tias/hakank | 87b7f180c9393afce440864eb9e5fb119bdec1a4 | [
"MIT"
] | 10 | 2017-10-05T15:48:50.000Z | 2021-09-20T12:06:52.000Z | google_or_tools/coloring_ip_sat.py | tias/hakank | 87b7f180c9393afce440864eb9e5fb119bdec1a4 | [
"MIT"
] | 83 | 2015-01-20T03:44:00.000Z | 2022-03-13T23:53:06.000Z | # Copyright 2021 Hakan Kjellerstrand hakank@gmail.com
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Simple coloring problem (MIP approach) in OR-tools CP-SAT Solver.
Inspired by the GLPK:s model color.mod
'''
COLOR, Graph Coloring Problem
Written in GNU MathProg by Andrew Makhorin <mao@mai2.rcnet.ru>
Given an undirected loopless graph G = (V, E), where V is a set of
nodes, E <= V x V is a set of arcs, the Graph Coloring Problem is to
find a mapping (coloring) F: V -> C, where C = {1, 2, ... } is a set
of colors whose cardinality is as small as possible, such that
F(i) != F(j) for every arc (i,j) in E, that is adjacent nodes must
be assigned different colors.
'''
This is a port of my old OR-tools CP solver coloring_ip.py
This model was created by Hakan Kjellerstrand (hakank@gmail.com)
Also see my other OR-tols models: http://www.hakank.org/or_tools/
"""
from __future__ import print_function
from ortools.sat.python import cp_model as cp
import math, sys
# from cp_sat_utils import *
if __name__ == '__main__':
main()
| 27.232558 | 78 | 0.63507 |
f715d5acbe3a069259390dee428b7666dca26c08 | 9,706 | py | Python | src/intermediate_representation/sem_utils.py | ckosten/ValueNet4SPARQL | de320a2f0e1a4c5a6c0e5cc79057dda9901046e8 | [
"Apache-2.0"
] | null | null | null | src/intermediate_representation/sem_utils.py | ckosten/ValueNet4SPARQL | de320a2f0e1a4c5a6c0e5cc79057dda9901046e8 | [
"Apache-2.0"
] | null | null | null | src/intermediate_representation/sem_utils.py | ckosten/ValueNet4SPARQL | de320a2f0e1a4c5a6c0e5cc79057dda9901046e8 | [
"Apache-2.0"
] | 1 | 2021-09-23T13:02:45.000Z | 2021-09-23T13:02:45.000Z | # Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
# -*- coding: utf-8 -*-
"""
# @Time : 2019/5/27
# @Author : Jiaqi&Zecheng
# @File : sem_utils.py
# @Software: PyCharm
"""
import os
import json
import re as regex
import spacy
from nltk.stem import WordNetLemmatizer
wordnet_lemmatizer = WordNetLemmatizer()
nlp = spacy.load('en_core_web_sm', disable=['parser', 'ner'])
def alter_column0(datas):
"""
Attach column * table
:return: model_result_replace
"""
zero_count = 0
count = 0
result = []
for d in datas:
if 'C(0)' in d['model_result']:
pattern = regex.compile('C\(.*?\) T\(.*?\)')
result_pattern = list(set(pattern.findall(d['model_result'])))
ground_col_labels = []
for pa in result_pattern:
pa = pa.split(' ')
if pa[0] != 'C(0)':
index = int(pa[1][2:-1])
ground_col_labels.append(index)
ground_col_labels = list(set(ground_col_labels))
question_arg_type = d['question_arg_type']
question_arg = d['question_arg']
table_names = [[token.lemma_ for token in nlp(names)] for names in d['table_names']]
origin_table_names = [[wordnet_lemmatizer.lemmatize(x.lower()) for x in names.split(' ')] for names in
d['table_names']]
count += 1
easy_flag = False
for q_ind, q in enumerate(d['question_arg']):
q_str = " ".join(" ".join(x) for x in d['question_arg'])
if 'how many' in q_str or 'number of' in q_str or 'count of' in q_str:
easy_flag = True
if easy_flag:
# check for the last one is a table word
for q_ind, q in enumerate(d['question_arg']):
if (q_ind > 0 and q == ['many'] and d['question_arg'][q_ind - 1] == ['how']) or (
q_ind > 0 and q == ['of'] and d['question_arg'][q_ind - 1] == ['number']) or (
q_ind > 0 and q == ['of'] and d['question_arg'][q_ind - 1] == ['count']):
re = multi_equal(question_arg_type, q_ind, ['table'], 2)
if re is not False:
# This step work for the number of [table] example
table_result = table_names[origin_table_names.index(question_arg[re])]
result.append((d['query'], d['question'], table_result, d))
break
else:
re = multi_option(question_arg, q_ind, d['table_names'], 2)
if re is not False:
table_result = re
result.append((d['query'], d['question'], table_result, d))
pass
else:
re = multi_equal(question_arg_type, q_ind, ['table'], len(question_arg_type))
if re is not False:
# This step work for the number of [table] example
table_result = table_names[origin_table_names.index(question_arg[re])]
result.append((d['query'], d['question'], table_result, d))
break
pass
table_result = random_choice(question_arg=question_arg,
question_arg_type=question_arg_type,
names=table_names,
ground_col_labels=ground_col_labels, q_ind=q_ind, N=2,
origin_name=origin_table_names)
result.append((d['query'], d['question'], table_result, d))
zero_count += 1
break
else:
M_OP = False
for q_ind, q in enumerate(d['question_arg']):
if M_OP is False and q in [['than'], ['least'], ['most'], ['msot'], ['fewest']] or \
question_arg_type[q_ind] == ['M_OP']:
M_OP = True
re = multi_equal(question_arg_type, q_ind, ['table'], 3)
if re is not False:
# This step work for the number of [table] example
table_result = table_names[origin_table_names.index(question_arg[re])]
result.append((d['query'], d['question'], table_result, d))
break
else:
re = multi_option(question_arg, q_ind, d['table_names'], 3)
if re is not False:
table_result = re
# print(table_result)
result.append((d['query'], d['question'], table_result, d))
pass
else:
# zero_count += 1
re = multi_equal(question_arg_type, q_ind, ['table'], len(question_arg_type))
if re is not False:
# This step work for the number of [table] example
table_result = table_names[origin_table_names.index(question_arg[re])]
result.append((d['query'], d['question'], table_result, d))
break
table_result = random_choice(question_arg=question_arg,
question_arg_type=question_arg_type,
names=table_names,
ground_col_labels=ground_col_labels, q_ind=q_ind, N=2,
origin_name=origin_table_names)
result.append((d['query'], d['question'], table_result, d))
pass
if M_OP is False:
table_result = random_choice(question_arg=question_arg,
question_arg_type=question_arg_type,
names=table_names, ground_col_labels=ground_col_labels, q_ind=q_ind,
N=2,
origin_name=origin_table_names)
result.append((d['query'], d['question'], table_result, d))
for re in result:
table_names = [[token.lemma_ for token in nlp(names)] for names in re[3]['table_names']]
origin_table_names = [[x for x in names.split(' ')] for names in re[3]['table_names']]
if re[2] in table_names:
re[3]['rule_count'] = table_names.index(re[2])
else:
re[3]['rule_count'] = origin_table_names.index(re[2])
for data in datas:
if 'rule_count' in data:
str_replace = 'C(0) T(' + str(data['rule_count']) + ')'
replace_result = regex.sub('C\(0\) T\(.\)', str_replace, data['model_result'])
data['model_result_replace'] = replace_result
else:
data['model_result_replace'] = data['model_result']
| 46.888889 | 117 | 0.474861 |
f718b4fadc70811185014ceea7a2ac977f84aa08 | 1,472 | py | Python | src/server/core/tests/test_config.py | Freshia/masakhane-web | acf5eaef7ab8109d6f10f212765572a1dc893cd5 | [
"MIT"
] | 20 | 2021-04-09T09:08:53.000Z | 2022-03-16T09:45:36.000Z | src/server/core/tests/test_config.py | Freshia/masakhane-web | acf5eaef7ab8109d6f10f212765572a1dc893cd5 | [
"MIT"
] | 15 | 2021-04-19T07:04:56.000Z | 2022-03-12T00:57:44.000Z | src/server/core/tests/test_config.py | Freshia/masakhane-web | acf5eaef7ab8109d6f10f212765572a1dc893cd5 | [
"MIT"
] | 14 | 2021-04-19T04:39:04.000Z | 2021-10-08T22:19:58.000Z | import os
import unittest
from flask import current_app
from flask_testing import TestCase
from core import masakhane
if __name__ == '__main__':
unittest.main() | 32 | 77 | 0.688179 |
f719bed52604d78cd372c38b0ba41bc4f013d7b2 | 311 | py | Python | routes/show_bp.py | Silve1ra/fyyur | 580562cc592d587c9bed4f080b856664abb9f70d | [
"MIT"
] | 1 | 2021-09-17T11:56:38.000Z | 2021-09-17T11:56:38.000Z | routes/show_bp.py | Silve1ra/fyyur | 580562cc592d587c9bed4f080b856664abb9f70d | [
"MIT"
] | null | null | null | routes/show_bp.py | Silve1ra/fyyur | 580562cc592d587c9bed4f080b856664abb9f70d | [
"MIT"
] | null | null | null | from flask import Blueprint
from controllers.show import shows, create_shows, create_show_submission
show_bp = Blueprint('show_bp', __name__)
show_bp.route('/', methods=['GET'])(shows)
show_bp.route('/create', methods=['GET'])(create_shows)
show_bp.route('/create', methods=['POST'])(create_show_submission)
| 31.1 | 72 | 0.762058 |
f719f32c0de53ae35c0223c63678dbad415c2f11 | 22 | py | Python | __init__.py | andy-96/GFPGAN | 0ed1214760170cc27fdfd60da1f64a0699a28cf4 | [
"BSD-3-Clause"
] | null | null | null | __init__.py | andy-96/GFPGAN | 0ed1214760170cc27fdfd60da1f64a0699a28cf4 | [
"BSD-3-Clause"
] | null | null | null | __init__.py | andy-96/GFPGAN | 0ed1214760170cc27fdfd60da1f64a0699a28cf4 | [
"BSD-3-Clause"
] | null | null | null | from .gfpgan import *
| 11 | 21 | 0.727273 |
f71a808666b13ce290442e22bb59d1788d36b370 | 1,950 | py | Python | tools/find_run_binary.py | pospx/external_skia | 7a135275c9fc2a4b3cbdcf9a96e7102724752234 | [
"BSD-3-Clause"
] | 6,304 | 2015-01-05T23:45:12.000Z | 2022-03-31T09:48:13.000Z | third_party/skia/tools/find_run_binary.py | w4454962/miniblink49 | b294b6eacb3333659bf7b94d670d96edeeba14c0 | [
"Apache-2.0"
] | 459 | 2016-09-29T00:51:38.000Z | 2022-03-07T14:37:46.000Z | third_party/skia/tools/find_run_binary.py | w4454962/miniblink49 | b294b6eacb3333659bf7b94d670d96edeeba14c0 | [
"Apache-2.0"
] | 1,231 | 2015-01-05T03:17:39.000Z | 2022-03-31T22:54:58.000Z | #!/usr/bin/python
# Copyright (c) 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Module that finds and runs a binary by looking in the likely locations."""
import os
import subprocess
import sys
def run_command(args):
"""Runs a program from the command line and returns stdout.
Args:
args: Command line to run, as a list of string parameters. args[0] is the
binary to run.
Returns:
stdout from the program, as a single string.
Raises:
Exception: the program exited with a nonzero return code.
"""
proc = subprocess.Popen(args,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
(stdout, stderr) = proc.communicate()
if proc.returncode is not 0:
raise Exception('command "%s" failed: %s' % (args, stderr))
return stdout
def find_path_to_program(program):
"""Returns path to an existing program binary.
Args:
program: Basename of the program to find (e.g., 'render_pictures').
Returns:
Absolute path to the program binary, as a string.
Raises:
Exception: unable to find the program binary.
"""
trunk_path = os.path.abspath(os.path.join(os.path.dirname(__file__),
os.pardir))
possible_paths = [os.path.join(trunk_path, 'out', 'Release', program),
os.path.join(trunk_path, 'out', 'Debug', program),
os.path.join(trunk_path, 'out', 'Release',
program + '.exe'),
os.path.join(trunk_path, 'out', 'Debug',
program + '.exe')]
for try_path in possible_paths:
if os.path.isfile(try_path):
return try_path
raise Exception('cannot find %s in paths %s; maybe you need to '
'build %s?' % (program, possible_paths, program))
| 31.451613 | 77 | 0.615385 |
f71b20c6a58525d0ad6e5a5b0ad92dbbdf9f5849 | 1,599 | py | Python | user/tests.py | Vr3n/django_react_cart_system | f6d2572b640f711ff9c7020641051e3f92c3dd59 | [
"MIT"
] | null | null | null | user/tests.py | Vr3n/django_react_cart_system | f6d2572b640f711ff9c7020641051e3f92c3dd59 | [
"MIT"
] | 3 | 2021-06-18T15:13:46.000Z | 2021-06-18T18:24:43.000Z | user/tests.py | Vr3n/django_react_cart_system | f6d2572b640f711ff9c7020641051e3f92c3dd59 | [
"MIT"
] | null | null | null | from django.contrib.auth import get_user_model
from django.test import TestCase
# Create your tests here.
| 34.76087 | 70 | 0.642276 |
f71b9e37908dd5da30752301903bfc85504aa496 | 728 | py | Python | Examples/AcceptAllRevisions.py | aspose-words-cloud/aspose-words-cloud-python | 65c7b55fa4aac69b60d41e7f54aed231df285479 | [
"MIT"
] | 14 | 2018-07-15T17:01:52.000Z | 2018-11-29T06:15:33.000Z | Examples/AcceptAllRevisions.py | aspose-words-cloud/aspose-words-cloud-python | 65c7b55fa4aac69b60d41e7f54aed231df285479 | [
"MIT"
] | 1 | 2018-09-28T12:59:34.000Z | 2019-10-08T08:42:59.000Z | Examples/AcceptAllRevisions.py | aspose-words-cloud/aspose-words-cloud-python | 65c7b55fa4aac69b60d41e7f54aed231df285479 | [
"MIT"
] | 2 | 2020-12-21T07:59:17.000Z | 2022-02-16T21:41:25.000Z | import os
import asposewordscloud
import asposewordscloud.models.requests
from asposewordscloud.rest import ApiException
from shutil import copyfile
words_api = WordsApi(client_id = '####-####-####-####-####', client_secret = '##################')
file_name = 'test_doc.docx'
# Upload original document to cloud storage.
my_var1 = open(file_name, 'rb')
my_var2 = file_name
upload_file_request = asposewordscloud.models.requests.UploadFileRequest(file_content=my_var1, path=my_var2)
words_api.upload_file(upload_file_request)
# Calls AcceptAllRevisions method for document in cloud.
my_var3 = file_name
request = asposewordscloud.models.requests.AcceptAllRevisionsRequest(name=my_var3)
words_api.accept_all_revisions(request) | 38.315789 | 108 | 0.787088 |
f71c77d1c0f627d4c0d8120689ae89c7e1a43d86 | 2,577 | py | Python | agogosml_cli/cli/templates/{{cookiecutter.PROJECT_NAME_SLUG}}/e2e/testgen/main.py | cicorias/agogosml | 60e0b52c2fc721bdd965aadaf8c1afd1ddb9b7d1 | [
"MIT"
] | 13 | 2018-12-07T21:02:20.000Z | 2019-02-22T14:36:31.000Z | agogosml_cli/cli/templates/{{cookiecutter.PROJECT_NAME_SLUG}}/e2e/testgen/main.py | cicorias/agogosml | 60e0b52c2fc721bdd965aadaf8c1afd1ddb9b7d1 | [
"MIT"
] | 43 | 2018-11-30T11:31:43.000Z | 2019-04-03T16:09:06.000Z | agogosml_cli/cli/templates/{{cookiecutter.PROJECT_NAME_SLUG}}/e2e/testgen/main.py | cicorias/agogosml | 60e0b52c2fc721bdd965aadaf8c1afd1ddb9b7d1 | [
"MIT"
] | 13 | 2018-11-29T00:31:29.000Z | 2019-02-22T18:50:28.000Z | import json
import os
import sys
import time
from agogosml.common.abstract_streaming_client import find_streaming_clients
from agogosml.tools.sender import send
from agogosml.tools.receiver import receive
eh_base_config = {
"EVENT_HUB_NAMESPACE": os.getenv("EVENT_HUB_NAMESPACE"),
"EVENT_HUB_NAME": os.getenv("EVENT_HUB_NAME_INPUT"),
"EVENT_HUB_SAS_POLICY": os.getenv("EVENT_HUB_SAS_POLICY"),
"EVENT_HUB_SAS_KEY": os.getenv("EVENT_HUB_SAS_KEY_INPUT"),
}
eh_send_config = {
**eh_base_config,
'LEASE_CONTAINER_NAME': os.getenv('LEASE_CONTAINER_NAME_INPUT')
}
eh_receive_config = {
**eh_base_config,
"AZURE_STORAGE_ACCOUNT": os.getenv("AZURE_STORAGE_ACCOUNT"),
"AZURE_STORAGE_ACCESS_KEY": os.getenv("AZURE_STORAGE_ACCESS_KEY"),
"LEASE_CONTAINER_NAME": os.getenv("LEASE_CONTAINER_NAME_OUTPUT"),
"EVENT_HUB_CONSUMER_GROUP": os.getenv("EVENT_HUB_CONSUMER_GROUP"),
"TIMEOUT": 10,
}
kafka_base_config = {
'KAFKA_ADDRESS': os.getenv("KAFKA_ADDRESS"),
'TIMEOUT': os.getenv('KAFKA_TIMEOUT'),
# These configs are specific to Event Hub Head for Kafka
'EVENTHUB_KAFKA_CONNECTION_STRING': os.getenv('EVENTHUB_KAFKA_CONNECTION_STRING'),
'SSL_CERT_LOCATION': os.getenv('SSL_CERT_LOCATION') # /usr/local/etc/openssl/cert.pem
}
kafka_receive_config = {
**kafka_base_config,
'KAFKA_CONSUMER_GROUP': os.getenv('KAFKA_CONSUMER_GROUP'),
}
kafka_send_config = {
**kafka_base_config,
'KAFKA_TOPIC': os.getenv('KAFKA_TOPIC_INPUT')
}
if __name__ == "__main__":
cli()
| 28.955056 | 109 | 0.73962 |
f71c9a76519602baf175d90363655dc76c65ea28 | 512 | py | Python | MobileRevelator/python/postbank_finanzassistent_decrypt.py | ohunecker/MR | b0c93436c7964d87a0b8154f8b7662b1731124b9 | [
"MIT"
] | 98 | 2019-02-03T22:50:24.000Z | 2022-03-17T12:50:56.000Z | MobileRevelator/python/postbank_finanzassistent_decrypt.py | cewatkins/MR | 5ba553fd0eb4c1d80842074a553119486f005822 | [
"MIT"
] | 10 | 2019-03-14T20:12:10.000Z | 2020-05-23T10:37:54.000Z | MobileRevelator/python/postbank_finanzassistent_decrypt.py | cewatkins/MR | 5ba553fd0eb4c1d80842074a553119486f005822 | [
"MIT"
] | 30 | 2019-02-03T22:50:27.000Z | 2022-03-30T12:37:30.000Z | #Filename="finanzassistent"
#Type=Prerun
import os
| 34.133333 | 78 | 0.6875 |
f71d87a97f28b6912c291299e2155b00941ed654 | 1,615 | py | Python | imcsdk/__init__.py | kenrusse/imcsdk | c35ec5d41072c3ea82c64b1b66e0650d1d873657 | [
"Apache-2.0"
] | null | null | null | imcsdk/__init__.py | kenrusse/imcsdk | c35ec5d41072c3ea82c64b1b66e0650d1d873657 | [
"Apache-2.0"
] | null | null | null | imcsdk/__init__.py | kenrusse/imcsdk | c35ec5d41072c3ea82c64b1b66e0650d1d873657 | [
"Apache-2.0"
] | null | null | null | # Copyright 2016 Cisco Systems, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import logging
import logging.handlers
log = logging.getLogger('imc')
console = logging.StreamHandler()
formatter = logging.Formatter(
'%(asctime)s - %(name)s - %(levelname)s - %(message)s')
console.setFormatter(formatter)
def set_log_level(level=logging.DEBUG):
"""
Allows setting log level
Args:
level: logging level - import logging and pass enums from it(INFO/DEBUG/ERROR/etc..)
Returns:
None
Example:
from imcsdk import set_log_level
import logging
set_log_level(logging.INFO)
"""
log.setLevel(level)
console.setLevel(level)
set_log_level(logging.DEBUG)
log.addHandler(console)
if os.path.exists('/tmp/imcsdk_debug'):
enable_file_logging()
__author__ = 'Cisco Systems'
__email__ = 'ucs-python@cisco.com'
__version__ = '0.9.11'
| 26.47541 | 92 | 0.721981 |
f71e495c79f4bb1a1505cad9bdde64d7e37c7ba1 | 1,293 | py | Python | proxy/core/tls/certificate.py | fisabiliyusri/proxy | 29934503251b704813ef3e7ed8c2a5ae69448c8a | [
"BSD-3-Clause"
] | null | null | null | proxy/core/tls/certificate.py | fisabiliyusri/proxy | 29934503251b704813ef3e7ed8c2a5ae69448c8a | [
"BSD-3-Clause"
] | 8 | 2022-01-23T10:51:59.000Z | 2022-03-29T22:11:57.000Z | proxy/core/tls/certificate.py | fisabiliyusri/proxy | 29934503251b704813ef3e7ed8c2a5ae69448c8a | [
"BSD-3-Clause"
] | null | null | null | # -*- coding: utf-8 -*-
"""
proxy.py
~~~~~~~~
Fast, Lightweight, Pluggable, TLS interception capable proxy server focused on
Network monitoring, controls & Application development, testing, debugging.
:copyright: (c) 2013-present by Abhinav Singh and contributors.
:license: BSD, see LICENSE for more details.
"""
from typing import Tuple, Optional
| 23.509091 | 86 | 0.622583 |
f71f18898c8292f215084d67a0492fc48f5a9d6c | 8,974 | py | Python | main.py | PabloEmidio/Know-Weather-GTK | 797f25cbd0c8e1a2f124a5328d9decf2f3829252 | [
"MIT"
] | 4 | 2021-05-06T02:07:02.000Z | 2021-05-06T17:48:08.000Z | main.py | PabloEmidio/Know-Weather-GTK | 797f25cbd0c8e1a2f124a5328d9decf2f3829252 | [
"MIT"
] | null | null | null | main.py | PabloEmidio/Know-Weather-GTK | 797f25cbd0c8e1a2f124a5328d9decf2f3829252 | [
"MIT"
] | null | null | null | import gi
gi.require_version('Gtk', '3.0')
from gi.repository import Gtk
from datetime import datetime
from api_request import Weather
builder = Gtk.Builder()
builder.add_from_file('./glade/main.glade')
builder.connect_signals(Handler())
window = builder.get_object('window')
window.show_all()
Gtk.main() | 53.736527 | 131 | 0.622131 |
f71f497fb7582513c2d45b7633de0c7c9d7f7303 | 3,186 | py | Python | talk_lib/tests/testtalk.py | allankellynet/mimas | 10025d43bba9e84f502a266760786842e7158a05 | [
"MIT"
] | null | null | null | talk_lib/tests/testtalk.py | allankellynet/mimas | 10025d43bba9e84f502a266760786842e7158a05 | [
"MIT"
] | 1 | 2020-02-05T13:00:29.000Z | 2020-02-05T13:00:29.000Z | talk_lib/tests/testtalk.py | allankellynet/mimas | 10025d43bba9e84f502a266760786842e7158a05 | [
"MIT"
] | null | null | null | #-----------------------------------------------------
# Mimas: conference submission and review system
# (c) Allan Kelly 2016-2020 http://www.allankelly.net
# Licensed under MIT License, see LICENSE file
# -----------------------------------------------------
import unittest
from google.appengine.ext import testbed
from speaker_lib import speaker
from talk_lib import talk
| 29.775701 | 73 | 0.605775 |
f71f6972720d1f87a308457a99c2da6ef6fe19d9 | 63,620 | py | Python | LeetCode/contest-2018-11-26/fair_candy_swap.py | Max-PJB/python-learning2 | e8b05bef1574ee9abf8c90497e94ef20a7f4e3bd | [
"MIT"
] | null | null | null | LeetCode/contest-2018-11-26/fair_candy_swap.py | Max-PJB/python-learning2 | e8b05bef1574ee9abf8c90497e94ef20a7f4e3bd | [
"MIT"
] | null | null | null | LeetCode/contest-2018-11-26/fair_candy_swap.py | Max-PJB/python-learning2 | e8b05bef1574ee9abf8c90497e94ef20a7f4e3bd | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
-------------------------------------------------
@ Author : pengj
@ date : 2018/11/26 19:28
@ IDE : PyCharm
@ GitHub : https://github.com/JackyPJB
@ Contact : pengjianbiao@hotmail.com
-------------------------------------------------
Description : 888.
0
1
0
1
Easy
A[i] i B[j] j
ans ans[0] ans[1] Bob
1
A = [1,1], B = [2,2]
[1,2]
2
A = [1,2], B = [2,3]
[1,2]
3
A = [2], B = [1,3]
[2,3]
4
A = [1,2,5], B = [2,4]
[5,4]
1 <= A.length <= 10000
1 <= B.length <= 10000
1 <= A[i] <= 100000
1 <= B[i] <= 100000
-------------------------------------------------
"""
import time
__author__ = 'Max_Pengjb'
start = time.time()
#
A = [1, 2, 5]
B = [2, 4]
a1 = [1, 3, 5, 7, 9, 11, 13, 15, 17, 19, 21, 23, 25, 27, 29, 31, 33, 35, 37, 39, 41, 43, 45, 47, 49, 51, 53, 55, 57, 59,
61, 63, 65, 67, 69, 71, 73, 75, 77, 79, 81, 83, 85, 87, 89, 91, 93, 95, 97, 99, 101, 103, 105, 107, 109, 111, 113,
115, 117, 119, 121, 123, 125, 127, 129, 131, 133, 135, 137, 139, 141, 143, 145, 147, 149, 151, 153, 155, 157, 159,
161, 163, 165, 167, 169, 171, 173, 175, 177, 179, 181, 183, 185, 187, 189, 191, 193, 195, 197, 199, 201, 203, 205,
207, 209, 211, 213, 215, 217, 219, 221, 223, 225, 227, 229, 231, 233, 235, 237, 239, 241, 243, 245, 247, 249, 251,
253, 255, 257, 259, 261, 263, 265, 267, 269, 271, 273, 275, 277, 279, 281, 283, 285, 287, 289, 291, 293, 295, 297,
299, 301, 303, 305, 307, 309, 311, 313, 315, 317, 319, 321, 323, 325, 327, 329, 331, 333, 335, 337, 339, 341, 343,
345, 347, 349, 351, 353, 355, 357, 359, 361, 363, 365, 367, 369, 371, 373, 375, 377, 379, 381, 383, 385, 387, 389,
391, 393, 395, 397, 399, 401, 403, 405, 407, 409, 411, 413, 415, 417, 419, 421, 423, 425, 427, 429, 431, 433, 435,
437, 439, 441, 443, 445, 447, 449, 451, 453, 455, 457, 459, 461, 463, 465, 467, 469, 471, 473, 475, 477, 479, 481,
483, 485, 487, 489, 491, 493, 495, 497, 499, 501, 503, 505, 507, 509, 511, 513, 515, 517, 519, 521, 523, 525, 527,
529, 531, 533, 535, 537, 539, 541, 543, 545, 547, 549, 551, 553, 555, 557, 559, 561, 563, 565, 567, 569, 571, 573,
575, 577, 579, 581, 583, 585, 587, 589, 591, 593, 595, 597, 599, 601, 603, 605, 607, 609, 611, 613, 615, 617, 619,
621, 623, 625, 627, 629, 631, 633, 635, 637, 639, 641, 643, 645, 647, 649, 651, 653, 655, 657, 659, 661, 663, 665,
667, 669, 671, 673, 675, 677, 679, 681, 683, 685, 687, 689, 691, 693, 695, 697, 699, 701, 703, 705, 707, 709, 711,
713, 715, 717, 719, 721, 723, 725, 727, 729, 731, 733, 735, 737, 739, 741, 743, 745, 747, 749, 751, 753, 755, 757,
759, 761, 763, 765, 767, 769, 771, 773, 775, 777, 779, 781, 783, 785, 787, 789, 791, 793, 795, 797, 799, 801, 803,
805, 807, 809, 811, 813, 815, 817, 819, 821, 823, 825, 827, 829, 831, 833, 835, 837, 839, 841, 843, 845, 847, 849,
851, 853, 855, 857, 859, 861, 863, 865, 867, 869, 871, 873, 875, 877, 879, 881, 883, 885, 887, 889, 891, 893, 895,
897, 899, 901, 903, 905, 907, 909, 911, 913, 915, 917, 919, 921, 923, 925, 927, 929, 931, 933, 935, 937, 939, 941,
943, 945, 947, 949, 951, 953, 955, 957, 959, 961, 963, 965, 967, 969, 971, 973, 975, 977, 979, 981, 983, 985, 987,
989, 991, 993, 995, 997, 999, 1001, 1003, 1005, 1007, 1009, 1011, 1013, 1015, 1017, 1019, 1021, 1023, 1025, 1027,
1029, 1031, 1033, 1035, 1037, 1039, 1041, 1043, 1045, 1047, 1049, 1051, 1053, 1055, 1057, 1059, 1061, 1063, 1065,
1067, 1069, 1071, 1073, 1075, 1077, 1079, 1081, 1083, 1085, 1087, 1089, 1091, 1093, 1095, 1097, 1099, 1101, 1103,
1105, 1107, 1109, 1111, 1113, 1115, 1117, 1119, 1121, 1123, 1125, 1127, 1129, 1131, 1133, 1135, 1137, 1139, 1141,
1143, 1145, 1147, 1149, 1151, 1153, 1155, 1157, 1159, 1161, 1163, 1165, 1167, 1169, 1171, 1173, 1175, 1177, 1179,
1181, 1183, 1185, 1187, 1189, 1191, 1193, 1195, 1197, 1199, 1201, 1203, 1205, 1207, 1209, 1211, 1213, 1215, 1217,
1219, 1221, 1223, 1225, 1227, 1229, 1231, 1233, 1235, 1237, 1239, 1241, 1243, 1245, 1247, 1249, 1251, 1253, 1255,
1257, 1259, 1261, 1263, 1265, 1267, 1269, 1271, 1273, 1275, 1277, 1279, 1281, 1283, 1285, 1287, 1289, 1291, 1293,
1295, 1297, 1299, 1301, 1303, 1305, 1307, 1309, 1311, 1313, 1315, 1317, 1319, 1321, 1323, 1325, 1327, 1329, 1331,
1333, 1335, 1337, 1339, 1341, 1343, 1345, 1347, 1349, 1351, 1353, 1355, 1357, 1359, 1361, 1363, 1365, 1367, 1369,
1371, 1373, 1375, 1377, 1379, 1381, 1383, 1385, 1387, 1389, 1391, 1393, 1395, 1397, 1399, 1401, 1403, 1405, 1407,
1409, 1411, 1413, 1415, 1417, 1419, 1421, 1423, 1425, 1427, 1429, 1431, 1433, 1435, 1437, 1439, 1441, 1443, 1445,
1447, 1449, 1451, 1453, 1455, 1457, 1459, 1461, 1463, 1465, 1467, 1469, 1471, 1473, 1475, 1477, 1479, 1481, 1483,
1485, 1487, 1489, 1491, 1493, 1495, 1497, 1499, 1501, 1503, 1505, 1507, 1509, 1511, 1513, 1515, 1517, 1519, 1521,
1523, 1525, 1527, 1529, 1531, 1533, 1535, 1537, 1539, 1541, 1543, 1545, 1547, 1549, 1551, 1553, 1555, 1557, 1559,
1561, 1563, 1565, 1567, 1569, 1571, 1573, 1575, 1577, 1579, 1581, 1583, 1585, 1587, 1589, 1591, 1593, 1595, 1597,
1599, 1601, 1603, 1605, 1607, 1609, 1611, 1613, 1615, 1617, 1619, 1621, 1623, 1625, 1627, 1629, 1631, 1633, 1635,
1637, 1639, 1641, 1643, 1645, 1647, 1649, 1651, 1653, 1655, 1657, 1659, 1661, 1663, 1665, 1667, 1669, 1671, 1673,
1675, 1677, 1679, 1681, 1683, 1685, 1687, 1689, 1691, 1693, 1695, 1697, 1699, 1701, 1703, 1705, 1707, 1709, 1711,
1713, 1715, 1717, 1719, 1721, 1723, 1725, 1727, 1729, 1731, 1733, 1735, 1737, 1739, 1741, 1743, 1745, 1747, 1749,
1751, 1753, 1755, 1757, 1759, 1761, 1763, 1765, 1767, 1769, 1771, 1773, 1775, 1777, 1779, 1781, 1783, 1785, 1787,
1789, 1791, 1793, 1795, 1797, 1799, 1801, 1803, 1805, 1807, 1809, 1811, 1813, 1815, 1817, 1819, 1821, 1823, 1825,
1827, 1829, 1831, 1833, 1835, 1837, 1839, 1841, 1843, 1845, 1847, 1849, 1851, 1853, 1855, 1857, 1859, 1861, 1863,
1865, 1867, 1869, 1871, 1873, 1875, 1877, 1879, 1881, 1883, 1885, 1887, 1889, 1891, 1893, 1895, 1897, 1899, 1901,
1903, 1905, 1907, 1909, 1911, 1913, 1915, 1917, 1919, 1921, 1923, 1925, 1927, 1929, 1931, 1933, 1935, 1937, 1939,
1941, 1943, 1945, 1947, 1949, 1951, 1953, 1955, 1957, 1959, 1961, 1963, 1965, 1967, 1969, 1971, 1973, 1975, 1977,
1979, 1981, 1983, 1985, 1987, 1989, 1991, 1993, 1995, 1997, 1999, 2001, 2003, 2005, 2007, 2009, 2011, 2013, 2015,
2017, 2019, 2021, 2023, 2025, 2027, 2029, 2031, 2033, 2035, 2037, 2039, 2041, 2043, 2045, 2047, 2049, 2051, 2053,
2055, 2057, 2059, 2061, 2063, 2065, 2067, 2069, 2071, 2073, 2075, 2077, 2079, 2081, 2083, 2085, 2087, 2089, 2091,
2093, 2095, 2097, 2099, 2101, 2103, 2105, 2107, 2109, 2111, 2113, 2115, 2117, 2119, 2121, 2123, 2125, 2127, 2129,
2131, 2133, 2135, 2137, 2139, 2141, 2143, 2145, 2147, 2149, 2151, 2153, 2155, 2157, 2159, 2161, 2163, 2165, 2167,
2169, 2171, 2173, 2175, 2177, 2179, 2181, 2183, 2185, 2187, 2189, 2191, 2193, 2195, 2197, 2199, 2201, 2203, 2205,
2207, 2209, 2211, 2213, 2215, 2217, 2219, 2221, 2223, 2225, 2227, 2229, 2231, 2233, 2235, 2237, 2239, 2241, 2243,
2245, 2247, 2249, 2251, 2253, 2255, 2257, 2259, 2261, 2263, 2265, 2267, 2269, 2271, 2273, 2275, 2277, 2279, 2281,
2283, 2285, 2287, 2289, 2291, 2293, 2295, 2297, 2299, 2301, 2303, 2305, 2307, 2309, 2311, 2313, 2315, 2317, 2319,
2321, 2323, 2325, 2327, 2329, 2331, 2333, 2335, 2337, 2339, 2341, 2343, 2345, 2347, 2349, 2351, 2353, 2355, 2357,
2359, 2361, 2363, 2365, 2367, 2369, 2371, 2373, 2375, 2377, 2379, 2381, 2383, 2385, 2387, 2389, 2391, 2393, 2395,
2397, 2399, 2401, 2403, 2405, 2407, 2409, 2411, 2413, 2415, 2417, 2419, 2421, 2423, 2425, 2427, 2429, 2431, 2433,
2435, 2437, 2439, 2441, 2443, 2445, 2447, 2449, 2451, 2453, 2455, 2457, 2459, 2461, 2463, 2465, 2467, 2469, 2471,
2473, 2475, 2477, 2479, 2481, 2483, 2485, 2487, 2489, 2491, 2493, 2495, 2497, 2499, 2501, 2503, 2505, 2507, 2509,
2511, 2513, 2515, 2517, 2519, 2521, 2523, 2525, 2527, 2529, 2531, 2533, 2535, 2537, 2539, 2541, 2543, 2545, 2547,
2549, 2551, 2553, 2555, 2557, 2559, 2561, 2563, 2565, 2567, 2569, 2571, 2573, 2575, 2577, 2579, 2581, 2583, 2585,
2587, 2589, 2591, 2593, 2595, 2597, 2599, 2601, 2603, 2605, 2607, 2609, 2611, 2613, 2615, 2617, 2619, 2621, 2623,
2625, 2627, 2629, 2631, 2633, 2635, 2637, 2639, 2641, 2643, 2645, 2647, 2649, 2651, 2653, 2655, 2657, 2659, 2661,
2663, 2665, 2667, 2669, 2671, 2673, 2675, 2677, 2679, 2681, 2683, 2685, 2687, 2689, 2691, 2693, 2695, 2697, 2699,
2701, 2703, 2705, 2707, 2709, 2711, 2713, 2715, 2717, 2719, 2721, 2723, 2725, 2727, 2729, 2731, 2733, 2735, 2737,
2739, 2741, 2743, 2745, 2747, 2749, 2751, 2753, 2755, 2757, 2759, 2761, 2763, 2765, 2767, 2769, 2771, 2773, 2775,
2777, 2779, 2781, 2783, 2785, 2787, 2789, 2791, 2793, 2795, 2797, 2799, 2801, 2803, 2805, 2807, 2809, 2811, 2813,
2815, 2817, 2819, 2821, 2823, 2825, 2827, 2829, 2831, 2833, 2835, 2837, 2839, 2841, 2843, 2845, 2847, 2849, 2851,
2853, 2855, 2857, 2859, 2861, 2863, 2865, 2867, 2869, 2871, 2873, 2875, 2877, 2879, 2881, 2883, 2885, 2887, 2889,
2891, 2893, 2895, 2897, 2899, 2901, 2903, 2905, 2907, 2909, 2911, 2913, 2915, 2917, 2919, 2921, 2923, 2925, 2927,
2929, 2931, 2933, 2935, 2937, 2939, 2941, 2943, 2945, 2947, 2949, 2951, 2953, 2955, 2957, 2959, 2961, 2963, 2965,
2967, 2969, 2971, 2973, 2975, 2977, 2979, 2981, 2983, 2985, 2987, 2989, 2991, 2993, 2995, 2997, 2999, 3001, 3003,
3005, 3007, 3009, 3011, 3013, 3015, 3017, 3019, 3021, 3023, 3025, 3027, 3029, 3031, 3033, 3035, 3037, 3039, 3041,
3043, 3045, 3047, 3049, 3051, 3053, 3055, 3057, 3059, 3061, 3063, 3065, 3067, 3069, 3071, 3073, 3075, 3077, 3079,
3081, 3083, 3085, 3087, 3089, 3091, 3093, 3095, 3097, 3099, 3101, 3103, 3105, 3107, 3109, 3111, 3113, 3115, 3117,
3119, 3121, 3123, 3125, 3127, 3129, 3131, 3133, 3135, 3137, 3139, 3141, 3143, 3145, 3147, 3149, 3151, 3153, 3155,
3157, 3159, 3161, 3163, 3165, 3167, 3169, 3171, 3173, 3175, 3177, 3179, 3181, 3183, 3185, 3187, 3189, 3191, 3193,
3195, 3197, 3199, 3201, 3203, 3205, 3207, 3209, 3211, 3213, 3215, 3217, 3219, 3221, 3223, 3225, 3227, 3229, 3231,
3233, 3235, 3237, 3239, 3241, 3243, 3245, 3247, 3249, 3251, 3253, 3255, 3257, 3259, 3261, 3263, 3265, 3267, 3269,
3271, 3273, 3275, 3277, 3279, 3281, 3283, 3285, 3287, 3289, 3291, 3293, 3295, 3297, 3299, 3301, 3303, 3305, 3307,
3309, 3311, 3313, 3315, 3317, 3319, 3321, 3323, 3325, 3327, 3329, 3331, 3333, 3335, 3337, 3339, 3341, 3343, 3345,
3347, 3349, 3351, 3353, 3355, 3357, 3359, 3361, 3363, 3365, 3367, 3369, 3371, 3373, 3375, 3377, 3379, 3381, 3383,
3385, 3387, 3389, 3391, 3393, 3395, 3397, 3399, 3401, 3403, 3405, 3407, 3409, 3411, 3413, 3415, 3417, 3419, 3421,
3423, 3425, 3427, 3429, 3431, 3433, 3435, 3437, 3439, 3441, 3443, 3445, 3447, 3449, 3451, 3453, 3455, 3457, 3459,
3461, 3463, 3465, 3467, 3469, 3471, 3473, 3475, 3477, 3479, 3481, 3483, 3485, 3487, 3489, 3491, 3493, 3495, 3497,
3499, 3501, 3503, 3505, 3507, 3509, 3511, 3513, 3515, 3517, 3519, 3521, 3523, 3525, 3527, 3529, 3531, 3533, 3535,
3537, 3539, 3541, 3543, 3545, 3547, 3549, 3551, 3553, 3555, 3557, 3559, 3561, 3563, 3565, 3567, 3569, 3571, 3573,
3575, 3577, 3579, 3581, 3583, 3585, 3587, 3589, 3591, 3593, 3595, 3597, 3599, 3601, 3603, 3605, 3607, 3609, 3611,
3613, 3615, 3617, 3619, 3621, 3623, 3625, 3627, 3629, 3631, 3633, 3635, 3637, 3639, 3641, 3643, 3645, 3647, 3649,
3651, 3653, 3655, 3657, 3659, 3661, 3663, 3665, 3667, 3669, 3671, 3673, 3675, 3677, 3679, 3681, 3683, 3685, 3687,
3689, 3691, 3693, 3695, 3697, 3699, 3701, 3703, 3705, 3707, 3709, 3711, 3713, 3715, 3717, 3719, 3721, 3723, 3725,
3727, 3729, 3731, 3733, 3735, 3737, 3739, 3741, 3743, 3745, 3747, 3749, 3751, 3753, 3755, 3757, 3759, 3761, 3763,
3765, 3767, 3769, 3771, 3773, 3775, 3777, 3779, 3781, 3783, 3785, 3787, 3789, 3791, 3793, 3795, 3797, 3799, 3801,
3803, 3805, 3807, 3809, 3811, 3813, 3815, 3817, 3819, 3821, 3823, 3825, 3827, 3829, 3831, 3833, 3835, 3837, 3839,
3841, 3843, 3845, 3847, 3849, 3851, 3853, 3855, 3857, 3859, 3861, 3863, 3865, 3867, 3869, 3871, 3873, 3875, 3877,
3879, 3881, 3883, 3885, 3887, 3889, 3891, 3893, 3895, 3897, 3899, 3901, 3903, 3905, 3907, 3909, 3911, 3913, 3915,
3917, 3919, 3921, 3923, 3925, 3927, 3929, 3931, 3933, 3935, 3937, 3939, 3941, 3943, 3945, 3947, 3949, 3951, 3953,
3955, 3957, 3959, 3961, 3963, 3965, 3967, 3969, 3971, 3973, 3975, 3977, 3979, 3981, 3983, 3985, 3987, 3989, 3991,
3993, 3995, 3997, 3999, 4001, 4003, 4005, 4007, 4009, 4011, 4013, 4015, 4017, 4019, 4021, 4023, 4025, 4027, 4029,
4031, 4033, 4035, 4037, 4039, 4041, 4043, 4045, 4047, 4049, 4051, 4053, 4055, 4057, 4059, 4061, 4063, 4065, 4067,
4069, 4071, 4073, 4075, 4077, 4079, 4081, 4083, 4085, 4087, 4089, 4091, 4093, 4095, 4097, 4099, 4101, 4103, 4105,
4107, 4109, 4111, 4113, 4115, 4117, 4119, 4121, 4123, 4125, 4127, 4129, 4131, 4133, 4135, 4137, 4139, 4141, 4143,
4145, 4147, 4149, 4151, 4153, 4155, 4157, 4159, 4161, 4163, 4165, 4167, 4169, 4171, 4173, 4175, 4177, 4179, 4181,
4183, 4185, 4187, 4189, 4191, 4193, 4195, 4197, 4199, 4201, 4203, 4205, 4207, 4209, 4211, 4213, 4215, 4217, 4219,
4221, 4223, 4225, 4227, 4229, 4231, 4233, 4235, 4237, 4239, 4241, 4243, 4245, 4247, 4249, 4251, 4253, 4255, 4257,
4259, 4261, 4263, 4265, 4267, 4269, 4271, 4273, 4275, 4277, 4279, 4281, 4283, 4285, 4287, 4289, 4291, 4293, 4295,
4297, 4299, 4301, 4303, 4305, 4307, 4309, 4311, 4313, 4315, 4317, 4319, 4321, 4323, 4325, 4327, 4329, 4331, 4333,
4335, 4337, 4339, 4341, 4343, 4345, 4347, 4349, 4351, 4353, 4355, 4357, 4359, 4361, 4363, 4365, 4367, 4369, 4371,
4373, 4375, 4377, 4379, 4381, 4383, 4385, 4387, 4389, 4391, 4393, 4395, 4397, 4399, 4401, 4403, 4405, 4407, 4409,
4411, 4413, 4415, 4417, 4419, 4421, 4423, 4425, 4427, 4429, 4431, 4433, 4435, 4437, 4439, 4441, 4443, 4445, 4447,
4449, 4451, 4453, 4455, 4457, 4459, 4461, 4463, 4465, 4467, 4469, 4471, 4473, 4475, 4477, 4479, 4481, 4483, 4485,
4487, 4489, 4491, 4493, 4495, 4497, 4499, 4501, 4503, 4505, 4507, 4509, 4511, 4513, 4515, 4517, 4519, 4521, 4523,
4525, 4527, 4529, 4531, 4533, 4535, 4537, 4539, 4541, 4543, 4545, 4547, 4549, 4551, 4553, 4555, 4557, 4559, 4561,
4563, 4565, 4567, 4569, 4571, 4573, 4575, 4577, 4579, 4581, 4583, 4585, 4587, 4589, 4591, 4593, 4595, 4597, 4599,
4601, 4603, 4605, 4607, 4609, 4611, 4613, 4615, 4617, 4619, 4621, 4623, 4625, 4627, 4629, 4631, 4633, 4635, 4637,
4639, 4641, 4643, 4645, 4647, 4649, 4651, 4653, 4655, 4657, 4659, 4661, 4663, 4665, 4667, 4669, 4671, 4673, 4675,
4677, 4679, 4681, 4683, 4685, 4687, 4689, 4691, 4693, 4695, 4697, 4699, 4701, 4703, 4705, 4707, 4709, 4711, 4713,
4715, 4717, 4719, 4721, 4723, 4725, 4727, 4729, 4731, 4733, 4735, 4737, 4739, 4741, 4743, 4745, 4747, 4749, 4751,
4753, 4755, 4757, 4759, 4761, 4763, 4765, 4767, 4769, 4771, 4773, 4775, 4777, 4779, 4781, 4783, 4785, 4787, 4789,
4791, 4793, 4795, 4797, 4799, 4801, 4803, 4805, 4807, 4809, 4811, 4813, 4815, 4817, 4819, 4821, 4823, 4825, 4827,
4829, 4831, 4833, 4835, 4837, 4839, 4841, 4843, 4845, 4847, 4849, 4851, 4853, 4855, 4857, 4859, 4861, 4863, 4865,
4867, 4869, 4871, 4873, 4875, 4877, 4879, 4881, 4883, 4885, 4887, 4889, 4891, 4893, 4895, 4897, 4899, 4901, 4903,
4905, 4907, 4909, 4911, 4913, 4915, 4917, 4919, 4921, 4923, 4925, 4927, 4929, 4931, 4933, 4935, 4937, 4939, 4941,
4943, 4945, 4947, 4949, 4951, 4953, 4955, 4957, 4959, 4961, 4963, 4965, 4967, 4969, 4971, 4973, 4975, 4977, 4979,
4981, 4983, 4985, 4987, 4989, 4991, 4993, 4995, 4997, 4999, 5001, 5003, 5005, 5007, 5009, 5011, 5013, 5015, 5017,
5019, 5021, 5023, 5025, 5027, 5029, 5031, 5033, 5035, 5037, 5039, 5041, 5043, 5045, 5047, 5049, 5051, 5053, 5055,
5057, 5059, 5061, 5063, 5065, 5067, 5069, 5071, 5073, 5075, 5077, 5079, 5081, 5083, 5085, 5087, 5089, 5091, 5093,
5095, 5097, 5099, 5101, 5103, 5105, 5107, 5109, 5111, 5113, 5115, 5117, 5119, 5121, 5123, 5125, 5127, 5129, 5131,
5133, 5135, 5137, 5139, 5141, 5143, 5145, 5147, 5149, 5151, 5153, 5155, 5157, 5159, 5161, 5163, 5165, 5167, 5169,
5171, 5173, 5175, 5177, 5179, 5181, 5183, 5185, 5187, 5189, 5191, 5193, 5195, 5197, 5199, 5201, 5203, 5205, 5207,
5209, 5211, 5213, 5215, 5217, 5219, 5221, 5223, 5225, 5227, 5229, 5231, 5233, 5235, 5237, 5239, 5241, 5243, 5245,
5247, 5249, 5251, 5253, 5255, 5257, 5259, 5261, 5263, 5265, 5267, 5269, 5271, 5273, 5275, 5277, 5279, 5281, 5283,
5285, 5287, 5289, 5291, 5293, 5295, 5297, 5299, 5301, 5303, 5305, 5307, 5309, 5311, 5313, 5315, 5317, 5319, 5321,
5323, 5325, 5327, 5329, 5331, 5333, 5335, 5337, 5339, 5341, 5343, 5345, 5347, 5349, 5351, 5353, 5355, 5357, 5359,
5361, 5363, 5365, 5367, 5369, 5371, 5373, 5375, 5377, 5379, 5381, 5383, 5385, 5387, 5389, 5391, 5393, 5395, 5397,
5399, 5401, 5403, 5405, 5407, 5409, 5411, 5413, 5415, 5417, 5419, 5421, 5423, 5425, 5427, 5429, 5431, 5433, 5435,
5437, 5439, 5441, 5443, 5445, 5447, 5449, 5451, 5453, 5455, 5457, 5459, 5461, 5463, 5465, 5467, 5469, 5471, 5473,
5475, 5477, 5479, 5481, 5483, 5485, 5487, 5489, 5491, 5493, 5495, 5497, 5499, 5501, 5503, 5505, 5507, 5509, 5511,
5513, 5515, 5517, 5519, 5521, 5523, 5525, 5527, 5529, 5531, 5533, 5535, 5537, 5539, 5541, 5543, 5545, 5547, 5549,
5551, 5553, 5555, 5557, 5559, 5561, 5563, 5565, 5567, 5569, 5571, 5573, 5575, 5577, 5579, 5581, 5583, 5585, 5587,
5589, 5591, 5593, 5595, 5597, 5599, 5601, 5603, 5605, 5607, 5609, 5611, 5613, 5615, 5617, 5619, 5621, 5623, 5625,
5627, 5629, 5631, 5633, 5635, 5637, 5639, 5641, 5643, 5645, 5647, 5649, 5651, 5653, 5655, 5657, 5659, 5661, 5663,
5665, 5667, 5669, 5671, 5673, 5675, 5677, 5679, 5681, 5683, 5685, 5687, 5689, 5691, 5693, 5695, 5697, 5699, 5701,
5703, 5705, 5707, 5709, 5711, 5713, 5715, 5717, 5719, 5721, 5723, 5725, 5727, 5729, 5731, 5733, 5735, 5737, 5739,
5741, 5743, 5745, 5747, 5749, 5751, 5753, 5755, 5757, 5759, 5761, 5763, 5765, 5767, 5769, 5771, 5773, 5775, 5777,
5779, 5781, 5783, 5785, 5787, 5789, 5791, 5793, 5795, 5797, 5799, 5801, 5803, 5805, 5807, 5809, 5811, 5813, 5815,
5817, 5819, 5821, 5823, 5825, 5827, 5829, 5831, 5833, 5835, 5837, 5839, 5841, 5843, 5845, 5847, 5849, 5851, 5853,
5855, 5857, 5859, 5861, 5863, 5865, 5867, 5869, 5871, 5873, 5875, 5877, 5879, 5881, 5883, 5885, 5887, 5889, 5891,
5893, 5895, 5897, 5899, 5901, 5903, 5905, 5907, 5909, 5911, 5913, 5915, 5917, 5919, 5921, 5923, 5925, 5927, 5929,
5931, 5933, 5935, 5937, 5939, 5941, 5943, 5945, 5947, 5949, 5951, 5953, 5955, 5957, 5959, 5961, 5963, 5965, 5967,
5969, 5971, 5973, 5975, 5977, 5979, 5981, 5983, 5985, 5987, 5989, 5991, 5993, 5995, 5997, 5999, 6001, 6003, 6005,
6007, 6009, 6011, 6013, 6015, 6017, 6019, 6021, 6023, 6025, 6027, 6029, 6031, 6033, 6035, 6037, 6039, 6041, 6043,
6045, 6047, 6049, 6051, 6053, 6055, 6057, 6059, 6061, 6063, 6065, 6067, 6069, 6071, 6073, 6075, 6077, 6079, 6081,
6083, 6085, 6087, 6089, 6091, 6093, 6095, 6097, 6099, 6101, 6103, 6105, 6107, 6109, 6111, 6113, 6115, 6117, 6119,
6121, 6123, 6125, 6127, 6129, 6131, 6133, 6135, 6137, 6139, 6141, 6143, 6145, 6147, 6149, 6151, 6153, 6155, 6157,
6159, 6161, 6163, 6165, 6167, 6169, 6171, 6173, 6175, 6177, 6179, 6181, 6183, 6185, 6187, 6189, 6191, 6193, 6195,
6197, 6199, 6201, 6203, 6205, 6207, 6209, 6211, 6213, 6215, 6217, 6219, 6221, 6223, 6225, 6227, 6229, 6231, 6233,
6235, 6237, 6239, 6241, 6243, 6245, 6247, 6249, 6251, 6253, 6255, 6257, 6259, 6261, 6263, 6265, 6267, 6269, 6271,
6273, 6275, 6277, 6279, 6281, 6283, 6285, 6287, 6289, 6291, 6293, 6295, 6297, 6299, 6301, 6303, 6305, 6307, 6309,
6311, 6313, 6315, 6317, 6319, 6321, 6323, 6325, 6327, 6329, 6331, 6333, 6335, 6337, 6339, 6341, 6343, 6345, 6347,
6349, 6351, 6353, 6355, 6357, 6359, 6361, 6363, 6365, 6367, 6369, 6371, 6373, 6375, 6377, 6379, 6381, 6383, 6385,
6387, 6389, 6391, 6393, 6395, 6397, 6399, 6401, 6403, 6405, 6407, 6409, 6411, 6413, 6415, 6417, 6419, 6421, 6423,
6425, 6427, 6429, 6431, 6433, 6435, 6437, 6439, 6441, 6443, 6445, 6447, 6449, 6451, 6453, 6455, 6457, 6459, 6461,
6463, 6465, 6467, 6469, 6471, 6473, 6475, 6477, 6479, 6481, 6483, 6485, 6487, 6489, 6491, 6493, 6495, 6497, 6499,
6501, 6503, 6505, 6507, 6509, 6511, 6513, 6515, 6517, 6519, 6521, 6523, 6525, 6527, 6529, 6531, 6533, 6535, 6537,
6539, 6541, 6543, 6545, 6547, 6549, 6551, 6553, 6555, 6557, 6559, 6561, 6563, 6565, 6567, 6569, 6571, 6573, 6575,
6577, 6579, 6581, 6583, 6585, 6587, 6589, 6591, 6593, 6595, 6597, 6599, 6601, 6603, 6605, 6607, 6609, 6611, 6613,
6615, 6617, 6619, 6621, 6623, 6625, 6627, 6629, 6631, 6633, 6635, 6637, 6639, 6641, 6643, 6645, 6647, 6649, 6651,
6653, 6655, 6657, 6659, 6661, 6663, 6665, 6667, 6669, 6671, 6673, 6675, 6677, 6679, 6681, 6683, 6685, 6687, 6689,
6691, 6693, 6695, 6697, 6699, 6701, 6703, 6705, 6707, 6709, 6711, 6713, 6715, 6717, 6719, 6721, 6723, 6725, 6727,
6729, 6731, 6733, 6735, 6737, 6739, 6741, 6743, 6745, 6747, 6749, 6751, 6753, 6755, 6757, 6759, 6761, 6763, 6765,
6767, 6769, 6771, 6773, 6775, 6777, 6779, 6781, 6783, 6785, 6787, 6789, 6791, 6793, 6795, 6797, 6799, 6801, 6803,
6805, 6807, 6809, 6811, 6813, 6815, 6817, 6819, 6821, 6823, 6825, 6827, 6829, 6831, 6833, 6835, 6837, 6839, 6841,
6843, 6845, 6847, 6849, 6851, 6853, 6855, 6857, 6859, 6861, 6863, 6865, 6867, 6869, 6871, 6873, 6875, 6877, 6879,
6881, 6883, 6885, 6887, 6889, 6891, 6893, 6895, 6897, 6899, 6901, 6903, 6905, 6907, 6909, 6911, 6913, 6915, 6917,
6919, 6921, 6923, 6925, 6927, 6929, 6931, 6933, 6935, 6937, 6939, 6941, 6943, 6945, 6947, 6949, 6951, 6953, 6955,
6957, 6959, 6961, 6963, 6965, 6967, 6969, 6971, 6973, 6975, 6977, 6979, 6981, 6983, 6985, 6987, 6989, 6991, 6993,
6995, 6997, 6999, 7001, 7003, 7005, 7007, 7009, 7011, 7013, 7015, 7017, 7019, 7021, 7023, 7025, 7027, 7029, 7031,
7033, 7035, 7037, 7039, 7041, 7043, 7045, 7047, 7049, 7051, 7053, 7055, 7057, 7059, 7061, 7063, 7065, 7067, 7069,
7071, 7073, 7075, 7077, 7079, 7081, 7083, 7085, 7087, 7089, 7091, 7093, 7095, 7097, 7099, 7101, 7103, 7105, 7107,
7109, 7111, 7113, 7115, 7117, 7119, 7121, 7123, 7125, 7127, 7129, 7131, 7133, 7135, 7137, 7139, 7141, 7143, 7145,
7147, 7149, 7151, 7153, 7155, 7157, 7159, 7161, 7163, 7165, 7167, 7169, 7171, 7173, 7175, 7177, 7179, 7181, 7183,
7185, 7187, 7189, 7191, 7193, 7195, 7197, 7199, 7201, 7203, 7205, 7207, 7209, 7211, 7213, 7215, 7217, 7219, 7221,
7223, 7225, 7227, 7229, 7231, 7233, 7235, 7237, 7239, 7241, 7243, 7245, 7247, 7249, 7251, 7253, 7255, 7257, 7259,
7261, 7263, 7265, 7267, 7269, 7271, 7273, 7275, 7277, 7279, 7281, 7283, 7285, 7287, 7289, 7291, 7293, 7295, 7297,
7299, 7301, 7303, 7305, 7307, 7309, 7311, 7313, 7315, 7317, 7319, 7321, 7323, 7325, 7327, 7329, 7331, 7333, 7335,
7337, 7339, 7341, 7343, 7345, 7347, 7349, 7351, 7353, 7355, 7357, 7359, 7361, 7363, 7365, 7367, 7369, 7371, 7373,
7375, 7377, 7379, 7381, 7383, 7385, 7387, 7389, 7391, 7393, 7395, 7397, 7399, 7401, 7403, 7405, 7407, 7409, 7411,
7413, 7415, 7417, 7419, 7421, 7423, 7425, 7427, 7429, 7431, 7433, 7435, 7437, 7439, 7441, 7443, 7445, 7447, 7449,
7451, 7453, 7455, 7457, 7459, 7461, 7463, 7465, 7467, 7469, 7471, 7473, 7475, 7477, 7479, 7481, 7483, 7485, 7487,
7489, 7491, 7493, 7495, 7497, 7499, 7501, 7503, 7505, 7507, 7509, 7511, 7513, 7515, 7517, 7519, 7521, 7523, 7525,
7527, 7529, 7531, 7533, 7535, 7537, 7539, 7541, 7543, 7545, 7547, 7549, 7551, 7553, 7555, 7557, 7559, 7561, 7563,
7565, 7567, 7569, 7571, 7573, 7575, 7577, 7579, 7581, 7583, 7585, 7587, 7589, 7591, 7593, 7595, 7597, 7599, 7601,
7603, 7605, 7607, 7609, 7611, 7613, 7615, 7617, 7619, 7621, 7623, 7625, 7627, 7629, 7631, 7633, 7635, 7637, 7639,
7641, 7643, 7645, 7647, 7649, 7651, 7653, 7655, 7657, 7659, 7661, 7663, 7665, 7667, 7669, 7671, 7673, 7675, 7677,
7679, 7681, 7683, 7685, 7687, 7689, 7691, 7693, 7695, 7697, 7699, 7701, 7703, 7705, 7707, 7709, 7711, 7713, 7715,
7717, 7719, 7721, 7723, 7725, 7727, 7729, 7731, 7733, 7735, 7737, 7739, 7741, 7743, 7745, 7747, 7749, 7751, 7753,
7755, 7757, 7759, 7761, 7763, 7765, 7767, 7769, 7771, 7773, 7775, 7777, 7779, 7781, 7783, 7785, 7787, 7789, 7791,
7793, 7795, 7797, 7799, 7801, 7803, 7805, 7807, 7809, 7811, 7813, 7815, 7817, 7819, 7821, 7823, 7825, 7827, 7829,
7831, 7833, 7835, 7837, 7839, 7841, 7843, 7845, 7847, 7849, 7851, 7853, 7855, 7857, 7859, 7861, 7863, 7865, 7867,
7869, 7871, 7873, 7875, 7877, 7879, 7881, 7883, 7885, 7887, 7889, 7891, 7893, 7895, 7897, 7899, 7901, 7903, 7905,
7907, 7909, 7911, 7913, 7915, 7917, 7919, 7921, 7923, 7925, 7927, 7929, 7931, 7933, 7935, 7937, 7939, 7941, 7943,
7945, 7947, 7949, 7951, 7953, 7955, 7957, 7959, 7961, 7963, 7965, 7967, 7969, 7971, 7973, 7975, 7977, 7979, 7981,
7983, 7985, 7987, 7989, 7991, 7993, 7995, 7997, 7999, 8001, 8003, 8005, 8007, 8009, 8011, 8013, 8015, 8017, 8019,
8021, 8023, 8025, 8027, 8029, 8031, 8033, 8035, 8037, 8039, 8041, 8043, 8045, 8047, 8049, 8051, 8053, 8055, 8057,
8059, 8061, 8063, 8065, 8067, 8069, 8071, 8073, 8075, 8077, 8079, 8081, 8083, 8085, 8087, 8089, 8091, 8093, 8095,
8097, 8099, 8101, 8103, 8105, 8107, 8109, 8111, 8113, 8115, 8117, 8119, 8121, 8123, 8125, 8127, 8129, 8131, 8133,
8135, 8137, 8139, 8141, 8143, 8145, 8147, 8149, 8151, 8153, 8155, 8157, 8159, 8161, 8163, 8165, 8167, 8169, 8171,
8173, 8175, 8177, 8179, 8181, 8183, 8185, 8187, 8189, 8191, 8193, 8195, 8197, 8199, 8201, 8203, 8205, 8207, 8209,
8211, 8213, 8215, 8217, 8219, 8221, 8223, 8225, 8227, 8229, 8231, 8233, 8235, 8237, 8239, 8241, 8243, 8245, 8247,
8249, 8251, 8253, 8255, 8257, 8259, 8261, 8263, 8265, 8267, 8269, 8271, 8273, 8275, 8277, 8279, 8281, 8283, 8285,
8287, 8289, 8291, 8293, 8295, 8297, 8299, 8301, 8303, 8305, 8307, 8309, 8311, 8313, 8315, 8317, 8319, 8321, 8323,
8325, 8327, 8329, 8331, 8333, 8335, 8337, 8339, 8341, 8343, 8345, 8347, 8349, 8351, 8353, 8355, 8357, 8359, 8361,
8363, 8365, 8367, 8369, 8371, 8373, 8375, 8377, 8379, 8381, 8383, 8385, 8387, 8389, 8391, 8393, 8395, 8397, 8399,
8401, 8403, 8405, 8407, 8409, 8411, 8413, 8415, 8417, 8419, 8421, 8423, 8425, 8427, 8429, 8431, 8433, 8435, 8437,
8439, 8441, 8443, 8445, 8447, 8449, 8451, 8453, 8455, 8457, 8459, 8461, 8463, 8465, 8467, 8469, 8471, 8473, 8475,
8477, 8479, 8481, 8483, 8485, 8487, 8489, 8491, 8493, 8495, 8497, 8499, 8501, 8503, 8505, 8507, 8509, 8511, 8513,
8515, 8517, 8519, 8521, 8523, 8525, 8527, 8529, 8531, 8533, 8535, 8537, 8539, 8541, 8543, 8545, 8547, 8549, 8551,
8553, 8555, 8557, 8559, 8561, 8563, 8565, 8567, 8569, 8571, 8573, 8575, 8577, 8579, 8581, 8583, 8585, 8587, 8589,
8591, 8593, 8595, 8597, 8599, 8601, 8603, 8605, 8607, 8609, 8611, 8613, 8615, 8617, 8619, 8621, 8623, 8625, 8627,
8629, 8631, 8633, 8635, 8637, 8639, 8641, 8643, 8645, 8647, 8649, 8651, 8653, 8655, 8657, 8659, 8661, 8663, 8665,
8667, 8669, 8671, 8673, 8675, 8677, 8679, 8681, 8683, 8685, 8687, 8689, 8691, 8693, 8695, 8697, 8699, 8701, 8703,
8705, 8707, 8709, 8711, 8713, 8715, 8717, 8719, 8721, 8723, 8725, 8727, 8729, 8731, 8733, 8735, 8737, 8739, 8741,
8743, 8745, 8747, 8749, 8751, 8753, 8755, 8757, 8759, 8761, 8763, 8765, 8767, 8769, 8771, 8773, 8775, 8777, 8779,
8781, 8783, 8785, 8787, 8789, 8791, 8793, 8795, 8797, 8799, 8801, 8803, 8805, 8807, 8809, 8811, 8813, 8815, 8817,
8819, 8821, 8823, 8825, 8827, 8829, 8831, 8833, 8835, 8837, 8839, 8841, 8843, 8845, 8847, 8849, 8851, 8853, 8855,
8857, 8859, 8861, 8863, 8865, 8867, 8869, 8871, 8873, 8875, 8877, 8879, 8881, 8883, 8885, 8887, 8889, 8891, 8893,
8895, 8897, 8899, 8901, 8903, 8905, 8907, 8909, 8911, 8913, 8915, 8917, 8919, 8921, 8923, 8925, 8927, 8929, 8931,
8933, 8935, 8937, 8939, 8941, 8943, 8945, 8947, 8949, 8951, 8953, 8955, 8957, 8959, 8961, 8963, 8965, 8967, 8969,
8971, 8973, 8975, 8977, 8979, 8981, 8983, 8985, 8987, 8989, 8991, 8993, 8995, 8997, 8999, 9001, 9003, 9005, 9007,
9009, 9011, 9013, 9015, 9017, 9019, 9021, 9023, 9025, 9027, 9029, 9031, 9033, 9035, 9037, 9039, 9041, 9043, 9045,
9047, 9049, 9051, 9053, 9055, 9057, 9059, 9061, 9063, 9065, 9067, 9069, 9071, 9073, 9075, 9077, 9079, 9081, 9083,
9085, 9087, 9089, 9091, 9093, 9095, 9097, 9099, 9101, 9103, 9105, 9107, 9109, 9111, 9113, 9115, 9117, 9119, 9121,
9123, 9125, 9127, 9129, 9131, 9133, 9135, 9137, 9139, 9141, 9143, 9145, 9147, 9149, 9151, 9153, 9155, 9157, 9159,
9161, 9163, 9165, 9167, 9169, 9171, 9173, 9175, 9177, 9179, 9181, 9183, 9185, 9187, 9189, 9191, 9193, 9195, 9197,
9199, 9201, 9203, 9205, 9207, 9209, 9211, 9213, 9215, 9217, 9219, 9221, 9223, 9225, 9227, 9229, 9231, 9233, 9235,
9237, 9239, 9241, 9243, 9245, 9247, 9249, 9251, 9253, 9255, 9257, 9259, 9261, 9263, 9265, 9267, 9269, 9271, 9273,
9275, 9277, 9279, 9281, 9283, 9285, 9287, 9289, 9291, 9293, 9295, 9297, 9299, 9301, 9303, 9305, 9307, 9309, 9311,
9313, 9315, 9317, 9319, 9321, 9323, 9325, 9327, 9329, 9331, 9333, 9335, 9337, 9339, 9341, 9343, 9345, 9347, 9349,
9351, 9353, 9355, 9357, 9359, 9361, 9363, 9365, 9367, 9369, 9371, 9373, 9375, 9377, 9379, 9381, 9383, 9385, 9387,
9389, 9391, 9393, 9395, 9397, 9399, 9401, 9403, 9405, 9407, 9409, 9411, 9413, 9415, 9417, 9419, 9421, 9423, 9425,
9427, 9429, 9431, 9433, 9435, 9437, 9439, 9441, 9443, 9445, 9447, 9449, 9451, 9453, 9455, 9457, 9459, 9461, 9463,
9465, 9467, 9469, 9471, 9473, 9475, 9477, 9479, 9481, 9483, 9485, 9487, 9489, 9491, 9493, 9495, 9497, 9499, 9501,
9503, 9505, 9507, 9509, 9511, 9513, 9515, 9517, 9519, 9521, 9523, 9525, 9527, 9529, 9531, 9533, 9535, 9537, 9539,
9541, 9543, 9545, 9547, 9549, 9551, 9553, 9555, 9557, 9559, 9561, 9563, 9565, 9567, 9569, 9571, 9573, 9575, 9577,
9579, 9581, 9583, 9585, 9587, 9589, 9591, 9593, 9595, 9597, 9599, 9601, 9603, 9605, 9607, 9609, 9611, 9613, 9615,
9617, 9619, 9621, 9623, 9625, 9627, 9629, 9631, 9633, 9635, 9637, 9639, 9641, 9643, 9645, 9647, 9649, 9651, 9653,
9655, 9657, 9659, 9661, 9663, 9665, 9667, 9669, 9671, 9673, 9675, 9677, 9679, 9681, 9683, 9685, 9687, 9689, 9691,
9693, 9695, 9697, 9699, 9701, 9703, 9705, 9707, 9709, 9711, 9713, 9715, 9717, 9719, 9721, 9723, 9725, 9727, 9729,
9731, 9733, 9735, 9737, 9739, 9741, 9743, 9745, 9747, 9749, 9751, 9753, 9755, 9757, 9759, 9761, 9763, 9765, 9767,
9769, 9771, 9773, 9775, 9777, 9779, 9781, 9783, 9785, 9787, 9789, 9791, 9793, 9795, 9797, 9799, 9801, 9803, 9805,
9807, 9809, 9811, 9813, 9815, 9817, 9819, 9821, 9823, 9825, 9827, 9829, 9831, 9833, 9835, 9837, 9839, 9841, 9843,
9845, 9847, 9849, 9851, 9853, 9855, 9857, 9859, 9861, 9863, 9865, 9867, 9869, 9871, 9873, 9875, 9877, 9879, 9881,
9883, 9885, 9887, 9889, 9891, 9893, 9895, 9897, 9899, 9901, 9903, 9905, 9907, 9909, 9911, 9913, 9915, 9917, 9919,
9921, 9923, 9925, 9927, 9929, 9931, 9933, 9935, 9937, 9939, 9941, 9943, 9945, 9947, 9949, 9951, 9953, 9955, 9957,
9959, 9961, 9963, 9965, 9967, 9969, 9971, 9973, 9975, 9977, 9979, 9981, 9983, 9985, 9987, 9989, 9991, 9993, 9995,
9997, 9999, 4982]
b1 = [2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30, 32, 34, 36, 38, 40, 42, 44, 46, 48, 50, 52, 54, 56, 58,
60, 62, 64, 66, 68, 70, 72, 74, 76, 78, 80, 82, 84, 86, 88, 90, 92, 94, 96, 98, 100, 102, 104, 106, 108, 110, 112,
114, 116, 118, 120, 122, 124, 126, 128, 130, 132, 134, 136, 138, 140, 142, 144, 146, 148, 150, 152, 154, 156, 158,
160, 162, 164, 166, 168, 170, 172, 174, 176, 178, 180, 182, 184, 186, 188, 190, 192, 194, 196, 198, 200, 202, 204,
206, 208, 210, 212, 214, 216, 218, 220, 222, 224, 226, 228, 230, 232, 234, 236, 238, 240, 242, 244, 246, 248, 250,
252, 254, 256, 258, 260, 262, 264, 266, 268, 270, 272, 274, 276, 278, 280, 282, 284, 286, 288, 290, 292, 294, 296,
298, 300, 302, 304, 306, 308, 310, 312, 314, 316, 318, 320, 322, 324, 326, 328, 330, 332, 334, 336, 338, 340, 342,
344, 346, 348, 350, 352, 354, 356, 358, 360, 362, 364, 366, 368, 370, 372, 374, 376, 378, 380, 382, 384, 386, 388,
390, 392, 394, 396, 398, 400, 402, 404, 406, 408, 410, 412, 414, 416, 418, 420, 422, 424, 426, 428, 430, 432, 434,
436, 438, 440, 442, 444, 446, 448, 450, 452, 454, 456, 458, 460, 462, 464, 466, 468, 470, 472, 474, 476, 478, 480,
482, 484, 486, 488, 490, 492, 494, 496, 498, 500, 502, 504, 506, 508, 510, 512, 514, 516, 518, 520, 522, 524, 526,
528, 530, 532, 534, 536, 538, 540, 542, 544, 546, 548, 550, 552, 554, 556, 558, 560, 562, 564, 566, 568, 570, 572,
574, 576, 578, 580, 582, 584, 586, 588, 590, 592, 594, 596, 598, 600, 602, 604, 606, 608, 610, 612, 614, 616, 618,
620, 622, 624, 626, 628, 630, 632, 634, 636, 638, 640, 642, 644, 646, 648, 650, 652, 654, 656, 658, 660, 662, 664,
666, 668, 670, 672, 674, 676, 678, 680, 682, 684, 686, 688, 690, 692, 694, 696, 698, 700, 702, 704, 706, 708, 710,
712, 714, 716, 718, 720, 722, 724, 726, 728, 730, 732, 734, 736, 738, 740, 742, 744, 746, 748, 750, 752, 754, 756,
758, 760, 762, 764, 766, 768, 770, 772, 774, 776, 778, 780, 782, 784, 786, 788, 790, 792, 794, 796, 798, 800, 802,
804, 806, 808, 810, 812, 814, 816, 818, 820, 822, 824, 826, 828, 830, 832, 834, 836, 838, 840, 842, 844, 846, 848,
850, 852, 854, 856, 858, 860, 862, 864, 866, 868, 870, 872, 874, 876, 878, 880, 882, 884, 886, 888, 890, 892, 894,
896, 898, 900, 902, 904, 906, 908, 910, 912, 914, 916, 918, 920, 922, 924, 926, 928, 930, 932, 934, 936, 938, 940,
942, 944, 946, 948, 950, 952, 954, 956, 958, 960, 962, 964, 966, 968, 970, 972, 974, 976, 978, 980, 982, 984, 986,
988, 990, 992, 994, 996, 998, 1000, 1002, 1004, 1006, 1008, 1010, 1012, 1014, 1016, 1018, 1020, 1022, 1024, 1026,
1028, 1030, 1032, 1034, 1036, 1038, 1040, 1042, 1044, 1046, 1048, 1050, 1052, 1054, 1056, 1058, 1060, 1062, 1064,
1066, 1068, 1070, 1072, 1074, 1076, 1078, 1080, 1082, 1084, 1086, 1088, 1090, 1092, 1094, 1096, 1098, 1100, 1102,
1104, 1106, 1108, 1110, 1112, 1114, 1116, 1118, 1120, 1122, 1124, 1126, 1128, 1130, 1132, 1134, 1136, 1138, 1140,
1142, 1144, 1146, 1148, 1150, 1152, 1154, 1156, 1158, 1160, 1162, 1164, 1166, 1168, 1170, 1172, 1174, 1176, 1178,
1180, 1182, 1184, 1186, 1188, 1190, 1192, 1194, 1196, 1198, 1200, 1202, 1204, 1206, 1208, 1210, 1212, 1214, 1216,
1218, 1220, 1222, 1224, 1226, 1228, 1230, 1232, 1234, 1236, 1238, 1240, 1242, 1244, 1246, 1248, 1250, 1252, 1254,
1256, 1258, 1260, 1262, 1264, 1266, 1268, 1270, 1272, 1274, 1276, 1278, 1280, 1282, 1284, 1286, 1288, 1290, 1292,
1294, 1296, 1298, 1300, 1302, 1304, 1306, 1308, 1310, 1312, 1314, 1316, 1318, 1320, 1322, 1324, 1326, 1328, 1330,
1332, 1334, 1336, 1338, 1340, 1342, 1344, 1346, 1348, 1350, 1352, 1354, 1356, 1358, 1360, 1362, 1364, 1366, 1368,
1370, 1372, 1374, 1376, 1378, 1380, 1382, 1384, 1386, 1388, 1390, 1392, 1394, 1396, 1398, 1400, 1402, 1404, 1406,
1408, 1410, 1412, 1414, 1416, 1418, 1420, 1422, 1424, 1426, 1428, 1430, 1432, 1434, 1436, 1438, 1440, 1442, 1444,
1446, 1448, 1450, 1452, 1454, 1456, 1458, 1460, 1462, 1464, 1466, 1468, 1470, 1472, 1474, 1476, 1478, 1480, 1482,
1484, 1486, 1488, 1490, 1492, 1494, 1496, 1498, 1500, 1502, 1504, 1506, 1508, 1510, 1512, 1514, 1516, 1518, 1520,
1522, 1524, 1526, 1528, 1530, 1532, 1534, 1536, 1538, 1540, 1542, 1544, 1546, 1548, 1550, 1552, 1554, 1556, 1558,
1560, 1562, 1564, 1566, 1568, 1570, 1572, 1574, 1576, 1578, 1580, 1582, 1584, 1586, 1588, 1590, 1592, 1594, 1596,
1598, 1600, 1602, 1604, 1606, 1608, 1610, 1612, 1614, 1616, 1618, 1620, 1622, 1624, 1626, 1628, 1630, 1632, 1634,
1636, 1638, 1640, 1642, 1644, 1646, 1648, 1650, 1652, 1654, 1656, 1658, 1660, 1662, 1664, 1666, 1668, 1670, 1672,
1674, 1676, 1678, 1680, 1682, 1684, 1686, 1688, 1690, 1692, 1694, 1696, 1698, 1700, 1702, 1704, 1706, 1708, 1710,
1712, 1714, 1716, 1718, 1720, 1722, 1724, 1726, 1728, 1730, 1732, 1734, 1736, 1738, 1740, 1742, 1744, 1746, 1748,
1750, 1752, 1754, 1756, 1758, 1760, 1762, 1764, 1766, 1768, 1770, 1772, 1774, 1776, 1778, 1780, 1782, 1784, 1786,
1788, 1790, 1792, 1794, 1796, 1798, 1800, 1802, 1804, 1806, 1808, 1810, 1812, 1814, 1816, 1818, 1820, 1822, 1824,
1826, 1828, 1830, 1832, 1834, 1836, 1838, 1840, 1842, 1844, 1846, 1848, 1850, 1852, 1854, 1856, 1858, 1860, 1862,
1864, 1866, 1868, 1870, 1872, 1874, 1876, 1878, 1880, 1882, 1884, 1886, 1888, 1890, 1892, 1894, 1896, 1898, 1900,
1902, 1904, 1906, 1908, 1910, 1912, 1914, 1916, 1918, 1920, 1922, 1924, 1926, 1928, 1930, 1932, 1934, 1936, 1938,
1940, 1942, 1944, 1946, 1948, 1950, 1952, 1954, 1956, 1958, 1960, 1962, 1964, 1966, 1968, 1970, 1972, 1974, 1976,
1978, 1980, 1982, 1984, 1986, 1988, 1990, 1992, 1994, 1996, 1998, 2000, 2002, 2004, 2006, 2008, 2010, 2012, 2014,
2016, 2018, 2020, 2022, 2024, 2026, 2028, 2030, 2032, 2034, 2036, 2038, 2040, 2042, 2044, 2046, 2048, 2050, 2052,
2054, 2056, 2058, 2060, 2062, 2064, 2066, 2068, 2070, 2072, 2074, 2076, 2078, 2080, 2082, 2084, 2086, 2088, 2090,
2092, 2094, 2096, 2098, 2100, 2102, 2104, 2106, 2108, 2110, 2112, 2114, 2116, 2118, 2120, 2122, 2124, 2126, 2128,
2130, 2132, 2134, 2136, 2138, 2140, 2142, 2144, 2146, 2148, 2150, 2152, 2154, 2156, 2158, 2160, 2162, 2164, 2166,
2168, 2170, 2172, 2174, 2176, 2178, 2180, 2182, 2184, 2186, 2188, 2190, 2192, 2194, 2196, 2198, 2200, 2202, 2204,
2206, 2208, 2210, 2212, 2214, 2216, 2218, 2220, 2222, 2224, 2226, 2228, 2230, 2232, 2234, 2236, 2238, 2240, 2242,
2244, 2246, 2248, 2250, 2252, 2254, 2256, 2258, 2260, 2262, 2264, 2266, 2268, 2270, 2272, 2274, 2276, 2278, 2280,
2282, 2284, 2286, 2288, 2290, 2292, 2294, 2296, 2298, 2300, 2302, 2304, 2306, 2308, 2310, 2312, 2314, 2316, 2318,
2320, 2322, 2324, 2326, 2328, 2330, 2332, 2334, 2336, 2338, 2340, 2342, 2344, 2346, 2348, 2350, 2352, 2354, 2356,
2358, 2360, 2362, 2364, 2366, 2368, 2370, 2372, 2374, 2376, 2378, 2380, 2382, 2384, 2386, 2388, 2390, 2392, 2394,
2396, 2398, 2400, 2402, 2404, 2406, 2408, 2410, 2412, 2414, 2416, 2418, 2420, 2422, 2424, 2426, 2428, 2430, 2432,
2434, 2436, 2438, 2440, 2442, 2444, 2446, 2448, 2450, 2452, 2454, 2456, 2458, 2460, 2462, 2464, 2466, 2468, 2470,
2472, 2474, 2476, 2478, 2480, 2482, 2484, 2486, 2488, 2490, 2492, 2494, 2496, 2498, 2500, 2502, 2504, 2506, 2508,
2510, 2512, 2514, 2516, 2518, 2520, 2522, 2524, 2526, 2528, 2530, 2532, 2534, 2536, 2538, 2540, 2542, 2544, 2546,
2548, 2550, 2552, 2554, 2556, 2558, 2560, 2562, 2564, 2566, 2568, 2570, 2572, 2574, 2576, 2578, 2580, 2582, 2584,
2586, 2588, 2590, 2592, 2594, 2596, 2598, 2600, 2602, 2604, 2606, 2608, 2610, 2612, 2614, 2616, 2618, 2620, 2622,
2624, 2626, 2628, 2630, 2632, 2634, 2636, 2638, 2640, 2642, 2644, 2646, 2648, 2650, 2652, 2654, 2656, 2658, 2660,
2662, 2664, 2666, 2668, 2670, 2672, 2674, 2676, 2678, 2680, 2682, 2684, 2686, 2688, 2690, 2692, 2694, 2696, 2698,
2700, 2702, 2704, 2706, 2708, 2710, 2712, 2714, 2716, 2718, 2720, 2722, 2724, 2726, 2728, 2730, 2732, 2734, 2736,
2738, 2740, 2742, 2744, 2746, 2748, 2750, 2752, 2754, 2756, 2758, 2760, 2762, 2764, 2766, 2768, 2770, 2772, 2774,
2776, 2778, 2780, 2782, 2784, 2786, 2788, 2790, 2792, 2794, 2796, 2798, 2800, 2802, 2804, 2806, 2808, 2810, 2812,
2814, 2816, 2818, 2820, 2822, 2824, 2826, 2828, 2830, 2832, 2834, 2836, 2838, 2840, 2842, 2844, 2846, 2848, 2850,
2852, 2854, 2856, 2858, 2860, 2862, 2864, 2866, 2868, 2870, 2872, 2874, 2876, 2878, 2880, 2882, 2884, 2886, 2888,
2890, 2892, 2894, 2896, 2898, 2900, 2902, 2904, 2906, 2908, 2910, 2912, 2914, 2916, 2918, 2920, 2922, 2924, 2926,
2928, 2930, 2932, 2934, 2936, 2938, 2940, 2942, 2944, 2946, 2948, 2950, 2952, 2954, 2956, 2958, 2960, 2962, 2964,
2966, 2968, 2970, 2972, 2974, 2976, 2978, 2980, 2982, 2984, 2986, 2988, 2990, 2992, 2994, 2996, 2998, 3000, 3002,
3004, 3006, 3008, 3010, 3012, 3014, 3016, 3018, 3020, 3022, 3024, 3026, 3028, 3030, 3032, 3034, 3036, 3038, 3040,
3042, 3044, 3046, 3048, 3050, 3052, 3054, 3056, 3058, 3060, 3062, 3064, 3066, 3068, 3070, 3072, 3074, 3076, 3078,
3080, 3082, 3084, 3086, 3088, 3090, 3092, 3094, 3096, 3098, 3100, 3102, 3104, 3106, 3108, 3110, 3112, 3114, 3116,
3118, 3120, 3122, 3124, 3126, 3128, 3130, 3132, 3134, 3136, 3138, 3140, 3142, 3144, 3146, 3148, 3150, 3152, 3154,
3156, 3158, 3160, 3162, 3164, 3166, 3168, 3170, 3172, 3174, 3176, 3178, 3180, 3182, 3184, 3186, 3188, 3190, 3192,
3194, 3196, 3198, 3200, 3202, 3204, 3206, 3208, 3210, 3212, 3214, 3216, 3218, 3220, 3222, 3224, 3226, 3228, 3230,
3232, 3234, 3236, 3238, 3240, 3242, 3244, 3246, 3248, 3250, 3252, 3254, 3256, 3258, 3260, 3262, 3264, 3266, 3268,
3270, 3272, 3274, 3276, 3278, 3280, 3282, 3284, 3286, 3288, 3290, 3292, 3294, 3296, 3298, 3300, 3302, 3304, 3306,
3308, 3310, 3312, 3314, 3316, 3318, 3320, 3322, 3324, 3326, 3328, 3330, 3332, 3334, 3336, 3338, 3340, 3342, 3344,
3346, 3348, 3350, 3352, 3354, 3356, 3358, 3360, 3362, 3364, 3366, 3368, 3370, 3372, 3374, 3376, 3378, 3380, 3382,
3384, 3386, 3388, 3390, 3392, 3394, 3396, 3398, 3400, 3402, 3404, 3406, 3408, 3410, 3412, 3414, 3416, 3418, 3420,
3422, 3424, 3426, 3428, 3430, 3432, 3434, 3436, 3438, 3440, 3442, 3444, 3446, 3448, 3450, 3452, 3454, 3456, 3458,
3460, 3462, 3464, 3466, 3468, 3470, 3472, 3474, 3476, 3478, 3480, 3482, 3484, 3486, 3488, 3490, 3492, 3494, 3496,
3498, 3500, 3502, 3504, 3506, 3508, 3510, 3512, 3514, 3516, 3518, 3520, 3522, 3524, 3526, 3528, 3530, 3532, 3534,
3536, 3538, 3540, 3542, 3544, 3546, 3548, 3550, 3552, 3554, 3556, 3558, 3560, 3562, 3564, 3566, 3568, 3570, 3572,
3574, 3576, 3578, 3580, 3582, 3584, 3586, 3588, 3590, 3592, 3594, 3596, 3598, 3600, 3602, 3604, 3606, 3608, 3610,
3612, 3614, 3616, 3618, 3620, 3622, 3624, 3626, 3628, 3630, 3632, 3634, 3636, 3638, 3640, 3642, 3644, 3646, 3648,
3650, 3652, 3654, 3656, 3658, 3660, 3662, 3664, 3666, 3668, 3670, 3672, 3674, 3676, 3678, 3680, 3682, 3684, 3686,
3688, 3690, 3692, 3694, 3696, 3698, 3700, 3702, 3704, 3706, 3708, 3710, 3712, 3714, 3716, 3718, 3720, 3722, 3724,
3726, 3728, 3730, 3732, 3734, 3736, 3738, 3740, 3742, 3744, 3746, 3748, 3750, 3752, 3754, 3756, 3758, 3760, 3762,
3764, 3766, 3768, 3770, 3772, 3774, 3776, 3778, 3780, 3782, 3784, 3786, 3788, 3790, 3792, 3794, 3796, 3798, 3800,
3802, 3804, 3806, 3808, 3810, 3812, 3814, 3816, 3818, 3820, 3822, 3824, 3826, 3828, 3830, 3832, 3834, 3836, 3838,
3840, 3842, 3844, 3846, 3848, 3850, 3852, 3854, 3856, 3858, 3860, 3862, 3864, 3866, 3868, 3870, 3872, 3874, 3876,
3878, 3880, 3882, 3884, 3886, 3888, 3890, 3892, 3894, 3896, 3898, 3900, 3902, 3904, 3906, 3908, 3910, 3912, 3914,
3916, 3918, 3920, 3922, 3924, 3926, 3928, 3930, 3932, 3934, 3936, 3938, 3940, 3942, 3944, 3946, 3948, 3950, 3952,
3954, 3956, 3958, 3960, 3962, 3964, 3966, 3968, 3970, 3972, 3974, 3976, 3978, 3980, 3982, 3984, 3986, 3988, 3990,
3992, 3994, 3996, 3998, 4000, 4002, 4004, 4006, 4008, 4010, 4012, 4014, 4016, 4018, 4020, 4022, 4024, 4026, 4028,
4030, 4032, 4034, 4036, 4038, 4040, 4042, 4044, 4046, 4048, 4050, 4052, 4054, 4056, 4058, 4060, 4062, 4064, 4066,
4068, 4070, 4072, 4074, 4076, 4078, 4080, 4082, 4084, 4086, 4088, 4090, 4092, 4094, 4096, 4098, 4100, 4102, 4104,
4106, 4108, 4110, 4112, 4114, 4116, 4118, 4120, 4122, 4124, 4126, 4128, 4130, 4132, 4134, 4136, 4138, 4140, 4142,
4144, 4146, 4148, 4150, 4152, 4154, 4156, 4158, 4160, 4162, 4164, 4166, 4168, 4170, 4172, 4174, 4176, 4178, 4180,
4182, 4184, 4186, 4188, 4190, 4192, 4194, 4196, 4198, 4200, 4202, 4204, 4206, 4208, 4210, 4212, 4214, 4216, 4218,
4220, 4222, 4224, 4226, 4228, 4230, 4232, 4234, 4236, 4238, 4240, 4242, 4244, 4246, 4248, 4250, 4252, 4254, 4256,
4258, 4260, 4262, 4264, 4266, 4268, 4270, 4272, 4274, 4276, 4278, 4280, 4282, 4284, 4286, 4288, 4290, 4292, 4294,
4296, 4298, 4300, 4302, 4304, 4306, 4308, 4310, 4312, 4314, 4316, 4318, 4320, 4322, 4324, 4326, 4328, 4330, 4332,
4334, 4336, 4338, 4340, 4342, 4344, 4346, 4348, 4350, 4352, 4354, 4356, 4358, 4360, 4362, 4364, 4366, 4368, 4370,
4372, 4374, 4376, 4378, 4380, 4382, 4384, 4386, 4388, 4390, 4392, 4394, 4396, 4398, 4400, 4402, 4404, 4406, 4408,
4410, 4412, 4414, 4416, 4418, 4420, 4422, 4424, 4426, 4428, 4430, 4432, 4434, 4436, 4438, 4440, 4442, 4444, 4446,
4448, 4450, 4452, 4454, 4456, 4458, 4460, 4462, 4464, 4466, 4468, 4470, 4472, 4474, 4476, 4478, 4480, 4482, 4484,
4486, 4488, 4490, 4492, 4494, 4496, 4498, 4500, 4502, 4504, 4506, 4508, 4510, 4512, 4514, 4516, 4518, 4520, 4522,
4524, 4526, 4528, 4530, 4532, 4534, 4536, 4538, 4540, 4542, 4544, 4546, 4548, 4550, 4552, 4554, 4556, 4558, 4560,
4562, 4564, 4566, 4568, 4570, 4572, 4574, 4576, 4578, 4580, 4582, 4584, 4586, 4588, 4590, 4592, 4594, 4596, 4598,
4600, 4602, 4604, 4606, 4608, 4610, 4612, 4614, 4616, 4618, 4620, 4622, 4624, 4626, 4628, 4630, 4632, 4634, 4636,
4638, 4640, 4642, 4644, 4646, 4648, 4650, 4652, 4654, 4656, 4658, 4660, 4662, 4664, 4666, 4668, 4670, 4672, 4674,
4676, 4678, 4680, 4682, 4684, 4686, 4688, 4690, 4692, 4694, 4696, 4698, 4700, 4702, 4704, 4706, 4708, 4710, 4712,
4714, 4716, 4718, 4720, 4722, 4724, 4726, 4728, 4730, 4732, 4734, 4736, 4738, 4740, 4742, 4744, 4746, 4748, 4750,
4752, 4754, 4756, 4758, 4760, 4762, 4764, 4766, 4768, 4770, 4772, 4774, 4776, 4778, 4780, 4782, 4784, 4786, 4788,
4790, 4792, 4794, 4796, 4798, 4800, 4802, 4804, 4806, 4808, 4810, 4812, 4814, 4816, 4818, 4820, 4822, 4824, 4826,
4828, 4830, 4832, 4834, 4836, 4838, 4840, 4842, 4844, 4846, 4848, 4850, 4852, 4854, 4856, 4858, 4860, 4862, 4864,
4866, 4868, 4870, 4872, 4874, 4876, 4878, 4880, 4882, 4884, 4886, 4888, 4890, 4892, 4894, 4896, 4898, 4900, 4902,
4904, 4906, 4908, 4910, 4912, 4914, 4916, 4918, 4920, 4922, 4924, 4926, 4928, 4930, 4932, 4934, 4936, 4938, 4940,
4942, 4944, 4946, 4948, 4950, 4952, 4954, 4956, 4958, 4960, 4962, 4964, 4966, 4968, 4970, 4972, 4974, 4976, 4978,
4980, 4982, 4984, 4986, 4988, 4990, 4992, 4994, 4996, 4998, 5000, 5002, 5004, 5006, 5008, 5010, 5012, 5014, 5016,
5018, 5020, 5022, 5024, 5026, 5028, 5030, 5032, 5034, 5036, 5038, 5040, 5042, 5044, 5046, 5048, 5050, 5052, 5054,
5056, 5058, 5060, 5062, 5064, 5066, 5068, 5070, 5072, 5074, 5076, 5078, 5080, 5082, 5084, 5086, 5088, 5090, 5092,
5094, 5096, 5098, 5100, 5102, 5104, 5106, 5108, 5110, 5112, 5114, 5116, 5118, 5120, 5122, 5124, 5126, 5128, 5130,
5132, 5134, 5136, 5138, 5140, 5142, 5144, 5146, 5148, 5150, 5152, 5154, 5156, 5158, 5160, 5162, 5164, 5166, 5168,
5170, 5172, 5174, 5176, 5178, 5180, 5182, 5184, 5186, 5188, 5190, 5192, 5194, 5196, 5198, 5200, 5202, 5204, 5206,
5208, 5210, 5212, 5214, 5216, 5218, 5220, 5222, 5224, 5226, 5228, 5230, 5232, 5234, 5236, 5238, 5240, 5242, 5244,
5246, 5248, 5250, 5252, 5254, 5256, 5258, 5260, 5262, 5264, 5266, 5268, 5270, 5272, 5274, 5276, 5278, 5280, 5282,
5284, 5286, 5288, 5290, 5292, 5294, 5296, 5298, 5300, 5302, 5304, 5306, 5308, 5310, 5312, 5314, 5316, 5318, 5320,
5322, 5324, 5326, 5328, 5330, 5332, 5334, 5336, 5338, 5340, 5342, 5344, 5346, 5348, 5350, 5352, 5354, 5356, 5358,
5360, 5362, 5364, 5366, 5368, 5370, 5372, 5374, 5376, 5378, 5380, 5382, 5384, 5386, 5388, 5390, 5392, 5394, 5396,
5398, 5400, 5402, 5404, 5406, 5408, 5410, 5412, 5414, 5416, 5418, 5420, 5422, 5424, 5426, 5428, 5430, 5432, 5434,
5436, 5438, 5440, 5442, 5444, 5446, 5448, 5450, 5452, 5454, 5456, 5458, 5460, 5462, 5464, 5466, 5468, 5470, 5472,
5474, 5476, 5478, 5480, 5482, 5484, 5486, 5488, 5490, 5492, 5494, 5496, 5498, 5500, 5502, 5504, 5506, 5508, 5510,
5512, 5514, 5516, 5518, 5520, 5522, 5524, 5526, 5528, 5530, 5532, 5534, 5536, 5538, 5540, 5542, 5544, 5546, 5548,
5550, 5552, 5554, 5556, 5558, 5560, 5562, 5564, 5566, 5568, 5570, 5572, 5574, 5576, 5578, 5580, 5582, 5584, 5586,
5588, 5590, 5592, 5594, 5596, 5598, 5600, 5602, 5604, 5606, 5608, 5610, 5612, 5614, 5616, 5618, 5620, 5622, 5624,
5626, 5628, 5630, 5632, 5634, 5636, 5638, 5640, 5642, 5644, 5646, 5648, 5650, 5652, 5654, 5656, 5658, 5660, 5662,
5664, 5666, 5668, 5670, 5672, 5674, 5676, 5678, 5680, 5682, 5684, 5686, 5688, 5690, 5692, 5694, 5696, 5698, 5700,
5702, 5704, 5706, 5708, 5710, 5712, 5714, 5716, 5718, 5720, 5722, 5724, 5726, 5728, 5730, 5732, 5734, 5736, 5738,
5740, 5742, 5744, 5746, 5748, 5750, 5752, 5754, 5756, 5758, 5760, 5762, 5764, 5766, 5768, 5770, 5772, 5774, 5776,
5778, 5780, 5782, 5784, 5786, 5788, 5790, 5792, 5794, 5796, 5798, 5800, 5802, 5804, 5806, 5808, 5810, 5812, 5814,
5816, 5818, 5820, 5822, 5824, 5826, 5828, 5830, 5832, 5834, 5836, 5838, 5840, 5842, 5844, 5846, 5848, 5850, 5852,
5854, 5856, 5858, 5860, 5862, 5864, 5866, 5868, 5870, 5872, 5874, 5876, 5878, 5880, 5882, 5884, 5886, 5888, 5890,
5892, 5894, 5896, 5898, 5900, 5902, 5904, 5906, 5908, 5910, 5912, 5914, 5916, 5918, 5920, 5922, 5924, 5926, 5928,
5930, 5932, 5934, 5936, 5938, 5940, 5942, 5944, 5946, 5948, 5950, 5952, 5954, 5956, 5958, 5960, 5962, 5964, 5966,
5968, 5970, 5972, 5974, 5976, 5978, 5980, 5982, 5984, 5986, 5988, 5990, 5992, 5994, 5996, 5998, 6000, 6002, 6004,
6006, 6008, 6010, 6012, 6014, 6016, 6018, 6020, 6022, 6024, 6026, 6028, 6030, 6032, 6034, 6036, 6038, 6040, 6042,
6044, 6046, 6048, 6050, 6052, 6054, 6056, 6058, 6060, 6062, 6064, 6066, 6068, 6070, 6072, 6074, 6076, 6078, 6080,
6082, 6084, 6086, 6088, 6090, 6092, 6094, 6096, 6098, 6100, 6102, 6104, 6106, 6108, 6110, 6112, 6114, 6116, 6118,
6120, 6122, 6124, 6126, 6128, 6130, 6132, 6134, 6136, 6138, 6140, 6142, 6144, 6146, 6148, 6150, 6152, 6154, 6156,
6158, 6160, 6162, 6164, 6166, 6168, 6170, 6172, 6174, 6176, 6178, 6180, 6182, 6184, 6186, 6188, 6190, 6192, 6194,
6196, 6198, 6200, 6202, 6204, 6206, 6208, 6210, 6212, 6214, 6216, 6218, 6220, 6222, 6224, 6226, 6228, 6230, 6232,
6234, 6236, 6238, 6240, 6242, 6244, 6246, 6248, 6250, 6252, 6254, 6256, 6258, 6260, 6262, 6264, 6266, 6268, 6270,
6272, 6274, 6276, 6278, 6280, 6282, 6284, 6286, 6288, 6290, 6292, 6294, 6296, 6298, 6300, 6302, 6304, 6306, 6308,
6310, 6312, 6314, 6316, 6318, 6320, 6322, 6324, 6326, 6328, 6330, 6332, 6334, 6336, 6338, 6340, 6342, 6344, 6346,
6348, 6350, 6352, 6354, 6356, 6358, 6360, 6362, 6364, 6366, 6368, 6370, 6372, 6374, 6376, 6378, 6380, 6382, 6384,
6386, 6388, 6390, 6392, 6394, 6396, 6398, 6400, 6402, 6404, 6406, 6408, 6410, 6412, 6414, 6416, 6418, 6420, 6422,
6424, 6426, 6428, 6430, 6432, 6434, 6436, 6438, 6440, 6442, 6444, 6446, 6448, 6450, 6452, 6454, 6456, 6458, 6460,
6462, 6464, 6466, 6468, 6470, 6472, 6474, 6476, 6478, 6480, 6482, 6484, 6486, 6488, 6490, 6492, 6494, 6496, 6498,
6500, 6502, 6504, 6506, 6508, 6510, 6512, 6514, 6516, 6518, 6520, 6522, 6524, 6526, 6528, 6530, 6532, 6534, 6536,
6538, 6540, 6542, 6544, 6546, 6548, 6550, 6552, 6554, 6556, 6558, 6560, 6562, 6564, 6566, 6568, 6570, 6572, 6574,
6576, 6578, 6580, 6582, 6584, 6586, 6588, 6590, 6592, 6594, 6596, 6598, 6600, 6602, 6604, 6606, 6608, 6610, 6612,
6614, 6616, 6618, 6620, 6622, 6624, 6626, 6628, 6630, 6632, 6634, 6636, 6638, 6640, 6642, 6644, 6646, 6648, 6650,
6652, 6654, 6656, 6658, 6660, 6662, 6664, 6666, 6668, 6670, 6672, 6674, 6676, 6678, 6680, 6682, 6684, 6686, 6688,
6690, 6692, 6694, 6696, 6698, 6700, 6702, 6704, 6706, 6708, 6710, 6712, 6714, 6716, 6718, 6720, 6722, 6724, 6726,
6728, 6730, 6732, 6734, 6736, 6738, 6740, 6742, 6744, 6746, 6748, 6750, 6752, 6754, 6756, 6758, 6760, 6762, 6764,
6766, 6768, 6770, 6772, 6774, 6776, 6778, 6780, 6782, 6784, 6786, 6788, 6790, 6792, 6794, 6796, 6798, 6800, 6802,
6804, 6806, 6808, 6810, 6812, 6814, 6816, 6818, 6820, 6822, 6824, 6826, 6828, 6830, 6832, 6834, 6836, 6838, 6840,
6842, 6844, 6846, 6848, 6850, 6852, 6854, 6856, 6858, 6860, 6862, 6864, 6866, 6868, 6870, 6872, 6874, 6876, 6878,
6880, 6882, 6884, 6886, 6888, 6890, 6892, 6894, 6896, 6898, 6900, 6902, 6904, 6906, 6908, 6910, 6912, 6914, 6916,
6918, 6920, 6922, 6924, 6926, 6928, 6930, 6932, 6934, 6936, 6938, 6940, 6942, 6944, 6946, 6948, 6950, 6952, 6954,
6956, 6958, 6960, 6962, 6964, 6966, 6968, 6970, 6972, 6974, 6976, 6978, 6980, 6982, 6984, 6986, 6988, 6990, 6992,
6994, 6996, 6998, 7000, 7002, 7004, 7006, 7008, 7010, 7012, 7014, 7016, 7018, 7020, 7022, 7024, 7026, 7028, 7030,
7032, 7034, 7036, 7038, 7040, 7042, 7044, 7046, 7048, 7050, 7052, 7054, 7056, 7058, 7060, 7062, 7064, 7066, 7068,
7070, 7072, 7074, 7076, 7078, 7080, 7082, 7084, 7086, 7088, 7090, 7092, 7094, 7096, 7098, 7100, 7102, 7104, 7106,
7108, 7110, 7112, 7114, 7116, 7118, 7120, 7122, 7124, 7126, 7128, 7130, 7132, 7134, 7136, 7138, 7140, 7142, 7144,
7146, 7148, 7150, 7152, 7154, 7156, 7158, 7160, 7162, 7164, 7166, 7168, 7170, 7172, 7174, 7176, 7178, 7180, 7182,
7184, 7186, 7188, 7190, 7192, 7194, 7196, 7198, 7200, 7202, 7204, 7206, 7208, 7210, 7212, 7214, 7216, 7218, 7220,
7222, 7224, 7226, 7228, 7230, 7232, 7234, 7236, 7238, 7240, 7242, 7244, 7246, 7248, 7250, 7252, 7254, 7256, 7258,
7260, 7262, 7264, 7266, 7268, 7270, 7272, 7274, 7276, 7278, 7280, 7282, 7284, 7286, 7288, 7290, 7292, 7294, 7296,
7298, 7300, 7302, 7304, 7306, 7308, 7310, 7312, 7314, 7316, 7318, 7320, 7322, 7324, 7326, 7328, 7330, 7332, 7334,
7336, 7338, 7340, 7342, 7344, 7346, 7348, 7350, 7352, 7354, 7356, 7358, 7360, 7362, 7364, 7366, 7368, 7370, 7372,
7374, 7376, 7378, 7380, 7382, 7384, 7386, 7388, 7390, 7392, 7394, 7396, 7398, 7400, 7402, 7404, 7406, 7408, 7410,
7412, 7414, 7416, 7418, 7420, 7422, 7424, 7426, 7428, 7430, 7432, 7434, 7436, 7438, 7440, 7442, 7444, 7446, 7448,
7450, 7452, 7454, 7456, 7458, 7460, 7462, 7464, 7466, 7468, 7470, 7472, 7474, 7476, 7478, 7480, 7482, 7484, 7486,
7488, 7490, 7492, 7494, 7496, 7498, 7500, 7502, 7504, 7506, 7508, 7510, 7512, 7514, 7516, 7518, 7520, 7522, 7524,
7526, 7528, 7530, 7532, 7534, 7536, 7538, 7540, 7542, 7544, 7546, 7548, 7550, 7552, 7554, 7556, 7558, 7560, 7562,
7564, 7566, 7568, 7570, 7572, 7574, 7576, 7578, 7580, 7582, 7584, 7586, 7588, 7590, 7592, 7594, 7596, 7598, 7600,
7602, 7604, 7606, 7608, 7610, 7612, 7614, 7616, 7618, 7620, 7622, 7624, 7626, 7628, 7630, 7632, 7634, 7636, 7638,
7640, 7642, 7644, 7646, 7648, 7650, 7652, 7654, 7656, 7658, 7660, 7662, 7664, 7666, 7668, 7670, 7672, 7674, 7676,
7678, 7680, 7682, 7684, 7686, 7688, 7690, 7692, 7694, 7696, 7698, 7700, 7702, 7704, 7706, 7708, 7710, 7712, 7714,
7716, 7718, 7720, 7722, 7724, 7726, 7728, 7730, 7732, 7734, 7736, 7738, 7740, 7742, 7744, 7746, 7748, 7750, 7752,
7754, 7756, 7758, 7760, 7762, 7764, 7766, 7768, 7770, 7772, 7774, 7776, 7778, 7780, 7782, 7784, 7786, 7788, 7790,
7792, 7794, 7796, 7798, 7800, 7802, 7804, 7806, 7808, 7810, 7812, 7814, 7816, 7818, 7820, 7822, 7824, 7826, 7828,
7830, 7832, 7834, 7836, 7838, 7840, 7842, 7844, 7846, 7848, 7850, 7852, 7854, 7856, 7858, 7860, 7862, 7864, 7866,
7868, 7870, 7872, 7874, 7876, 7878, 7880, 7882, 7884, 7886, 7888, 7890, 7892, 7894, 7896, 7898, 7900, 7902, 7904,
7906, 7908, 7910, 7912, 7914, 7916, 7918, 7920, 7922, 7924, 7926, 7928, 7930, 7932, 7934, 7936, 7938, 7940, 7942,
7944, 7946, 7948, 7950, 7952, 7954, 7956, 7958, 7960, 7962, 7964, 7966, 7968, 7970, 7972, 7974, 7976, 7978, 7980,
7982, 7984, 7986, 7988, 7990, 7992, 7994, 7996, 7998, 8000, 8002, 8004, 8006, 8008, 8010, 8012, 8014, 8016, 8018,
8020, 8022, 8024, 8026, 8028, 8030, 8032, 8034, 8036, 8038, 8040, 8042, 8044, 8046, 8048, 8050, 8052, 8054, 8056,
8058, 8060, 8062, 8064, 8066, 8068, 8070, 8072, 8074, 8076, 8078, 8080, 8082, 8084, 8086, 8088, 8090, 8092, 8094,
8096, 8098, 8100, 8102, 8104, 8106, 8108, 8110, 8112, 8114, 8116, 8118, 8120, 8122, 8124, 8126, 8128, 8130, 8132,
8134, 8136, 8138, 8140, 8142, 8144, 8146, 8148, 8150, 8152, 8154, 8156, 8158, 8160, 8162, 8164, 8166, 8168, 8170,
8172, 8174, 8176, 8178, 8180, 8182, 8184, 8186, 8188, 8190, 8192, 8194, 8196, 8198, 8200, 8202, 8204, 8206, 8208,
8210, 8212, 8214, 8216, 8218, 8220, 8222, 8224, 8226, 8228, 8230, 8232, 8234, 8236, 8238, 8240, 8242, 8244, 8246,
8248, 8250, 8252, 8254, 8256, 8258, 8260, 8262, 8264, 8266, 8268, 8270, 8272, 8274, 8276, 8278, 8280, 8282, 8284,
8286, 8288, 8290, 8292, 8294, 8296, 8298, 8300, 8302, 8304, 8306, 8308, 8310, 8312, 8314, 8316, 8318, 8320, 8322,
8324, 8326, 8328, 8330, 8332, 8334, 8336, 8338, 8340, 8342, 8344, 8346, 8348, 8350, 8352, 8354, 8356, 8358, 8360,
8362, 8364, 8366, 8368, 8370, 8372, 8374, 8376, 8378, 8380, 8382, 8384, 8386, 8388, 8390, 8392, 8394, 8396, 8398,
8400, 8402, 8404, 8406, 8408, 8410, 8412, 8414, 8416, 8418, 8420, 8422, 8424, 8426, 8428, 8430, 8432, 8434, 8436,
8438, 8440, 8442, 8444, 8446, 8448, 8450, 8452, 8454, 8456, 8458, 8460, 8462, 8464, 8466, 8468, 8470, 8472, 8474,
8476, 8478, 8480, 8482, 8484, 8486, 8488, 8490, 8492, 8494, 8496, 8498, 8500, 8502, 8504, 8506, 8508, 8510, 8512,
8514, 8516, 8518, 8520, 8522, 8524, 8526, 8528, 8530, 8532, 8534, 8536, 8538, 8540, 8542, 8544, 8546, 8548, 8550,
8552, 8554, 8556, 8558, 8560, 8562, 8564, 8566, 8568, 8570, 8572, 8574, 8576, 8578, 8580, 8582, 8584, 8586, 8588,
8590, 8592, 8594, 8596, 8598, 8600, 8602, 8604, 8606, 8608, 8610, 8612, 8614, 8616, 8618, 8620, 8622, 8624, 8626,
8628, 8630, 8632, 8634, 8636, 8638, 8640, 8642, 8644, 8646, 8648, 8650, 8652, 8654, 8656, 8658, 8660, 8662, 8664,
8666, 8668, 8670, 8672, 8674, 8676, 8678, 8680, 8682, 8684, 8686, 8688, 8690, 8692, 8694, 8696, 8698, 8700, 8702,
8704, 8706, 8708, 8710, 8712, 8714, 8716, 8718, 8720, 8722, 8724, 8726, 8728, 8730, 8732, 8734, 8736, 8738, 8740,
8742, 8744, 8746, 8748, 8750, 8752, 8754, 8756, 8758, 8760, 8762, 8764, 8766, 8768, 8770, 8772, 8774, 8776, 8778,
8780, 8782, 8784, 8786, 8788, 8790, 8792, 8794, 8796, 8798, 8800, 8802, 8804, 8806, 8808, 8810, 8812, 8814, 8816,
8818, 8820, 8822, 8824, 8826, 8828, 8830, 8832, 8834, 8836, 8838, 8840, 8842, 8844, 8846, 8848, 8850, 8852, 8854,
8856, 8858, 8860, 8862, 8864, 8866, 8868, 8870, 8872, 8874, 8876, 8878, 8880, 8882, 8884, 8886, 8888, 8890, 8892,
8894, 8896, 8898, 8900, 8902, 8904, 8906, 8908, 8910, 8912, 8914, 8916, 8918, 8920, 8922, 8924, 8926, 8928, 8930,
8932, 8934, 8936, 8938, 8940, 8942, 8944, 8946, 8948, 8950, 8952, 8954, 8956, 8958, 8960, 8962, 8964, 8966, 8968,
8970, 8972, 8974, 8976, 8978, 8980, 8982, 8984, 8986, 8988, 8990, 8992, 8994, 8996, 8998, 9000, 9002, 9004, 9006,
9008, 9010, 9012, 9014, 9016, 9018, 9020, 9022, 9024, 9026, 9028, 9030, 9032, 9034, 9036, 9038, 9040, 9042, 9044,
9046, 9048, 9050, 9052, 9054, 9056, 9058, 9060, 9062, 9064, 9066, 9068, 9070, 9072, 9074, 9076, 9078, 9080, 9082,
9084, 9086, 9088, 9090, 9092, 9094, 9096, 9098, 9100, 9102, 9104, 9106, 9108, 9110, 9112, 9114, 9116, 9118, 9120,
9122, 9124, 9126, 9128, 9130, 9132, 9134, 9136, 9138, 9140, 9142, 9144, 9146, 9148, 9150, 9152, 9154, 9156, 9158,
9160, 9162, 9164, 9166, 9168, 9170, 9172, 9174, 9176, 9178, 9180, 9182, 9184, 9186, 9188, 9190, 9192, 9194, 9196,
9198, 9200, 9202, 9204, 9206, 9208, 9210, 9212, 9214, 9216, 9218, 9220, 9222, 9224, 9226, 9228, 9230, 9232, 9234,
9236, 9238, 9240, 9242, 9244, 9246, 9248, 9250, 9252, 9254, 9256, 9258, 9260, 9262, 9264, 9266, 9268, 9270, 9272,
9274, 9276, 9278, 9280, 9282, 9284, 9286, 9288, 9290, 9292, 9294, 9296, 9298, 9300, 9302, 9304, 9306, 9308, 9310,
9312, 9314, 9316, 9318, 9320, 9322, 9324, 9326, 9328, 9330, 9332, 9334, 9336, 9338, 9340, 9342, 9344, 9346, 9348,
9350, 9352, 9354, 9356, 9358, 9360, 9362, 9364, 9366, 9368, 9370, 9372, 9374, 9376, 9378, 9380, 9382, 9384, 9386,
9388, 9390, 9392, 9394, 9396, 9398, 9400, 9402, 9404, 9406, 9408, 9410, 9412, 9414, 9416, 9418, 9420, 9422, 9424,
9426, 9428, 9430, 9432, 9434, 9436, 9438, 9440, 9442, 9444, 9446, 9448, 9450, 9452, 9454, 9456, 9458, 9460, 9462,
9464, 9466, 9468, 9470, 9472, 9474, 9476, 9478, 9480, 9482, 9484, 9486, 9488, 9490, 9492, 9494, 9496, 9498, 9500,
9502, 9504, 9506, 9508, 9510, 9512, 9514, 9516, 9518, 9520, 9522, 9524, 9526, 9528, 9530, 9532, 9534, 9536, 9538,
9540, 9542, 9544, 9546, 9548, 9550, 9552, 9554, 9556, 9558, 9560, 9562, 9564, 9566, 9568, 9570, 9572, 9574, 9576,
9578, 9580, 9582, 9584, 9586, 9588, 9590, 9592, 9594, 9596, 9598, 9600, 9602, 9604, 9606, 9608, 9610, 9612, 9614,
9616, 9618, 9620, 9622, 9624, 9626, 9628, 9630, 9632, 9634, 9636, 9638, 9640, 9642, 9644, 9646, 9648, 9650, 9652,
9654, 9656, 9658, 9660, 9662, 9664, 9666, 9668, 9670, 9672, 9674, 9676, 9678, 9680, 9682, 9684, 9686, 9688, 9690,
9692, 9694, 9696, 9698, 9700, 9702, 9704, 9706, 9708, 9710, 9712, 9714, 9716, 9718, 9720, 9722, 9724, 9726, 9728,
9730, 9732, 9734, 9736, 9738, 9740, 9742, 9744, 9746, 9748, 9750, 9752, 9754, 9756, 9758, 9760, 9762, 9764, 9766,
9768, 9770, 9772, 9774, 9776, 9778, 9780, 9782, 9784, 9786, 9788, 9790, 9792, 9794, 9796, 9798, 9800, 9802, 9804,
9806, 9808, 9810, 9812, 9814, 9816, 9818, 9820, 9822, 9824, 9826, 9828, 9830, 9832, 9834, 9836, 9838, 9840, 9842,
9844, 9846, 9848, 9850, 9852, 9854, 9856, 9858, 9860, 9862, 9864, 9866, 9868, 9870, 9872, 9874, 9876, 9878, 9880,
9882, 9884, 9886, 9888, 9890, 9892, 9894, 9896, 9898, 9900, 9902, 9904, 9906, 9908, 9910, 9912, 9914, 9916, 9918,
9920, 9922, 9924, 9926, 9928, 9930, 9932, 9934, 9936, 9938, 9940, 9942, 9944, 9946, 9948, 9950, 9952, 9954, 9956,
9958, 9960, 9962, 9964, 9966, 9968, 9970, 9972, 9974, 9976, 9978, 9980, 9982, 9984, 9986, 9988, 9990, 9992, 9994,
9996, 9998, 10000, 10002]
res = Solution().fairCandySwap(a1, b1)
print(res)
#
end = time.time()
print('Running time: %s Seconds' % (end - start))
| 104.638158 | 120 | 0.623546 |
f7216012bdabcc6a4f76ac1521c5236c58f42c7a | 393 | py | Python | bookitoBackend/User/urls.py | mazdakdev/Bookito | 38e18fee22aafea95429da01e9769acf2748f676 | [
"MIT"
] | 10 | 2021-12-09T04:39:03.000Z | 2022-02-07T05:42:29.000Z | bookitoBackend/User/urls.py | mazdakdev/Bookito | 38e18fee22aafea95429da01e9769acf2748f676 | [
"MIT"
] | 2 | 2022-02-07T18:12:54.000Z | 2022-02-10T10:27:37.000Z | bookitoBackend/User/urls.py | mazdakdev/Bookito | 38e18fee22aafea95429da01e9769acf2748f676 | [
"MIT"
] | null | null | null | from django.urls import path
from .api import *
from knox import views as knox_views
urlpatterns = [
#domain.dn/api/v1/register/ | POST
path('register/' , SignUpAPI.as_view() , name='register'),
#domain.dn/api/v1/register/ | POST
path('login/' , SignInAPI.as_view() , name='login'),
#domain.dn/api/v1/user | GET
path('user/', MainUser.as_view() , name='user'),
] | 21.833333 | 62 | 0.64631 |
f725220f95e7ed6a18489ee1563dd48ce5f224d6 | 2,985 | py | Python | solutions/day18.py | nitekat1124/advent-of-code-2021 | 74501b84f0a08b33f48b4e5a2d66b8293c854150 | [
"WTFPL"
] | 3 | 2021-12-22T17:44:39.000Z | 2022-01-14T17:18:15.000Z | solutions/day18.py | nitekat1124/advent-of-code-2021 | 74501b84f0a08b33f48b4e5a2d66b8293c854150 | [
"WTFPL"
] | null | null | null | solutions/day18.py | nitekat1124/advent-of-code-2021 | 74501b84f0a08b33f48b4e5a2d66b8293c854150 | [
"WTFPL"
] | null | null | null | import re
from itertools import combinations
from utils.solution_base import SolutionBase
| 34.310345 | 114 | 0.524958 |