max_stars_repo_path stringlengths 3 269 | max_stars_repo_name stringlengths 4 119 | max_stars_count int64 0 191k | id stringlengths 1 7 | content stringlengths 6 1.05M | score float64 0.23 5.13 | int_score int64 0 5 |
|---|---|---|---|---|---|---|
arlpy/dtla.py | kjaxon/arlpy | 81 | 12759051 | ##############################################################################
#
# Copyright (c) 2016, <NAME>
#
# This file is part of arlpy which is released under Simplified BSD License.
# See file LICENSE or go to http://www.opensource.org/licenses/BSD-3-Clause
# for full license details.
#
##############################################################################
"""DTLA support toolbox."""
import os as _os
import numpy as _np
from scipy import signal as _sig
_fs = 1/(1.6e-6*26)
_framelen = 2*26
_channels = 24
_magic = 0xc0de
def check(filename):
"""Check if a file is likely to be a valid DTLA datafile."""
statinfo = _os.stat(filename)
if statinfo.st_size >= 2*2*_channels:
with open(filename, 'rb') as f:
data = _np.fromfile(f, dtype=_np.uint16, count=_framelen/2)
if data[0] == _magic & data[1] == _magic:
return True
return False
def get_sampling_rate(filename=None):
"""Get the sampling rate in Hz."""
return _fs
def get_channels(filename=None):
"""Get the number of available data channels."""
return _channels
def get_data_length(filename):
"""Get the length of the datafile in samples."""
statinfo = _os.stat(filename)
return statinfo.st_size//_framelen
def get_data(filename, channel=None, start=0, length=None, detrend='linear'):
"""Load selected data from DTLA recording.
:param filename: name of the datafile
:param channel: list of channels to read (base 0, None to read all channels)
:param start: sample index to start from
:param length: number of samples to read (None means read all available samples)
:param detrend: processing to be applied to each channel to remove offset/bias
(supported values: ``'linear'``, ``'constant'``, ``None``)
"""
if channel is None:
channel = range(_channels)
elif isinstance(channel, int):
channel = [channel]
if length is None:
length = get_data_length(filename)-start
with open(filename, 'rb') as f:
f.seek(start*_framelen, _os.SEEK_SET)
data = _np.fromfile(f, dtype=_np.uint16, count=_framelen//2*length)
data = _np.reshape(data, [length,_framelen//2])
data = data[:,2:]
data = _np.take(data, channel, axis=1).astype(_np.float)
if len(channel) == 1:
data = data.ravel()
data = 5*data/65536-2.5
if detrend is not None:
data = _sig.detrend(data, axis=0, type=detrend)
return data
| 2.296875 | 2 |
DataScience/python/td_query/test/test_data_manipulate_ex.py | Ernestyj/PyStudy | 1 | 12759052 | # -*- coding: utf-8 -*-
import unittest
import os
import pickle
import pandas as pd
import numpy as np
from td_query import ROOT_PATH
from td_query.data_manipulate import data_manipulate_instance as instance
from teradata import UdaExec
class TestDataManipulate(unittest.TestCase):
@classmethod
def setUpClass(cls):
print("**************************************** setUpClass ****************************************")
instance.init()
print(instance.teradata)
@classmethod
def tearDownClass(cls):
print("************************************** tearDownClass ***************************************")
def setUp(self):
print("****** setUp *******")
def tearDown(self):
print("***** tearDown *****")
def _example(self):
df = instance.query_sample()
# with open(ROOT_PATH + '/external/df_dispatch_bna.pickle', 'wb') as f: # save
# pickle.dump(df, f)
print(df)
def _query(self):
query = '''select top 10 * from pp_scratch_risk.ms_auto_trend_us_bad;'''
df = instance.query(query)
print(df)
def _query_table_schema(self):
dest_db = "pp_scratch_risk"
dest_table = "ms_auto_trend_us2_1_3_100_100_1_1_1"
result_cursor = instance.teradata.execute("show select * from {}.{};".format(dest_db, dest_table))
last_row = result_cursor.fetchall()
print(last_row)
def _query_table_top_rows(self):
table = "pp_scratch_risk.ms_auto_trend_us_bad"
df = instance.query_table_top_rows(table)
print(df)
def _drop_table(self):
dest_db = "pp_scratch_risk"
dest_table = "ms_auto_trend_us2_1_3_100_100_1_1_1"
instance.drop_table(dest_db, dest_table)
def _transalte_100_63_22_14_1(self):
rules = [
"(SELLER_CONSUMER_SEG != 'Y') & (IS_ULP_TRANS_T_F >= 0.5) & (dc_string != '<missing>') & (amt2 == 'a-1k') & (SELLER_CONSUMER_SEG == 'C')",
"(SELLER_CONSUMER_SEG == 'Y') & (IS_ULP_TRANS_T_F >= 0.5) & (dc_string == '10008') & (amt2 != 'c-1h') & (amt2 != 'e-<50')",
]
result = instance.translate_hyperloop_rules_to_sql(rules)
print(result)
def _duplicate_rows_to_new_table(self):
src_db = "pp_scratch_risk"
src_table = 'ms_auto_trend_us2_1_3'
dest_db = "pp_scratch_risk"
weight_a = 900
weight_b = 400
weight_c = 9
weight_d = 16
weight_e = 1
dest_table = "ms_auto_trend_us2_1_3_{}_{}_{}_{}_{}".format(weight_a, weight_b, weight_c, weight_d, weight_e)
instance.duplicate_rows_to_new_table(src_db, src_table, dest_db, dest_table, weight_a, weight_b, weight_c, weight_d, weight_e)
def _duplicate_rows_from_bad_and_sample_from_good_into_new_table(self):
src_db = "pp_scratch_risk"
src_table = 'ms_auto_trend_us'
dest_db = "pp_scratch_risk"
bad_scale = 1
good_scale = 3
weight_a = 52
weight_b = 16
weight_c = 23
weight_d = 5
weight_e = 4
dest_table = "ms_auto_trend_us_{}_{}__{}_{}_{}_{}_{}_v2".format(bad_scale, good_scale, weight_a, weight_b, weight_c, weight_d, weight_e)
instance.duplicate_rows_from_bad_and_sample_from_good_into_new_table(src_db, src_table, dest_db, dest_table,
bad_scale, good_scale,
weight_a, weight_b, weight_c, weight_d, weight_e)
def _generate_hl_job_json(self):
training_table = "ms_auto_trend_us2_1_3"
testing_table = "ms_auto_trend_us_t"
instance.generate_hl_job_json(training_table, testing_table, template_name='hl_job_template_na.json')
def _add_weight_col_to_table(self):
src_db = "pp_scratch_risk"
src_table = 'ms_auto_trend_us2_1_3'
# weight_a = 0.312
# weight_b = 0.140
# weight_c = 0.011
# weight_d = 0.011
# weight_e = 0.001
weight_a = 10 * 30
weight_b = 8 * 20
weight_c = 4.6 * 3
weight_d = 3.7 * 4
weight_e = 1 * 1
instance.add_weight_col_to_table(src_db, src_table, weight_a, weight_b, weight_c, weight_d, weight_e)
def _update_weight_col_in_table(self):
src_db = "pp_scratch_risk"
src_table = 'ms_auto_trend_us2_1_3'
src_col = 'PMT_USD_AMT'
instance.update_weight_col_in_table(src_db, src_table, src_col)
def _update_custom_weight_col_in_table(self):
src_db = "pp_scratch_risk"
src_table = 'ms_auto_trend_us2_1_3'
src_col = 'PMT_USD_AMT'
instance.update_custom_weight_col_in_table(src_db, src_table, src_col) | 2.375 | 2 |
movement.py | dluiscosta/pythongame | 0 | 12759053 | import pygame as pg
class Movement: #control class
def __init__(self, move_steps=16, beginning_mov_parcel=0, moving_sprite_frames=20):
self.move_steps = move_steps # frames required for each movement
# Size of the beginning parcel (in frames) in which a movement can still
# be cancelled
self.beginning_mov_parcel = beginning_mov_parcel
# During movement, the amount of frames that will exhibit the same image
# before using the next on the sprite
self.moving_sprite_frames = moving_sprite_frames
self.moving = False #characters start idle
self.direction = None
self.steps_taken = 0 #in a single movement between two cells
# First movement between 2 cells caused by a single keystrike
self.first_mov = None
def get_direction(self):
return self.direction if self.is_moving() else None
def is_moving(self):
return self.moving
def get_steps_taken(self):
return self.steps_taken if self.is_moving() else None
def get_move_steps(self):
return self.move_steps if self.is_moving() else None
def get_moving_sprite_frames(self):
return self.moving_sprite_frames
# If the movement is still at the beginning cancellable parcel (specified by
# beginning_mov_parcel), cancel it
def attempt_cancel(self):
if (not self.first_mov and
self.get_steps_taken() < self.beginning_mov_parcel):
self.moving = False
# Try to start a movement in a given direction
def attempt_movement(self, dir, first=False):
if (not self.is_moving() or
self.attempt_cancel()): #can interrupt movements that have just started
self.direction = dir
self.steps_taken = 0
self.moving = True
self.first_mov = first
# Passes one frame, taking a step or finishing the movement if it's complete
def continue_movement(self, characters):
if self.get_steps_taken() == self.move_steps: #finishes movement
for character in characters:
character.move_char(self.direction)
self.moving = False
self.first_mov = False
return False
else: #continue movement
if self.is_moving():
self.steps_taken += 1
return True
| 3.34375 | 3 |
jotleaf/test_localsettings.py | reverie/jotleaf.com | 1 | 12759054 | <filename>jotleaf/test_localsettings.py
from root_dir import root_dir
DEBUG = True
LOG_DIRECTORY = root_dir('..', 'dev_logs')
EMAIL_BACKEND = 'django.core.mail.backends.locmem.EmailBackend'
EMAIL_PORT = 1025
YWOT_HOST = 'localhost:8001'
PUSHER_APP_ID = '...'
PUSHER_KEY = '...'
PUSHER_SECRET = '...'
MIXPANEL_ID = "..."
STATIC_URL = '/static/'
SENTRY_DSN = None
| 1.445313 | 1 |
robot-server/robot_server/service/system/router.py | faliester/opentrons | 0 | 12759055 | <filename>robot-server/robot_server/service/system/router.py
import logging
from datetime import datetime
from fastapi import APIRouter
from robot_server.service.json_api.resource_links import (
ResourceLinkKey, ResourceLink)
from robot_server.system import time
from robot_server.service.system import models as time_models
router = APIRouter()
log = logging.getLogger(__name__)
"""
These routes allows the client to read & update robot system time
"""
def _create_response(dt: datetime) \
-> time_models.SystemTimeResponse:
"""Create a SystemTimeResponse with system datetime"""
return time_models.SystemTimeResponse(
data=time_models.SystemTimeAttributesResponse(
systemTime=dt,
id="time"
),
links={
ResourceLinkKey.self: ResourceLink(href='/system/time')
}
)
@router.get("/system/time",
description="Fetch system time & date",
summary="Get robot's time status, which includes- current UTC "
"date & time, local timezone, whether robot time is synced"
" with an NTP server &/or it has an active RTC.",
response_model=time_models.SystemTimeResponse
)
async def get_time() -> time_models.SystemTimeResponse:
res = await time.get_system_time()
return _create_response(res)
@router.put("/system/time",
description="Update system time",
summary="Set robot time",
response_model=time_models.SystemTimeResponse)
async def set_time(new_time: time_models.SystemTimeRequest) \
-> time_models.SystemTimeResponse:
sys_time = await time.set_system_time(new_time.data.systemTime)
return _create_response(sys_time)
| 2.84375 | 3 |
test/unit/test_backend_retrieval.py | renier/qiskit-ibm-runtime | 0 | 12759056 | <gh_stars>0
# This code is part of Qiskit.
#
# (C) Copyright IBM 2021.
#
# This code is licensed under the Apache License, Version 2.0. You may
# obtain a copy of this license in the LICENSE.txt file in the root directory
# of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
#
# Any modifications or derivative works of this code must retain this
# copyright notice, and modified files need to carry a notice indicating
# that they have been altered from the originals.
"""Backends Filtering Test."""
from qiskit.providers.exceptions import QiskitBackendNotFoundError
from qiskit.test.mock.backends import FakeLima
from .mock.fake_account_client import BaseFakeAccountClient
from .mock.fake_runtime_service import FakeRuntimeService
from ..ibm_test_case import IBMTestCase
from ..decorators import run_legacy_and_cloud_fake
class TestBackendFilters(IBMTestCase):
"""Qiskit Backend Filtering Tests."""
@run_legacy_and_cloud_fake
def test_no_filter(self, service):
"""Test no filtering."""
# FakeRuntimeService by default creates 3 backends.
backend_name = [back.name for back in service.backends()]
self.assertEqual(len(backend_name), 3)
@run_legacy_and_cloud_fake
def test_filter_by_name(self, service):
"""Test filtering by name."""
for name in [
FakeRuntimeService.DEFAULT_COMMON_BACKEND,
FakeRuntimeService.DEFAULT_UNIQUE_BACKEND_PREFIX + "0",
]:
with self.subTest(name=name):
backend_name = [back.name for back in service.backends(name=name)]
self.assertEqual(len(backend_name), 1)
def test_filter_by_instance_legacy(self):
"""Test filtering by instance."""
service = FakeRuntimeService(auth="legacy", token="<PASSWORD>")
for hgp in FakeRuntimeService.DEFAULT_HGPS:
with self.subTest(hgp=hgp):
backends = service.backends(instance=hgp)
backend_name = [back.name for back in backends]
self.assertEqual(len(backend_name), 2)
for back in backends:
self.assertEqual(back._api_client.hgp, hgp)
def test_filter_config_properties(self):
"""Test filtering by configuration properties."""
n_qubits = 5
fake_backends = [
self._get_specs(n_qubits=n_qubits, local=False),
self._get_specs(n_qubits=n_qubits * 2, local=False),
self._get_specs(n_qubits=n_qubits, local=True),
]
services = self._get_services(fake_backends)
for service in services:
with self.subTest(service=service.auth):
filtered_backends = service.backends(n_qubits=n_qubits, local=False)
self.assertTrue(len(filtered_backends), 1)
self.assertEqual(
n_qubits, filtered_backends[0].configuration().n_qubits
)
self.assertFalse(filtered_backends[0].configuration().local)
def test_filter_status_dict(self):
"""Test filtering by dictionary of mixed status/configuration properties."""
fake_backends = [
self._get_specs(operational=True, simulator=True),
self._get_specs(operational=True, simulator=True),
self._get_specs(operational=True, simulator=False),
self._get_specs(operational=False, simulator=False),
]
services = self._get_services(fake_backends)
for service in services:
with self.subTest(service=service.auth):
filtered_backends = service.backends(
operational=True, # from status
simulator=True, # from configuration
)
self.assertTrue(len(filtered_backends), 2)
for backend in filtered_backends:
self.assertTrue(backend.status().operational)
self.assertTrue(backend.configuration().simulator)
def test_filter_config_callable(self):
"""Test filtering by lambda function on configuration properties."""
n_qubits = 5
fake_backends = [
self._get_specs(n_qubits=n_qubits),
self._get_specs(n_qubits=n_qubits * 2),
self._get_specs(n_qubits=n_qubits - 1),
]
services = self._get_services(fake_backends)
for service in services:
with self.subTest(service=service.auth):
filtered_backends = service.backends(
filters=lambda x: (x.configuration().n_qubits >= 5)
)
self.assertTrue(len(filtered_backends), 2)
for backend in filtered_backends:
self.assertGreaterEqual(backend.configuration().n_qubits, n_qubits)
def test_filter_least_busy(self):
"""Test filtering by least busy function."""
default_stat = {"pending_jobs": 1, "operational": True, "status_msg": "active"}
fake_backends = [
self._get_specs(
**{**default_stat, "backend_name": "bingo", "pending_jobs": 5}
),
self._get_specs(**{**default_stat, "pending_jobs": 7}),
self._get_specs(**{**default_stat, "operational": False}),
self._get_specs(**{**default_stat, "status_msg": "internal"}),
]
services = self._get_services(fake_backends)
for service in services:
with self.subTest(service=service.auth):
backend = service.least_busy()
self.assertEqual(backend.name, "bingo")
def test_filter_min_num_qubits(self):
"""Test filtering by minimum number of qubits."""
n_qubits = 5
fake_backends = [
self._get_specs(n_qubits=n_qubits),
self._get_specs(n_qubits=n_qubits * 2),
self._get_specs(n_qubits=n_qubits - 1),
]
services = self._get_services(fake_backends)
for service in services:
with self.subTest(service=service.auth):
filtered_backends = service.backends(min_num_qubits=n_qubits)
self.assertTrue(len(filtered_backends), 2)
for backend in filtered_backends:
self.assertGreaterEqual(backend.configuration().n_qubits, n_qubits)
def test_filter_by_hgp(self):
"""Test filtering by hub/group/project."""
num_backends = 3
test_options = {
"account_client": BaseFakeAccountClient(num_backends=num_backends),
"num_hgps": 2,
}
legacy_service = FakeRuntimeService(
auth="legacy",
token="my_token",
instance="h/g/p",
test_options=test_options,
)
backends = legacy_service.backends(instance="hub0/group0/project0")
self.assertEqual(len(backends), num_backends)
def _get_specs(self, **kwargs):
"""Get the backend specs to pass to the fake account client."""
specs = {"configuration": {}, "status": {}}
status_keys = FakeLima().status().to_dict()
status_keys.pop("backend_name") # name is in both config and status
status_keys = list(status_keys.keys())
for key, val in kwargs.items():
if key in status_keys:
specs["status"][key] = val
else:
specs["configuration"][key] = val
return specs
def _get_services(self, fake_backends):
"""Get both cloud and legacy services initialized with fake backends."""
test_options = {"account_client": BaseFakeAccountClient(specs=fake_backends)}
legacy_service = FakeRuntimeService(
auth="legacy",
token="my_token",
instance="h/g/p",
test_options=test_options,
)
cloud_service = FakeRuntimeService(
auth="cloud",
token="my_token",
instance="my_instance",
test_options=test_options,
)
return [legacy_service, cloud_service]
class TestGetBackend(IBMTestCase):
"""Test getting a backend via legacy api."""
def test_get_common_backend(self):
"""Test getting a backend that is in default and non-default hgp."""
service = FakeRuntimeService(auth="legacy", token="my_token")
backend = service.backend(FakeRuntimeService.DEFAULT_COMMON_BACKEND)
self.assertEqual(backend._api_client.hgp, list(service._hgps.keys())[0])
def test_get_unique_backend_default_hgp(self):
"""Test getting a backend in the default hgp."""
service = FakeRuntimeService(auth="legacy", token="<PASSWORD>")
backend_name = FakeRuntimeService.DEFAULT_UNIQUE_BACKEND_PREFIX + "0"
backend = service.backend(backend_name)
self.assertEqual(backend._api_client.hgp, list(service._hgps.keys())[0])
def test_get_unique_backend_non_default_hgp(self):
"""Test getting a backend in the non default hgp."""
service = FakeRuntimeService(auth="legacy", token="<PASSWORD>")
backend_name = FakeRuntimeService.DEFAULT_UNIQUE_BACKEND_PREFIX + "1"
backend = service.backend(backend_name)
self.assertEqual(backend._api_client.hgp, list(service._hgps.keys())[1])
def test_get_phantom_backend(self):
"""Test getting a phantom backend."""
service = FakeRuntimeService(auth="legacy", token="my_token")
with self.assertRaises(QiskitBackendNotFoundError):
service.backend("phantom")
def test_get_backend_by_hgp(self):
"""Test getting a backend by hgp."""
hgp = FakeRuntimeService.DEFAULT_HGPS[1]
backend_name = FakeRuntimeService.DEFAULT_COMMON_BACKEND
service = FakeRuntimeService(auth="legacy", token="my_<PASSWORD>")
backend = service.backend(backend_name, instance=hgp)
self.assertEqual(backend._api_client.hgp, hgp)
def test_get_backend_by_bad_hgp(self):
"""Test getting a backend not in hgp."""
hgp = FakeRuntimeService.DEFAULT_HGPS[1]
backend_name = FakeRuntimeService.DEFAULT_UNIQUE_BACKEND_PREFIX + "0"
service = FakeRuntimeService(auth="legacy", token="my_<PASSWORD>")
with self.assertRaises(QiskitBackendNotFoundError):
_ = service.backend(backend_name, instance=hgp)
| 2.171875 | 2 |
scripts/gwrender.py | damibabayemi/gwpv | 8 | 12759057 | #!/usr/bin/env python
# This script needs to control its startup sequence to interface with ParaView's
# `pvpython`.
#
# 1. The user launches `gwrender.py` in a Python environment of their choice.
# They have `gwpv` and its dependencies installed in this environment.
# The `pvpython` executable is available in the `PATH`.
# 2. CLI arguments are parsed.
# a. The `scene` entrypoint is dispatched to `pvpython` in a subprocess,
# passing along the path to the active Python environment.
# b. The `scenes` entrypoint launches subprocesses with the `pvpython`
# executable that each call the `scene` entrypoint.
# 3. Now running in `pvpython`, the Python environment is activated using its
# `activate_this.py` script.
# 4. The `gwpv.render.frames` module is imported in the global namespace so
# ParaView plugins are loaded and work with `multiprocessing`.
#
# FIXME:
# - Installing in editable mode with `pip install -e` is broken
# - Generated state file doesn't `UpdatePipeline()` in between adding the
# reader and the filter, so the timesteps are not loaded from the file yet.
# This generates an error in the GUI and timesteps are unavailable.
# I had no success propagating the time range from the reader to the filter
# in `RequestInformation` so far, neither using information keys nor
# `vtkFieldData`.
from __future__ import division
import logging
import json
def _render_frame_window(job_id_and_frame_window, **kwargs):
from gwpv.render.frames import render_frames
render_frames(job_id=job_id_and_frame_window[0],
frame_window=job_id_and_frame_window[1],
**kwargs)
def render_parallel(num_jobs, scene, frame_window=None, **kwargs):
import functools
import h5py
import multiprocessing
from gwpv.scene_configuration import parse_as, animate
from tqdm import tqdm
logger = logging.getLogger(__name__)
# Infer frame window if needed
if 'FreezeTime' in scene['Animation']:
frame_window = (0, 1)
elif frame_window is None:
if 'Crop' in scene['Animation']:
max_animation_length = (scene['Animation']['Crop'][1] -
scene['Animation']['Crop'][0])
else:
waveform_file_and_subfile = parse_as.file_and_subfile(
scene['Datasources']['Waveform'])
with h5py.File(waveform_file_and_subfile[0], 'r') as waveform_file:
waveform_times = waveform_file[
waveform_file_and_subfile[1]]['Y_l2_m2.dat'][:, 0]
max_animation_length = waveform_times[-1] - waveform_times[0]
logger.debug(
"Inferred max. animation length {}M from waveform data.".
format(max_animation_length))
frame_window = (0,
animate.num_frames(
max_animation_length=max_animation_length,
animation_speed=scene['Animation']['Speed'],
frame_rate=scene['Animation']['FrameRate']))
logger.debug("Inferred total frame window: {}".format(frame_window))
num_frames = frame_window[1] - frame_window[0]
frames_per_job = int(num_frames / num_jobs)
extra_frames = num_frames % num_jobs
logger.debug(
"Using {} jobs with {} frames per job ({} jobs render an additional frame)."
.format(num_jobs, frames_per_job, extra_frames))
frame_windows = []
distributed_frames = frame_window[0]
for i in range(num_jobs):
frames_this_job = frames_per_job + (1 if i < extra_frames else 0)
frame_windows.append(
(distributed_frames, distributed_frames + frames_this_job))
distributed_frames += frames_this_job
logger.debug("Frame windows: {}".format(frame_windows))
pool = multiprocessing.Pool(num_jobs,
initializer=tqdm.set_lock,
initargs=(tqdm.get_lock(), ))
render_frame_window = functools.partial(_render_frame_window,
scene=scene,
**kwargs)
pool.map(render_frame_window, enumerate(frame_windows))
def render_scene_entrypoint(scene_files, keypath_overrides, scene_paths,
num_jobs, render_movie_to_file,
force_offscreen_rendering, **kwargs):
from gwpv.scene_configuration.load import load_scene
from gwpv.download_data import download_data
from gwpv.swsh_cache import precompute_cached_swsh_grid
# Validate options
assert (
kwargs['frames_dir'] is not None or kwargs['no_render']
or render_movie_to_file is not None
), "Provide the `--frames-dir` option, the '--render-movie-to-file' option, or disable rendering with `--no-render`."
if kwargs['frames_dir'] is None and render_movie_to_file is not None:
kwargs['frames_dir'] = render_movie_to_file + '_frames'
# Load scene configuration file
scene = load_scene(scene_files, keypath_overrides, paths=scene_paths)
# Download data files
download_data(scene['Datasources'])
# Cache SWSH grid
precompute_cached_swsh_grid(scene)
if num_jobs == 1:
render_frames(scene=scene, **kwargs)
else:
render_parallel(num_jobs=num_jobs, scene=scene, **kwargs)
if (render_movie_to_file is not None
and 'FreezeTime' not in scene['Animation']):
from gwpv.render.movie import render_movie
render_movie(output_filename=render_movie_to_file,
frame_rate=scene['Animation']['FrameRate'],
frames_dir=kwargs['frames_dir'])
def dispatch_to_pvpython(force_offscreen_rendering, cli_args):
import os
import sys
import subprocess
logger = logging.getLogger(__name__)
# Check if we're running in a virtual environment and pass that
# information on
activate_venv_script = os.path.join(sys.prefix, 'bin', 'activate_this.py')
pvpython_command = (['pvpython'] + (['--force-offscreen-rendering']
if force_offscreen_rendering else []) +
cli_args +
(['--activate-venv', sys.prefix]
if os.path.exists(activate_venv_script) else []))
logger.debug("Dispatching to 'pvpython' as: {}".format(pvpython_command))
return subprocess.call(pvpython_command)
def render_scenes_entrypoint(scenes_file, output_dir, output_prefix,
output_suffix, scene_overrides, scene_paths,
keypath_overrides, render_missing_frames,
num_jobs, force_offscreen_rendering, verbose,
logging_config):
import itertools
import os
import yaml
from tqdm import tqdm
common_args = (
list(
itertools.chain(*[('--override', "=".join(override))
for override in keypath_overrides])) +
(['--render-missing-frames'] if render_missing_frames else []) + list(
itertools.chain(*[('-p', scene_path)
for scene_path in scene_paths])) +
['-n', str(num_jobs)] + ['-v'] * verbose +
(['--logging-config', "'" + json.dumps(logging_config) +
"'"] if logging_config is not None else []))
with tqdm(yaml.safe_load(open(scenes_file, 'r'))['Scenes'],
desc='Scenes',
unit='scene') as scenes:
for scene in scenes:
scenes.set_postfix(current_scene=scene['Name'])
scene_files = [scenes_file + ':' + scene['Name']] + scene_overrides
movie_file = os.path.join(
output_dir, output_prefix + scene['Name'] + output_suffix)
# Run as a subprocess instead of calling `render_scene_entrypoint`
# directly to make sure ParaView releases memory after each run
dispatch_to_pvpython(
force_offscreen_rendering, [__file__, 'scene'] + scene_files +
['--render-movie-to-file', movie_file] + common_args)
def render_waveform_entrypoint(scene_files, keypath_overrides, scene_paths,
**kwargs):
from gwpv.render.waveform import render_waveform
from gwpv.scene_configuration.load import load_scene
from gwpv.download_data import download_data
scene = load_scene(scene_files, keypath_overrides, paths=scene_paths)
download_data(scene['Datasources'])
render_waveform(scene, **kwargs)
if __name__ == '__main__':
import argparse
parser = argparse.ArgumentParser(
'gwrender.py',
description="Visualize gravitational waves with ParaView")
subparsers = parser.add_subparsers(dest='entrypoint')
subparsers.required = True
# `scene` CLI
parser_scene = subparsers.add_parser(
'scene', help="Render frames for a single scene.")
parser_scene.set_defaults(subcommand=render_scene_entrypoint)
parser_scene.add_argument(
'scene_files',
help=
"Path to one or more YAML scene configuration files. Entries in later files override those in earlier files.",
nargs='+')
parser_scene.add_argument('--frames-dir',
'-o',
help="Output directory for frames",
required=False)
parser_scene.add_argument(
'--frame-window',
help=
"Subset of frames to render. Includes lower bound and excludes upper bound.",
type=int,
nargs=2)
parser_scene.add_argument(
'--render-movie-to-file',
help=
"Name of a file (excluding extension) to render a movie from all frames to."
)
parser_scene.add_argument(
'--save-state-to-file',
help=
"Name of a file (excluding the 'pvsm' extension) to save the ParaView state to. The file can be loaded with ParaView to inspect the scene interactively."
)
parser_scene.add_argument(
'--no-render',
action='store_true',
help="Skip rendering any frames, e.g. to produce only a state file.")
parser_scene_preview_group = parser_scene.add_mutually_exclusive_group()
parser_scene_preview_group.add_argument(
'--show-preview',
action='store_true',
help="Show a window with a preview of the full movie.")
parser_scene.add_argument('--hide-progress',
dest='show_progress',
action='store_false',
help="Hide the progress bar")
# `scenes` CLI
parser_scenes = subparsers.add_parser(
'scenes', help="Render a set of scenes consecutively.")
parser_scenes.set_defaults(subcommand=render_scenes_entrypoint)
parser_scenes.add_argument(
'scenes_file',
help="Path to a YAML file listing the scenes to render.")
parser_scenes.add_argument('scene_overrides',
help="Overrides to apply to all scenes",
nargs='*',
default=[])
parser_scenes.add_argument('--output-dir', '-o')
parser_scenes.add_argument('--output-prefix', default="")
parser_scenes.add_argument('--output-suffix', default="")
# Common CLI for `scene` and `scenes`
for subparser in [parser_scene, parser_scenes]:
subparser.add_argument(
'--render-missing-frames',
help="Only render missing frames without replacing existing files.",
action='store_true')
subparser.add_argument('--num-jobs',
'-n',
help="Render frames in parallel",
type=int,
default=1)
subparser.add_argument('--force-offscreen-rendering',
'-x',
action='store_true')
subparser.add_argument('--activate-venv')
# `waveform` CLI
parser_waveform = subparsers.add_parser(
'waveform', help="Render waveform for a scene.")
parser_waveform.set_defaults(subcommand=render_waveform_entrypoint)
parser_waveform.add_argument(
'scene_files',
help=
"Path to one or more YAML scene configuration files. Entries in later files override those in earlier files.",
nargs='+')
parser_waveform.add_argument('--output-file', '-o', required=True)
parser_waveform.add_argument('--time-merger', type=float, required=True)
parser_waveform.add_argument('--mass', type=float, required=True)
parser_waveform.add_argument('--bounds', type=float, nargs=2)
# Common CLI for all entrypoints
for subparser in [parser_scene, parser_scenes, parser_waveform]:
subparser.add_argument(
'--scene-path',
'-p',
help="Append search paths for scene configuration files",
action='append',
dest='scene_paths',
default=[])
subparser.add_argument(
'--override',
help=
"A key-value pair that replaces an entry in the scene file, e.g. '--override Animation.FrameRate=30'. The value is parsed as YAML.",
action='append',
type=lambda kv: kv.split('='),
dest='keypath_overrides',
default=[])
subparser.add_argument('--verbose',
'-v',
action='count',
default=0,
help="Logging verbosity (-v, -vv, ...)")
subparser.add_argument('--logging-config', type=json.loads)
args = parser.parse_args()
# Setup logging
logging.basicConfig(level=logging.WARNING - args.verbose * 10)
if args.logging_config is not None:
import logging.config
if 'version' not in args.logging_config:
args.logging_config['version'] = 1
logging.config.dictConfig(args.logging_config)
if args.entrypoint != 'scenes':
del args.verbose
del args.logging_config
logger = logging.getLogger(__name__)
# Re-launch the script with `pvpython` if necessary
if args.entrypoint == 'scene':
try:
logger.debug("Checking if we're running with 'pvpython'...")
import paraview
except ImportError:
import sys
logger.debug("Not running with 'pvpython', dispatching...")
sys.exit(
dispatch_to_pvpython(args.force_offscreen_rendering, sys.argv))
logger.debug("Running with 'pvpython'.")
# Activate the virtual environment if requested before trying to import
# from `gwpv` below
if args.entrypoint in ['scene', 'scenes']:
if args.activate_venv:
activate_venv = args.activate_venv
logger.debug('Activating venv: {}'.format(activate_venv))
import os
activate_venv_script = os.path.join(activate_venv, 'bin',
'activate_this.py')
assert os.path.exists(activate_venv_script), (
"No 'bin/activate_this.py' script found in '{}'.".format(
activate_venv))
with open(activate_venv_script, 'r') as f:
exec(f.read(), {'__file__': activate_venv_script})
del args.activate_venv
# Import render_frames here to make loading the ParaView plugins work with
# `multiprocessing`
if args.entrypoint == 'scene':
from gwpv.render.frames import render_frames
# Forward to the user-selected entrypoint
subcommand = args.subcommand
del args.subcommand
del args.entrypoint
subcommand(**vars(args))
| 2.21875 | 2 |
bsuite/bsuite/utils/wrappers_test.py | hbutsuak95/iv_rl | 1,337 | 12759058 | <reponame>hbutsuak95/iv_rl
# python3
# pylint: disable=g-bad-file-header
# Copyright 2019 DeepMind Technologies Limited. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Tests for bsuite.utils.wrapper."""
from absl.testing import absltest
from absl.testing import parameterized
from bsuite import environments
from bsuite.environments import catch
from bsuite.utils import wrappers
import dm_env
from dm_env import specs
from dm_env import test_utils
import mock
import numpy as np
class FakeEnvironment(environments.Environment):
"""An environment that returns pre-determined rewards and observations."""
def __init__(self, time_steps):
"""Initializes a new FakeEnvironment.
Args:
time_steps: A sequence of time step namedtuples. This could represent
one episode, or several. This class just repeatedly plays through the
sequence and doesn't inspect the contents.
"""
super().__init__()
self.bsuite_num_episodes = 1000
self._time_steps = time_steps
obs = np.asarray(self._time_steps[0].observation)
self._observation_spec = specs.Array(shape=obs.shape, dtype=obs.dtype)
self._step_index = 0
self._reset_next_step = True
def reset(self):
self._reset_next_step = False
self._step_index = 0
return self._time_steps[0]
def step(self, action):
del action
if self._reset_next_step:
return self.reset()
self._step_index += 1
self._step_index %= len(self._time_steps)
return self._time_steps[self._step_index]
def _reset(self):
raise NotImplementedError
def _step(self, action: int):
raise NotImplementedError
def observation_spec(self):
return self._observation_spec
def action_spec(self):
return specs.Array(shape=(), dtype=np.int32)
def bsuite_info(self):
return {}
class WrapperTest(absltest.TestCase):
def test_wrapper(self):
"""Tests that the wrapper computes and logs the correct data."""
mock_logger = mock.MagicMock()
mock_logger.write = mock.MagicMock()
# Make a fake environment that cycles through these time steps.
timesteps = [
dm_env.restart([]),
dm_env.transition(1, []),
dm_env.transition(2, []),
dm_env.termination(3, []),
]
expected_episode_return = 6
fake_env = FakeEnvironment(timesteps)
env = wrappers.Logging(env=fake_env, logger=mock_logger, log_every=True) # pytype: disable=wrong-arg-types
num_episodes = 5
for _ in range(num_episodes):
timestep = env.reset()
while not timestep.last():
timestep = env.step(action=0)
# We count the number of transitions, hence the -1.
expected_episode_length = len(timesteps) - 1
expected_calls = []
for i in range(1, num_episodes + 1):
expected_calls.append(
mock.call(dict(
steps=expected_episode_length * i,
episode=i,
total_return=expected_episode_return * i,
episode_len=expected_episode_length,
episode_return=expected_episode_return,
))
)
mock_logger.write.assert_has_calls(expected_calls)
def test_unwrap(self):
raw_env = FakeEnvironment([dm_env.restart([])])
scale_env = wrappers.RewardScale(raw_env, reward_scale=1.)
noise_env = wrappers.RewardNoise(scale_env, noise_scale=1.)
logging_env = wrappers.Logging(noise_env, logger=None) # pytype: disable=wrong-arg-types
unwrapped = logging_env.raw_env
self.assertEqual(id(raw_env), id(unwrapped))
class ImageObservationTest(parameterized.TestCase):
@parameterized.parameters(
((84, 84, 4), np.array([1, 2])),
((70, 90), np.array([[1, 0, 2, 3]])),
)
def test_to_image(self, shape, observation):
image = wrappers.to_image(shape, observation)
self.assertEqual(image.shape, shape)
self.assertCountEqual(np.unique(image), np.unique(observation))
class ImageWrapperCatchTest(test_utils.EnvironmentTestMixin, absltest.TestCase):
def make_object_under_test(self):
env = catch.Catch()
return wrappers.ImageObservation(env, (84, 84, 4))
def make_action_sequence(self):
actions = [0, 1, 2]
rng = np.random.RandomState(42)
for _ in range(100):
yield rng.choice(actions)
if __name__ == '__main__':
absltest.main()
| 2.109375 | 2 |
updatevariables.py | TemboInc/gitlabapi | 0 | 12759059 | <gh_stars>0
from gitlab import Gitlab
import click
@click.command()
@click.argument('env_file', type=click.File('r'))
@click.argument('private_token')
@click.argument('project_name')
@click.argument('username')
def update_variables(env_file, private_token, project_name, username):
"""
A command line app to create and update Gitlab environment variables. Will read an env_file in the following form.
Variables will only be created or updated, NOT deleted.
# Comments are ignored
foo=bar
spam=ham
:param env_file: env file to read
:param private_token: Gitlab private token. This can be created in Account Settings > Access Tokens (api scope)
:param project_name: Gitlab Project to update
:param username: Gitlab username
:return:
"""
valid_lines = [line.strip() for line in env_file.readlines() if not line.startswith('#')]
new_env_vars = [{'key': pair[0], 'value': pair[1]} for pair in [line.split('=') for line in valid_lines]]
url = "https://gitlab.com"
g = Gitlab(url=url, private_token=private_token)
user = g.users.list(username=username)[0]
user_project = user.projects.list(search=project_name)[0]
project_id = user_project.get_id()
project = g.projects.get(project_id)
existing_vars = {var.attributes['key']: var.attributes['value'] for var in project.variables.list()}
existing_keys = existing_vars.keys()
for env_var in new_env_vars:
if env_var['key'] not in existing_keys:
print(f"creating {env_var['key']} set to {env_var['value']}")
project.variables.create(env_var)
elif env_var['value'] != existing_vars[env_var['key']]:
print(f"updating {env_var['key']} from {existing_vars[env_var['key']]} to {env_var['value']}")
project.variables.update(env_var['key'], env_var)
print("Done")
if __name__ == '__main__':
update_variables()
| 2.84375 | 3 |
tests/megawing/papilioone/_timer.py | splhack/loam | 14 | 12759060 | import sys
from magma import *
from mantle import *
from loam.boards.papilio import Papilio
papilio = Papilio()
papilio.Clock.on()
papilio.Timer.on()
papilio.C[8].rename('LED').output().on()
main = papilio.main()
wire( main.systimer.O[24], main.LED )
compile(sys.argv[1], main)
| 1.765625 | 2 |
7day/web/Web23.py | jsjang93/joony | 0 | 12759061 | # Web23.py
import requests
from bs4 import BeautifulSoup
url = "http://music.naver.com/listen/history/index.nhn?type=TOTAL&year=2017&month=12&week=1"
html_music = requests.get(url).text
soup_music = BeautifulSoup(html_music, "lxml")
# a 태그의 요소 중에서 class 속성값이 "_title" 인 것을 찾고
# 그 안에서 span 태그의 요소 중에서 class 속성값이 "ellipsis"인 요소를 추출
titles = soup_music.select('a._title span.ellipsis')
print(titles[0:7])
music_titles = [title.get_text() for title in titles]
print(music_titles[0:7])
# a 태그의 요소 중에서 class 속성값이 "_artist" 인 것을 찾고
# 그 안에서 span 태그의 요소 중에서 class 속성값이 "ellipsis"인 요소를 추출
artists = soup_music.select('a._artist span.ellipsis')
print(artists[0].get_text())
print(artists[0].get_text().strip())
music_artists = [artist.get_text().strip() for artist in artists]
print(music_artists[0:7])
# td 태그의 요소 중에서 class 속성값이 "_artist" 인 것을 찾고
# 그 안에서 a 태그의 요소를 추출
artists = soup_music.select('td._artist a')
print(artists[0], artists[4])
print(artists[0].get_text().strip())
music_artists = [artist.get_text().strip() for artist in artists]
print(music_artists[0:7])
| 3.09375 | 3 |
parallel_coordinate_plot/parallel_coordinate_plot.py | eragasa/ragasa_python_tutorials | 0 | 12759062 | <gh_stars>0
import matplotlib.pyplot as plt
import matplotlib.lines as mlines
import pandas as pd
from pandas.plotting import parallel_coordinates
data = pd.read_csv('data/iris.data')
print(data)
plt.figure()
parallel_coordinates(data,"Name")
plt.show()
exit()
from pandas.plotting import andrews_curves
data = pd.read_csv('data/iris.data')
lowColorList=["k","k","y","y"]
midColorList=["c","b","g"]
topColorList=["r"]
plt.close()
plt.gcf().clear()
fig, ax = plt.subplots()
parallel_coordinates(lowDf, "Cat",color=lowColorList, alpha=0.1)
parallel_coordinates(midDf, "Cat", color=midColorList, alpha=0.4)
parallel_coordinates(topDf, "Cat", color=topColorList, alpha=0.9)
# remove the pandas legend
plt.gca().legend_.remove()
plt.xlabel("Each Component of X is Shown on its Own Vertical Axis")
plt.ylabel("Values")
plt.title("Finding the Optimal Value of X")
# add new legend
topHandle = mlines.Line2D([],[], color='red', ls="-", label="Best")
midHandleOne = mlines.Line2D([],[], color='blue', ls="-", label="Next Best")
lowHandle = mlines.Line2D([],[], color='black', ls="-", label="Worst")
plt.legend(handles=[topHandle, midHandleOne,lowHandle],loc=1, prop={'size':10})
plt.show()
| 2.9375 | 3 |
mushroom_rl_benchmark/builders/environment_builder.py | benvoe/mushroom-rl-benchmark | 0 | 12759063 | from copy import deepcopy
import mushroom_rl.environments
class EnvironmentBuilder:
"""
Class to spawn instances of a MushroomRL environment
"""
def __init__(self, env_name, env_params):
"""
Constructor
Args:
env_name: name of the environment to build
env_params: required parameters to build the specified environment
"""
self.env_name = env_name
self.env_params = env_params
def build(self):
"""
Build and return an environment
"""
environment = getattr(mushroom_rl.environments, self.env_name)
return environment(*self.env_params.values())
def copy(self):
"""
Create a deepcopy of the environment_builder and return it
"""
return deepcopy(self) | 3.234375 | 3 |
lianjia_fetch/proxy/choose_proxy.py | lxyangfan/lianjia_fetch | 0 | 12759064 | #! -*- encoding:utf-8 -*-
import logging
from lib.file_util import read_csv_to_list
from task_def import VarifyProxyTask
logger = logging.getLogger()
handler = logging.StreamHandler()
formatter = logging.Formatter(
'%(asctime)s %(name)-12s %(levelname)-8s %(message)s')
handler.setFormatter(formatter)
logger.addHandler(handler)
logger.setLevel(logging.DEBUG)
def choose_one_useful_proxy():
proxies = []
read_csv_to_list('useful.csv', proxies)
for proxy in proxies:
# print 'Proxy is {}'.format(proxy)
logger.debug("chose proxy is %s", proxy)
task = VarifyProxyTask(url=proxy, timeout=1)
if task():
return proxy
return None
if __name__ == "__main__":
proxy = choose_one_useful_proxy()
if proxy:
logger.debug("Find One useful proxy: %s", proxy)
| 2.6875 | 3 |
stylemuzeapp/migrations/0001_initial.py | Abhisheksoni1/stylemuze | 0 | 12759065 | # -*- coding: utf-8 -*-
# Generated by Django 1.9.1 on 2016-01-16 16:15
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Bff',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
],
),
migrations.CreateModel(
name='BffState',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('state_name', models.CharField(max_length=10)),
('state_num', models.IntegerField()),
],
),
migrations.CreateModel(
name='Brand',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('logo_pic_url', models.CharField(max_length=1000)),
('name', models.CharField(max_length=200)),
],
),
migrations.CreateModel(
name='Comment',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('comment', models.TextField()),
],
),
migrations.CreateModel(
name='Follow',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
],
),
migrations.CreateModel(
name='Like',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
],
),
migrations.CreateModel(
name='PhotoItem',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('is_bff_item', models.BooleanField()),
('time_created', models.DateTimeField(verbose_name='date published')),
('title', models.CharField(max_length=500)),
('photo_url', models.CharField(max_length=1000)),
('brand_assosiated', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='stylemuzeapp.Brand')),
],
),
migrations.CreateModel(
name='User',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('profile_pic_url', models.CharField(max_length=1000, null=True)),
('website', models.CharField(max_length=1000, null=True)),
('email', models.CharField(max_length=1000, null=True)),
('gender', models.BooleanField()),
('first_name', models.CharField(max_length=50)),
('last_name', models.CharField(max_length=50)),
('facebook_id', models.CharField(max_length=128)),
('date_registered', models.DateTimeField(verbose_name='date registered')),
('last_login', models.DateTimeField(verbose_name='last login time')),
],
),
migrations.CreateModel(
name='Vote',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('from_user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='stylemuzeapp.User')),
('item', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='stylemuzeapp.PhotoItem')),
],
),
migrations.CreateModel(
name='VoteOption',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('vote_name', models.CharField(max_length=10)),
('vote_num', models.IntegerField()),
],
),
migrations.AddField(
model_name='vote',
name='vote',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='stylemuzeapp.VoteOption'),
),
migrations.AddField(
model_name='photoitem',
name='user_created',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='stylemuzeapp.User'),
),
migrations.AddField(
model_name='like',
name='from_user',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='stylemuzeapp.User'),
),
migrations.AddField(
model_name='like',
name='item',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='stylemuzeapp.PhotoItem'),
),
migrations.AddField(
model_name='follow',
name='follower_user',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='follow_user_requested', to='stylemuzeapp.User'),
),
migrations.AddField(
model_name='follow',
name='following_user',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='follow_user_being_followed', to='stylemuzeapp.User'),
),
migrations.AddField(
model_name='comment',
name='from_user',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='stylemuzeapp.User'),
),
migrations.AddField(
model_name='comment',
name='item',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='stylemuzeapp.PhotoItem'),
),
migrations.AddField(
model_name='bff',
name='follower_user',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='bff_user_requested', to='stylemuzeapp.User'),
),
migrations.AddField(
model_name='bff',
name='following_user',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='bff_user_being_bffed', to='stylemuzeapp.User'),
),
migrations.AddField(
model_name='bff',
name='state',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='stylemuzeapp.BffState'),
),
]
| 1.632813 | 2 |
pybind/slxos/v16r_1_00b/brocade_vcs_rpc/get_vcs_details/output/vcs_details/__init__.py | shivharis/pybind | 0 | 12759066 |
from operator import attrgetter
import pyangbind.lib.xpathhelper as xpathhelper
from pyangbind.lib.yangtypes import RestrictedPrecisionDecimalType, RestrictedClassType, TypedListType
from pyangbind.lib.yangtypes import YANGBool, YANGListType, YANGDynClass, ReferenceType
from pyangbind.lib.base import PybindBase
from decimal import Decimal
from bitarray import bitarray
import __builtin__
class vcs_details(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module brocade-vcs - based on the path /brocade_vcs_rpc/get-vcs-details/output/vcs-details. Each member element of
the container is represented as a class variable - with a specific
YANG type.
"""
__slots__ = ('_pybind_generated_by', '_path_helper', '_yang_name', '_rest_name', '_extmethods', '__principal_switch_wwn','__co_ordinator_wwn','__local_switch_wwn','__node_vcs_mode','__node_vcs_type','__node_vcs_id',)
_yang_name = 'vcs-details'
_rest_name = 'vcs-details'
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
path_helper_ = kwargs.pop("path_helper", None)
if path_helper_ is False:
self._path_helper = False
elif path_helper_ is not None and isinstance(path_helper_, xpathhelper.YANGPathHelper):
self._path_helper = path_helper_
elif hasattr(self, "_parent"):
path_helper_ = getattr(self._parent, "_path_helper", False)
self._path_helper = path_helper_
else:
self._path_helper = False
extmethods = kwargs.pop("extmethods", None)
if extmethods is False:
self._extmethods = False
elif extmethods is not None and isinstance(extmethods, dict):
self._extmethods = extmethods
elif hasattr(self, "_parent"):
extmethods = getattr(self._parent, "_extmethods", None)
self._extmethods = extmethods
else:
self._extmethods = False
self.__principal_switch_wwn = YANGDynClass(base=unicode, is_leaf=True, yang_name="principal-switch-wwn", rest_name="principal-switch-wwn", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-vcs', defining_module='brocade-vcs', yang_type='string', is_config=True)
self.__local_switch_wwn = YANGDynClass(base=unicode, is_leaf=True, yang_name="local-switch-wwn", rest_name="local-switch-wwn", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-vcs', defining_module='brocade-vcs', yang_type='string', is_config=True)
self.__node_vcs_id = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="node-vcs-id", rest_name="node-vcs-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-vcs', defining_module='brocade-vcs', yang_type='uint32', is_config=True)
self.__node_vcs_type = YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'vcs-fabric-cluster': {'value': 3}, u'vcs-unknown-cluster': {'value': 1}, u'vcs-stand-alone': {'value': 2}, u'vcs-management-cluster': {'value': 4}},), is_leaf=True, yang_name="node-vcs-type", rest_name="node-vcs-type", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-vcs', defining_module='brocade-vcs', yang_type='vcs-cluster-type', is_config=True)
self.__node_vcs_mode = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="node-vcs-mode", rest_name="node-vcs-mode", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-vcs', defining_module='brocade-vcs', yang_type='boolean', is_config=True)
self.__co_ordinator_wwn = YANGDynClass(base=unicode, is_leaf=True, yang_name="co-ordinator-wwn", rest_name="co-ordinator-wwn", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-vcs', defining_module='brocade-vcs', yang_type='string', is_config=True)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return [u'brocade_vcs_rpc', u'get-vcs-details', u'output', u'vcs-details']
def _rest_path(self):
if hasattr(self, "_parent"):
if self._rest_name:
return self._parent._rest_path()+[self._rest_name]
else:
return self._parent._rest_path()
else:
return [u'get-vcs-details', u'output', u'vcs-details']
def _get_principal_switch_wwn(self):
"""
Getter method for principal_switch_wwn, mapped from YANG variable /brocade_vcs_rpc/get_vcs_details/output/vcs_details/principal_switch_wwn (string)
YANG Description: WWN of principal switch
"""
return self.__principal_switch_wwn
def _set_principal_switch_wwn(self, v, load=False):
"""
Setter method for principal_switch_wwn, mapped from YANG variable /brocade_vcs_rpc/get_vcs_details/output/vcs_details/principal_switch_wwn (string)
If this variable is read-only (config: false) in the
source YANG file, then _set_principal_switch_wwn is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_principal_switch_wwn() directly.
YANG Description: WWN of principal switch
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=unicode, is_leaf=True, yang_name="principal-switch-wwn", rest_name="principal-switch-wwn", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-vcs', defining_module='brocade-vcs', yang_type='string', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """principal_switch_wwn must be of a type compatible with string""",
'defined-type': "string",
'generated-type': """YANGDynClass(base=unicode, is_leaf=True, yang_name="principal-switch-wwn", rest_name="principal-switch-wwn", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-vcs', defining_module='brocade-vcs', yang_type='string', is_config=True)""",
})
self.__principal_switch_wwn = t
if hasattr(self, '_set'):
self._set()
def _unset_principal_switch_wwn(self):
self.__principal_switch_wwn = YANGDynClass(base=unicode, is_leaf=True, yang_name="principal-switch-wwn", rest_name="principal-switch-wwn", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-vcs', defining_module='brocade-vcs', yang_type='string', is_config=True)
def _get_co_ordinator_wwn(self):
"""
Getter method for co_ordinator_wwn, mapped from YANG variable /brocade_vcs_rpc/get_vcs_details/output/vcs_details/co_ordinator_wwn (string)
YANG Description: WWN of Co-ordinator switch
"""
return self.__co_ordinator_wwn
def _set_co_ordinator_wwn(self, v, load=False):
"""
Setter method for co_ordinator_wwn, mapped from YANG variable /brocade_vcs_rpc/get_vcs_details/output/vcs_details/co_ordinator_wwn (string)
If this variable is read-only (config: false) in the
source YANG file, then _set_co_ordinator_wwn is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_co_ordinator_wwn() directly.
YANG Description: WWN of Co-ordinator switch
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=unicode, is_leaf=True, yang_name="co-ordinator-wwn", rest_name="co-ordinator-wwn", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-vcs', defining_module='brocade-vcs', yang_type='string', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """co_ordinator_wwn must be of a type compatible with string""",
'defined-type': "string",
'generated-type': """YANGDynClass(base=unicode, is_leaf=True, yang_name="co-ordinator-wwn", rest_name="co-ordinator-wwn", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-vcs', defining_module='brocade-vcs', yang_type='string', is_config=True)""",
})
self.__co_ordinator_wwn = t
if hasattr(self, '_set'):
self._set()
def _unset_co_ordinator_wwn(self):
self.__co_ordinator_wwn = YANGDynClass(base=unicode, is_leaf=True, yang_name="co-ordinator-wwn", rest_name="co-ordinator-wwn", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-vcs', defining_module='brocade-vcs', yang_type='string', is_config=True)
def _get_local_switch_wwn(self):
"""
Getter method for local_switch_wwn, mapped from YANG variable /brocade_vcs_rpc/get_vcs_details/output/vcs_details/local_switch_wwn (string)
YANG Description: WWN of local switch
"""
return self.__local_switch_wwn
def _set_local_switch_wwn(self, v, load=False):
"""
Setter method for local_switch_wwn, mapped from YANG variable /brocade_vcs_rpc/get_vcs_details/output/vcs_details/local_switch_wwn (string)
If this variable is read-only (config: false) in the
source YANG file, then _set_local_switch_wwn is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_local_switch_wwn() directly.
YANG Description: WWN of local switch
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=unicode, is_leaf=True, yang_name="local-switch-wwn", rest_name="local-switch-wwn", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-vcs', defining_module='brocade-vcs', yang_type='string', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """local_switch_wwn must be of a type compatible with string""",
'defined-type': "string",
'generated-type': """YANGDynClass(base=unicode, is_leaf=True, yang_name="local-switch-wwn", rest_name="local-switch-wwn", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-vcs', defining_module='brocade-vcs', yang_type='string', is_config=True)""",
})
self.__local_switch_wwn = t
if hasattr(self, '_set'):
self._set()
def _unset_local_switch_wwn(self):
self.__local_switch_wwn = YANGDynClass(base=unicode, is_leaf=True, yang_name="local-switch-wwn", rest_name="local-switch-wwn", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-vcs', defining_module='brocade-vcs', yang_type='string', is_config=True)
def _get_node_vcs_mode(self):
"""
Getter method for node_vcs_mode, mapped from YANG variable /brocade_vcs_rpc/get_vcs_details/output/vcs_details/node_vcs_mode (boolean)
YANG Description: Node's VCS mode
"""
return self.__node_vcs_mode
def _set_node_vcs_mode(self, v, load=False):
"""
Setter method for node_vcs_mode, mapped from YANG variable /brocade_vcs_rpc/get_vcs_details/output/vcs_details/node_vcs_mode (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_node_vcs_mode is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_node_vcs_mode() directly.
YANG Description: Node's VCS mode
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="node-vcs-mode", rest_name="node-vcs-mode", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-vcs', defining_module='brocade-vcs', yang_type='boolean', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """node_vcs_mode must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="node-vcs-mode", rest_name="node-vcs-mode", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-vcs', defining_module='brocade-vcs', yang_type='boolean', is_config=True)""",
})
self.__node_vcs_mode = t
if hasattr(self, '_set'):
self._set()
def _unset_node_vcs_mode(self):
self.__node_vcs_mode = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="node-vcs-mode", rest_name="node-vcs-mode", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-vcs', defining_module='brocade-vcs', yang_type='boolean', is_config=True)
def _get_node_vcs_type(self):
"""
Getter method for node_vcs_type, mapped from YANG variable /brocade_vcs_rpc/get_vcs_details/output/vcs_details/node_vcs_type (vcs-cluster-type)
YANG Description: Vcs Type
"""
return self.__node_vcs_type
def _set_node_vcs_type(self, v, load=False):
"""
Setter method for node_vcs_type, mapped from YANG variable /brocade_vcs_rpc/get_vcs_details/output/vcs_details/node_vcs_type (vcs-cluster-type)
If this variable is read-only (config: false) in the
source YANG file, then _set_node_vcs_type is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_node_vcs_type() directly.
YANG Description: Vcs Type
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'vcs-fabric-cluster': {'value': 3}, u'vcs-unknown-cluster': {'value': 1}, u'vcs-stand-alone': {'value': 2}, u'vcs-management-cluster': {'value': 4}},), is_leaf=True, yang_name="node-vcs-type", rest_name="node-vcs-type", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-vcs', defining_module='brocade-vcs', yang_type='vcs-cluster-type', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """node_vcs_type must be of a type compatible with vcs-cluster-type""",
'defined-type': "brocade-vcs:vcs-cluster-type",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'vcs-fabric-cluster': {'value': 3}, u'vcs-unknown-cluster': {'value': 1}, u'vcs-stand-alone': {'value': 2}, u'vcs-management-cluster': {'value': 4}},), is_leaf=True, yang_name="node-vcs-type", rest_name="node-vcs-type", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-vcs', defining_module='brocade-vcs', yang_type='vcs-cluster-type', is_config=True)""",
})
self.__node_vcs_type = t
if hasattr(self, '_set'):
self._set()
def _unset_node_vcs_type(self):
self.__node_vcs_type = YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'vcs-fabric-cluster': {'value': 3}, u'vcs-unknown-cluster': {'value': 1}, u'vcs-stand-alone': {'value': 2}, u'vcs-management-cluster': {'value': 4}},), is_leaf=True, yang_name="node-vcs-type", rest_name="node-vcs-type", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-vcs', defining_module='brocade-vcs', yang_type='vcs-cluster-type', is_config=True)
def _get_node_vcs_id(self):
"""
Getter method for node_vcs_id, mapped from YANG variable /brocade_vcs_rpc/get_vcs_details/output/vcs_details/node_vcs_id (uint32)
YANG Description: Vcs Id
"""
return self.__node_vcs_id
def _set_node_vcs_id(self, v, load=False):
"""
Setter method for node_vcs_id, mapped from YANG variable /brocade_vcs_rpc/get_vcs_details/output/vcs_details/node_vcs_id (uint32)
If this variable is read-only (config: false) in the
source YANG file, then _set_node_vcs_id is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_node_vcs_id() directly.
YANG Description: Vcs Id
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="node-vcs-id", rest_name="node-vcs-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-vcs', defining_module='brocade-vcs', yang_type='uint32', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """node_vcs_id must be of a type compatible with uint32""",
'defined-type': "uint32",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="node-vcs-id", rest_name="node-vcs-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-vcs', defining_module='brocade-vcs', yang_type='uint32', is_config=True)""",
})
self.__node_vcs_id = t
if hasattr(self, '_set'):
self._set()
def _unset_node_vcs_id(self):
self.__node_vcs_id = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="node-vcs-id", rest_name="node-vcs-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-vcs', defining_module='brocade-vcs', yang_type='uint32', is_config=True)
principal_switch_wwn = __builtin__.property(_get_principal_switch_wwn, _set_principal_switch_wwn)
co_ordinator_wwn = __builtin__.property(_get_co_ordinator_wwn, _set_co_ordinator_wwn)
local_switch_wwn = __builtin__.property(_get_local_switch_wwn, _set_local_switch_wwn)
node_vcs_mode = __builtin__.property(_get_node_vcs_mode, _set_node_vcs_mode)
node_vcs_type = __builtin__.property(_get_node_vcs_type, _set_node_vcs_type)
node_vcs_id = __builtin__.property(_get_node_vcs_id, _set_node_vcs_id)
_pyangbind_elements = {'principal_switch_wwn': principal_switch_wwn, 'co_ordinator_wwn': co_ordinator_wwn, 'local_switch_wwn': local_switch_wwn, 'node_vcs_mode': node_vcs_mode, 'node_vcs_type': node_vcs_type, 'node_vcs_id': node_vcs_id, }
| 1.882813 | 2 |
squint/__init__.py | shawnbrown/squint | 3 | 12759067 | <reponame>shawnbrown/squint
# -*- coding: utf-8 -*-
"""squint: simple query interface for tabular data
PYTEST_DONT_REWRITE
"""
from __future__ import absolute_import
import sys as _sys
# Check that `sqlite3` is available. Some non-standard builds
# of Python do not include the full standard library (e.g.,
# Jython 2.7 and Jython 2.5).
try:
import sqlite3 as _sqlite3
except ImportError as err:
message = (
'The standard library "sqlite3" package is missing '
'from the current Python installation:\n\nPython {0}'
).format(_sys.version)
raise ImportError(message)
# Check that `sqlite3` is not too old. Some very old builds
# of Python were compiled with versions of SQLite that are
# incompatible with Squint (e.g., certain builds of Python
# 3.1.4 and Python 2.6.6).
if _sqlite3.sqlite_version_info < (3, 6, 8):
message = (
'Squint requires SQLite 3.6.8 or newer but the current '
'Python installation was built with an old version:\n\n'
'Python {0}\n\nBuilt with SQLite {1}'
).format(_sys.version, _sqlite3.sqlite_version)
raise ImportError(message)
############################################
# Import squint objects into main namespace.
############################################
from .query import BaseElement
from .select import Select
from .select import Query
from .result import Result
from ._vendor.predicate import Predicate
from . import _preview
__version__ = '0.2.0.dev0'
BaseElement.__module__ = 'squint'
Select.__module__ = 'squint'
Query.__module__ = 'squint'
Result.__module__ = 'squint'
Predicate.__module__ = 'squint'
# Set display hook for interactive sessions.
_sys.displayhook = _preview.displayhook
| 2.296875 | 2 |
prettyqt/gui/__init__.py | phil65/PrettyQt | 7 | 12759068 | <reponame>phil65/PrettyQt
"""Gui module.
Contains QtGui-based classes
"""
from __future__ import annotations
from prettyqt.qt.QtGui import (
QKeyEvent as KeyEvent,
QMouseEvent as MouseEvent,
QWheelEvent as WheelEvent,
QActionEvent as ActionEvent,
QShowEvent as ShowEvent,
QDropEvent as DropEvent,
QFocusEvent as FocusEvent,
QDragEnterEvent as DragEnterEvent,
QContextMenuEvent as ContextMenuEvent,
QResizeEvent as ResizeEvent,
QNativeGestureEvent as NativeGestureEvent,
QMoveEvent as MoveEvent,
QInputMethodEvent as InputMethodEvent,
QInputMethodQueryEvent as InputMethodQueryEvent,
QCloseEvent as CloseEvent,
QDragLeaveEvent as DragLeaveEvent,
QHelpEvent as HelpEvent,
QHideEvent as HideEvent,
QHoverEvent as HoverEvent,
QDragMoveEvent as DragMoveEvent,
QEnterEvent as EnterEvent,
QExposeEvent as ExposeEvent,
QFileOpenEvent as FileOpenEvent,
QIconDragEvent as IconDragEvent,
QInputEvent as InputEvent,
QPaintEvent as PaintEvent,
QShortcutEvent as ShortcutEvent,
QStatusTipEvent as StatusTipEvent,
QTouchEvent as TouchEvent,
QTabletEvent as TabletEvent,
QWindowStateChangeEvent as WindowStateChangeEvent,
QWhatsThisClickedEvent as WhatsThisClickedEvent,
QScrollEvent as ScrollEvent,
QScrollPrepareEvent as ScrollPrepareEvent,
)
# not available in PySide2
# from prettyqt.qt.QtGui import QPlatformSurfaceEvent as PlatformSurfaceEvent
from .textobjectinterface import TextObjectInterface
from .drag import Drag
from .screen import Screen
from .keysequence import KeySequence
from .surface import Surface
from .window import Window
from .icon import Icon
from .paintdevice import PaintDevice
from .paintdevicewindow import PaintDeviceWindow
from .pixmap import Pixmap
from .iconengine import IconEngine
from .pixmapcache import PixmapCache
from .bitmap import Bitmap
from .image import Image
from .rasterwindow import RasterWindow
from .clipboard import Clipboard
from .inputmethod import InputMethod
from .sessionmanager import SessionManager
from .validator import Validator
from .regularexpressionvalidator import RegularExpressionValidator
from .textlayout import TextLayout
from .textline import TextLine
from .textoption import TextOption
from .textblock import TextBlock
from .textdocumentwriter import TextDocumentWriter
from .textdocument import TextDocument
from .textdocumentfragment import TextDocumentFragment
from .statictext import StaticText
from .intvalidator import IntValidator
from .doublevalidator import DoubleValidator
from .color import Color
from .brush import Brush
from .textblockuserdata import TextBlockUserData
from .gradient import Gradient
from .lineargradient import LinearGradient
from .radialgradient import RadialGradient
from .conicalgradient import ConicalGradient
from .pagesize import PageSize
from .pagelayout import PageLayout
from .font import Font
from .fontmetrics import FontMetrics
from .fontmetricsf import FontMetricsF
from .fontinfo import FontInfo
from .fontdatabase import FontDatabase
from .region import Region
from .movie import Movie
from .transform import Transform
from .pagedpaintdevice import PagedPaintDevice
from .pen import Pen
from .picture import Picture
from .painter import Painter
from .painterpath import PainterPath
from .painterpathstroker import PainterPathStroker
from .palette import Palette
from .guiapplication import GuiApplication
from .cursor import Cursor
from .polygon import Polygon
from .polygonf import PolygonF
from .standarditem import StandardItem
from .standarditemmodel import StandardItemModel
from .textcursor import TextCursor
from .pdfwriter import PdfWriter
from .desktopservices import DesktopServices
from .matrix4x4 import Matrix4x4
from .vector3d import Vector3D
from .vector4d import Vector4D
from .imageiohandler import ImageIOHandler
from .imagereader import ImageReader
from .imagewriter import ImageWriter
from .textformat import TextFormat
from .textlength import TextLength
from .textframeformat import TextFrameFormat
from .textcharformat import TextCharFormat
from .textimageformat import TextImageFormat
from .textlistformat import TextListFormat
from .texttablecellformat import TextTableCellFormat
from .textobject import TextObject
from .textblockgroup import TextBlockGroup
from .textframe import TextFrame
from .abstracttextdocumentlayout import AbstractTextDocumentLayout
from .syntaxhighlighter import SyntaxHighlighter
from prettyqt import core
if core.VersionNumber.get_qt_version() >= (5, 13, 0):
from .colorspace import ColorSpace
def app(args: list[str] | None = None) -> GuiApplication:
instance = GuiApplication.instance()
if instance is not None:
return instance
return GuiApplication([] if args is None else args)
__all__ = [
"app",
"KeyEvent",
"MouseEvent",
"WheelEvent",
"ActionEvent",
"ShowEvent",
"ContextMenuEvent",
"ResizeEvent",
"NativeGestureEvent",
"InputMethodQueryEvent",
"InputMethodEvent",
"TextObjectInterface",
"SessionManager",
"CloseEvent",
"DragLeaveEvent",
"MoveEvent",
"HelpEvent",
"HideEvent",
"HoverEvent",
"DragMoveEvent",
"TouchEvent",
"TabletEvent",
"WindowStateChangeEvent",
"FileOpenEvent",
"IconDragEvent",
"InputEvent",
"PaintEvent",
"ShortcutEvent",
"StatusTipEvent",
"EnterEvent",
"ExposeEvent",
"WhatsThisClickedEvent",
"ScrollEvent",
"ScrollPrepareEvent",
"PlatformSurfaceEvent",
"DropEvent",
"DragEnterEvent",
"FocusEvent",
"Drag",
"Screen",
"Gradient",
"LinearGradient",
"RadialGradient",
"ConicalGradient",
"PageSize",
"PageLayout",
"Clipboard",
"GuiApplication",
"Validator",
"TextLayout",
"TextLine",
"TextOption",
"TextBlock",
"TextBlockGroup",
"TextDocumentWriter",
"TextDocument",
"TextDocumentFragment",
"StaticText",
"RegularExpressionValidator",
"IntValidator",
"DoubleValidator",
"Brush",
"TextBlockUserData",
"Color",
"Font",
"FontMetrics",
"FontMetricsF",
"FontInfo",
"FontDatabase",
"Region",
"Movie",
"PaintDevice",
"Transform",
"PagedPaintDevice",
"Icon",
"Image",
"Pen",
"Picture",
"Pixmap",
"IconEngine",
"PixmapCache",
"Bitmap",
"Painter",
"PainterPath",
"PainterPathStroker",
"Palette",
"Cursor",
"Polygon",
"PolygonF",
"StandardItem",
"StandardItemModel",
"TextCharFormat",
"TextImageFormat",
"TextListFormat",
"TextTableCellFormat",
"TextCursor",
"SyntaxHighlighter",
"PdfWriter",
"KeySequence",
"Surface",
"Window",
"DesktopServices",
"Matrix4x4",
"Vector3D",
"Vector4D",
"PaintDeviceWindow",
"RasterWindow",
"ImageIOHandler",
"ImageReader",
"ImageWriter",
"TextObject",
"TextLength",
"TextFormat",
"TextFrameFormat",
"TextFrame",
"AbstractTextDocumentLayout",
"InputMethod",
"ColorSpace",
]
| 1.375 | 1 |
server/athenian/api/controllers/features/github/developer_metrics.py | athenianco/athenian-api | 9 | 12759069 | from typing import Collection, Dict, List, Sequence, Type, TypeVar
import numpy as np
import pandas as pd
from athenian.api.controllers.features.metric import Metric, MetricInt
from athenian.api.controllers.features.metric_calculator import AnyMetricCalculator, \
BinnedMetricCalculator, \
MetricCalculator, MetricCalculatorEnsemble, SumMetricCalculator
from athenian.api.controllers.miners.github.developer import developer_changed_lines_column, \
developer_identity_column, DeveloperTopic
from athenian.api.controllers.miners.github.pull_request import ReviewResolution
from athenian.api.models.metadata.github import PullRequest, PullRequestComment, \
PullRequestReview, PullRequestReviewComment, PushCommit, \
Release
metric_calculators: Dict[str, Type[MetricCalculator]] = {}
T = TypeVar("T")
def register_metric(topic: DeveloperTopic):
"""Keep track of the developer metric calculators."""
assert isinstance(topic, DeveloperTopic)
def register_with_name(cls: Type[MetricCalculator]):
metric_calculators[topic.value] = cls
return cls
return register_with_name
class DeveloperMetricCalculatorEnsemble(MetricCalculatorEnsemble):
"""MetricCalculatorEnsemble adapted for developers."""
def __init__(self, *metrics: str, quantiles: Sequence[float], quantile_stride: int):
"""Initialize a new instance of ReleaseMetricCalculatorEnsemble class."""
super().__init__(*metrics,
quantiles=quantiles,
quantile_stride=quantile_stride,
class_mapping=metric_calculators)
class DeveloperBinnedMetricCalculator(BinnedMetricCalculator):
"""BinnedMetricCalculator adapted for developers."""
ensemble_class = DeveloperMetricCalculatorEnsemble
class DeveloperTopicCounter(SumMetricCalculator[int]):
"""Count all `topic` events in each time interval."""
may_have_negative_values = False
metric = MetricInt
timestamp_column: str
def _analyze(self,
facts: pd.DataFrame,
min_times: np.ndarray,
max_times: np.ndarray,
**kwargs) -> np.array:
result = np.full((len(min_times), len(facts)), self.nan, self.dtype)
column = facts[self.timestamp_column].astype(min_times.dtype, copy=False).values
column_in_range = (min_times[:, None] <= column) & (column < max_times[:, None])
result[column_in_range] = 1
return result
class DeveloperTopicSummator(SumMetricCalculator[int]):
"""Sum all `topic` events in each time interval."""
may_have_negative_values = False
metric = MetricInt
topic_column: str
timestamp_column: str
def _analyze(self,
facts: pd.DataFrame,
min_times: np.ndarray,
max_times: np.ndarray,
**kwargs) -> np.array:
result = np.full((len(min_times), len(facts)), self.nan, self.dtype)
topic_column = facts[self.topic_column].values
ts_column = facts[self.timestamp_column].values
column_in_range = (min_times[:, None] <= ts_column) & (ts_column < max_times[:, None])
for result_dim, column_in_range_dim in zip(result, column_in_range):
result_dim[column_in_range_dim] = topic_column[column_in_range_dim]
return result
@register_metric(DeveloperTopic.commits_pushed)
class CommitsPushedCounter(DeveloperTopicCounter):
"""Calculate "dev-commits-pushed" metric."""
timestamp_column = PushCommit.committed_date.name
@register_metric(DeveloperTopic.lines_changed)
class LinesChangedCounter(DeveloperTopicSummator):
"""Calculate "dev-lines-changed" metric."""
topic_column = developer_changed_lines_column
timestamp_column = PushCommit.committed_date.name
@register_metric(DeveloperTopic.active)
class ActiveCounter(MetricCalculator[int]):
"""Calculate "dev-active" metric."""
ACTIVITY_DAYS_THRESHOLD_DENSITY = 0.2
may_have_negative_values = False
metric = MetricInt
def _value(self, samples: np.ndarray) -> Metric[int]:
if len(samples) > 0:
days = samples[0] % 1000000
active = len(np.unique(samples // 1000000))
else:
days = 1
active = 0
assert days > 0
value = int(active / days > self.ACTIVITY_DAYS_THRESHOLD_DENSITY)
return self.metric.from_fields(True, value, None, None)
def _analyze(self,
facts: pd.DataFrame,
min_times: np.ndarray,
max_times: np.ndarray,
**kwargs) -> np.array:
column = facts[PushCommit.committed_date.name].dt.floor(freq="D").values
column_in_range = (min_times[:, None] <= column) & (column < max_times[:, None])
timestamps = np.repeat(column[None, :], len(min_times), axis=0)
result = timestamps.view(int)
lengths = (max_times - min_times).astype("timedelta64[D]").view(int)
result += lengths[:, None]
result[~column_in_range] = self.nan
return result
@register_metric(DeveloperTopic.active0)
class Active0Counter(AnyMetricCalculator[int]):
"""Calculate "dev-active0" metric."""
deps = (ActiveCounter,)
may_have_negative_values = False
metric = MetricInt
def _analyze(self,
facts: pd.DataFrame,
min_times: np.ndarray,
max_times: np.ndarray,
**kwargs) -> np.array:
return self._calcs[0].peek
@register_metric(DeveloperTopic.prs_created)
class PRsCreatedCounter(DeveloperTopicCounter):
"""Calculate "dev-prs-created" metric."""
timestamp_column = PullRequest.created_at.name
@register_metric(DeveloperTopic.prs_merged)
class PRsMergedCounter(DeveloperTopicCounter):
"""Calculate "dev-prs-merged" metric."""
timestamp_column = PullRequest.merged_at.name
@register_metric(DeveloperTopic.releases)
class ReleasesCounter(DeveloperTopicCounter):
"""Calculate "dev-releases" metric."""
timestamp_column = Release.published_at.name
@register_metric(DeveloperTopic.regular_pr_comments)
class RegularPRCommentsCounter(DeveloperTopicCounter):
"""Calculate "dev-regular-pr-comments" metric."""
timestamp_column = PullRequestComment.created_at.name
@register_metric(DeveloperTopic.review_pr_comments)
class ReviewPRCommentsCounter(DeveloperTopicCounter):
"""Calculate "dev-review-pr-comments" metric."""
timestamp_column = PullRequestReviewComment.created_at.name
@register_metric(DeveloperTopic.pr_comments)
class PRCommentsCounter(DeveloperTopicCounter):
"""Calculate "dev-pr-comments" metric."""
timestamp_column = "created_at"
@register_metric(DeveloperTopic.prs_reviewed)
class PRReviewedCounter(SumMetricCalculator[int]):
"""Calculate "dev-prs-reviewed" metric."""
may_have_negative_values = False
metric = MetricInt
def _analyze(self,
facts: pd.DataFrame,
min_times: np.ndarray,
max_times: np.ndarray,
**kwargs) -> np.array:
result = np.full((len(min_times), len(facts)), self.nan, self.dtype)
column = facts[PullRequestReview.submitted_at.name].values
column_in_range = (min_times[:, None] <= column) & (column < max_times[:, None])
duplicated = facts.duplicated([
PullRequestReview.pull_request_node_id.name, developer_identity_column,
]).values
column_in_range[np.broadcast_to(duplicated[None, :], result.shape)] = False
result[column_in_range] = 1
return result
@register_metric(DeveloperTopic.reviews)
class ReviewsCounter(DeveloperTopicCounter):
"""Calculate "dev-reviews" metric."""
timestamp_column = PullRequestReview.submitted_at.name
class ReviewStatesCounter(SumMetricCalculator[int]):
"""Count reviews with the specified outcome in `state`."""
may_have_negative_values = False
metric = MetricInt
state = None
def _analyze(self,
facts: pd.DataFrame,
min_times: np.ndarray,
max_times: np.ndarray,
**kwargs) -> np.array:
result = np.full((len(min_times), len(facts)), self.nan, self.dtype)
column = facts[PullRequestReview.submitted_at.name].values
column_in_range = (min_times[:, None] <= column) & (column < max_times[:, None])
wrong_state = facts[PullRequestReview.state.name].values != self.state.value
column_in_range[np.broadcast_to(wrong_state[None, :], result.shape)] = False
result[column_in_range] = 1
return result
@register_metric(DeveloperTopic.review_approvals)
class ApprovalsCounter(ReviewStatesCounter):
"""Calculate "dev-review-approved" metric."""
state = ReviewResolution.APPROVED
@register_metric(DeveloperTopic.review_rejections)
class RejectionsCounter(ReviewStatesCounter):
"""Calculate "dev-review-rejected" metric."""
state = ReviewResolution.CHANGES_REQUESTED
@register_metric(DeveloperTopic.review_neutrals)
class NeutralReviewsCounter(ReviewStatesCounter):
"""Calculate "dev-review-neutrals" metric."""
state = ReviewResolution.COMMENTED
@register_metric(DeveloperTopic.worked)
class WorkedCounter(AnyMetricCalculator[int]):
"""Calculate "dev-worked" metric."""
deps = (
PRsCreatedCounter,
PRsMergedCounter,
ReleasesCounter,
CommitsPushedCounter,
ReviewsCounter,
RegularPRCommentsCounter,
)
may_have_negative_values = False
metric = MetricInt
def _analyze(self,
facts: pd.DataFrame,
min_times: np.ndarray,
max_times: np.ndarray,
**kwargs) -> np.array:
result = np.full((len(min_times), len(facts)), 0, self.dtype)
for calc in self._calcs:
result |= calc.peek > 0
result[result == 0] = self.nan
return result
def group_actions_by_developers(devs: Sequence[Collection[str]],
df: pd.DataFrame,
) -> List[np.ndarray]:
"""Group developer actions by developer groups."""
indexes = []
identities = df[developer_identity_column].values.astype("S")
for group in devs:
if len(group) == 1:
dev = next(iter(group))
indexes.append(np.nonzero(identities == dev.encode())[0])
continue
if isinstance(group, set):
group = list(group)
indexes.append(np.nonzero(np.in1d(identities, np.array(group, dtype="S")))[0])
return indexes
| 2.28125 | 2 |
pogom/search.py | 1623tennyson/pogomaplatestversion | 0 | 12759070 | <reponame>1623tennyson/pogomaplatestversion<filename>pogom/search.py
#!/usr/bin/python
# -*- coding: utf-8 -*-
import logging
import math
import time
from sys import maxint
import collections
import cProfile
from geographiclib.geodesic import Geodesic
from pgoapi import PGoApi
from pgoapi.utilities import f2i, get_cellid, get_pos_by_name
from .models import parse_map, SearchConfig
log = logging.getLogger(__name__)
TIMESTAMP = '\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000'
REQ_SLEEP = 1
api = PGoApi()
queue = collections.deque()
consecutive_map_fails = 0
scan_start_time = 0
min_time_per_scan = 3 * 60
def set_cover():
lat = SearchConfig.ORIGINAL_LATITUDE
lng = SearchConfig.ORIGINAL_LONGITUDE
d = math.sqrt(3) * 100
points = [[{'lat2': lat, 'lon2': lng, 's': 0}]]
for i in xrange(1, maxint):
oor_counter = 0
points.append([])
for j in range(0, 6 * i):
p = points[i - 1][(j - j / i - 1 + (j % i == 0))]
p_new = Geodesic.WGS84.Direct(p['lat2'], p['lon2'], (j+i-1)/i * 60, d)
p_new['s'] = Geodesic.WGS84.Inverse(p_new['lat2'], p_new['lon2'], lat, lng)['s12']
points[i].append(p_new)
if p_new['s'] > SearchConfig.RADIUS:
oor_counter += 1
if oor_counter == 6 * i:
break
cover = [{"lat": p['lat2'], "lng": p['lon2']}
for sublist in points for p in sublist if p['s'] < SearchConfig.RADIUS]
SearchConfig.COVER = cover
def set_location(location, radius):
position = get_pos_by_name(location)
log.info('Parsed location is: {:.4f}/{:.4f}/{:.4f} (lat/lng/alt)'.
format(*position))
SearchConfig.ORIGINAL_LATITUDE = position[0]
SearchConfig.ORIGINAL_LONGITUDE = position[1]
SearchConfig.RADIUS = radius
def send_map_request(api, position, args):
try:
login_if_necessary(args, position)
api.set_position(*position)
api.get_map_objects(latitude=f2i(position[0]),
longitude=f2i(position[1]),
since_timestamp_ms=TIMESTAMP,
cell_id=get_cellid(position[0], position[1]))
return api.call()
except Exception: # make sure we dont crash in the main loop
log.exception("Uncaught exception when downloading map")
return False
def generate_location_steps():
for point in SearchConfig.COVER:
yield (point["lat"], point["lng"], 0)
def login(args, position):
SearchConfig.LOGGED_IN = 0
log.info('Attempting login')
consecutive_fails = 0
api.set_position(*position)
while not api.login(args.auth_service, args.username, args.password):
sleep_t = min(math.exp(consecutive_fails / 1.7), 5 * 60)
log.info('Login failed, retrying in {:.2f} seconds'.format(sleep_t))
consecutive_fails += 1
time.sleep(sleep_t)
SearchConfig.LOGGED_IN = time.time()
log.info('Login successful')
def login_if_necessary(args, position):
global api
if api._rpc.auth_provider and api._rpc.auth_provider._ticket_expire:
remaining_time = api._rpc.auth_provider._ticket_expire / 1000 - time.time()
if remaining_time < 60:
log.info("Login has or is about to expire")
login(args, position)
else:
login(args, position)
def search(args):
num_steps = len(SearchConfig.COVER)
i = 1
for step_location in generate_location_steps():
log.debug('Scanning step {:d} of {:d}.'.format(i, num_steps))
log.debug('Scan location is {:f}, {:f}'.format(step_location[0], step_location[1]))
response_dict = send_map_request(api, step_location, args)
while not response_dict:
log.info('Map Download failed. Trying again.')
response_dict = send_map_request(api, step_location, args)
time.sleep(REQ_SLEEP)
try:
parse_map(response_dict)
except KeyError:
log.exception('Failed to parse response: {}'.format(response_dict))
except: # make sure we dont crash in the main loop
log.exception('Unexpected error')
SearchConfig.LAST_SUCCESSFUL_REQUEST = time.time()
log.info('Completed {:5.2f}% of scan.'.format(float(i) / num_steps * 100))
if SearchConfig.CHANGE:
SearchConfig.CHANGE = False
break
i += 1
def search_async(args):
num_steps = len(SearchConfig.COVER)
log.info("Starting scan of {} locations".format(num_steps))
i = 1
while len(queue) > 0:
c = queue.pop()
step_location = (c["lat"], c["lng"], 0)
log.debug('Scanning step {:d} of {:d}.'.format(i, num_steps))
log.debug('Scan location is {:f}, {:f}'.format(step_location[0], step_location[1]))
login_if_necessary(args, step_location)
error_throttle()
api.set_position(*step_location)
api.get_map_objects(latitude=f2i(step_location[0]),
longitude=f2i(step_location[1]),
since_timestamp_ms=TIMESTAMP,
cell_id=get_cellid(step_location[0], step_location[1]))
api.call_async(callback)
if SearchConfig.CHANGE:
log.info("Changing scan location")
SearchConfig.CHANGE = False
queue.clear()
queue.extend(SearchConfig.COVER)
if (i%20 == 0):
log.info(api._rpc._curl.stats())
i += 1
api.finish_async()
log.info(api._rpc._curl.stats())
api._rpc._curl.reset_stats()
def error_throttle():
if consecutive_map_fails == 0:
return
sleep_t = min(math.exp(1.0 * consecutive_map_fails / 5) - 1, 2*60)
log.info('Loading map failed, waiting {:.5f} seconds'.format(sleep_t))
start_sleep = time.time()
api.finish_async(sleep_t)
time.sleep(max(start_sleep + sleep_t - time.time(), 0))
def callback(response_dict):
global consecutive_map_fails
if (not response_dict) or ('responses' in response_dict and not response_dict['responses']):
log.info('Map Download failed. Trying again.')
consecutive_map_fails += 1
return
try:
parse_map(response_dict)
SearchConfig.LAST_SUCCESSFUL_REQUEST = time.time()
consecutive_map_fails = 0
log.debug("Parsed & saved.")
except KeyError:
log.exception('Failed to parse response: {}'.format(response_dict))
consecutive_map_fails += 1
except: # make sure we dont crash in the main loop
log.exception('Unexpected error when parsing response: {}'.format(response_dict))
consecutive_map_fails += 1
def throttle():
if scan_start_time == 0:
return
sleep_time = max(min_time_per_scan - (time.time() - scan_start_time), 0)
log.info("Scan finished. Sleeping {:.2f} seconds before continuing.".format(sleep_time))
SearchConfig.LAST_SUCCESSFUL_REQUEST = -1
time.sleep(sleep_time)
def search_loop_async(args):
global scan_start_time
while True:
throttle()
scan_start_time = time.time()
queue.extend(SearchConfig.COVER[::-1])
search_async(args)
SearchConfig.COMPLETE_SCAN_TIME = time.time() - scan_start_time
def search_loop(args):
global scan_start_time
while True:
scan_start_time = time.time()
search(args)
log.info("Finished scan")
SearchConfig.COMPLETE_SCAN_TIME = time.time() - scan_start_time
| 1.9375 | 2 |
backend/mqtt_react/python_bugg/paho.mqtt.python/examples/loop_select.py | Jegeva/BruCON_2021 | 0 | 12759071 | #!/usr/bin/env python3
import socket
import uuid
import paho.mqtt.client as mqtt
from select import select
from time import time
client_id = 'paho-mqtt-python/issue72/' + str(uuid.uuid4())
topic = client_id
print("Using client_id / topic: " + client_id)
class SelectMqttExample:
def __init__(self):
pass
def on_connect(self, client, userdata, flags, rc):
print("Subscribing")
client.subscribe(topic)
def on_message(self, client, userdata, msg):
if self.state not in {1, 3, 5}:
print("Got unexpected message: {}".format(msg.decode()))
return
print("Got message with len {}".format(len(msg.payload)))
self.state += 1
self.t = time()
def on_disconnect(self, client, userdata, rc):
self.disconnected = True, rc
def do_select(self):
sock = self.client.socket()
if not sock:
raise Exception("Socket is gone")
print("Selecting for reading" + (" and writing" if self.client.want_write() else ""))
r, w, e = select(
[sock],
[sock] if self.client.want_write() else [],
[],
1
)
if sock in r:
print("Socket is readable, calling loop_read")
self.client.loop_read()
if sock in w:
print("Socket is writable, calling loop_write")
self.client.loop_write()
self.client.loop_misc()
def main(self):
self.disconnected = (False, None)
self.t = time()
self.state = 0
self.client = mqtt.Client(client_id=client_id)
self.client.on_connect = self.on_connect
self.client.on_message = self.on_message
self.client.on_disconnect = self.on_disconnect
self.client.connect('mqtt.eclipseprojects.io', 1883, 60)
print("Socket opened")
self.client.socket().setsockopt(socket.SOL_SOCKET, socket.SO_SNDBUF, 2048)
while not self.disconnected[0]:
self.do_select()
if self.state in {0, 2, 4}:
if time() - self.t >= 5:
print("Publishing")
self.client.publish(topic, b'Hello' * 40000)
self.state += 1
if self.state == 6:
self.state += 1
self.client.disconnect()
print("Disconnected: {}".format(self.disconnected[1]))
print("Starting")
SelectMqttExample().main()
print("Finished")
| 2.78125 | 3 |
hw2-romv-scheduler/transaction.py | eladn/romv-scheduler-python | 1 | 12759072 | <gh_stars>1-10
from timestamps_manager import Timestamp
from operation import Operation
# Transaction is the main API between the scheduler and the user.
# It allows the user to add operations to it, using `add_operation(..)` method.
# The scheduler calls the method `try_perform_next_operation(..)` when it decides to.
# When the scheduler calls this method, the transaction tells the next operation to
# try perform itself, using the method `next_operation.try_perform(..)`.
# If the next operation successfully performed itself, the transaction would remove
# this operation from the `_waiting_operations_queue`.
# After each time the scheduler tries to execute the next operation (using the above
# mentioned method), a users' callback is called. If the operation has been successfully
# completed, the callback `on_operation_complete_callback(..)` is called.
# Otherwise, the callback `on_operation_failed_callback(..)` is called.
# When `try_perform_next_operation(..)` is called (by the scheduler) but the queue
# `_waiting_operations_queue` is empty, the scheduler calls to the user callback
# `_ask_user_for_next_operation_callback(..)`. It gives the user an opportunity to add
# the next operation for that transaction. However, the user does not have to do so.
# The user can also take advantage of the callback `on_operation_complete_callback(..)`
# in order to add the next operation to be performed.
# All of these mentioned users' callbacks are set on the transaction creation.
class Transaction:
def __init__(self, transaction_id, is_read_only: bool=False,
on_operation_complete_callback=None,
on_operation_failed_callback=None,
on_transaction_aborted_callback=None,
ask_user_for_next_operation_callback=None):
self._transaction_id = transaction_id
self._is_read_only = is_read_only
self._waiting_operations_queue = [] # list of instances of `Operation`.
self._is_completed = False
self._is_aborted = False
self._timestamp = None
# To be called after an operation has been completed.
self._on_operation_complete_callback = on_operation_complete_callback
# To be called after an operation has failed (and now waiting till next attempt) due to locks.
self._on_operation_failed_callback = on_operation_failed_callback
# To be called after a transaction has been aborted by the scheduler.
self._on_transaction_aborted_callback = on_transaction_aborted_callback
# To be called by the scheduler when the waiting operations queue is empty and
# `has_waiting_operation_to_perform()` is called (it is called by the scheduler inside of `run()`.
self._ask_user_for_next_operation_callback = ask_user_for_next_operation_callback
# Transactions are stored in a list, stored by transaction id, so that the scheduler can
# iterate over the transactions by the order of their transaction id.
# Each transaction stores a pointer to its own node in the list, so that given a transaction
# we could find efficiently the next & previous transactions in the list the transaction
# belongs to.
self.transactions_by_tid_list_node = None
# When a transaction tries to perform its operation, and the scheduler cannot acquire locks
# because of other transactions, it assigns to this public field the set of transactions
# that the current transaction is waiting for. It is used later in the run-log printings.
# The user can access this field later.
self.waits_for = None
@property
def transaction_id(self):
return self._transaction_id
@property
def is_read_only(self):
return self._is_read_only
@property
def is_completed(self):
return self._is_completed
@property
def is_aborted(self):
return self._is_aborted
@property
def is_finished(self):
return self._is_aborted or self._is_completed
@property
def timestamp(self):
assert self._timestamp is not None
return self._timestamp
@timestamp.setter
def timestamp(self, ts: Timestamp):
assert self._timestamp is None # can be set only once in a life of a transaction.
self._timestamp = ts
@property
def has_timestamp(self):
return self._timestamp is not None
def peek_next_operation(self):
assert len(self._waiting_operations_queue) > 0
return self._waiting_operations_queue[0]
def ask_user_for_next_operation(self, scheduler):
if self._ask_user_for_next_operation_callback is not None:
self._ask_user_for_next_operation_callback(self, scheduler)
def has_waiting_operation_to_perform(self, scheduler):
if len(self._waiting_operations_queue) < 1:
self.ask_user_for_next_operation(scheduler)
return len(self._waiting_operations_queue) > 0
# Returns the operation that we tried to perform (if exists one in the waiting queue).
# If there is no operation in the operations waiting queue, ask the user for one.
# To check whether it has been performed, use `operation.is_completed`.
def try_perform_next_operation(self, scheduler):
assert len(self._waiting_operations_queue) > 0
# Reset the "wait_for" field. It might be set by the scheduler if the operation
# would fail to perform because it waits for another operation. In that case
# the scheduler would assign to this field a set of transaction we wait for.
self.waits_for = None
next_operation = self._waiting_operations_queue[0]
next_operation.try_perform(scheduler)
# The scheduler might abort the transaction when `scheduler.try_write(..)`
# calls `scheduler.try_write(..)` or `scheduler.try_read(..)`.
# In that case, `scheduler.try_write(..)` would return and we would each here.
# We should just return. The caller scheduler would continue handle this case.
if self.is_aborted:
return next_operation
# The call to `next_operation.try_perform(..)` has failed, bacauuse it
# called one of `scheduler.try_write(..)` or `scheduler.try_read(..)`,
# and this inner call has been failed (for example: couldn't acquire lock).
if not next_operation.is_completed:
if self._on_operation_failed_callback:
self._on_operation_failed_callback(self, scheduler, next_operation)
return next_operation
# The operation has been completed. Remove it from the waiting queue.
queue_head = self._waiting_operations_queue.pop(0) # remove the list head
assert queue_head == next_operation
if next_operation.get_type() == 'commit':
self._is_completed = True
if self._on_operation_complete_callback:
# The user callback might now add the next operation.
self._on_operation_complete_callback(self, scheduler, next_operation)
return next_operation
# Called by the scheduler when it decides to abort this transaction.
# Notice: Called by the scheduler after removing this transaction from it's transactions list.
# It might be called in the following trace:
# transaction.try_perform_next_operation(..)
# next_operation.try_perform(..)
# scheduler.try_write(..)
# transaction.abort(..)
def abort(self, scheduler, reason):
assert not self._is_aborted and not self._is_completed
assert self._transaction_id is not None
assert scheduler.get_transaction_by_id(self._transaction_id) is None
self._is_aborted = True
# Call the user callback.
if self._on_transaction_aborted_callback:
self._on_transaction_aborted_callback(self, scheduler, reason)
# Called by the user.
def add_operation(self, operation: Operation):
assert not self._is_read_only or operation.get_type() != 'write'
self._waiting_operations_queue.append(operation)
operation.transaction_id = self.transaction_id
| 3.03125 | 3 |
reinvent-2019/polyglot-bot/pi/recorder.py | kienpham2000/aws-builders-fair-projects | 2 | 12759073 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# recorder.py
#
# Copyright 2019 <<EMAIL>>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
#
#
import os
import time
from AWSIoTPythonSDK.MQTTLib import AWSIoTMQTTClient
import logging
import time
import argparse
import json
from random import randint
# Configure logging
logger = logging.getLogger("AWSIoTPythonSDK.core")
logger.setLevel(logging.DEBUG)
streamHandler = logging.StreamHandler()
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
streamHandler.setFormatter(formatter)
logger.addHandler(streamHandler)
host = 'a6zwxk9vm9bfa-ats.iot.us-west-2.amazonaws.com'
rootCAPath = 'root-CA.crt'
certificatePath = 'pi.cert.pem'
privateKeyPath = 'pi.private.key'
port = 8883
clientId = 'basicPubSub'
topic = 'sdk/test/Python'
myAWSIoTMQTTClient = None
fileId= ''
def recordAudio(filename, key):
print ("Starting Recorder")
cmd ='arecord /home/pi/Desktop/'+filename + ' -t wav -D sysdefault:CARD=1 -d 3 -r 48000;aws s3 cp /home/pi/Desktop/'+filename+' s3://bdfairdev/'+key+'/'+filename
os.system(cmd)
print ("Recording complete and sent to S3")
if key == 'greetings':
cmd = "ffplay -nodisp -autoexit /home/pi/PolyglotRobot/generalIntro1.mp3 >/dev/null 2>&1 &"
os.system(cmd)
cmd1 = " python3 /home/pi/PolyglotRobot/avainitialgreetings.py &"
os.system(cmd1)
def recordRawAudio(filename, key):
print ("Starting Recorder")
cmd ='arecord /home/pi/Desktop/'+filename + ' -c 2 -f S16_LE -r 22050 -t wav -D sysdefault:CARD=1 -d 3;aws s3 cp /home/pi/Desktop/'+filename+' s3://bdfairdev/'+key+'/'+filename
os.system(cmd)
print ("Recording complete and sent to S3")
# Custom MQTT message callback
def customCallback(client, userdata, message):
print("Received a new message: ")
print(message.payload)
payload = json.loads(message.payload)
langDetected = payload["language"]
output=payload["s3output"]
outputType=payload["type"]
print(output)
cmd ='aws s3 cp ' + output + ' /home/pi/Desktop/output.mp3'
os.system(cmd)
if outputType == 'weather':
cmd='python3 ' + langDetected+'_avagreeting.py & >/dev/null 2>&1'
print(cmd)
os.system(cmd)
time.sleep(2)
cmd='ffplay -nodisp -autoexit /home/pi/Desktop/output.mp3 >/dev/null 2>&1'
os.system(cmd)
print("from topic: ")
print(message.topic)
print("--------------\n\n")
fileId = str(randint(123456,234532))
fileId= fileId +'.wav'
if outputType == 'goodbye':
main(None)
recordRawAudio(langDetected+'_'+fileId, outputType)
def waitForResponse():
myAWSIoTMQTTClient = AWSIoTMQTTClient(clientId)
myAWSIoTMQTTClient.configureEndpoint(host, port)
myAWSIoTMQTTClient.configureCredentials(rootCAPath, privateKeyPath, certificatePath)
myAWSIoTMQTTClient.configureAutoReconnectBackoffTime(1, 32, 20)
myAWSIoTMQTTClient.configureOfflinePublishQueueing(-1) # Infinite offline Publish queueing
myAWSIoTMQTTClient.configureDrainingFrequency(2) # Draining: 2 Hz
myAWSIoTMQTTClient.configureConnectDisconnectTimeout(10) # 10 sec
myAWSIoTMQTTClient.configureMQTTOperationTimeout(5) # 5 sec
myAWSIoTMQTTClient.connect()
myAWSIoTMQTTClient.subscribe(topic, 1, customCallback)
time.sleep(2)
# Publish to the same topic in a loop forever
loopCount = 0
while True:
time.sleep(2)
def main(args):
time.sleep(2)
fileId = str(randint(123456,234532))
key='greetings'
fileId= fileId +'.wav'
recordAudio(fileId, key)
waitForResponse()
return 0
if __name__ == '__main__':
import sys
sys.exit(main(sys.argv))
| 2.125 | 2 |
malib/environments/pbeauty_game.py | wwxFromTju/malib | 6 | 12759074 | <reponame>wwxFromTju/malib
import numpy as np
from malib.spaces import Discrete, Box, MASpace, MAEnvSpec
from malib.environments.base_game import BaseGame
from malib.error import EnvironmentNotFound, RewardTypeNotFound, WrongActionInputLength
class PBeautyGame(BaseGame):
def __init__(
self,
agent_num,
game_name="pbeauty",
p=0.67,
reward_type="abs",
action_range=(-1.0, 1.0),
):
self.agent_num = agent_num
self.p = p
self.game_name = game_name
self.reward_type = reward_type
self.action_range = action_range
self.action_spaces = MASpace(
tuple(Box(low=-1.0, high=1.0, shape=(1,)) for _ in range(self.agent_num))
)
self.observation_spaces = MASpace(
tuple(Discrete(1) for _ in range(self.agent_num))
)
self.env_specs = MAEnvSpec(self.observation_spaces, self.action_spaces)
self.t = 0
self.rewards = np.zeros((self.agent_num,))
if not self.game_name in PBeautyGame.get_game_list():
raise EnvironmentNotFound(f"The game {self.game_name} doesn't exists")
if self.game_name == "pbeauty":
if (
not self.reward_type
in PBeautyGame.get_game_list()[self.game_name]["reward_type"]
):
raise RewardTypeNotFound(
f"The reward type {self.reward_type} doesn't exists"
)
def step(self, actions):
if len(actions) != self.agent_num:
raise WrongActionInputLength(
f"Expected number of actions is {self.agent_num}"
)
actions = np.array(actions).reshape((self.agent_num,))
reward_n = np.zeros((self.agent_num,))
if self.game_name == "pbeauty":
actions = (actions + 1.0) * 50.0
action_mean = np.mean(actions) * self.p
deviation_abs = np.abs(actions - action_mean)
print(actions, np.mean(actions))
if self.reward_type == "abs":
reward_n = -deviation_abs
elif self.reward_type == "one":
i = np.argmin(deviation_abs)
reward_n[i] = 1.0
elif self.reward_type == "sqrt":
reward_n = -np.sqrt(deviation_abs)
elif self.reward_type == "square":
reward_n = -np.square(deviation_abs)
elif self.game_name == "entry":
actions = (actions + 1.0) / 2.0
# think about it?
print(reward_n, np.mean(reward_n))
state_n = np.array(list([[0.0 * i] for i in range(self.agent_num)]))
info = {}
done_n = np.array([True] * self.agent_num)
self.t += 1
return state_n, reward_n, done_n, info
def reset(self):
return np.array(list([[0.0 * i] for i in range(self.agent_num)]))
def render(self, mode="human", close=False):
if mode == "human":
print(self.__str__())
def get_rewards(self):
return self.rewards
def terminate(self):
pass
@staticmethod
def get_game_list():
return {
"pbeauty": {"reward_type": ["abs", "one", "sqrt", "square"]},
"entry": {"reward_type": []},
}
def __str__(self):
content = "Game Name {}, Number of Agent {}, Action Range {}\n".format(
self.game_name, self.agent_num, self.action_range
)
return content
| 2.5625 | 3 |
wavespec/Test/LS.py | mattkjames7/wavespec | 1 | 12759075 | '''
Test the FFT functions
'''
import numpy as np
from .. import LombScargle as LS
import matplotlib.pyplot as plt
import matplotlib.colors as colors
from mpl_toolkits.axes_grid1 import make_axes_locatable
from ..Tools.mode import mode
from ..Tools.DetectGaps import DetectGaps
from ..Spectrogram.SpectrogramPlotter import SpectrogramPlotter
def Spectrum():
#pick two frequencies
f0 = 0.002
f1 = 0.005
#amplitudes
A0 = 2.0
A1 = 1.5
#phases
p0 = np.pi/2.0
p1 = 0.0
#time series
t = np.arange(1000.0)
dt = np.random.rand(t.size)*5
t = t + dt
t.sort()
v0 = A0*np.cos(2*np.pi*f0*t + p0)
v1 = A1*np.cos(2*np.pi*f1*t + p1)
v = v0 + v1
#frequencies
freq = np.arange(2000,dtype='float32')/(np.float32(4000*1.0))
print(freq.max())
#spectrum
power,A,phase,fr,fi = LS.LombScargle(t,v,freq)
#figure
fig = plt
fig.figure(figsize=(8,11))
ax0 = fig.subplot2grid((2,1),(0,0))
ax1 = fig.subplot2grid((2,1),(1,0))
ax0.plot(t,v0,color='red',linestyle='--')
ax0.plot(t,v1,color='orange',linestyle='--')
ax0.plot(t,v,color='black',linestyle='-')
ax1.plot(freq*1000.0,power,color='blue')
ax0.set_xlabel('$t$ (s)')
ax1.set_ylabel('Power')
ax1.set_xlabel('Frequency (mHz)')
fmx = np.min([freq.max(),1.5*np.max([f0,f1])])
ax1.set_xlim(0,fmx*1000)
def Spectrogram():
#pick two frequencies
f0 = 0.002
f1 = 0.005
#amplitudes
A0 = 2.0
A1 = 1.5
#phases
p0 = np.pi/2.0
p1 = 0.0
#time series
t = np.arange(10800.0)
dt = np.random.rand(t.size)*5
t = t + dt
t.sort()
v0 = A0*np.cos(2*np.pi*f0*t + p0)
v1 = A1*np.cos(2*np.pi*f1*t + p1)
v = v0 + v1
wind = 1800
slip = 200
#frequencies
freq = np.arange(900,dtype='float32')/(np.float32(1800*1.0))
#figure
fig = plt
fig.figure(figsize=(8,11))
ax0 = fig.subplot2grid((2,1),(0,0))
ax1 = fig.subplot2grid((2,1),(1,0))
ax0.plot(t,v0,color='red',linestyle='--')
ax0.plot(t,v1,color='orange',linestyle='--')
ax0.plot(t,v,color='black',linestyle='-')
ax0.set_xlabel('Time (s)')
#spectrogram
ax1,Nw,Freq,Spec = LS.PlotSpectrogram(t,v,wind,slip,Freq=freq,FreqAxisUnits='mHz',fig=fig,maps=[1,2,0,1])
fmx = np.min([Freq.max(),1.5*np.max([f0,f1])])
ax1.set_ylim(0,fmx*1000)
def Spectrogram2():
#pick two frequencies
f0 = 0.002
f1 = 0.005
#amplitudes
A0 = 2.0
A1 = 1.5
#phases
p0 = np.pi/2.0
p1 = 0.0
#time series
t = np.arange(10800.0)
dt = np.random.rand(t.size)*5
t = t + dt
t.sort()
v0 = A0*np.cos(2*np.pi*f0*t + p0)
v1 = A1*np.cos(2*np.pi*f1*t + p1)
v = v0 + v1
wind = 1800
slip = 200
#frequencies
freq = np.arange(900,dtype='float32')/(np.float32(1800*1.0))
#figure
fig = plt
fig.figure(figsize=(8,11))
ax0 = fig.subplot2grid((2,1),(0,0))
ax1 = fig.subplot2grid((2,1),(1,0))
ax0.plot(t,v0,color='red',linestyle='--')
ax0.plot(t,v1,color='orange',linestyle='--')
ax0.plot(t,v,color='black',linestyle='-')
ax0.set_xlabel('Time (s)')
Nw,Freq,Spec = LS.Spectrogram(t,v,wind,slip,Freq=freq)
#spectrogram
ax1,Nw,Freq,Spec = LS.PlotSpectrogram(Freq,Spec,FreqAxisUnits='mHz',fig=fig,maps=[1,2,0,1])
fmx = np.min([Freq.max(),1.5*np.max([f0,f1])])
ax1.set_ylim(0,fmx*1000)
def Spectrogram3():
#pick two frequencies
f0 = 0.002
f1 = 0.005
#amplitudes
A0 = 2.0
A1 = 1.5
#phases
p0 = np.pi/2.0
p1 = 0.0
#time series
t = np.arange(10800.0)
dt = np.random.rand(t.size)*5
t = t + dt
t.sort()
v0 = A0*np.cos(2*np.pi*f0*t + p0)
v1 = A1*np.cos(2*np.pi*f1*t + p1)
v = v0 + v1
wind = 1800
slip = 200
#frequencies
freq = np.arange(900,dtype='float32')/(np.float32(1800*1.0))
#figure
fig = plt
fig.figure(figsize=(8,11))
ax0 = fig.subplot2grid((2,1),(0,0))
ax1 = fig.subplot2grid((2,1),(1,0))
ax0.plot(t,v0,color='red',linestyle='--')
ax0.plot(t,v1,color='orange',linestyle='--')
ax0.plot(t,v,color='black',linestyle='-')
ax0.set_xlabel('Time (s)')
Nw,Freq,Spec = LS.Spectrogram(t,v,wind,slip,Freq=freq)
#spectrogram
ax1 = SpectrogramPlotter(Spec.Tspec,Freq*1000,Spec.Pow,fig=fig,maps=[1,2,0,1])
fmx = np.min([Freq.max(),1.5*np.max([f0,f1])])
ax1.set_ylim(0,fmx*1000)
def Spectrogram3D():
#pick some frequencies
fx0 = 0.002
fx1 = 0.007
fy0 = 0.007
fy1 = 0.010
#amplitudes
A0 = 2.0
A1 = 1.5
#phases
p0 = np.pi/2.0
p1 = 0.0
#time series
t = np.arange(10800.0)
dt = np.random.rand(t.size)*5
t = t + dt
t.sort()
x0 = A0*np.cos(2*np.pi*fx0*t + p0)
x1 = A1*np.cos(2*np.pi*fx1*t + p1)
x = x0 + x1
y0 = A0*np.cos(2*np.pi*fy0*t + p0)
y1 = A1*np.cos(2*np.pi*fy1*t + p1)
y = y0 + y1
z = np.zeros(t.size,dtype='float32')
#frequencies
freq = np.arange(900,dtype='float32')/(np.float32(1800*1.0))
#spectrogram
wind = 1800
slip = 200
Nw,Freq,Spec = LS.Spectrogram3D(t,x,y,z,wind,slip,Freq=freq,CombineComps=True)
Nf = Freq.size - 1
S = Spec.xyPow
f = Freq[:Nf+1]*1000.0
ts = Spec.Tspec
xlabel = 'Time (s)'
dt = mode(ts[1:] - ts[:-1])/2.0
scale = [np.nanmin(S),np.nanmax(S)]
norm = colors.Normalize(vmin=scale[0],vmax=scale[1])
cmap = plt.cm.get_cmap('gnuplot')
#find gaps
gaps = np.where(np.isfinite(S[:,1]) == False)[0]
ngd,T0,T1 = DetectGaps(S[:,1])
#figure
fig = plt
fig.figure(figsize=(8,11))
ax0 = fig.subplot2grid((2,1),(0,0))
ax1 = fig.subplot2grid((2,1),(1,0))
ax0.plot(t,x,color='red')
ax0.plot(t,y,color='orange')
sm = None
for i in range(0,ngd):
#select the good portion of the
use = np.arange(T0[i],T1[i]+1)
tax = np.append(ts[use]-dt,ts[use[-1]]+dt)
Stmp = S[use]
#mesh the axes
tm,fm = np.meshgrid(tax,f)
#plot the section
sm = ax1.pcolormesh(tm.T,fm.T,Stmp,cmap=cmap,norm=norm)
#colour bar
fig.subplots_adjust(right=0.8)
box = ax1.get_position()
if not sm is None:
cax = plt.axes([0.05*box.width + box.x1,box.y0+0.1*box.height,box.width*0.025,box.height*0.8])
cbar = fig.colorbar(sm,cax=cax)
cbar.set_label('Power')
#axis labels
ax1.set_xlabel(xlabel)
ax1.set_ylabel('$f$ (mHz)')
fmx = np.min([Freq.max(),1.5*np.max([fx0,fx1,fy0,fy1])])
ax1.set_ylim(0,fmx*1000)
return Spec
| 2.703125 | 3 |
Practicas/Practica 2/NodoBroadcast.py | JohannGordillo/Computacion-Distribuida | 0 | 12759076 | <filename>Practicas/Practica 2/NodoBroadcast.py
"""
======================================================================
>> Autor: <NAME>
>> Email: <EMAIL>
>> Fecha: 21/10/2020
======================================================================
Universidad Nacional Autónoma de México
Facultad de Ciencias
Computación Distribuida [2021-1]
Ejercicio 3.
Algoritmo Distribuido para hacer Broadcast.
======================================================================
"""
import simpy
from Canales.Canal import Canal
from Nodo import Nodo
class NodoBroadcast(Nodo):
"""Implementa la interfaz de Nodo para el algoritmo para conocer a
los vecinos de mis vecinos."""
def __init__(self, id: int, neighbors: list,
input_channel: simpy.Store,
output_channel: simpy.Store,
children: list):
"""Constructor para el nodo."""
self.id = id
self.neighbors = neighbors
self.input_channel = input_channel
self.output_channel = output_channel
self.children = children
def broadcast(self, env: simpy.Environment):
"""Función para que un nodo colabore en la construcción
de un árbol generador."""
# Solo el nodo raiz (id = 0) envía el primer msj
if self.id == 0:
# Mensaje que se quiere difundir.
self.data = "Hello, World"
print(f'El proceso {self.id} inicializa su mensaje {self.data} en la ronda {env.now}')
yield env.timeout(1)
self.output_channel.send(self.data, self.children)
# Para los nodos no distinguidos data será nula.
else:
self.data = None
while True:
# Esperamos a que nos llegue el mensaje.
self.data = yield self.input_channel.get()
print(f'El proceso {self.id} recibió el mensaje {self.data} en la ronda {env.now}')
# Reenvíamos el mensaje a nuestros hijos.
if len(self.children) > 0:
yield env.timeout(1)
self.output_channel.send(self.data, self.children)
if __name__ == "__main__":
# Tomemos como ejemplo la siguiente gráfica:
example = '''Para el siguiente árbol:
(6)
(1) /
/ /
/ /
(0)-----(2)-----(3)--(4)
\
\
\
(5)
'''
# Creamos los nodos.
graph = list()
# Lista de adyacencias.
adjacencies = [[1, 2], [0], [0, 3], [2, 4, 5, 6],
[3], [3], [3]]
# Hijos de cada nodo.
children = [[1, 2], [], [3], [4, 5, 6], [], [], []]
# Orden de la gráfica.
order = len(adjacencies)
# Inicializamos el ambiente y el canal.
env = simpy.Environment()
pipe = Canal(env)
# Llenado de la gráfica.
for i in range(order):
input_channel = pipe.create_input_channel()
n = NodoBroadcast(i, adjacencies[i], input_channel, pipe, children[i])
graph.append(n)
# Y le decimos al ambiente que lo procese.
for n in graph:
env.process(n.broadcast(env))
# Imprimimos la gráfica de ejemplo.
print(example)
env.run(until=10)
print("\nAl finalizar el algoritmo:")
for n in graph:
print(f"La información del proceso {n.id} es {n.data}")
| 3.140625 | 3 |
app/v1/__init__.py | SyedaZehra95/wfh-demo | 0 | 12759077 | <filename>app/v1/__init__.py
from flask import Blueprint
from flask_restplus import Api
v1_blueprint = Blueprint('v1_blueprint', __name__,
template_folder='templates')
# Bases Authorization
# authorizations = {
# 'apiKey': {
# 'type': 'apiKey',
# 'in': 'header',
# 'name': 'Authorization'
# }
# }
v1_api = Api(v1_blueprint,
title='WORK_FROM_HOME_API',
version='1.0',
description='auth: <NAME>',
default="auth",
default_label=''
# authorizations=authorizations,
# security='apiKey')
)
from .modules.registration.resources import registration_ns
from .modules.super_user.resource import super_user_ns
from .modules.applications.resource import applications_ns
from .modules.projects.resources import projects_ns
v1_api.add_namespace(registration_ns)
v1_api.add_namespace(super_user_ns)
v1_api.add_namespace(applications_ns)
v1_api.add_namespace(projects_ns)
| 2.046875 | 2 |
generate_through_all_networks.py | perc3ptr0n/stylegan2-ada-pytorch | 0 | 12759078 | <gh_stars>0
import os
import sys
from glob import glob
import click
from typing import List, Optional
# to add different noises testing
@click.command()
@click.pass_context
@click.option('--outdir', type=str, help='Number of greetings.', required=True, metavar='DIR')
@click.option('--seeds', default='[0,1]', type=str,
help='Specify desired seeds number separated by comma (,) or hyphen (-)', required=True,)
@click.option('--models-folder', help='Specify folder where models for testing', required=True,)
def run_models_testing(
ctx: click.Context,
models_folder: str,
seeds: Optional[List[int]],
outdir: str
):
"""Call generate.py command for same set of seeds but with different models
to compare more deeply models evolution during training.
"""
if models_folder[-1] == '/':
models_folder = models_folder[:-1]
models_list = glob('%s/*.pkl' % models_folder)
if len(models_folder) == 0:
ctx.fail('This directory \'%s\' not contain any model (*.pkl)! Aborted.' % models_folder)
else:
for model in models_list:
command = 'python generate.py --outdir=%s --seeds=%s --class=1 --network=%s' % (outdir, seeds, model)
os.system(command)
#----------------------------------------------------------------------------
if __name__ == "__main__":
run_models_testing()
#----------------------------------------------------------------------------
| 2.390625 | 2 |
tests/content_type_field_validation_test.py | bram-rongen/contentful-management.py | 30 | 12759079 | from unittest import TestCase
from contentful_management.content_type_field_validation import ContentTypeFieldValidation
class ContentTypeFieldValidationTest(TestCase):
def test_content_type_field_validation(self):
validation = ContentTypeFieldValidation({
'size': {'min': 3}
})
self.assertEqual(str(validation), "<ContentTypeFieldValidation size='{'min': 3}'>")
self.assertEqual(validation.to_json(), {
'size': {'min': 3}
})
self.assertEqual(validation.size, {'min': 3})
with self.assertRaises(Exception):
validation.foobar
validation.foo = 'bar'
self.assertEqual(validation.to_json(), {
'size': {'min': 3},
'foo': 'bar'
})
| 3.234375 | 3 |
gammapy/spectrum/make.py | QRemy/gammapy | 0 | 12759080 | <reponame>QRemy/gammapy
# Licensed under a 3-clause BSD style license - see LICENSE.rst
import logging
import numpy as np
from astropy import units as u
from regions import CircleSkyRegion
from gammapy.maps import WcsGeom
from .core import CountsSpectrum
from .dataset import SpectrumDataset
__all__ = ["SpectrumDatasetMaker"]
log = logging.getLogger(__name__)
class SpectrumDatasetMaker:
"""Make spectrum for a single IACT observation.
The irfs and background are computed at a single fixed offset,
which is recommend only for point-sources.
Parameters
----------
containment_correction : bool
Apply containment correction for point sources and circular on regions.
selection : list
List of str, selecting which maps to make.
Available: 'counts', 'aeff', 'background', 'edisp'
By default, all spectra are made.
"""
available_selection = ["counts", "background", "aeff", "edisp"]
def __init__(self, containment_correction=False, selection=None):
self.containment_correction = containment_correction
if selection is None:
selection = self.available_selection
self.selection = selection
# TODO: move this to a RegionGeom class
@staticmethod
def geom_ref(region):
"""Reference geometry to project region"""
frame = region.center.frame.name
return WcsGeom.create(
skydir=region.center, npix=(1, 1), binsz=1, proj="TAN", frame=frame
)
def make_counts(self, region, energy_axis, observation):
"""Make counts.
Parameters
----------
region : `~regions.SkyRegion`
Region to compute counts spectrum for.
energy_axis : `~gammapy.maps.MapAxis`
Reconstructed energy axis.
observation: `~gammapy.data.DataStoreObservation`
Observation to compute effective area for.
Returns
-------
counts : `~gammapy.spectrum.CountsSpectrum`
Counts spectrum
"""
edges = energy_axis.edges
counts = CountsSpectrum(
energy_hi=edges[1:], energy_lo=edges[:-1], region=region
)
events_region = observation.events.select_region(
region, wcs=self.geom_ref(region).wcs
)
counts.fill_events(events_region)
return counts
@staticmethod
def make_background(region, energy_axis, observation):
"""Make background.
Parameters
----------
region : `~regions.SkyRegion`
Region to compute background spectrum for.
energy_axis : `~gammapy.maps.MapAxis`
Reconstructed energy axis.
observation: `~gammapy.data.DataStoreObservation`
Observation to compute effective area for.
Returns
-------
background : `~gammapy.spectrum.CountsSpectrum`
Background spectrum
"""
if not isinstance(region, CircleSkyRegion):
raise TypeError(
"Background computation only supported for circular regions."
)
offset = observation.pointing_radec.separation(region.center)
e_reco = energy_axis.edges
bkg = observation.bkg
data = bkg.evaluate_integrate(
fov_lon=0 * u.deg, fov_lat=offset, energy_reco=e_reco
)
solid_angle = 2 * np.pi * (1 - np.cos(region.radius)) * u.sr
data *= solid_angle
data *= observation.observation_time_duration
return CountsSpectrum(
energy_hi=e_reco[1:], energy_lo=e_reco[:-1], data=data.to_value(""), unit=""
)
def make_aeff(self, region, energy_axis_true, observation):
"""Make effective area.
Parameters
----------
region : `~regions.SkyRegion`
Region to compute background effective area.
energy_axis_true : `~gammapy.maps.MapAxis`
True energy axis.
observation: `~gammapy.data.DataStoreObservation`
Observation to compute effective area for.
Returns
-------
aeff : `~gammapy.irf.EffectiveAreaTable`
Effective area table.
"""
offset = observation.pointing_radec.separation(region.center)
aeff = observation.aeff.to_effective_area_table(
offset, energy=energy_axis_true.edges
)
if self.containment_correction:
if not isinstance(region, CircleSkyRegion):
raise TypeError(
"Containment correction only supported for circular regions."
)
psf = observation.psf.to_energy_dependent_table_psf(theta=offset)
containment = psf.containment(aeff.energy.center, region.radius)
aeff.data.data *= containment.squeeze()
return aeff
@staticmethod
def make_edisp(position, energy_axis, energy_axis_true, observation):
"""Make energy dispersion.
Parameters
----------
position : `~astropy.coordinates.SkyCoord`
Position to compute energy dispersion for.
energy_axis : `~gammapy.maps.MapAxis`
Reconstructed energy axis.
energy_axis_true : `~gammapy.maps.MapAxis`
True energy axis.
observation: `~gammapy.data.DataStoreObservation`
Observation to compute edisp for.
Returns
-------
edisp : `~gammapy.irf.EDispKernel`
Energy dispersion
"""
offset = observation.pointing_radec.separation(position)
return observation.edisp.to_energy_dispersion(
offset, e_reco=energy_axis.edges, e_true=energy_axis_true.edges
)
def run(self, dataset, observation):
"""Make spectrum dataset.
Parameters
----------
dataset : `~gammapy.spectrum.SpectrumDataset`
Spectrum dataset.
observation: `~gammapy.data.DataStoreObservation`
Observation to reduce.
Returns
-------
dataset : `~gammapy.spectrum.SpectrumDataset`
Spectrum dataset.
"""
kwargs = {
"name": f"{observation.obs_id}",
"gti": observation.gti,
"livetime": observation.observation_live_time_duration,
}
energy_axis = dataset.counts.energy
energy_axis_true = dataset.aeff.data.axis("energy")
region = dataset.counts.region
if "counts" in self.selection:
kwargs["counts"] = self.make_counts(region, energy_axis, observation)
if "background" in self.selection:
kwargs["background"] = self.make_background(
region, energy_axis, observation
)
if "aeff" in self.selection:
kwargs["aeff"] = self.make_aeff(region, energy_axis_true, observation)
if "edisp" in self.selection:
kwargs["edisp"] = self.make_edisp(
region.center, energy_axis, energy_axis_true, observation
)
return SpectrumDataset(**kwargs)
| 2.109375 | 2 |
Task5/model.py | qs956/Taidi-face-recognition | 0 | 12759081 | <filename>Task5/model.py
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Author: 罗梓颖
import tensorflow as tf
import numpy as np
import cv2
from PIL import Image
pic_size = (256,256)
name_map = np.load(r'../Task4/model/name_map.npy')
class FaceDetector:
def __init__(self, model_path='../Task2/model.pb', gpu_memory_fraction=0.75, visible_device_list='0'):
"""
Arguments:
model_path: a string, path to a pb file.
gpu_memory_fraction: a float number.
visible_device_list: a string.
"""
with tf.gfile.GFile(model_path, 'rb') as f:
graph_def = tf.GraphDef()
graph_def.ParseFromString(f.read())
graph = tf.Graph()
with graph.as_default():
tf.import_graph_def(graph_def, name='import')
self.input_image = graph.get_tensor_by_name('import/image_tensor:0')
self.output_ops = [
graph.get_tensor_by_name('import/boxes:0'),
graph.get_tensor_by_name('import/scores:0'),
graph.get_tensor_by_name('import/num_boxes:0'),
]
config_proto = tf.ConfigProto(log_device_placement=False)
self.sess = tf.Session(graph=graph, config=config_proto)
def __call__(self, image, score_threshold = 0.75):
"""Detect faces.
Arguments:
image: a numpy uint8 array with shape [height, width, 3],
that represents a RGB image.
score_threshold: a float number.
Returns:
boxes: a float numpy array of shape [num_faces, 4].
scores: a float numpy array of shape [num_faces].
Note that box coordinates are in the order: ymin, xmin, ymax, xmax!
"""
h, w, _ = image.shape
image = np.expand_dims(image, 0)
boxes, scores, num_boxes = self.sess.run(
self.output_ops, feed_dict={self.input_image: image}
)
num_boxes = num_boxes[0]
boxes = boxes[0][:num_boxes]
scores = scores[0][:num_boxes]
to_keep = scores > score_threshold
boxes = boxes[to_keep]
scores = scores[to_keep]
scaler = np.array([h, w, h, w], dtype='float32')
boxes = boxes * scaler
return boxes, scores
# class recon_pre:
# def __init__(self,output_size = (256,256)):
# self.output_size = output_size
# def __call__(self,img):
# return cv2.resize(img,output_size)
class ResNet_recon:
def __init__(self,mu_std_path = r'../Task4/model/mu_std.npy',graph_path = r'../Task4/model//model.meta',ckpt_path = r'../Task4/model/'):
temp = np.load(mu_std_path)
self.mu,self.sigma = temp[0],temp[1]
self.G = tf.Graph()
with self.G.as_default():
self.sess = tf.Session(graph = self.G)
self.saver = tf.train.import_meta_graph(graph_path)
self.inputs = self.sess.graph.get_tensor_by_name('x_input:0')
self.y_pred = self.sess.graph.get_tensor_by_name('output/y_pred:0')
self.y_proba = self.sess.graph.get_tensor_by_name('output/y_proba:0')
self.saver.restore(self.sess, tf.train.latest_checkpoint(ckpt_path))
def __call__(self,inputs):
# inputs -= self.mu.astype(np.float32)
# inputs /= self.sigma.astype(np.float32)
y_pred,y_proba = self.sess.run([self.y_pred,self.y_proba],feed_dict = {self.inputs:inputs})
return y_pred,y_proba
def main(img,face_detect,face_recon,name):
#人脸检测
boxes,scores = face_detect(img)
#灰度处理
if (len(scores) != 0):
ymin,xmin,ymax,xmax = boxes[np.argmax(scores)]
pic_crop = Image.fromarray(img).crop([xmin,ymin,xmax,ymax])
pic_crop = np.asarray(pic_crop.convert('L'))
cv2.imshow("gray", pic_crop)
if cv2.waitKey(1) & 0xFF == ord('q'):
cv2.destroyAllWindows()
# pic_crop = img[int(ymin):int(ymax),int(xmin):int(xmax)]
# pic_crop = cv2.cvtColor(pic_crop,cv2.COLOR_BGR2GRAY)
else:
return img
cv2.rectangle(img, (xmin,ymin), (xmax,ymax), (0,0,255), 4)
#数据预处理
pic_crop = cv2.resize(pic_crop,pic_size)
# pic_crop = pic_crop.astype(np.float32)
#人脸识别
y_pred,y_proba = face_recon(pic_crop.reshape((1,pic_size[0]*pic_size[1])))
print('pred:',name[y_pred][0],'proba:',y_proba[0])
text = str(name[y_pred][0]) + str(y_proba[0])
cv2.putText(img, text, (xmin,ymin), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0,0,255), 1)
return img
if __name__ == '__main__':
print('正在加载模型,请稍后......')
face_detect = FaceDetector()
face_recon = ResNet_recon()
name = name_map[0]
print('模型加载完成!')
cap = cv2.VideoCapture(0)
while(1):
ret, frame = cap.read()
img = main(frame,face_detect,face_recon,name)
cv2.imshow("按q退出", img)
if cv2.waitKey(1) & 0xFF == ord('q'):
break
cap.release()#释放摄像头
cv2.destroyAllWindows() | 2.390625 | 2 |
rose/profile/profile.py | good-praxis/pronoun-discord-bot | 1 | 12759082 | <reponame>good-praxis/pronoun-discord-bot
import rose.profile.field_factory as factory
from rose.profile.fields import ProfileField
class Profile:
def __init__(self, id, callback=lambda arg: arg):
self._id = id
self._fields = dict()
self._change_callback = callback
@property
def fields(self):
"""returns a set of all profile field objects"""
return self._fields
@fields.getter
def fields(self):
return self._fields
def generate_profile(self, username):
if self._fields:
ret = ["{}: {}\n".format(k, v.url) if isinstance(v, ProfileField) else
"{}: {}\n".format(k, v.username) for k, v in self._fields.items()]
ret.sort()
return self.profile_header(username) + "".join(ret)
else:
return self.profile_header(username) + "Oh, there hasn't been anything added yet!"
def profile_header(self, username):
return "**Profile of {}**\n\n".format(username)
def add_field(self, key, value):
self._fields[key] = (factory.get_field_for(key, value))
self._change_callback(self)
| 2.609375 | 3 |
python/plotserv/api_core.py | advancedplotting/aplot | 10 | 12759083 | <reponame>advancedplotting/aplot
# Copyright (c) 2014-2015, Heliosphere Research LLC
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from this
# software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
from matplotlib import pyplot as plt
from cStringIO import StringIO
from PIL import Image
import numpy as np
import os.path as op
import time
from .terminals import remove_none
from .core import resource
from . import errors
@resource('init')
def init(ctx, a):
""" No-op for Init.vi """
pass
@resource('new')
def new(ctx, a):
""" Create a new figure, and initialize the axes.
Returns a string integer with the new plot ID.
"""
import random
PLOT_TYPES = {0: 'rect', 1: 'polar'}
SCALES = {0: 'linear', 1: 'linear', 2: 'log', 3: 'symlog'}
kind = a.enum('kind', PLOT_TYPES)
xscale = a.enum('xscale', SCALES)
yscale = a.enum('yscale', SCALES)
bgcolor = a.color('bgcolor')
axiscolor = a.color('axiscolor')
left = a.float('left')
right = a.float('right')
top = a.float('top')
bottom = a.float('bottom')
aspect = a.float('aspect')
# Check polar arguments for consistency
if kind == 'polar' and xscale != 'linear':
raise errors.LogNotSupported("Polar plots support only linear scales for X")
# Right/top default margins are smaller as there are no labels
left = left if left is not None else 0.12
bottom = bottom if bottom is not None else 0.12
right = right if right is not None else 0.10
top = top if top is not None else 0.10
width = (1.-left-right)
height = (1.-bottom-top)
# Catch reversed margins
if width < 0:
width = -1*width
left = right
if height < 0:
height = -1*width
bottom = top
if aspect <= 0:
aspect = None
k = { 'axisbg': axiscolor,
'polar': kind == 'polar',
'aspect': aspect, }
remove_none(k)
plotid = random.randint(1,2**30)
f = ctx.new(plotid)
ctx.polar = (kind == 'polar')
plt.axes((left, bottom, width, height), **k)
if bgcolor is not None:
f.set_facecolor(bgcolor)
else:
f.set_facecolor('w')
# Manually setting the scale to linear screws up the default axis range
if xscale != 'linear':
plt.xscale(xscale)#, nonposx='clip')
if yscale != 'linear':
plt.yscale(yscale)#, nonposy='clip')
ctx.xscale = xscale
ctx.yscale = yscale
return str(plotid)
@resource('close')
def close(ctx, a):
""" Close a Plot ID, ignoring any error. """
plotid = a.plotid()
try:
ctx.set(plotid)
ctx.close()
except Exception:
pass
@resource('isvalid')
def isvalid(ctx, a):
""" Test if an identifier is known.
Returns a string '1' if valid, '0' otherwise.
"""
plotid = a.plotid()
return "%d" % (1 if ctx.isvalid(plotid) else 0)
@resource('view')
def view(ctx, a):
""" Represents View.vi, optimized for rendering to a Picture."""
plotid = a.plotid()
f = ctx.set(plotid)
sio = StringIO()
# Step 1: Save the figure to a raw RGBA buffer
plt.savefig(sio, format='rgba', dpi=f.get_dpi(), facecolor=f.get_facecolor())
sio.seek(0)
# Step 2: Import the image into PIL
xsize, ysize = f.canvas.get_width_height()
img = Image.fromstring("RGBA", (xsize, ysize), sio.read())
sio.close()
# Step 3: Process the alpha channel out
img.load()
newimg = Image.new('RGB', img.size, (255, 255, 255))
newimg.paste(img, mask=img.split()[3])
# Step 4: Generate ARGB buffer (in little-endian format)
r, g, b = tuple(np.fromstring(x.tostring(), dtype='u1') for x in newimg.split())
a = np.empty((xsize*ysize,4), dtype='u1')
a[:,0] = b
a[:,1] = g
a[:,2] = r
a[:,3] = 0
# Step 4: Return to LabVIEW, with size headers
sio = StringIO()
sio.write(np.array(ysize, 'u4').tostring())
sio.write(np.array(xsize, 'u4').tostring())
sio.write(a.tostring())
sio.seek(0)
return sio.read()
@resource('save')
def save(ctx, a):
""" Represents Save.vi. """
EXTENSIONS = { '.pdf': 'pdf',
'.png': 'png',
'.bmp': 'bmp',
'.tif': 'tiff',
'.tiff': 'tiff',
'.jpg': 'jpeg',
'.jpeg': 'jpeg',
'.gif': 'gif', }
plotid = a.plotid()
name = a.string('path')
f = ctx.set(plotid)
root, ext = op.splitext(name)
ext = ext.lower()
if len(ext) == 0:
raise errors.UnrecognizedFileExtension('No file extension: "%s"' % name)
if ext not in EXTENSIONS:
raise errors.UnrecognizedFileExtension('Unknown file extension: "%s"' % ext)
format = EXTENSIONS[ext]
vector_formats = ('pdf',)
sio = StringIO()
# PDF doesn't need further processing by PIL,
# so we can short-circuit and return here.
if format in vector_formats:
plt.savefig(sio, format=format)
sio.seek(0)
return sio.read()
# Step 1: Save the figure to a raw RGBA buffer
plt.savefig(sio, format='rgba', dpi=f.get_dpi(), facecolor=f.get_facecolor())
sio.seek(0)
# Step 2: Import the image into PIL
xsize, ysize = f.canvas.get_width_height()
img = Image.fromstring("RGBA", (xsize, ysize), sio.read())
# Step 3: Process the alpha channel out
img.load()
newimg = Image.new('RGB', img.size, (255, 255, 255))
newimg.paste(img, mask=img.split()[3])
img = newimg
# Step 4: Export from PIL to the destination format
sio = StringIO()
img.save(sio, format=format)
sio.seek(0)
return sio.read()
| 1.710938 | 2 |
greenbutton/enums.py | rybalko/greenbutton-python | 0 | 12759084 | #!/usr/bin/python
# -*- coding: utf-8 -*-
from enum import Enum
class AccumulationBehaviourType(Enum):
notApplicable = 0
bulkQuantity = 1
cumulative = 3
deltaData = 4
indicating = 6
summation = 9
instantaneous = 12
class CommodityType(Enum):
notApplicable = 0
electricity = 1
air = 4
naturalGas = 7
propane = 8
potableWater = 9
steam = 10
wastewater = 11
heatingFluid = 12
coolingFluid = 13
class ConsumptionTierType(Enum):
notApplicable = 0
blockTier1 = 1
blockTier2 = 2
blockTier3 = 3
blockTier4 = 4
blockTier5 = 5
blockTier6 = 6
blockTier7 = 7
blockTier8 = 8
blockTier9 = 9
blockTier10 = 10
blockTier11 = 11
blockTier12 = 12
blockTier13 = 13
blockTier14 = 14
blockTier15 = 15
blockTier16 = 16
class CurrencyCode(Enum):
na = 0
aus = 36
cad = 124
usd = 840
eur = 978
@property
def symbol(self):
if self in [CurrencyCode.aus, CurrencyCode.cad, CurrencyCode.usd]:
return '$'
elif self is CurrencyCode.eur:
return '€'
else:
return '¤'
@property
def uom_id(self):
if self in CURRENCY_UOM_IDS:
return CURRENCY_UOM_IDS[self]
else:
return None
class DataQualifierType(Enum):
notApplicable = 0
average = 2
maximum = 8
minimum = 9
normal = 12
class FlowDirectionType(Enum):
notApplicable = 0
forward = 1
reverse = 19
class KindType(Enum):
notApplicable = 0
currency = 3
current = 4
currentAngle = 5
date = 7
demand = 8
energy = 12
frequency = 15
power = 37
powerFactor = 38
quantityPower = 40
voltage = 54
voltageAngle = 55
distortionPowerFactor = 64
volumetricFlow = 155
class PhaseCode(Enum):
notApplicable = 0
c = 32
ca = 40
b = 64
bc = 66
a = 128
an = 129
ab = 132
abc = 224
s2 = 256
s2n = 257
s1 = 512
s1n = 513
s1s2 = 768
s1s2n = 769
class QualityOfReading(Enum):
valid = 0
manuallyEdited = 7
estimatedUsingReferenceDay = 8
estimatedUsingLinearInterpolation = 9
questionable = 10
derived = 11
projected = 12
mixed = 13
raw = 14
normalizedForWeather = 15
other = 16
validated = 17
verified = 18
class ServiceKind(Enum):
electricity = 0
naturalGas = 1
water = 2
pressure = 4
heat = 5
cold = 6
communication = 7
time = 8
class TimeAttributeType(Enum):
notApplicable = 0
tenMinutes = 1
fifteenMinutes = 2
twentyFourHours = 4
thirtyMinutes = 5
sixtyMinutes = 7
daily = 11
monthly = 13
present = 15
previous = 16
weekly = 24
forTheSpecifiedPeriod = 32
daily30MinuteFixedBlock = 79
class UomType(Enum):
notApplicable = 0
amps = 5
volts = 29
joules = 31
hertz = 33
watts = 38
cubicMeters = 42
voltAmps = 61
voltAmpsReactive = 63
cosine = 65
voltsSquared = 67
ampsSquared = 69
voltAmpHours = 71
wattHours = 72
voltAmpReactiveHours = 73
ampHours = 106
cubicFeet = 119
cubicFeetPerHour = 122
cubicMetersPerHour = 125
usGallons = 128
usGallonsPerHour = 129
imperialGallons = 130
imperialGallonsPerHour = 131
britishThermalUnits = 132
britishThermalUnitsPerHour = 133
liters = 134
litersPerHour = 137
gaugePascals = 140
absolutePascals = 155
therms = 169
UOM_SYMBOLS = {
UomType.notApplicable: '',
UomType.amps: 'A',
UomType.volts: 'V',
UomType.joules: 'J',
UomType.hertz: 'Hz',
UomType.watts: 'W',
UomType.cubicMeters: 'm³',
UomType.voltAmps: 'VA',
UomType.voltAmpsReactive: 'VAr',
UomType.cosine: 'cos',
UomType.voltsSquared: 'V²',
UomType.ampsSquared: 'A²',
UomType.voltAmpHours: 'VAh',
UomType.wattHours: 'Wh',
UomType.voltAmpReactiveHours: 'VArh',
UomType.ampHours: 'Ah',
UomType.cubicFeet: 'ft³',
UomType.cubicFeetPerHour: 'ft³/h',
UomType.cubicMetersPerHour: 'm³/h',
UomType.usGallons: 'US gal',
UomType.usGallonsPerHour: 'US gal/h',
UomType.imperialGallons: 'IMP gal',
UomType.imperialGallonsPerHour: 'IMP gal/h',
UomType.britishThermalUnits: 'BTU',
UomType.britishThermalUnitsPerHour: 'BTU/h',
UomType.liters: 'L',
UomType.litersPerHour: 'L/h',
UomType.gaugePascals: 'Pag',
UomType.absolutePascals: 'Pa',
UomType.therms: 'thm',
}
UOM_IDS = {
UomType.amps: 'electric current_A',
UomType.volts: 'electric potential_V',
UomType.joules: 'energy_J',
UomType.hertz: 'frequency_Hz',
UomType.watts: 'power_W',
UomType.cubicMeters: 'volume_m**3',
UomType.voltAmps: 'apparent power_VA',
UomType.voltAmpsReactive: 'reactive power_VAR',
UomType.voltAmpHours: 'apparent energy_VAh',
UomType.wattHours: 'energy_Wh',
UomType.voltAmpReactiveHours: 'reactive energy_VARh',
UomType.ampHours: 'electric charge_Ah',
UomType.cubicFeet: 'volume_ft**3',
UomType.usGallons: 'volume_gal',
UomType.britishThermalUnits: 'energy_BTU',
UomType.britishThermalUnitsPerHour: 'power_BTU-h',
UomType.liters: 'volume_L',
UomType.litersPerHour: 'volumetric flow_L-h',
UomType.absolutePascals: 'pressure_Pa',
UomType.therms: 'energy_therm',
}
CURRENCY_UOM_IDS = {
CurrencyCode.aus: 'currency_AUD',
CurrencyCode.cad: 'currency_CAD',
CurrencyCode.usd: 'currency_USD',
CurrencyCode.eur: 'currency_EUR'
}
| 2.765625 | 3 |
auto/parsing/manager.py | alexei-alexov/ds | 0 | 12759085 | # AAAAA
from parsing.item_parser import ItemParser
from parsing.list_parser import ListParser
import csv
import threading
import time
class Manager:
"""Manage data provided by parsers and to parsers"""
order = [
'bodyType', 'brand', 'color', 'fuelType', 'modelDate', 'name', 'numberOfDoors', 'productionDate',
'vehicleConfiguration', 'vehicleTransmission', 'engineDisplacement', 'enginePower', 'description',
'mileage', 'Комплектация', 'Привод', 'Руль', 'Состояние', 'Владельцы', 'ПТС', 'Таможня', 'Владение', 'id',
'Price'
]
def __init__(self):
pass
def load(self):
# 600
list_parser = ListParser(pages=(600, 5000))
item_parser = ItemParser()
finished = False
parsed_data = []
ids_to_parse = []
def save_data():
_id = 1
with open('data.csv', 'a', newline='', encoding='utf-8') as csv_f:
writer = csv.writer(csv_f, delimiter=',', quotechar='"')
while not finished or parsed_data:
try:
item = parsed_data.pop()
item['id'] = _id
_id += 1
writer.writerow([item.get(key) for key in self.order])
csv_f.flush()
if _id % 20 == 0:
print("Saved %d records" % (_id,))
except IndexError:
time.sleep(0.5)
continue
print("Saved %d records" % (_id,))
save_thread = threading.Thread(target=save_data)
save_thread.start()
def parse_list():
nonlocal finished
generator = list_parser.parse()
while True:
if len(ids_to_parse) >= 1000:
time.sleep(1)
continue
try:
car_id = next(generator)
ids_to_parse.append(car_id)
except Exception as error:
print(error)
finished = True
list_parser_thread = threading.Thread(target=parse_list)
list_parser_thread.start()
def parse_car():
while not finished or ids_to_parse:
try:
car_id = ids_to_parse.pop()
except IndexError:
time.sleep(0.5)
continue
try:
parsed_data.append(item_parser.parse_page(car_id))
except Exception as error:
print('Cannot parse car page: %s' % (error,))
car_parsers = [threading.Thread(target=parse_car) for _ in range(3)]
for car_parser in car_parsers: car_parser.start()
for car_parser in car_parsers: car_parser.join()
finished = True
save_thread.join()
print("DONE")
if __name__ == '__main__':
m = Manager()
m.load()
| 3.0625 | 3 |
AI/trie_test.py | ConnorB737/capstone | 0 | 12759086 | from AI import trie
import itertools
def test_trie():
target_trie = trie.Trie()
letters = "ABCDEFG"
words = []
for i in range(7, 1, -1):
permutations = list(itertools.permutations(letters, i))
for p in permutations:
word = ''.join(p)
if target_trie.search(word):
words.append(word)
assert words != []
| 3.59375 | 4 |
stage_1_disentangle/split_data.py | grtzsohalf/Audio-Phonetic-and-Semantic-Embedding | 2 | 12759087 | from utils import *
feat_dir = '/home/grtzsohalf/Desktop/LibriSpeech'
seq_len = 70
n_files = 200
proportion = 0.975
def main():
split_data(feat_dir, n_files, proportion, seq_len)
if __name__=='__main__':
main()
| 1.882813 | 2 |
bsstudio/widgets/mplwidget.py | bsobhani/bsstudio | 0 | 12759088 | <filename>bsstudio/widgets/mplwidget.py
# Imports
from PyQt5 import QtWidgets, QtCore
from matplotlib.figure import Figure
import matplotlib.pyplot as plt
from matplotlib.backends.backend_qt5agg import FigureCanvasQTAgg as Canvas
from matplotlib.backends.backend_qt5agg import NavigationToolbar2QT as NavigationToolbar
from matplotlib.backends.backend_qt5 import StatusbarQt
from matplotlib.backend_bases import StatusbarBase
import matplotlib
from .Base import BaseWidget
# Ensure using PyQt5 backend
matplotlib.use('QT5Agg')
#def on_mouse_move(event):
# print('Event received:',event.x,event.y)
# Matplotlib canvas class to create figure
class MplCanvas(Canvas):
def __init__(self):
self.fig = Figure()
self.ax = self.fig.add_subplot(111)
Canvas.__init__(self, self.fig)
Canvas.setSizePolicy(self, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Expanding)
Canvas.updateGeometry(self)
#self.mpl_connect('motion_notify_event',on_mouse_move)
#self.ax.plot([0, 1], [1, 1])
def wipe(self):
self.fig.clf()
self.ax = self.fig.add_subplot(111)
# Matplotlib widget
class MplWidget(QtWidgets.QWidget, BaseWidget):
def __init__(self, parent=None):
QtWidgets.QWidget.__init__(self, parent) # Inherit from QWidget
self.canvas = MplCanvas() # Create canvas object
self.toolbar = NavigationToolbar(self.canvas, self)
#self.statusbar = StatusbarBase(self.canvas.manager.toolmanager)
#self.statusbar = StatusbarQt(self, None)
self.vbl = QtWidgets.QVBoxLayout() # Set box for plotting
self.vbl.addWidget(self.toolbar)
self.vbl.addWidget(self.canvas)
#self.vbl.addWidget(self.statusbar)
#self.vbl.addWidget(QtCore.Qt.BottomToolBarArea, NavigationToolbar(self.canvas, self))
self.setLayout(self.vbl)
def format_coord(x, y):
return "x={:0.2f},y={:0.2f}".format(x,y)
self.canvas.ax.format_coord = format_coord
#self.canvas.ax.figure.tight_layout()
#self.canvas.window().statusbar().setVisible(True)
def sizeHint(self):
return QtCore.QSize(400, 300)
| 2.421875 | 2 |
snmp/datadog_checks/snmp/config.py | tanner-bruce/integrations-core | 0 | 12759089 | # (C) Datadog, Inc. 2010-present
# All rights reserved
# Licensed under Simplified BSD License (see LICENSE)
import ipaddress
from collections import defaultdict
from pyasn1.type.univ import OctetString
from pysnmp import hlapi
from pysnmp.smi import builder, view
from datadog_checks.base import ConfigurationError, is_affirmative
from .resolver import OIDResolver
def to_oid_tuple(oid_string):
"""Return a OID tuple from a OID string."""
return tuple(map(int, oid_string.lstrip('.').split('.')))
class ParsedMetric(object):
__slots__ = ('name', 'metric_tags', 'forced_type', 'enforce_scalar')
def __init__(self, name, metric_tags, forced_type, enforce_scalar=True):
self.name = name
self.metric_tags = metric_tags
self.forced_type = forced_type
self.enforce_scalar = enforce_scalar
class ParsedTableMetric(object):
__slots__ = ('name', 'index_tags', 'column_tags', 'forced_type')
def __init__(self, name, index_tags, column_tags, forced_type):
self.name = name
self.index_tags = index_tags
self.column_tags = column_tags
self.forced_type = forced_type
class InstanceConfig:
"""Parse and hold configuration about a single instance."""
DEFAULT_RETRIES = 5
DEFAULT_TIMEOUT = 1
DEFAULT_ALLOWED_FAILURES = 3
DEFAULT_BULK_THRESHOLD = 0
def __init__(self, instance, warning, log, global_metrics, mibs_path, profiles, profiles_by_oid):
self.instance = instance
self.tags = instance.get('tags', [])
self.metrics = instance.get('metrics', [])
profile = instance.get('profile')
if is_affirmative(instance.get('use_global_metrics', True)):
self.metrics.extend(global_metrics)
if profile:
if profile not in profiles:
raise ConfigurationError("Unknown profile '{}'".format(profile))
self.metrics.extend(profiles[profile]['definition']['metrics'])
self.enforce_constraints = is_affirmative(instance.get('enforce_mib_constraints', True))
self._snmp_engine, mib_view_controller = self.create_snmp_engine(mibs_path)
self._resolver = OIDResolver(mib_view_controller, self.enforce_constraints)
self.ip_address = None
self.ip_network = None
self.discovered_instances = {}
self.failing_instances = defaultdict(int)
self.allowed_failures = int(instance.get('discovery_allowed_failures', self.DEFAULT_ALLOWED_FAILURES))
self.bulk_threshold = int(instance.get('bulk_threshold', self.DEFAULT_BULK_THRESHOLD))
timeout = int(instance.get('timeout', self.DEFAULT_TIMEOUT))
retries = int(instance.get('retries', self.DEFAULT_RETRIES))
ip_address = instance.get('ip_address')
network_address = instance.get('network_address')
if not ip_address and not network_address:
raise ConfigurationError('An IP address or a network address needs to be specified')
if ip_address and network_address:
raise ConfigurationError('Only one of IP address and network address must be specified')
if ip_address:
self._transport = self.get_transport_target(instance, timeout, retries)
self.ip_address = ip_address
self.tags.append('snmp_device:{}'.format(self.ip_address))
if network_address:
if isinstance(network_address, bytes):
network_address = network_address.decode('utf-8')
self.ip_network = ipaddress.ip_network(network_address)
if not self.metrics and not profiles_by_oid:
raise ConfigurationError('Instance should specify at least one metric or profiles should be defined')
self._auth_data = self.get_auth_data(instance)
self.all_oids, self.bulk_oids, self.parsed_metrics = self.parse_metrics(self.metrics, warning, log)
self._context_data = hlapi.ContextData(*self.get_context_data(instance))
def resolve_oid(self, oid):
return self._resolver.resolve_oid(oid)
def refresh_with_profile(self, profile, warning, log):
self.metrics.extend(profile['definition']['metrics'])
self.all_oids, self.bulk_oids, self.parsed_metrics = self.parse_metrics(self.metrics, warning, log)
def call_cmd(self, cmd, *args, **kwargs):
return cmd(self._snmp_engine, self._auth_data, self._transport, self._context_data, *args, **kwargs)
@staticmethod
def create_snmp_engine(mibs_path):
"""
Create a command generator to perform all the snmp query.
If mibs_path is not None, load the mibs present in the custom mibs
folder. (Need to be in pysnmp format)
"""
snmp_engine = hlapi.SnmpEngine()
mib_builder = snmp_engine.getMibBuilder()
if mibs_path is not None:
mib_builder.addMibSources(builder.DirMibSource(mibs_path))
mib_view_controller = view.MibViewController(mib_builder)
return snmp_engine, mib_view_controller
@staticmethod
def get_transport_target(instance, timeout, retries):
"""
Generate a Transport target object based on the instance's configuration
"""
ip_address = instance['ip_address']
port = int(instance.get('port', 161)) # Default SNMP port
return hlapi.UdpTransportTarget((ip_address, port), timeout=timeout, retries=retries)
@staticmethod
def get_auth_data(instance):
"""
Generate a Security Parameters object based on the instance's
configuration.
"""
if 'community_string' in instance:
# SNMP v1 - SNMP v2
# See http://snmplabs.com/pysnmp/docs/api-reference.html#pysnmp.hlapi.CommunityData
if int(instance.get('snmp_version', 2)) == 1:
return hlapi.CommunityData(instance['community_string'], mpModel=0)
return hlapi.CommunityData(instance['community_string'], mpModel=1)
if 'user' in instance:
# SNMP v3
user = instance['user']
auth_key = None
priv_key = None
auth_protocol = None
priv_protocol = None
if 'authKey' in instance:
auth_key = instance['authKey']
auth_protocol = hlapi.usmHMACMD5AuthProtocol
if 'privKey' in instance:
priv_key = instance['privKey']
auth_protocol = hlapi.usmHMACMD5AuthProtocol
priv_protocol = hlapi.usmDESPrivProtocol
if 'authProtocol' in instance:
auth_protocol = getattr(hlapi, instance['authProtocol'])
if 'privProtocol' in instance:
priv_protocol = getattr(hlapi, instance['privProtocol'])
return hlapi.UsmUserData(user, auth_key, priv_key, auth_protocol, priv_protocol)
raise ConfigurationError('An authentication method needs to be provided')
@staticmethod
def get_context_data(instance):
"""
Generate a Context Parameters object based on the instance's
configuration.
We do not use the hlapi currently, but the rfc3413.oneliner.cmdgen
accepts Context Engine Id (always None for now) and Context Name parameters.
"""
context_engine_id = None
context_name = ''
if 'user' in instance:
if 'context_engine_id' in instance:
context_engine_id = OctetString(instance['context_engine_id'])
if 'context_name' in instance:
context_name = instance['context_name']
return context_engine_id, context_name
def parse_metrics(self, metrics, warning, log):
"""Parse configuration and returns data to be used for SNMP queries.
`oids` is a dictionnary of SNMP tables to symbols to query.
"""
table_oids = {}
parsed_metrics = []
def extract_symbol(mib, symbol):
if isinstance(symbol, dict):
symbol_oid = symbol['OID']
symbol = symbol['name']
self._resolver.register(to_oid_tuple(symbol_oid), symbol)
identity = hlapi.ObjectIdentity(symbol_oid)
else:
identity = hlapi.ObjectIdentity(mib, symbol)
return identity, symbol
def get_table_symbols(mib, table):
identity, table = extract_symbol(mib, table)
key = (mib, table)
if key in table_oids:
return table_oids[key][1], table
table_object = hlapi.ObjectType(identity)
symbols = []
table_oids[key] = (table_object, symbols)
return symbols, table
# Check the metrics completely defined
for metric in metrics:
forced_type = metric.get('forced_type')
metric_tags = metric.get('metric_tags', [])
if 'MIB' in metric:
if not ('table' in metric or 'symbol' in metric):
raise ConfigurationError('When specifying a MIB, you must specify either table or symbol')
if 'symbol' in metric:
to_query = metric['symbol']
try:
_, parsed_metric_name = get_table_symbols(metric['MIB'], to_query)
except Exception as e:
warning("Can't generate MIB object for variable : %s\nException: %s", metric, e)
else:
parsed_metric = ParsedMetric(parsed_metric_name, metric_tags, forced_type)
parsed_metrics.append(parsed_metric)
continue
elif 'symbols' not in metric:
raise ConfigurationError('When specifying a table, you must specify a list of symbols')
symbols, _ = get_table_symbols(metric['MIB'], metric['table'])
index_tags = []
column_tags = []
for metric_tag in metric_tags:
if not ('tag' in metric_tag and ('index' in metric_tag or 'column' in metric_tag)):
raise ConfigurationError(
'When specifying metric tags, you must specify a tag, and an index or column'
)
tag_key = metric_tag['tag']
if 'column' in metric_tag:
# In case it's a column, we need to query it as well
mib = metric_tag.get('MIB', metric['MIB'])
identity, column = extract_symbol(mib, metric_tag['column'])
column_tags.append((tag_key, column))
try:
object_type = hlapi.ObjectType(identity)
except Exception as e:
warning("Can't generate MIB object for variable : %s\nException: %s", metric, e)
else:
if 'table' in metric_tag:
tag_symbols, _ = get_table_symbols(mib, metric_tag['table'])
tag_symbols.append(object_type)
elif mib != metric['MIB']:
raise ConfigurationError(
'When tagging from a different MIB, the table must be specified'
)
else:
symbols.append(object_type)
elif 'index' in metric_tag:
index_tags.append((tag_key, metric_tag['index']))
if 'mapping' in metric_tag:
# Need to do manual resolution
for symbol in metric['symbols']:
self._resolver.register_index(
symbol['name'], metric_tag['index'], metric_tag['mapping']
)
for tag in metric['metric_tags']:
if 'column' in tag:
self._resolver.register_index(
tag['column']['name'], metric_tag['index'], metric_tag['mapping']
)
for symbol in metric['symbols']:
identity, parsed_metric_name = extract_symbol(metric['MIB'], symbol)
try:
symbols.append(hlapi.ObjectType(identity))
except Exception as e:
warning("Can't generate MIB object for variable : %s\nException: %s", metric, e)
parsed_metric = ParsedTableMetric(parsed_metric_name, index_tags, column_tags, forced_type)
parsed_metrics.append(parsed_metric)
elif 'OID' in metric:
oid_object = hlapi.ObjectType(hlapi.ObjectIdentity(metric['OID']))
table_oids[metric['OID']] = (oid_object, [])
self._resolver.register(to_oid_tuple(metric['OID']), metric['name'])
parsed_metric = ParsedMetric(metric['name'], metric_tags, forced_type, enforce_scalar=False)
parsed_metrics.append(parsed_metric)
else:
raise ConfigurationError('Unsupported metric in config file: {}'.format(metric))
oids = []
all_oids = []
bulk_oids = []
# Use bulk for SNMP version > 1 and there are enough symbols
bulk_limit = self.bulk_threshold if self._auth_data.mpModel else 0
for table, symbols in table_oids.values():
if not symbols:
# No table to browse, just one symbol
oids.append(table)
elif bulk_limit and len(symbols) > bulk_limit:
bulk_oids.append(table)
else:
all_oids.append(symbols)
if oids:
all_oids.insert(0, oids)
return all_oids, bulk_oids, parsed_metrics
| 2.015625 | 2 |
rossml/elements.py | rodrigomoliveira1/ross-ml | 0 | 12759090 | <reponame>rodrigomoliveira1/ross-ml
"""Bearing and seal elements module.
This module defines bearing and seal elements through artificial neural networks (ANN).
There is a general class to create an element with any ANN built with ROSS-ML and
other specific classes, which have fixed features that the model must match.
"""
from collections.abc import Iterable
from itertools import repeat
import numpy as np
import pandas as pd
from ross import BearingElement
from rossml.pipeline import Model
__all__ = ["AnnBearingElement", "SealLabyrinthElement", "TiltPadBearingElement"]
class AnnBearingElement(BearingElement):
"""Create bearings or seals via Neural Netwarks.
This class creates a element from a trained neural network. In order to create it
properly, one should bear in mind the name of the trained network, and the
parameters used. The results will be given in a dataframe form. When the number of
features is different from the trained one, a error is displayed and the same
occurs when the variable name is wrong due to a typo.
Parameters
----------
arq : str
The neural network folder's name, which must be located at ross-ml package.
The model files are loaded from this folder.
n : int
The node in which the element will be located in the rotor.
n_link : int, optional
Node to which the bearing will connect. If None the bearing is
connected to ground.
Default is None.
scale_factor : float, optional
The scale factor is used to scale the bearing drawing.
Default is 1.
kwargs : optional
The required arguments to the neural network predict the rotordynamic
coefficients. It must match the features from the neural network.
It may varies with the loaded model.
Returns
-------
A AnnBearingElement object.
Raises
------
KeyError
Error raised if kwargs does not match the features from the neural netowork
model.
ValueError
Error raised if some kwargs does not have the same length than 'speeds' kwargs.
It is raised only if other kwargs rathen than 'speeds' is passed as an iterable,
with missmatching size.
Examples
--------
>>> import rossml as rsml
Specify the neural netowark to be used. "arq" must match one of the folders name
of a neural network previously saved inside rossml. You can check the available
models with:
>>> rsml.available_models()
['test_model']
Now, select one of the available options.
>>> nn_model = "test_model"
Or build a neu neural network model (see Pipeline documentation).
Before setting data to the neural network, check what are the required features
according to the loaded model.
First, load the neural network:
>>> model = rsml.Model(nn_model)
Check for the features:
>>> features = model.features
Now, enter the respective values for each key in "model.features". Or use other
classes with preset parameters.
>>> seal = rsml.AnnBearingElement(
... n=0,
... arq=nn_model,
... seal_radius=141.61,
... number_of_teeth=22,
... tooth_pitch=8.58,
... tooth_height=8.37,
... radial_clearance=0.16309,
... ethane=0.14548,
... propane=0.00542,
... isobutan=.17441,
... butane=0.22031,
... nitrogen=0.10908,
... methane=0.11907,
... hydrogen=0.11584,
... oxygen=0.05794,
... co2=0.05224,
... reservoir_temperature=25.0,
... reservoir_pressure=568.90,
... sump_pressure=5.3,
... inlet_tangential_velocity_ratio=0.617,
... whirl_speed=8310.5,
... speeds=[7658.3],
... )
>>> seal # doctest: +ELLIPSIS
AnnBearingElement(n=0...
If kwargs has different argumments than model.features, an error is raised
informing how many which are the features and how many kwargs has been entered.
Users can copy the list directly from the error message to set the correct keys in
kwargs.
>>> seal = rsml.AnnBearingElement(
... n=0,
... arq=nn_model,
... seal_radius=141.61,
... number_of_teeth=22,
... tooth_pitch=8.58,
... tooth_height=8.37,
... radial_clearance=0.16309,
... methane=0.11907,
... hydrogen=0.11584,
... oxygen=0.05794,
... co2=0.05224,
... reservoir_temperature=25.0,
... reservoir_pressure=568.90,
... sump_pressure=5.3,
... inlet_tangential_velocity_ratio=0.617,
... whirl_speed=8310.5,
... speeds=[7658.3],
... ) # doctest: +ELLIPSIS
KeyError...
"""
def __init__(self, arq=None, n=None, n_link=None, scale_factor=1.0, **kwargs):
# loading neural network model
model = Model(arq)
features = model.features
reordered_dict = {}
# checking data consistency
if any(key not in kwargs.keys() for key in features):
raise KeyError(
f"Model '{arq}' has the following {len(list(features))} features: "
f"{list(features)}, and {len(kwargs)} are given. "
f"Check the **kwargs dictionary for the same keys."
)
size = len(kwargs["speeds"])
if any(len(v) != size for k, v in kwargs.items() if isinstance(v, Iterable)):
raise ValueError(
"Some keyword arguments does not have the same length than 'speeds'."
)
for k in kwargs:
if not isinstance(kwargs[k], Iterable):
reordered_dict[k] = repeat(kwargs[k], size)
else:
reordered_dict[k] = kwargs[k]
reordered_dict = {k: kwargs[k] for k in features}
data = pd.DataFrame(reordered_dict)
results = model.predict(data)
super().__init__(
n=n,
frequency=np.array(kwargs["speeds"]),
kxx=np.array(results["kxx"], dtype=np.float64),
kxy=np.array(results["kxy"], dtype=np.float64),
kyx=np.array(results["kyx"], dtype=np.float64),
kyy=np.array(results["kyy"], dtype=np.float64),
cxx=np.array(results["cxx"], dtype=np.float64),
cxy=np.array(results["cyx"], dtype=np.float64),
cyx=np.array(results["cyx"], dtype=np.float64),
cyy=np.array(results["cyy"], dtype=np.float64),
tag=arq,
n_link=n_link,
scale_factor=scale_factor,
)
class SealLabyrinthElement(AnnBearingElement):
"""Create labyrinth Seal elements via Neural Netwarks.
This class creates an Labyrinth Seal Element from a trained neural network.
The parameters inserted are used to predict the rotordynamics coefficients.
Parameters
----------
n : int
The node in which the element will be located in the rotor.
arq : str
The neural network folder's name, which must be located at ross-ml package.
The model files are loaded from this folder.
seal_radius : float, list, array
The seal radius.
number_of_teeth : int, list, array
Number of teeth present on the seal .
tooth_pitch : float, list, array
The pitch between two teeth.
tooth_height : float, list, array
The tooth height.
radial_clearance : float, list, array
The seal radial clearance.
methane : float, list, array
The proportion of methane in the gas.
ethane : float, list, array
The proportion of ethane in the gas.
propane : float, list, array
The proportion of propane in the gas.
isobutan : float, list, array
The proportion of isobutan in the gas.
butane : float, list, array
The proportion of butane in the gas.
hydrogen : float, list, array
The proportion of hydrogen in the gas.
nitrogen : float, list, array
The proportion of nitrogen in the gas.
oxygen : float, list, array
The proportion of oxygen in the gas.
co2 : float, list, array
The proportion of co2 in the gas.
reservoir_temperature : float, list, array
The reservoir temperature.
reservoir_pressure : float, list, array
The reservoir pressure.
sump_pressure : float, list, array
The sump pressure.
inlet_tangential_velocity_ratio, : float, list, array
The inlet tangential velocity ratio.
whirl_speed : float, list, array
Whirl speed value.
speeds : list, array
Array with the frequencies of interest.
n_link : int, optional
Node to which the bearing will connect. If None the bearing is
connected to ground.
Default is None.
scale_factor : float, optional
The scale factor is used to scale the bearing drawing.
Default is 1.
Returns
-------
A SealLabyrinthElement object.
Examples
--------
>>> import rossml as rsml
>>> seal = rsml.SealLabyrinthElement(
... arq="test_model", n=0,
... seal_radius=141.60965632804,
... number_of_teeth=22,
... tooth_pitch=8.58370220849736,
... tooth_height=8.369750873237724,
... radial_clearance=0.16309012139139262,
... methane=0.11907483818379025,
... hydrogen=0.11584137533357493,
... oxygen=0.05794358525114542,
... co2=0.052243764797109724,
... ethane=0.1454836854619626,
... propane=0.005424100482971933,
... isobutan=0.17441663090080106,
... butane=0.22031291584401053,
... nitrogen=0.10908265522969644,
... reservoir_temperature=25.037483348934998,
... reservoir_pressure=568.9058098384347,
... sump_pressure=5.299447680455862,
... inlet_tangential_velocity_ratio=0.6171344358228346,
... whirl_speed=8310.497837226783,
... speeds=[7658.340362809778],
... )
>>> seal # doctest: +ELLIPSIS
SealLabyrinthElement(n=0...
"""
def __init__(
self,
n,
arq,
seal_radius,
number_of_teeth,
tooth_pitch,
tooth_height,
radial_clearance,
methane,
ethane,
propane,
isobutan,
butane,
hydrogen,
nitrogen,
oxygen,
co2,
reservoir_temperature,
reservoir_pressure,
sump_pressure,
inlet_tangential_velocity_ratio,
whirl_speed,
speeds,
n_link=None,
scale_factor=1.0,
):
super().__init__(
arq=arq,
n=n,
n_link=n_link,
scale_factor=scale_factor,
seal_radius=seal_radius,
number_of_teeth=number_of_teeth,
tooth_pitch=tooth_pitch,
tooth_height=tooth_height,
radial_clearance=radial_clearance,
methane=methane,
ethane=ethane,
propane=propane,
isobutan=isobutan,
butane=butane,
hydrogen=hydrogen,
nitrogen=nitrogen,
oxygen=oxygen,
co2=co2,
reservoir_temperature=reservoir_temperature,
reservoir_pressure=reservoir_pressure,
sump_pressure=sump_pressure,
inlet_tangential_velocity_ratio=inlet_tangential_velocity_ratio,
whirl_speed=whirl_speed,
speeds=speeds,
)
class TiltPadBearingElement(AnnBearingElement):
"""Create Tilting Pad Bearing Element via Neural Netwarks.
This class creates a Tilting Pad Bearing Element from a trained neural network.
The parameters inserted are used to predict the rotordynamics coefficients.
Parameters
----------
n : int
The node in which the element will be located in the rotor.
arq : str
The neural network folder's name, which must be located at ross-ml package.
The model files are loaded from this folder.
diameter : float
Rotor diameter.
axial_length : float
Axial length.
number_of_pads : int
Number of pads.
pad_leading_edge : float
Angular position of first pad leading edge, relative to negative X axis.
pad_thickness : float
Pad thickness.
ambient_pressure : float
Ambient pressure
supply_pressure : float
Oil supply pressure.
cavitation_pressure : float
Cavitation pressure.
supply_temperature : float
Oil supply temperature.
viscosity_at_supply : float
Oil viscosity at supply temperature.
density : float
Oil density.
specific_heat : float
Oil specific heat
thermal_conductivity : float
Oil thermal conductivity.
alpha_v : float
Oil viscosity exponent
visc = visc_supply * exp(-alpha_v * (T - T_supply))
inertia_effects : boolen
Key to considerer or not the inertia effects.
frequency_analysis_option : int
The frequency analysis type.
Options are:
1: synchronous
2: asynchronous.
shaft_speed : float
shaft speed for asynchronous analysis only .
thermal_analysis_type : int
Options are 1 to 7.
See THD equations for reference. Haussen (4) recommended.
journal_heat_transfer : int
Heat transfer analysis type for the journal bearing:
1: Adiabatic journal.
2: Known journal temperature.
3: Calc journal temperature.
journal_temperature : float
Journal temperature if journal_heat_transfer == 2.
pad_heat_transfer : int
Heat transfer analysis type for the pad:
1: Adiabatic pad.
2: Known pad temperature.
3: Calc pad temperature.
sump_temperature : float
Pad temperature if pad_heat_transfer == 2.
pad_thermal_conductivity : float
The pad thermal conductivity property.
reynolds_pad_back : float
housing_inner_diameter : float
The housing inned diameter. It must be >= diameter + pad_thickness.
percent_heat_in_oil : float
Percentage of heat carried by the oil.
groove_convection_coef : float
The groove convection coefficient.
inlet_taper_angle : float
Pad inlet taper angle.
inlet_taper_arc_length : float
(degrees)
case_analysis_option : int
Options are:
1: vary eccentricity
2: vary load
x_initial_eccentricity : float
X eccentricity ratio initial guess. X0/cp.
y_initial_eccentricity : float
Y eccentricity ratio initial guess. Y0/cp.
pad_option : int
Options are:
1: equal pads
2: unequal pads
analysis_model : int
Thermal analysis type. Options are:
1: TEHD
2: THD
pad_preload : float, list
Pad preload. May be list if pad_option == 2
pad_offset : float, list
Pad offset. May be list if pad_option == 2
pad_arc_length : float, list
Pad arc length (degrees). May be list if pad_option == 2.
pad_clearance : float, list
Pad clearance. May be list if pad_option == 2.
bearing_type : int
Options are:
1: tilting pad
2: rigid pad
pad_mass : float
Pad mass (kg).
pad_moment_of_inertia : flaot
Moment of inertia about pivot (kg-m^2).
pad_mass_center_location : float
Distance from pivot do CM (m).
pad_thickness_at_pivot : float
Pad thickness at the pivot (m).
pivot_type
pivot_info : list, array
4x3 list indicating pivot parameters for chosen pad_type.
groove_mixing_model : int
Options are:
1: hot oil carry over
2: improved model
hot_oil_carry_over : float
Hot oil carry over. Insert this option if groove_mixing_model = 1.
groove_mixing_efficiency : float
Groove mixing efficiency. Insert this option if groove_mixing_model = 2.
oil_flowrate : float
Oil flowrate (LPM). Insert this option if groove_mixing_model = 2.
pad_deformation_model : int
Pad deformation type. Options are:
1: pressure
2: thermal
3: both
shaft_and_housing : int, optional
Options are:
1: rigid shaft + housing
2: shaft expands
3: both expand
4: shaft expands housing contracts
temperature_cold_condition : float, optional
Ttemperature cold condition (degC)
shaft_thermal_expansion_coef : float, optional
shaft_thermal_expansion_coef (1/degC). Default is None.
pad_thermal_expansion_coef : float, optional
pad_thermal_expansion_coef (1/degC). Default is None.
housing_thermal_expansion_coef : float, optional
Housing thermal expansion coefficient (1/degC). Default is None.
pad_flexibility : int
Options are:
1: approximate
2: 3D FEM)
pad_elastic_modulus : float, optional
Pad elastic modulus (N/m^2). Default is None
pad_liner_compliance : int, optional
Options are:
1: liner deformation
2: rigid liner
liner_elastic_modulus : float, optional
Linear elastic modulus (N/m^2). Defaults is None.
liner_poisson_ratio : float
Poisson ratio.. Defaults is None.
liner_thickness : float, optional
Linear thickness (m). Defaults is None.
liner_thermal_expansion_coef : float, optional
Liner thermal expansion coefficient (1/degC). Defaults is None.
liner_thermal_conductivity : float, optional
Liner thermal conductivity (W/m-degC). Defaults is None.
grid_ratio : float, optional
mesh ratio circ/axial. Defaults is None.
number_of_circumferential_points : int, optional
Number of circumferential points. Default is None.
number_of_axial_points : int
Number of axial points
bearing_load_x : float, optional
Bearing load X direction (N).
bearing_load_y : float, optional
Bearing load Y direction (N) (gravity load is negative).
speeds : list, array
Array with the frequencies of interest.
n_link : int, optional
Node to which the bearing will connect. If None the bearing is
connected to ground.
Default is None.
scale_factor : float, optional
The scale factor is used to scale the bearing drawing.
Default is 1.
Returns
-------
A TiltPadBearingElement object.
"""
def __init__(
self,
n,
arq,
diameter,
axial_length,
number_of_pads,
pad_leading_edge,
pad_thickness,
ambient_pressure,
supply_pressure,
cavitation_pressure,
supply_temperature,
viscosity_at_supply,
density,
specific_heat,
thermal_condutivity,
alpha_v,
inertia_effects,
frequency_analysis_option,
shaft_speed,
thermal_analysis_type,
journal_heat_transfer,
journal_temperature,
pad_heat_transfer,
sump_temperature,
pad_thermal_conductivity,
reynolds_pad_back,
housing_inner_diameter,
percent_heat_in_oil,
groove_convection_coef,
inlet_taper_angle,
inlet_taper_arc_length,
case_analysis_option,
x_initial_eccentricity,
y_initial_eccentricity,
pad_option,
analysis_model,
pad_preload,
pad_offset,
pad_arc_length,
pad_clearance,
bearing_type,
pad_mass,
pad_moment_of_inertia,
pad_mass_center_location,
pad_thickness_at_pivot,
pivot_type,
pivot_info,
groove_mixing_model,
hot_oil_carry_over,
groove_mixing_efficiency,
oil_flowrate,
pad_deformation_model,
shaft_and_housing,
temperature_cold_condition,
shaft_thermal_expansion_coef,
pad_thermal_expansion_coef,
housing_thermal_expansion_coef,
pad_flexibility,
pad_elastic_modulus,
pad_liner_compliance,
liner_elastic_modulus,
liner_poisson_ratio,
liner_thickness,
liner_thermal_expansion_coef,
liner_thermal_conductivity,
grid_ratio,
number_of_circumferential_points,
number_of_axial_points,
bearing_load_x,
bearing_load_y,
speeds=None,
n_link=None,
scale_factor=1.0,
):
super().__init__(
arq=arq,
n=n,
n_link=n_link,
scale_factor=scale_factor,
diameter=diameter,
axial_length=axial_length,
number_of_pads=number_of_pads,
pad_leading_edge=pad_leading_edge,
pad_thickness=pad_thickness,
ambient_pressure=ambient_pressure,
supply_pressure=supply_pressure,
cavitation_pressure=cavitation_pressure,
supply_temperature=supply_temperature,
viscosity_at_supply=viscosity_at_supply,
density=density,
specific_heat=specific_heat,
thermal_condutivity=thermal_condutivity,
alpha_v=alpha_v,
inertia_effects=inertia_effects,
frequency_analysis_option=frequency_analysis_option,
shaft_speed=shaft_speed,
thermal_analysis_type=thermal_analysis_type,
journal_heat_transfer=journal_heat_transfer,
journal_temperature=journal_temperature,
pad_heat_transfer=pad_heat_transfer,
sump_temperature=sump_temperature,
pad_thermal_conductivity=pad_thermal_conductivity,
reynolds_pad_back=reynolds_pad_back,
housing_inner_diameter=housing_inner_diameter,
percent_heat_in_oil=percent_heat_in_oil,
groove_convection_coef=groove_convection_coef,
inlet_taper_angle=inlet_taper_angle,
inlet_taper_arc_length=inlet_taper_arc_length,
case_analysis_option=case_analysis_option,
x_initial_eccentricity=x_initial_eccentricity,
y_initial_eccentricity=y_initial_eccentricity,
pad_option=pad_option,
analysis_model=analysis_model,
pad_preload=pad_preload,
pad_offset=pad_offset,
pad_arc_length=pad_arc_length,
pad_clearance=pad_clearance,
bearing_type=bearing_type,
pad_mass=pad_mass,
pad_moment_of_inertia=pad_moment_of_inertia,
pad_mass_center_location=pad_mass_center_location,
pad_thickness_at_pivot=pad_thickness_at_pivot,
pivot_type=pivot_type,
pivot_info=pivot_info,
groove_mixing_model=groove_mixing_model,
hot_oil_carry_over=hot_oil_carry_over,
groove_mixing_efficiency=groove_mixing_efficiency,
oil_flowrate=oil_flowrate,
pad_deformation_model=pad_deformation_model,
shaft_and_housing=shaft_and_housing,
temperature_cold_condition=temperature_cold_condition,
shaft_thermal_expansion_coef=shaft_thermal_expansion_coef,
pad_thermal_expansion_coef=pad_thermal_expansion_coef,
housing_thermal_expansion_coef=housing_thermal_expansion_coef,
pad_flexibility=pad_flexibility,
pad_elastic_modulus=pad_elastic_modulus,
pad_liner_compliance=pad_liner_compliance,
liner_elastic_modulus=liner_elastic_modulus,
liner_poisson_ratio=liner_poisson_ratio,
liner_thickness=liner_thickness,
liner_thermal_expansion_coef=liner_thermal_expansion_coef,
liner_thermal_conductivity=liner_thermal_conductivity,
grid_ratio=grid_ratio,
number_of_circumferential_points=number_of_circumferential_points,
number_of_axial_points=number_of_axial_points,
bearing_load_x=bearing_load_x,
bearing_load_y=bearing_load_y,
)
| 3.78125 | 4 |
refresh_all.py | flavio-a/GeoguessrBot | 2 | 12759091 | import bot
if __name__ == "__main__":
print('Refreshing all...')
links = bot.db.getLinksList()
for link in links:
bot.refreshMatch(link)
print('Refreshed succesfully')
| 2.375 | 2 |
networks/stylegan_networks.py | styler00dollar/Colab-latent-composition | 1 | 12759092 | <filename>networks/stylegan_networks.py
import torch, os
from utils import customnet
from argparse import Namespace
from utils.pt_stylegan2 import get_generator
from collections import OrderedDict
import torch.nn as nn
from torch.nn.functional import interpolate
def stylegan_setting(domain):
outdim = 256
nz = 512
mult = 14
resnet_depth = 34
if domain == 'ffhq':
outdim = 1024
mult = 18
if domain == 'car':
outdim = 512
mult = 16
return dict(outdim=outdim, nz=nz, nlatent=nz*mult,
resnet_depth=resnet_depth)
def load_stylegan(domain, size=256):
ckpt_path = f'pretrained_models/sgans/stylegan2-{domain}-config-f.pt'
cfg=Namespace(optimize_to_w=True)
generator = get_generator(ckpt_path, cfg=cfg, size=size).eval()
return generator
def load_stylegan_encoder(domain, nz=512*14, outdim=256, use_RGBM=True, use_VAE=False,
resnet_depth=34, ckpt_path=None):
halfsize = False # hardcoding
if use_VAE:
nz = nz*2
channels_in = 4 if use_RGBM or use_VAE else 3
print(f"Using halfsize?: {halfsize}")
print(f"Input channels: {channels_in}")
encoder = get_stylegan_encoder(ndim_z=nz, resnet_depth=resnet_depth,
halfsize=halfsize, channels_in=channels_in)
if ckpt_path is None:
if use_RGBM:
suffix = 'RGBM'
elif use_VAE:
suffix = 'VAE'
else:
suffix = 'RGBM'
ckpt_path = f'pretrained_models/sgan_encoders/{domain}_{suffix}/model_final.pth'
print(f"Using default checkpoint path: {ckpt_path}")
ckpt = torch.load(ckpt_path)
encoder.load_state_dict(ckpt['state_dict'])
encoder = encoder.eval()
return encoder
## code from Jonas
def get_stylegan_encoder(ndim_z=512, add_relu=False, resnet_depth=34, halfsize=True, channels_in=3):
"""
Return encoder. Change to get a different encoder.
"""
def make_resnet(halfsize=True, resize=True, ndim_z=512, add_relu=False, resnet_depth=34, channels_in=3):
# A resnet with the final FC layer removed.
# Instead, we have a final conv5, leaky relu, and global average pooling.
native_size = 128 if halfsize else 256
# Make an encoder model.
def change_out(layers):
numch = 512 if resnet_depth < 50 else 2048
ind = [i for i, (n, l) in enumerate(layers) if n == 'layer4'][0] + 1
newlayer = ('layer5',
torch.nn.Sequential(OrderedDict([
('conv5', torch.nn.Conv2d(numch, ndim_z, kernel_size=1)),
])))
layers.insert(ind, newlayer)
if resize:
layers[:0] = [('downsample',
InterpolationLayer(size=(native_size, native_size)))]
# Remove FC layer
layers = layers[:-1]
if add_relu:
layers.append( ('postrelu', torch.nn.LeakyReLU(0.2) ))
# add reshape layer
layers.append(('to_wplus', customnet.EncoderToWplus()))
return layers
encoder = customnet.CustomResNet(
resnet_depth, modify_sequence=change_out, halfsize=halfsize,
channels_in=channels_in)
# Init using He initialization
def init_weights(m):
if type(m) == torch.nn.Linear or type(m) == torch.nn.Conv2d:
torch.nn.init.kaiming_normal_(m.weight, nonlinearity='relu')
if m.bias is not None:
m.bias.data.fill_(0.01)
encoder.apply(init_weights)
return encoder
encoder = make_resnet(ndim_z=ndim_z, add_relu=add_relu ,resnet_depth=resnet_depth,
channels_in=channels_in, halfsize=halfsize)
return encoder
class InterpolationLayer(nn.Module):
def __init__(self, size):
super(InterpolationLayer, self).__init__()
self.size=size
def forward(self, x):
return interpolate(x, size=self.size, mode='area')
| 2.296875 | 2 |
py_challenge/level_7.py | bertrandvidal/stuff | 0 | 12759093 | <reponame>bertrandvidal/stuff
from challenge import open_next_level
import requests
import zipfile
import os
import tempfile
tmp = tempfile.mkdtemp()
(_, archive_path) = tempfile.mkstemp(dir=tmp)
with open(archive_path, "w") as channel:
response = requests.get("http://www.pythonchallenge.com/pc/def/channel.zip")
response.raise_for_status()
channel.write(response.content)
archive = zipfile.ZipFile(archive_path)
nothing = 90052
comments = []
while nothing:
current = "%s.txt" % nothing
data = archive.read(current)
comments.append(archive.getinfo(current).comment)
nothing = data.split()[-1]
try:
int(nothing)
except ValueError as val_error:
print "This might be the one", val_error
break
print "".join(comments)
open_next_level("oxygen")
| 2.84375 | 3 |
tests/test_async_media.py | rguillermo/wpclient | 0 | 12759094 | import asyncio
from tests.fixtures import auth_data, media_data
from wpclient.client import Client
from wpclient.models.media import MediaModel
async def create(auth_data, media_data):
async with Client(**auth_data) as client:
media = MediaModel.parse_obj(media_data)
coro = client.media.create(media)
client.add(coro)
await client.perform()
return client.result
async def get(auth_data, id):
async with Client(**auth_data) as client:
coro = client.media.get(id)
client.add(coro)
await client.perform()
return client.result
async def update(auth_data, id, media_updated):
async with Client(**auth_data) as client:
coro = client.media.update(id, media_updated)
client.add(coro)
await client.perform()
return client.result
async def delete(auth_data, id):
async with Client(**auth_data) as client:
coro = client.media.delete(id)
client.add(coro)
await client.perform()
return client.result
def test_async_crud(auth_data, media_data):
r = asyncio.run(create(auth_data, media_data))
media = r[0]
assert isinstance(media, MediaModel)
r = asyncio.run(get(auth_data, media.id))
media = r[0]
assert media.alt_text == media_data['alt_text']
media_updated = media_data.copy()
media_updated['alt_text'] = 'My updated alt text'
r = asyncio.run(update(auth_data, media.id, media_updated))
media = r[0]
assert media.alt_text == media_updated['alt_text']
r = asyncio.run(delete(auth_data, media.id))
assert r[0] == True
| 2.234375 | 2 |
data_steward/cdr_cleaner/cleaning_rules/clean_ppi_numeric_fields_using_parameters.py | dcampbell-vumc/curation | 0 | 12759095 | import constants.cdr_cleaner.clean_cdr as cdr_consts
CLEAN_PPI_NUMERIC_FIELDS = """
UPDATE
`{project}.{dataset}.observation` u1
SET
u1.value_as_number = NULL,
u1.value_as_concept_id = 2000000010
FROM
(
SELECT
*
FROM
`{project}.{dataset}.observation`
WHERE
observation_concept_id = 1585889 AND (value_as_number < 0 OR value_as_number > 20)
UNION ALL
SELECT
*
FROM
`{project}.{dataset}.observation`
WHERE
observation_concept_id = 1585890 AND (value_as_number < 0 OR value_as_number > 20)
UNION ALL
SELECT
*
FROM
`{project}.{dataset}.observation`
WHERE
observation_concept_id = 1585795 AND (value_as_number < 0 OR value_as_number > 99)
UNION ALL
SELECT
*
FROM
`{project}.{dataset}.observation`
WHERE
observation_concept_id = 1585802 AND (value_as_number < 0 OR value_as_number > 99)
UNION ALL
SELECT
*
FROM
`{project}.{dataset}.observation`
WHERE
observation_concept_id = 1585820 AND (value_as_number < 0 OR value_as_number > 255)
UNION ALL
SELECT
*
FROM
`{project}.{dataset}.observation`
WHERE
observation_concept_id = 1585864 AND (value_as_number < 0 OR value_as_number > 99)
UNION ALL
SELECT
*
FROM
`{project}.{dataset}.observation`
WHERE
observation_concept_id = 1585870 AND (value_as_number < 0 OR value_as_number > 99)
UNION ALL
SELECT
*
FROM
`{project}.{dataset}.observation`
WHERE
observation_concept_id = 1585873 AND (value_as_number < 0 OR value_as_number > 99)
UNION ALL
SELECT
*
FROM
`{project}.{dataset}.observation`
WHERE
observation_concept_id = 1586159 AND (value_as_number < 0 OR value_as_number > 99)
UNION ALL
SELECT
*
FROM
`{project}.{dataset}.observation`
WHERE
observation_concept_id = 1586162 AND (value_as_number < 0 OR value_as_number > 99) ) a
WHERE
u1.observation_id = a.observation_id
"""
def get_clean_ppi_num_fields_using_parameters_queries(project_id, dataset_id):
"""
runs the query which updates the ppi numeric fields in observation table based on the
upper and lower bounds specified.
:param project_id: Name of the project
:param dataset_id: Name of the dataset where the queries should be run
:return:
"""
queries_list = []
query = dict()
query[cdr_consts.QUERY] = CLEAN_PPI_NUMERIC_FIELDS.format(
dataset=dataset_id,
project=project_id,
)
queries_list.append(query)
return queries_list
if __name__ == '__main__':
import cdr_cleaner.args_parser as parser
import cdr_cleaner.clean_cdr_engine as clean_engine
ARGS = parser.parse_args()
clean_engine.add_console_logging(ARGS.console_log)
query_list = get_clean_ppi_num_fields_using_parameters_queries(
ARGS.project_id, ARGS.dataset_id)
clean_engine.clean_dataset(ARGS.project_id, query_list)
| 1.070313 | 1 |
tardis/adapters/sites/fakesite.py | maxfischer2781/tardis | 11 | 12759096 | from ...exceptions.tardisexceptions import TardisError
from ...interfaces.siteadapter import ResourceStatus
from ...interfaces.siteadapter import SiteAdapter
from ...utilities.attributedict import AttributeDict
from ...utilities.staticmapping import StaticMapping
from contextlib import contextmanager
from datetime import datetime
from datetime import timedelta
from functools import partial
from uuid import uuid4
import asyncio
class FakeSiteAdapter(SiteAdapter):
def __init__(self, machine_type: str, site_name: str) -> None:
self._machine_type = machine_type
self._site_name = site_name
self._api_response_delay = self.configuration.api_response_delay
self._resource_boot_time = self.configuration.resource_boot_time
key_translator = StaticMapping(
remote_resource_uuid="remote_resource_uuid",
resource_status="resource_status",
created="created",
updated="updated",
resource_boot_time="resource_boot_time",
)
self.handle_response = partial(
self.handle_response,
key_translator=key_translator,
translator_functions=StaticMapping(),
)
self._stopped_n_terminated_resources = {}
async def deploy_resource(
self, resource_attributes: AttributeDict
) -> AttributeDict:
await asyncio.sleep(self._api_response_delay.get_value())
now = datetime.now()
response = AttributeDict(
remote_resource_uuid=uuid4().hex,
resource_status=ResourceStatus.Booting,
created=now,
updated=now,
resource_boot_time=self._resource_boot_time.get_value(),
)
return self.handle_response(response)
def get_resource_boot_time(self, resource_attributes: AttributeDict) -> float:
try:
return resource_attributes.resource_boot_time
except AttributeError:
# In case tardis is restarted, resource_boot_time is not set, so re-set
resource_boot_time = resource_attributes[
"resource_boot_time"
] = self._resource_boot_time.get_value()
return resource_boot_time
async def resource_status(
self, resource_attributes: AttributeDict
) -> AttributeDict:
await asyncio.sleep(self._api_response_delay.get_value())
try: # check if resource has been stopped or terminated
resource_status = self._stopped_n_terminated_resources[
resource_attributes.drone_uuid
]
except KeyError:
pass
else:
return self.handle_response(AttributeDict(resource_status=resource_status))
created_time = resource_attributes.created
resource_boot_time = self.get_resource_boot_time(resource_attributes)
# check if resource is already running
if (datetime.now() - created_time) > timedelta(seconds=resource_boot_time):
return self.handle_response(
AttributeDict(resource_status=ResourceStatus.Running)
)
return self.handle_response(resource_attributes)
async def stop_resource(self, resource_attributes: AttributeDict):
await asyncio.sleep(self._api_response_delay.get_value())
self._stopped_n_terminated_resources[
resource_attributes.drone_uuid
] = ResourceStatus.Stopped
return self.handle_response(
AttributeDict(resource_status=ResourceStatus.Stopped)
)
async def terminate_resource(self, resource_attributes: AttributeDict):
await asyncio.sleep(self._api_response_delay.get_value())
self._stopped_n_terminated_resources[
resource_attributes.drone_uuid
] = ResourceStatus.Deleted
return self.handle_response(
AttributeDict(resource_status=ResourceStatus.Deleted)
)
@contextmanager
def handle_exceptions(self) -> None:
try:
yield
except Exception as ex:
raise TardisError from ex
| 2.046875 | 2 |
easySpeech/speech.py | SaptakBhoumik/easySpeech | 15 | 12759097 | # import required libraries
import sounddevice as sd
from .recognize import *
from .record import *
from .ml import *
def speech(using,freq = 44100,duration = 5,key=None, language="en-US", show_all=False):
# Start recorder with the given values of
# duration and sample frequency
recording = sd.rec(int(duration * freq),
samplerate=freq, channels=2)
# Record audio for the given number of seconds
sd.wait()
write("recording.wav", recording, freq, sampwidth=2)
if using.lower()=='google':
r = Recognizer()
recording = AudioFile('recording.wav')
with recording as source:
audio = r.record(source)
text=r.recognize_google(audio,key, language, show_all)
elif using.lower()=='ml':
text=ml('recording.wav')
else:
text='engine not found'
return text
def google_audio(file,key=None, language="en-US", show_all=False):
r = Recognizer()
recording = AudioFile(file)
with recording as source:
audio = r.record(source)
text=r.recognize_google(audio,key, language, show_all)
return text
def recorder(name,duration = 5,freq = 44100):
recording = sd.rec(int(duration * freq),samplerate=freq, channels=2)
sd.wait()
write(name, recording, freq, sampwidth=2) | 3.0625 | 3 |
src/PlateRecognition.py | rohanabhishek/License-Plate-Recognition | 0 | 12759098 | <reponame>rohanabhishek/License-Plate-Recognition
import sys
import cv2
import numpy as np
import matplotlib.pyplot as plt
from skimage.transform import resize
from skimage import measure
from skimage.measure import regionprops
import matplotlib.patches as patches
from skimage.color import rgb2gray
from skimage.io import imread
from skimage.filters import threshold_otsu
import pytesseract
from PIL import Image
import imutils
from tasks import *
plate_like_objects = []
filename = sys.argv[1]
# Image converted to binary
car_image = imread(filename, as_gray=True)
print(car_image.shape)
gray_car_image = car_image*255
# thershold value obtained using Otsu's method
threshold_value = threshold_otsu(gray_car_image)
binary_car_image = gray_car_image > threshold_value
# get all the connected regions and group them together
label_image = measure.label(binary_car_image)
# constraints on maximum and minimum values on width, height
plate_dimensions = (0.04*label_image.shape[0], 0.5*label_image.shape[0], 0.2*label_image.shape[1], 0.6*label_image.shape[1])
min_height, max_height, min_width, max_width = plate_dimensions
plate_objects_cordinates = []
fig, (ax1) = plt.subplots(1)
ax1.imshow(gray_car_image, cmap="gray")
# regionprops creates a list of properties of all the labelled regions
for region in regionprops(label_image):
if region.area < 50:
#if the region is very small
continue
# the bounding box coordinates
min_row, min_col, max_row, max_col = region.bbox
region_height = max_row - min_row
region_width = max_col - min_col
# checking the conditions of a typical license plate
if region_height >= min_height and region_height <= max_height and region_width >= min_width and region_width <= max_width and region_width > region_height:
plate_like_objects.append(gray_car_image[min_row:max_row,
min_col:max_col])
plate_objects_cordinates.append((min_row, min_col,
max_row, max_col))
rectBorder = patches.Rectangle((min_col, min_row), max_col - min_col, max_row - min_row, edgecolor="red",
linewidth=2, fill=False)
# red rectangular border added
ax1.add_patch(rectBorder)
Cropped = gray_car_image[min_row:max_row, min_col:max_col]
# text = pytesseract.image_to_string(Cropped, config='--psm 11')
# print("Predicted Number by pytessaract : ",text)
plt.show()
modelName = 'my_model.npy'
nn1 = nn.NeuralNetwork(36, 0.001, 200, 10)
nn1.addLayer(FullyConnectedLayer(400, 50, "relu"))
nn1.addLayer(FullyConnectedLayer(50, 36, "softmax"))
model = np.load(modelName,allow_pickle=True)
k,i = 0,0
for l in nn1.layers:
if type(l).__name__ != "AvgPoolingLayer" and type(l).__name__ != "FlattenLayer":
nn1.layers[i].weights = model[k]
nn1.layers[i].biases = model[k+1]
k+=2
i+=1
print("Model Loaded... ")
list_of_plates = [] # list of characters in all paltes
list_of_columns = [] # to re-order characters as they are in LP
for lp in plate_like_objects:
# invert image
license_plate = (255-lp)
# reaply threshold on the extracted region
threshold_value = threshold_otsu(license_plate)
license_plate = license_plate > threshold_value
labelled_plate = measure.label(license_plate)
fig, ax1 = plt.subplots(1)
license_plate = rgb2gray(license_plate)
ax1.imshow(license_plate, cmap="gray")
# character dimension constraints
character_dimensions = (0.3*license_plate.shape[0], 1.0*license_plate.shape[0], 0.01*license_plate.shape[1], 0.6*license_plate.shape[1])
min_height, max_height, min_width, max_width = character_dimensions
characters = []
column_list = []
for regions in regionprops(labelled_plate):
y0, x0, y1, x1 = regions.bbox
region_height = y1 - y0
region_width = x1 - x0
if region_height > min_height and region_height < max_height and region_width > min_width and region_width < max_width:
roi = license_plate[y0:y1, x0:x1]
# draw a red bordered rectangle over the character.
rect_border = patches.Rectangle((x0, y0), x1 - x0, y1 - y0, edgecolor="red",
linewidth=2, fill=False)
ax1.add_patch(rect_border)
# resize the characters to 20X20 and then append each character into the characters list
resized_char = Image.fromarray(roi).resize((20, 20))
characters.append(resized_char)
# to keep track of the arrangement of the characters(based on x-coordinate)
column_list.append(x0)
list_of_plates.append(characters)
list_of_columns.append(column_list)
plt.show()
list_of_numbers = []
for i in range(len(list_of_plates)):
characters = list_of_plates[i]
plate_num = []
for resized_char in characters:
roi = np.array(resized_char)
# reshape to an array as one input
roi = roi.reshape((1,400))
# predict result using neural network
valActivations = nn1.feedforward(roi)
# get the class with highest prediction
pred = np.argmax(valActivations[-1], axis=1)
# check with threshold to remove non-characters
if(valActivations[-1][0][pred]<0.5):
plate_num.append('')
continue
if(pred<10):
plate_num.append(str(pred[0]))
else:
plate_num.append(str(chr(65+pred[0]-10)))
column = np.array(list_of_columns[i])
# sort characters as they are in LP
sort_idx = np.argsort(column)
plate_num = np.array(plate_num)[sort_idx]
# output licence plate number
plate_num = "".join(plate_num)
list_of_numbers.append(plate_num)
print('Predictions - ',end=' ')
print(list_of_numbers)
final_num = sorted(list_of_numbers, key=len)
print('Final Licence plate - ' + final_num[-1]) | 3.0625 | 3 |
pycordia/interactions/components.py | classPythonAddike/pycordia | 23 | 12759099 | from pycordia import errors, utils
import typing
import enum
class ComponentType(enum.Enum):
action_row = 1
button = 2
select_menu = 3
class ButtonStyles(enum.Enum):
primary = 1
secondary = 2
success = 3
danger = 4
link = 5
class SelectMenuOption:
def __init__(self, *, label: str, value: str,
description: str = None, emoji: dict = None,
default: bool = False
) -> None:
self.label = label
self.value = value
self.description = description
self.emoji = emoji
self.default = default
@classmethod
def from_json(cls, data: dict):
return utils.obj_from_dict(data, cls)
def to_json(self):
return utils.obj_to_dict(self)
class SelectMenu:
def __init__(self, *, custom_id: str, placeholder: str = None,
min_values: int = 1, max_values: int = 1, disabled: bool = False
) -> None:
pass
class Button:
def __init__(self, *, custom_id: str = None, disabled: bool = False,
style: ButtonStyles, label: str, emoji = None, url: str = None
) -> None:
self.__on_click_func = None
self.custom_id = custom_id
self.disabled = disabled
self.style = style
self.label = label
self.emoji = emoji
self.url = url
self.__verify_component()
@classmethod
def from_json(cls, data: dict):
obj = utils.obj_from_dict(data, cls)
obj.style = ComponentType(obj.style)
return obj
def to_json(self):
obj = utils.obj_to_dict(self)
obj["type"] = ComponentType.button.value
obj["style"] = obj["style"].value
return obj
def __verify_component(self):
if self.url and self.custom_id:
raise errors.ComponentError(
"A link button cannot have a custom ID."
)
elif not self.custom_id:
raise errors.ComponentError(
"Non-link buttons must contain a custom ID"
)
def on_click(self, fun):
self.__on_click_func = fun
def wrapper():
fun()
return wrapper
# class ActionRow:
# def __init__(self, *components: typing.List[typing.Union[SelectMenu, Button]]):
# self.__verify_components(components)
# self.component_type = ComponentType.action_row
# self.__components = [*components]
# @classmethod
# def from_list(cls, data: list):
# comps = []
# for elem in data:
# comp_type = int(elem["type"])
# if comp_type == ComponentType.action_row.value:
# raise errors.ComponentError(
# "An ActionRow cannot contain another ActionRow"
# )
# elif comp_type == ComponentType.button.value:
# comps.append(Button(**elem))
# elif comp_type == ComponentType.
# return ActionRow(*comps)
# @property
# def components(self):
# return self.__components
# def __verify_components(self, components):
# for comp in components:
# if isinstance(comp, ActionRow):
# raise errors.ComponentError(
# "An ActionRow cannot contain another ActionRow"
# ) | 2.578125 | 3 |
example/re_extract.py | alex/httpfuzz | 12 | 12759100 | import sre_parse
import re
import pytest
def extract_literals(r):
ops = sre_parse.parse(r.pattern)
results = []
extract_literals_from_ops(ops, results)
return results
def extract_literals_from_ops(ops, results):
i = 0
while i < len(ops):
op, val = ops[i]
if op == sre_parse.LITERAL:
start_i = i
while i < len(ops) and ops[i][0] == sre_parse.LITERAL:
i += 1
results.append("".join(chr(c) for _, c in ops[start_i:i]))
continue
elif op == sre_parse.BRANCH:
_, branches = val
for branch in branches:
extract_literals_from_ops(branch, results)
elif op == sre_parse.SUBPATTERN:
_, _, _, sub_ops = val
extract_literals_from_ops(sub_ops, results)
elif op == sre_parse.MAX_REPEAT:
_, _, sub_ops = val
extract_literals_from_ops(sub_ops, results)
elif op == sre_parse.ASSERT or op == sre_parse.ASSERT_NOT:
_, sub_ops = val
extract_literals_from_ops(sub_ops, results)
i += 1
return results
@pytest.mark.parametrize(
("r", "expected"),
[
(r"^abc$", ["abc"]),
(r"abc|def", ["abc", "def"]),
(r"(abc|\d+)", ["abc"]),
(r"(?:abc){3,}", ["abc"]),
(r"(?:abc){,3}", ["abc"]),
(r"(?=abc)", ["abc"]),
(r"(?!abc)", ["abc"]),
(r"(?<=abc)", ["abc"]),
(r"(?<!abc)", ["abc"]),
]
)
def test_extract_literals(r, expected):
actual = extract_literals(re.compile(r))
assert actual == expected | 2.703125 | 3 |
healthier/entries/consumption.py | muatik/healthier | 9 | 12759101 | import json
from entries.fcd import FCD
from entries.models import Nutrient, Recipe
def calculate_consumption(ndbno, measure, quantity):
nutrients = FCD.get_nutrients(ndbno)
intake = []
for nutrient in nutrients:
for i_measure in nutrient["measures"]:
if i_measure["label"] == measure and i_measure["value"] != 0:
intake.append({
"category": "i",
"label": nutrient["name"],
"unit": nutrient["unit"],
"quantity": float(i_measure["value"]) * quantity
})
return intake
def insert_food_consumption(entry, data):
entry.extra = {"ndbno": data["ndbno"]}
entry.extra = json.dumps({"ndbno": data["ndbno"]})
entry.save()
nutrients = calculate_consumption(
data["ndbno"], entry.measure, entry.quantity)
for nutrient_data in nutrients:
try:
nutrient = Nutrient(**nutrient_data)
nutrient.entry = entry
nutrient.save()
except Exception as e:
pass
def insert_recipe(entry, data):
recipe = Recipe.objects.get(id=data["id"])
for ingredient in recipe.recipeingredient_set.all():
for n in ingredient.getNutrients():
nutrient = Nutrient(**n)
nutrient.quantity = nutrient.quantity * entry.quantity
nutrient.entry = entry
nutrient.save()
def insert_nutrients(entry, data):
if data["category"] == "c" and "ndbno" not in data and "id" in data:
insert_recipe(entry, data)
elif data["category"] == "c" and "ndbno" in data:
entry.insert_food_nutrients(data)
else:
entry.insert_activity_nutrients()
| 2.609375 | 3 |
Day05/py_password_generator.py | MHKomeili/100DaysofCode | 0 | 12759102 | import random
letters = ["A", "B", "C", "D", "E", "F", "G", "H", "I", "J", "K", "L",
"M", "N", "O", "P", "Q", "R", "S", "T", "U", "V", "W", "X", "Y", "Z"]
numbers = ["1", "2", "3", "4", "5", "6", "7", "8", "9",]
symbols = ["@", "#", "%", "&", "*", "?", "-", "_"]
print("Welcome to PyPasswordGenerator")
nr_letters = int(input('How many letters would you like in your password?\n'))
nr_numbers = int(input('How many numbers would you like in your password?\n'))
nr_symbols = int(input('How many symbols would you like in your password?\n'))
chosen_chars = []
for i in range(nr_letters):
chosen_chars.append(random.choice(letters))
for i in range(nr_numbers):
chosen_chars.append(random.choice(numbers))
for i in range(nr_symbols):
chosen_chars.append(random.choice(symbols))
random.shuffle(chosen_chars)
password = ''.join(chosen_chars)
print(f"Here is your password: \t{password} \nkeep it safe.")
| 4.03125 | 4 |
php_proxy/setup.py | jt6562/XX-Net | 2 | 12759103 | <filename>php_proxy/setup.py<gh_stars>1-10
#!/usr/bin/env python
print "setup ok!"
| 1.132813 | 1 |
oop/oop_002_myclass.py | xmark2/practice | 0 | 12759104 | <gh_stars>0
class MyClass(object):
def set_val(self,val):
self.val = val
def get_val(self):
return self.val
a = MyClass()
b = MyClass()
a.set_val(10)
b.set_val(100)
print(a.get_val())
print(b.get_val()) | 3.265625 | 3 |
pybarst/core/__init__.py | matham/pybarst | 2 | 12759105 | <reponame>matham/pybarst<filename>pybarst/core/__init__.py
__all__ = ('join', 'default_server_timeout', 'BarstException')
from pybarst.core.exception import BarstException
default_server_timeout = 2000
'''
The default time a pipe waits when trying to open a connection to the
server before returning a timeout error.
'''
def join(*args):
'''
Joins a pipe name with a sub-channel number to derive the pipe name
used by the sub-channel. The function is typically not used by the user
and is mostly for internal use.
::
>>> join('\\\\.\\pipe\\TestPipe', '0', '10')
\\\\.\\pipe\\TestPipe:0:10
'''
return ':'.join(args)
| 2.21875 | 2 |
sightengine/client.py | Tiffanyzhao12345/client-python | 18 | 12759106 | # -*- coding: utf-8 -*-
"""
Copyright (c) 2017 Sightengine
http://sightengine.com/
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
import requests, json, os
import sightengine
from .check import Check
VERSION = sightengine.__version__
headers = requests.utils.default_headers()
headers.update(
{
'User-Agent': 'SE-SDK-Python ' + VERSION,
}
)
class SightengineClient(object):
modelVersions = {}
def __init__(self, api_user, api_secret):
self.api_user = api_user
self.api_secret = api_secret
self.endpoint = 'https://api.sightengine.com/'
def feedback(self, model, modelClass, image):
if not model:
raise Exception('Please provide the version of the model ' + model)
if image.lower().startswith(('http://', 'https://')):
url = self.endpoint + '1.0/feedback.json'
r = requests.get(url, params={'model': model, 'class': modelClass, 'url': image, 'api_user': self.api_user, 'api_secret': self.api_secret}, headers=headers)
else:
url = self.endpoint + '1.0/feedback.json'
r = requests.post(url, files={'media': open(image, 'rb')}, data={'model': model, 'class': modelClass, 'api_user': self.api_user, 'api_secret': self.api_secret}, headers=headers)
output = json.loads(r.text)
return output
def check(self, *args):
return Check(self.api_user,self.api_secret, *args)
| 1.84375 | 2 |
composer/core/serializable.py | murthyn/composer | 0 | 12759107 | <gh_stars>0
# Copyright 2021 MosaicML. All Rights Reserved.
"""Serialization interface used by checkpointing."""
from __future__ import annotations
from typing import Any, Dict
__all__ = ["Serializable"]
class Serializable:
"""Interface for serialization; used by checkpointing."""
def state_dict(self) -> Dict[str, Any]:
"""Returns a dictionary representing the internal state.
The returned dictionary must be pickale-able via :func:`torch.save`.
Returns:
Dict[str, Any]: The state of the object.
"""
return {}
def load_state_dict(self, state: Dict[str, Any]) -> None:
"""Restores the state of the object.
Args:
state (Dict[str, Any]): The state of the object, as previously returned by :meth:`.state_dict`.
"""
pass
| 2.421875 | 2 |
Chapter04/testscript3.py | PacktPublishing/Practical-Network-Automation | 20 | 12759108 | <reponame>PacktPublishing/Practical-Network-Automation<filename>Chapter04/testscript3.py
import cgi
form = cgi.FieldStorage()
value=int(form.getvalue('number'))
print('Content-Type: text/html')
print('')
xval=0
tval="<table border='1' style='border-collapse: collapse'><tr><th>Table for "+str(value)+"</th></tr>"
for xval in range(1,11):
mval=value*xval
tval=tval+"<tr><td>"+str(value)+"</td><td>*</td><td>"+str(xval)+"</td><td>=</td><td><font color='blue'><b>"+str(mval)+"</b></font></td></tr>"
tval=tval+"</table>"
print(tval)
| 3 | 3 |
src/oci/core/models/public_ip.py | LaudateCorpus1/oci-python-sdk | 0 | 12759109 | <gh_stars>0
# coding: utf-8
# Copyright (c) 2016, 2022, Oracle and/or its affiliates. All rights reserved.
# This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license.
from oci.util import formatted_flat_dict, NONE_SENTINEL, value_allowed_none_or_none_sentinel # noqa: F401
from oci.decorators import init_model_state_from_kwargs
@init_model_state_from_kwargs
class PublicIp(object):
"""
A *public IP* is a conceptual term that refers to a public IP address and related properties.
The `publicIp` object is the API representation of a public IP.
There are two types of public IPs:
1. Ephemeral
2. Reserved
For more information and comparison of the two types,
see `Public IP Addresses`__.
__ https://docs.cloud.oracle.com/iaas/Content/Network/Tasks/managingpublicIPs.htm
"""
#: A constant which can be used with the assigned_entity_type property of a PublicIp.
#: This constant has a value of "PRIVATE_IP"
ASSIGNED_ENTITY_TYPE_PRIVATE_IP = "PRIVATE_IP"
#: A constant which can be used with the assigned_entity_type property of a PublicIp.
#: This constant has a value of "NAT_GATEWAY"
ASSIGNED_ENTITY_TYPE_NAT_GATEWAY = "NAT_GATEWAY"
#: A constant which can be used with the lifecycle_state property of a PublicIp.
#: This constant has a value of "PROVISIONING"
LIFECYCLE_STATE_PROVISIONING = "PROVISIONING"
#: A constant which can be used with the lifecycle_state property of a PublicIp.
#: This constant has a value of "AVAILABLE"
LIFECYCLE_STATE_AVAILABLE = "AVAILABLE"
#: A constant which can be used with the lifecycle_state property of a PublicIp.
#: This constant has a value of "ASSIGNING"
LIFECYCLE_STATE_ASSIGNING = "ASSIGNING"
#: A constant which can be used with the lifecycle_state property of a PublicIp.
#: This constant has a value of "ASSIGNED"
LIFECYCLE_STATE_ASSIGNED = "ASSIGNED"
#: A constant which can be used with the lifecycle_state property of a PublicIp.
#: This constant has a value of "UNASSIGNING"
LIFECYCLE_STATE_UNASSIGNING = "UNASSIGNING"
#: A constant which can be used with the lifecycle_state property of a PublicIp.
#: This constant has a value of "UNASSIGNED"
LIFECYCLE_STATE_UNASSIGNED = "UNASSIGNED"
#: A constant which can be used with the lifecycle_state property of a PublicIp.
#: This constant has a value of "TERMINATING"
LIFECYCLE_STATE_TERMINATING = "TERMINATING"
#: A constant which can be used with the lifecycle_state property of a PublicIp.
#: This constant has a value of "TERMINATED"
LIFECYCLE_STATE_TERMINATED = "TERMINATED"
#: A constant which can be used with the lifetime property of a PublicIp.
#: This constant has a value of "EPHEMERAL"
LIFETIME_EPHEMERAL = "EPHEMERAL"
#: A constant which can be used with the lifetime property of a PublicIp.
#: This constant has a value of "RESERVED"
LIFETIME_RESERVED = "RESERVED"
#: A constant which can be used with the scope property of a PublicIp.
#: This constant has a value of "REGION"
SCOPE_REGION = "REGION"
#: A constant which can be used with the scope property of a PublicIp.
#: This constant has a value of "AVAILABILITY_DOMAIN"
SCOPE_AVAILABILITY_DOMAIN = "AVAILABILITY_DOMAIN"
def __init__(self, **kwargs):
"""
Initializes a new PublicIp object with values from keyword arguments.
The following keyword arguments are supported (corresponding to the getters/setters of this class):
:param assigned_entity_id:
The value to assign to the assigned_entity_id property of this PublicIp.
:type assigned_entity_id: str
:param assigned_entity_type:
The value to assign to the assigned_entity_type property of this PublicIp.
Allowed values for this property are: "PRIVATE_IP", "NAT_GATEWAY", 'UNKNOWN_ENUM_VALUE'.
Any unrecognized values returned by a service will be mapped to 'UNKNOWN_ENUM_VALUE'.
:type assigned_entity_type: str
:param availability_domain:
The value to assign to the availability_domain property of this PublicIp.
:type availability_domain: str
:param compartment_id:
The value to assign to the compartment_id property of this PublicIp.
:type compartment_id: str
:param defined_tags:
The value to assign to the defined_tags property of this PublicIp.
:type defined_tags: dict(str, dict(str, object))
:param display_name:
The value to assign to the display_name property of this PublicIp.
:type display_name: str
:param freeform_tags:
The value to assign to the freeform_tags property of this PublicIp.
:type freeform_tags: dict(str, str)
:param id:
The value to assign to the id property of this PublicIp.
:type id: str
:param ip_address:
The value to assign to the ip_address property of this PublicIp.
:type ip_address: str
:param lifecycle_state:
The value to assign to the lifecycle_state property of this PublicIp.
Allowed values for this property are: "PROVISIONING", "AVAILABLE", "ASSIGNING", "ASSIGNED", "UNASSIGNING", "UNASSIGNED", "TERMINATING", "TERMINATED", 'UNKNOWN_ENUM_VALUE'.
Any unrecognized values returned by a service will be mapped to 'UNKNOWN_ENUM_VALUE'.
:type lifecycle_state: str
:param lifetime:
The value to assign to the lifetime property of this PublicIp.
Allowed values for this property are: "EPHEMERAL", "RESERVED", 'UNKNOWN_ENUM_VALUE'.
Any unrecognized values returned by a service will be mapped to 'UNKNOWN_ENUM_VALUE'.
:type lifetime: str
:param private_ip_id:
The value to assign to the private_ip_id property of this PublicIp.
:type private_ip_id: str
:param scope:
The value to assign to the scope property of this PublicIp.
Allowed values for this property are: "REGION", "AVAILABILITY_DOMAIN", 'UNKNOWN_ENUM_VALUE'.
Any unrecognized values returned by a service will be mapped to 'UNKNOWN_ENUM_VALUE'.
:type scope: str
:param time_created:
The value to assign to the time_created property of this PublicIp.
:type time_created: datetime
:param public_ip_pool_id:
The value to assign to the public_ip_pool_id property of this PublicIp.
:type public_ip_pool_id: str
"""
self.swagger_types = {
'assigned_entity_id': 'str',
'assigned_entity_type': 'str',
'availability_domain': 'str',
'compartment_id': 'str',
'defined_tags': 'dict(str, dict(str, object))',
'display_name': 'str',
'freeform_tags': 'dict(str, str)',
'id': 'str',
'ip_address': 'str',
'lifecycle_state': 'str',
'lifetime': 'str',
'private_ip_id': 'str',
'scope': 'str',
'time_created': 'datetime',
'public_ip_pool_id': 'str'
}
self.attribute_map = {
'assigned_entity_id': 'assignedEntityId',
'assigned_entity_type': 'assignedEntityType',
'availability_domain': 'availabilityDomain',
'compartment_id': 'compartmentId',
'defined_tags': 'definedTags',
'display_name': 'displayName',
'freeform_tags': 'freeformTags',
'id': 'id',
'ip_address': 'ipAddress',
'lifecycle_state': 'lifecycleState',
'lifetime': 'lifetime',
'private_ip_id': 'privateIpId',
'scope': 'scope',
'time_created': 'timeCreated',
'public_ip_pool_id': 'publicIpPoolId'
}
self._assigned_entity_id = None
self._assigned_entity_type = None
self._availability_domain = None
self._compartment_id = None
self._defined_tags = None
self._display_name = None
self._freeform_tags = None
self._id = None
self._ip_address = None
self._lifecycle_state = None
self._lifetime = None
self._private_ip_id = None
self._scope = None
self._time_created = None
self._public_ip_pool_id = None
@property
def assigned_entity_id(self):
"""
Gets the assigned_entity_id of this PublicIp.
The `OCID`__ of the entity the public IP is assigned to, or in the process of
being assigned to.
__ https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm
:return: The assigned_entity_id of this PublicIp.
:rtype: str
"""
return self._assigned_entity_id
@assigned_entity_id.setter
def assigned_entity_id(self, assigned_entity_id):
"""
Sets the assigned_entity_id of this PublicIp.
The `OCID`__ of the entity the public IP is assigned to, or in the process of
being assigned to.
__ https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm
:param assigned_entity_id: The assigned_entity_id of this PublicIp.
:type: str
"""
self._assigned_entity_id = assigned_entity_id
@property
def assigned_entity_type(self):
"""
Gets the assigned_entity_type of this PublicIp.
The type of entity the public IP is assigned to, or in the process of being
assigned to.
Allowed values for this property are: "PRIVATE_IP", "NAT_GATEWAY", 'UNKNOWN_ENUM_VALUE'.
Any unrecognized values returned by a service will be mapped to 'UNKNOWN_ENUM_VALUE'.
:return: The assigned_entity_type of this PublicIp.
:rtype: str
"""
return self._assigned_entity_type
@assigned_entity_type.setter
def assigned_entity_type(self, assigned_entity_type):
"""
Sets the assigned_entity_type of this PublicIp.
The type of entity the public IP is assigned to, or in the process of being
assigned to.
:param assigned_entity_type: The assigned_entity_type of this PublicIp.
:type: str
"""
allowed_values = ["PRIVATE_IP", "NAT_GATEWAY"]
if not value_allowed_none_or_none_sentinel(assigned_entity_type, allowed_values):
assigned_entity_type = 'UNKNOWN_ENUM_VALUE'
self._assigned_entity_type = assigned_entity_type
@property
def availability_domain(self):
"""
Gets the availability_domain of this PublicIp.
The public IP's availability domain. This property is set only for ephemeral public IPs
that are assigned to a private IP (that is, when the `scope` of the public IP is set to
AVAILABILITY_DOMAIN). The value is the availability domain of the assigned private IP.
Example: `Uocm:PHX-AD-1`
:return: The availability_domain of this PublicIp.
:rtype: str
"""
return self._availability_domain
@availability_domain.setter
def availability_domain(self, availability_domain):
"""
Sets the availability_domain of this PublicIp.
The public IP's availability domain. This property is set only for ephemeral public IPs
that are assigned to a private IP (that is, when the `scope` of the public IP is set to
AVAILABILITY_DOMAIN). The value is the availability domain of the assigned private IP.
Example: `Uocm:PHX-AD-1`
:param availability_domain: The availability_domain of this PublicIp.
:type: str
"""
self._availability_domain = availability_domain
@property
def compartment_id(self):
"""
Gets the compartment_id of this PublicIp.
The `OCID`__ of the compartment containing the public IP. For an ephemeral public IP, this is
the compartment of its assigned entity (which can be a private IP or a regional entity such
as a NAT gateway). For a reserved public IP that is currently assigned,
its compartment can be different from the assigned private IP's.
__ https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm
:return: The compartment_id of this PublicIp.
:rtype: str
"""
return self._compartment_id
@compartment_id.setter
def compartment_id(self, compartment_id):
"""
Sets the compartment_id of this PublicIp.
The `OCID`__ of the compartment containing the public IP. For an ephemeral public IP, this is
the compartment of its assigned entity (which can be a private IP or a regional entity such
as a NAT gateway). For a reserved public IP that is currently assigned,
its compartment can be different from the assigned private IP's.
__ https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm
:param compartment_id: The compartment_id of this PublicIp.
:type: str
"""
self._compartment_id = compartment_id
@property
def defined_tags(self):
"""
Gets the defined_tags of this PublicIp.
Defined tags for this resource. Each key is predefined and scoped to a
namespace. For more information, see `Resource Tags`__.
Example: `{\"Operations\": {\"CostCenter\": \"42\"}}`
__ https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm
:return: The defined_tags of this PublicIp.
:rtype: dict(str, dict(str, object))
"""
return self._defined_tags
@defined_tags.setter
def defined_tags(self, defined_tags):
"""
Sets the defined_tags of this PublicIp.
Defined tags for this resource. Each key is predefined and scoped to a
namespace. For more information, see `Resource Tags`__.
Example: `{\"Operations\": {\"CostCenter\": \"42\"}}`
__ https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm
:param defined_tags: The defined_tags of this PublicIp.
:type: dict(str, dict(str, object))
"""
self._defined_tags = defined_tags
@property
def display_name(self):
"""
Gets the display_name of this PublicIp.
A user-friendly name. Does not have to be unique, and it's changeable.
Avoid entering confidential information.
:return: The display_name of this PublicIp.
:rtype: str
"""
return self._display_name
@display_name.setter
def display_name(self, display_name):
"""
Sets the display_name of this PublicIp.
A user-friendly name. Does not have to be unique, and it's changeable.
Avoid entering confidential information.
:param display_name: The display_name of this PublicIp.
:type: str
"""
self._display_name = display_name
@property
def freeform_tags(self):
"""
Gets the freeform_tags of this PublicIp.
Free-form tags for this resource. Each tag is a simple key-value pair with no
predefined name, type, or namespace. For more information, see `Resource Tags`__.
Example: `{\"Department\": \"Finance\"}`
__ https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm
:return: The freeform_tags of this PublicIp.
:rtype: dict(str, str)
"""
return self._freeform_tags
@freeform_tags.setter
def freeform_tags(self, freeform_tags):
"""
Sets the freeform_tags of this PublicIp.
Free-form tags for this resource. Each tag is a simple key-value pair with no
predefined name, type, or namespace. For more information, see `Resource Tags`__.
Example: `{\"Department\": \"Finance\"}`
__ https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm
:param freeform_tags: The freeform_tags of this PublicIp.
:type: dict(str, str)
"""
self._freeform_tags = freeform_tags
@property
def id(self):
"""
Gets the id of this PublicIp.
The public IP's Oracle ID (`OCID`__).
__ https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm
:return: The id of this PublicIp.
:rtype: str
"""
return self._id
@id.setter
def id(self, id):
"""
Sets the id of this PublicIp.
The public IP's Oracle ID (`OCID`__).
__ https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm
:param id: The id of this PublicIp.
:type: str
"""
self._id = id
@property
def ip_address(self):
"""
Gets the ip_address of this PublicIp.
The public IP address of the `publicIp` object.
Example: `203.0.113.2`
:return: The ip_address of this PublicIp.
:rtype: str
"""
return self._ip_address
@ip_address.setter
def ip_address(self, ip_address):
"""
Sets the ip_address of this PublicIp.
The public IP address of the `publicIp` object.
Example: `203.0.113.2`
:param ip_address: The ip_address of this PublicIp.
:type: str
"""
self._ip_address = ip_address
@property
def lifecycle_state(self):
"""
Gets the lifecycle_state of this PublicIp.
The public IP's current state.
Allowed values for this property are: "PROVISIONING", "AVAILABLE", "ASSIGNING", "ASSIGNED", "UNASSIGNING", "UNASSIGNED", "TERMINATING", "TERMINATED", 'UNKNOWN_ENUM_VALUE'.
Any unrecognized values returned by a service will be mapped to 'UNKNOWN_ENUM_VALUE'.
:return: The lifecycle_state of this PublicIp.
:rtype: str
"""
return self._lifecycle_state
@lifecycle_state.setter
def lifecycle_state(self, lifecycle_state):
"""
Sets the lifecycle_state of this PublicIp.
The public IP's current state.
:param lifecycle_state: The lifecycle_state of this PublicIp.
:type: str
"""
allowed_values = ["PROVISIONING", "AVAILABLE", "ASSIGNING", "ASSIGNED", "UNASSIGNING", "UNASSIGNED", "TERMINATING", "TERMINATED"]
if not value_allowed_none_or_none_sentinel(lifecycle_state, allowed_values):
lifecycle_state = 'UNKNOWN_ENUM_VALUE'
self._lifecycle_state = lifecycle_state
@property
def lifetime(self):
"""
Gets the lifetime of this PublicIp.
Defines when the public IP is deleted and released back to Oracle's public IP pool.
* `EPHEMERAL`: The lifetime is tied to the lifetime of its assigned entity. An ephemeral
public IP must always be assigned to an entity. If the assigned entity is a private IP,
the ephemeral public IP is automatically deleted when the private IP is deleted, when
the VNIC is terminated, or when the instance is terminated. If the assigned entity is a
:class:`NatGateway`, the ephemeral public IP is automatically
deleted when the NAT gateway is terminated.
* `RESERVED`: You control the public IP's lifetime. You can delete a reserved public IP
whenever you like. It does not need to be assigned to a private IP at all times.
For more information and comparison of the two types,
see `Public IP Addresses`__.
__ https://docs.cloud.oracle.com/iaas/Content/Network/Tasks/managingpublicIPs.htm
Allowed values for this property are: "EPHEMERAL", "RESERVED", 'UNKNOWN_ENUM_VALUE'.
Any unrecognized values returned by a service will be mapped to 'UNKNOWN_ENUM_VALUE'.
:return: The lifetime of this PublicIp.
:rtype: str
"""
return self._lifetime
@lifetime.setter
def lifetime(self, lifetime):
"""
Sets the lifetime of this PublicIp.
Defines when the public IP is deleted and released back to Oracle's public IP pool.
* `EPHEMERAL`: The lifetime is tied to the lifetime of its assigned entity. An ephemeral
public IP must always be assigned to an entity. If the assigned entity is a private IP,
the ephemeral public IP is automatically deleted when the private IP is deleted, when
the VNIC is terminated, or when the instance is terminated. If the assigned entity is a
:class:`NatGateway`, the ephemeral public IP is automatically
deleted when the NAT gateway is terminated.
* `RESERVED`: You control the public IP's lifetime. You can delete a reserved public IP
whenever you like. It does not need to be assigned to a private IP at all times.
For more information and comparison of the two types,
see `Public IP Addresses`__.
__ https://docs.cloud.oracle.com/iaas/Content/Network/Tasks/managingpublicIPs.htm
:param lifetime: The lifetime of this PublicIp.
:type: str
"""
allowed_values = ["EPHEMERAL", "RESERVED"]
if not value_allowed_none_or_none_sentinel(lifetime, allowed_values):
lifetime = 'UNKNOWN_ENUM_VALUE'
self._lifetime = lifetime
@property
def private_ip_id(self):
"""
Gets the private_ip_id of this PublicIp.
Deprecated. Use `assignedEntityId` instead.
The `OCID`__ of the private IP that the public IP is currently assigned to, or in the
process of being assigned to.
**Note:** This is `null` if the public IP is not assigned to a private IP, or is
in the process of being assigned to one.
__ https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm
:return: The private_ip_id of this PublicIp.
:rtype: str
"""
return self._private_ip_id
@private_ip_id.setter
def private_ip_id(self, private_ip_id):
"""
Sets the private_ip_id of this PublicIp.
Deprecated. Use `assignedEntityId` instead.
The `OCID`__ of the private IP that the public IP is currently assigned to, or in the
process of being assigned to.
**Note:** This is `null` if the public IP is not assigned to a private IP, or is
in the process of being assigned to one.
__ https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm
:param private_ip_id: The private_ip_id of this PublicIp.
:type: str
"""
self._private_ip_id = private_ip_id
@property
def scope(self):
"""
Gets the scope of this PublicIp.
Whether the public IP is regional or specific to a particular availability domain.
* `REGION`: The public IP exists within a region and is assigned to a regional entity
(such as a :class:`NatGateway`), or can be assigned to a private
IP in any availability domain in the region. Reserved public IPs and ephemeral public IPs
assigned to a regional entity have `scope` = `REGION`.
* `AVAILABILITY_DOMAIN`: The public IP exists within the availability domain of the entity
it's assigned to, which is specified by the `availabilityDomain` property of the public IP object.
Ephemeral public IPs that are assigned to private IPs have `scope` = `AVAILABILITY_DOMAIN`.
Allowed values for this property are: "REGION", "AVAILABILITY_DOMAIN", 'UNKNOWN_ENUM_VALUE'.
Any unrecognized values returned by a service will be mapped to 'UNKNOWN_ENUM_VALUE'.
:return: The scope of this PublicIp.
:rtype: str
"""
return self._scope
@scope.setter
def scope(self, scope):
"""
Sets the scope of this PublicIp.
Whether the public IP is regional or specific to a particular availability domain.
* `REGION`: The public IP exists within a region and is assigned to a regional entity
(such as a :class:`NatGateway`), or can be assigned to a private
IP in any availability domain in the region. Reserved public IPs and ephemeral public IPs
assigned to a regional entity have `scope` = `REGION`.
* `AVAILABILITY_DOMAIN`: The public IP exists within the availability domain of the entity
it's assigned to, which is specified by the `availabilityDomain` property of the public IP object.
Ephemeral public IPs that are assigned to private IPs have `scope` = `AVAILABILITY_DOMAIN`.
:param scope: The scope of this PublicIp.
:type: str
"""
allowed_values = ["REGION", "AVAILABILITY_DOMAIN"]
if not value_allowed_none_or_none_sentinel(scope, allowed_values):
scope = 'UNKNOWN_ENUM_VALUE'
self._scope = scope
@property
def time_created(self):
"""
Gets the time_created of this PublicIp.
The date and time the public IP was created, in the format defined by `RFC3339`__.
Example: `2016-08-25T21:10:29.600Z`
__ https://tools.ietf.org/html/rfc3339
:return: The time_created of this PublicIp.
:rtype: datetime
"""
return self._time_created
@time_created.setter
def time_created(self, time_created):
"""
Sets the time_created of this PublicIp.
The date and time the public IP was created, in the format defined by `RFC3339`__.
Example: `2016-08-25T21:10:29.600Z`
__ https://tools.ietf.org/html/rfc3339
:param time_created: The time_created of this PublicIp.
:type: datetime
"""
self._time_created = time_created
@property
def public_ip_pool_id(self):
"""
Gets the public_ip_pool_id of this PublicIp.
The `OCID`__ of the pool object created in the current tenancy.
__ https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm
:return: The public_ip_pool_id of this PublicIp.
:rtype: str
"""
return self._public_ip_pool_id
@public_ip_pool_id.setter
def public_ip_pool_id(self, public_ip_pool_id):
"""
Sets the public_ip_pool_id of this PublicIp.
The `OCID`__ of the pool object created in the current tenancy.
__ https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm
:param public_ip_pool_id: The public_ip_pool_id of this PublicIp.
:type: str
"""
self._public_ip_pool_id = public_ip_pool_id
def __repr__(self):
return formatted_flat_dict(self)
def __eq__(self, other):
if other is None:
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not self == other
| 1.742188 | 2 |
installDatapack.py | IronHeart7334/Various-Minecraft-Programs | 0 | 12759110 | from os import listdir
from os.path import isdir, exists
from shutil import copyfile, copytree, rmtree
from tkinter import Tk, filedialog
done = False
ip = ""
packs = listdir("./datapacks")
target = ""
Tk().withdraw()
print("Welcome to Matt's Minecraft Datapack install utility")
print("First, choose the 'datapacks' folder of the world where you want to install datapacks")
while target == "":
mode = input("First, enter 1 to manually enter the path, or enter 2 to use a gui, or q to quit:")
if mode == "1":
print("Enter the full path to the directory where you wish to install datapacks (you don't need quote marks around multi-word folders, nor do you need backquoted spaces)")
print("Example: /Users/xxxx/Library/Application Support/minecraft/saves/WORLD/datapacks")
target = input("Enter path: ")
if not isdir(target):
target = ""
if mode == "2":
target = filedialog.askdirectory()
if mode == "q":
target = "NO"
done = True
print("Data packs will be installed to " + target)
while not done:
#todo: just enter the world name, this finds the proper directory
#print("(minecraft/saves/WORLD/datapacks)")
print("Available datapacks:")
for datapack in packs:
print("* " + datapack)
ip = input("Select a datapack to install in this world, or type 'exit' to quit: ")
if ip == "exit":
break;
if ip in packs:
copyTo = target + "/" + ip
if exists(copyTo):
print("This datapack is already installed, so I'll delete it and reinstall")
rmtree(copyTo)
copytree("./datapacks/" + ip, copyTo)
else:
print("'" + ip + "\' is not in the list of datapacks") | 3.4375 | 3 |
test/protocols_flavor.py | cihangir/vitess | 0 | 12759111 | #!/usr/bin/env python
import logging
import os
class ProtocolsFlavor(object):
"""Base class for protocols"""
def binlog_player_protocol_flags(self):
"""Returns the flags to pass to process to set the binlog player protocol."""
return []
def vtctl_client_protocol(self):
"""Returns the protocol to use for vtctl connections. Needs to be supported both in python and go."""
return ""
def tablet_manager_protocol_flags(self):
"""Returns the flags to use for specifying the tablet manager protocol."""
return ['-tablet_manager_protocol', 'bson']
def tabletconn_protocol_flags(self):
"""Returns the flags to use for specifying the query service protocol."""
return ['-tablet_protocol', 'gorpc']
def rpc_timeout_message(self):
"""Returns the error message used by the protocol to indicate a timeout."""
raise NotImplementedError('Implementations need to overwrite this')
class GoRpcProtocolsFlavor(ProtocolsFlavor):
"""Overrides to use go rpc everywhere"""
def binlog_player_protocol_flags(self):
return ['-binlog_player_protocol', 'gorpc']
def vtctl_client_protocol(self):
return 'gorpc'
def tablet_manager_protocol_flags(self):
return ['-tablet_manager_protocol', 'bson']
def tabletconn_protocol_flags(self):
return ['-tablet_protocol', 'gorpc']
def rpc_timeout_message(self):
return 'timeout waiting for'
__knows_protocols_flavor_map = {
'gorpc': GoRpcProtocolsFlavor,
}
__protocols_flavor = None
def protocols_flavor():
return __protocols_flavor
def set_protocols_flavor(flavor):
global __protocols_flavor
if not flavor:
flavor = 'gorpc'
klass = __knows_protocols_flavor_map.get(flavor, None)
if not klass:
logging.error('Unknown protocols flavor %s', flavor)
exit(1)
__protocols_flavor = klass()
logging.debug('Using protocols flavor %s', flavor)
| 2.359375 | 2 |
algoritmos-e-programacao-icev/exercicios/28-03-2022/python/exemplo-8.py | pedrosantanaabreu/algoritmos-e-programacao-icev | 1 | 12759112 | """
@<NAME> (https://linktr.ee/pedrosantanaabreu)
@Icev (https://somosicev.com)
PT-BR:
Desenvolva um programa para efetuar o cálculo da quantidade de litros de
combustível gasta em uma viagem, considerando um automóvel que faz 12
Km/l. Para obter o cálculo, o usuário deve fornecer o tempo gasto (variável
TEMPO) e a velocidade média (variável VELOCIDADE) durante a viagem.
Dessa forma, será possível obter a distância percorrida com a fórmula
DISTÂNCIA = TEMPO * VELOCIDADE. A partir do valor da distância, basta
calcular a quantidade de litros de combustível utilizada na viagem com a
fórmula LITROS_USADOS = DISTÂNCIA / 12. O programa deve apresentar
os valores da velocidade média, tempo gasto na viagem, a distância
percorrida e a quantidade de litros utilizada na viagem.
"""
# Recebendo valores
tempo = float(input("Digite o tempo gasto em horas | "))
velocidade = float(input("Digite a velocidade média em Km/h | "))
# Calculando
distancia = tempo * velocidade
litros_usados = distancia / 12
# Exibindo resultado
print("""\nVelocidade média | {:.2f} Km/h
Tempo gasto na viagem | {:.1f} Hrs
Distância percorrida | {:.2f} Km
Quantidade de litros utilizada na viagem | {:.1f} L""".format(velocidade, tempo, distancia, litros_usados))
| 3.546875 | 4 |
test/test_k_everywhere.py | ivanlyon/exercises | 0 | 12759113 | import unittest
from kattis import k_everywhere
###############################################################################
class SampleInput(unittest.TestCase):
'''Problem statement sample inputs and outputs'''
def test_sample_input_1(self):
'''Run and assert problem statement sample 1 input and output.'''
input1 = []
input1.append('saskatoon')
input1.append('toronto')
input1.append('winnipeg')
input1.append('toronto')
input1.append('vancouver')
input1.append('saskatoon')
input1.append('toronto')
self.assertEqual(k_everywhere.uniques(input1), 4)
def test_sample_input_2(self):
'''Run and assert problem statement sample 2 input and output.'''
input2 = []
input2.append('edmonton')
input2.append('edmonton')
input2.append('edmonton')
self.assertEqual(k_everywhere.uniques(input2), 1)
###############################################################################
if __name__ == '__main__':
unittest.main()
| 3.4375 | 3 |
supriya/utils.py | butayama/supriya | 0 | 12759114 | <reponame>butayama/supriya
"""
Utility functions.
These will be migrated out into a base package at some point.
"""
import importlib
import itertools
import pathlib
from collections.abc import Iterable
def locate(path: str) -> pathlib.Path:
if ":" in path:
module_path, _, file_path = path.partition(":")
module = importlib.import_module(module_path)
if hasattr(module, "__file__"):
return pathlib.Path(module.__file__).parent / file_path
return pathlib.Path(module.__path__[0]) / file_path # type: ignore
return pathlib.Path(path)
def flatten_iterable(iterable):
for x in iterable:
if isinstance(x, Iterable):
yield from flatten_iterable(x)
else:
yield x
def group_iterable_by_count(iterable, count):
iterator = iter(iterable)
while True:
group = list(itertools.islice(iterator, count))
if not group:
return
yield group
def iterate_nwise(iterable, n=2):
iterables = itertools.tee(iterable, n)
temp = []
for idx, it in enumerate(iterables):
it = itertools.islice(it, idx, None)
temp.append(it)
return zip(*temp)
def repeat_sequence_to_length(sequence, length):
for i, x in enumerate(itertools.cycle(sequence)):
if i >= length:
break
yield x
def zip_sequences(*args):
maximum_i = max(len(_) for _ in args) - 1
cycles = [itertools.cycle(_) for _ in args]
iterator = enumerate(zip(*cycles))
for i, result in iterator:
yield result
if i == maximum_i:
break
| 2.390625 | 2 |
apps/core/models/chartbuilder/parameters.py | bispojr/observatorio-ufj-covid19 | 3 | 12759115 | <reponame>bispojr/observatorio-ufj-covid19<filename>apps/core/models/chartbuilder/parameters.py
from django.db import models
import json
class Parameters():
corGrafico = {
"Confirmados": "red",
"Descartados": "pink",
"Investigados": "yellow",
"Notificados": "green",
"Isolados": "gray",
"Internados": "blue",
"Monitorados": "brown",
"Recuperados": "purple",
"Óbitos": "black"
}
def cores(self, tipo):
cores = []
for cat in self.categorias(self, tipo):
cores.append(self.corGrafico[cat])
return cores
def categorias(self, tipo, comData = False):
categorias = []
if(tipo == "resumo"):
categorias = [
"Confirmados", "Recuperados",
"Internados", "Óbitos"
]
if(tipo == "monitorados"):
categorias = [
"Monitorados", "Descartados"
]
if(tipo == "todas"):
categorias = [
"Confirmados", "Descartados", "Investigados",
"Notificados", "Isolados","Internados",
"Monitorados", "Recuperados", "Óbitos"
]
if comData == True:
categorias.insert(0, "Data")
return categorias | 2.203125 | 2 |
anthill/tools/services/dlc/__init__.py | 0x55AAh/anthill_gaming | 1 | 12759116 | <reponame>0x55AAh/anthill_gaming<filename>anthill/tools/services/dlc/__init__.py
from .. import Service
class DLC(Service):
pass
| 1.054688 | 1 |
lesson8/solution-1.1.py | ZemlikSasha/lessons | 0 | 12759117 | """
Спортсмен поставил перед собой задачу пробежать в общей сложности Х километров. В первый день спортсмен пробежал
Y километров, а затем он каждый день увеличивал пробег на 10% от предыдущего значения. Определите номер дня в который
спортсмен достигнет своей цели. Оформите решение в виде программы, которая на вход принимает числа X и Y и выводит
номер найденного дня.
"""
def days_to_goal(target_distance: int, daily_distance: int):
"""
Calculate days to goal
"""
result_days = 0
current_distance = 0
while current_distance < target_distance:
current_distance += daily_distance
daily_distance *= 1.1
result_days += 1
return result_days
def main():
target_distance = int(input("Enter target distance (km): "))
daily_distance = int(input("Enter daily distance (km): "))
days = days_to_goal(target_distance, daily_distance)
print("Days to goal: ", days)
if __name__ == "__main__":
main()
| 4.71875 | 5 |
hw/foboot-bitstream.py | zeldin/foboot | 0 | 12759118 | #!/usr/bin/env python3
# This variable defines all the external programs that this module
# relies on. lxbuildenv reads this variable in order to ensure
# the build will finish without exiting due to missing third-party
# programs.
LX_DEPENDENCIES = ["riscv", "icestorm", "yosys", "nextpnr-ice40"]
# Import lxbuildenv to integrate the deps/ directory
import lxbuildenv
# Disable pylint's E1101, which breaks completely on migen
#pylint:disable=E1101
#from migen import *
from migen import Module, Signal, Instance, ClockDomain, If
from migen.fhdl.specials import TSTriple
from migen.fhdl.decorators import ClockDomainsRenamer
from litex.build.lattice.platform import LatticePlatform
from litex.build.generic_platform import Pins, Subsignal
from litex.soc.integration.doc import AutoDoc, ModuleDoc
from litex.soc.integration.soc_core import SoCCore
from litex.soc.cores.cpu import CPUNone
from litex.soc.integration.builder import Builder
from litex.soc.interconnect import wishbone
from litex.soc.cores import spi_flash
from valentyusb.usbcore import io as usbio
from valentyusb.usbcore.cpu import epmem, unififo, epfifo, dummyusb, eptri
from valentyusb.usbcore.endpoint import EndpointType
import litex.soc.doc as lxsocdoc
import spibone
import argparse
import os
import subprocess
from rtl.version import Version
from rtl.romgen import RandomFirmwareROM, FirmwareROMHex
from rtl.messible import Messible
class BaseSoC(SoCCore, AutoDoc):
"""Fomu Bootloader and Base SoC
Fomu is an FPGA that fits in your USB port. This reference manual
documents the basic SoC that runs the bootloader, and that can be
reused to run your own RISC-V programs.
This reference manual only describes a particular version of the SoC.
The register sets described here are guaranteed to be available
with a given ``major version``, but are not guaranteed to be available on
any other version. Naturally, you are free to create your own SoC
that does not provide these hardware blocks. To see what the version of the
bitstream you're running, check the ``VERSION`` registers.
"""
csr_map = {
"ctrl": 0, # provided by default (optional)
"crg": 1, # user
"uart_phy": 2, # provided by default (optional)
"uart": 3, # provided by default (optional)
"identifier_mem": 4, # provided by default (optional)
"timer0": 5, # provided by default (optional)
"cpu_or_bridge": 8,
"usb": 9,
"picorvspi": 10,
"touch": 11,
"reboot": 12,
"rgb": 13,
"version": 14,
"lxspi": 15,
"messible": 16,
"button": 17,
}
SoCCore.mem_map = {
"rom": 0x00000000, # (default shadow @0x80000000)
"sram": 0x10000000, # (default shadow @0xa0000000)
"spiflash": 0x20000000, # (default shadow @0xa0000000)
"main_ram": 0x40000000, # (default shadow @0xc0000000)
"csr": 0xe0000000, # (default shadow @0xe0000000)
"vexriscv_debug": 0xf00f0000,
}
interrupt_map = {
"timer0": 2,
"usb": 3,
}
interrupt_map.update(SoCCore.interrupt_map)
def __init__(self, platform, boot_source="rand",
debug=None, bios_file=None,
use_dsp=False, placer="heap", output_dir="build",
pnr_seed=0,
**kwargs):
# Disable integrated RAM as we'll add it later
self.integrated_sram_size = 0
if hasattr(platform, "get_integrated_sram_size"):
self.integrated_sram_size = platform.get_integrated_sram_size()
self.output_dir = output_dir
clk_freq = int(12e6)
platform.add_crg(self)
SoCCore.__init__(self, platform, clk_freq, integrated_sram_size=self.integrated_sram_size, with_uart=False, csr_data_width=32, **kwargs)
usb_debug = False
if debug is not None:
if debug == "uart":
from litex.soc.cores.uart import UARTWishboneBridge
self.submodules.uart_bridge = UARTWishboneBridge(platform.request("serial"), clk_freq, baudrate=115200)
self.add_wb_master(self.uart_bridge.wishbone)
elif debug == "usb":
usb_debug = True
elif debug == "spi":
import spibone
# Add SPI Wishbone bridge
debug_device = [
("spidebug", 0,
Subsignal("mosi", Pins("dbg:0")),
Subsignal("miso", Pins("dbg:1")),
Subsignal("clk", Pins("dbg:2")),
Subsignal("cs_n", Pins("dbg:3")),
)
]
platform.add_extension(debug_device)
spi_pads = platform.request("spidebug")
self.submodules.spibone = ClockDomainsRenamer("usb_12")(spibone.SpiWishboneBridge(spi_pads, wires=4))
self.add_wb_master(self.spibone.wishbone)
if hasattr(self, "cpu") and not isinstance(self.cpu, CPUNone):
platform.add_cpu_variant(self, debug=True)
self.register_mem("vexriscv_debug", 0xf00f0000, self.cpu.debug_bus, 0x100)
else:
if hasattr(self, "cpu") and not isinstance(self.cpu, CPUNone):
platform.add_cpu_variant(self)
if hasattr(platform, "add_sram"):
# SPRAM- UP5K has single port RAM, might as well use it as SRAM to
# free up scarce block RAM.
spram_size = platform.add_sram(self)
self.register_mem("sram", self.mem_map["sram"], self.spram.bus, spram_size)
# Add a Messible for device->host communications
self.submodules.messible = Messible()
if boot_source == "rand":
kwargs['cpu_reset_address'] = 0
bios_size = 0x2000
self.submodules.random_rom = RandomFirmwareROM(bios_size)
self.add_constant("ROM_DISABLE", 1)
self.register_rom(self.random_rom.bus, bios_size)
elif boot_source == "bios":
kwargs['cpu_reset_address'] = 0
if bios_file is None:
self.integrated_rom_size = bios_size = 0x4000
self.submodules.rom = wishbone.SRAM(bios_size, read_only=True, init=[])
self.register_rom(self.rom.bus, bios_size)
else:
bios_size = 0x4000
self.submodules.firmware_rom = FirmwareROMHex(bios_size, bios_file)
self.add_constant("ROM_DISABLE", 1)
self.register_rom(self.firmware_rom.bus, bios_size)
elif boot_source == "spi":
kwargs['cpu_reset_address'] = 0
self.integrated_rom_size = bios_size = 0x2000
gateware_size = 0x1a000
self.flash_boot_address = self.mem_map["spiflash"] + gateware_size
self.submodules.rom = wishbone.SRAM(bios_size, read_only=True, init=[])
self.register_rom(self.rom.bus, bios_size)
else:
raise ValueError("unrecognized boot_source: {}".format(boot_source))
# The litex SPI module supports memory-mapped reads, as well as a bit-banged mode
# for doing writes.
spi_pads = platform.request("spiflash4x")
self.submodules.lxspi = spi_flash.SpiFlashDualQuad(spi_pads, dummy=platform.spi_dummy, endianness="little")
self.lxspi.add_clk_primitive(platform.device)
self.register_mem("spiflash", self.mem_map["spiflash"], self.lxspi.bus, size=platform.spi_size)
# Add USB pads, as well as the appropriate USB controller. If no CPU is
# present, use the DummyUsb controller.
usb_pads = platform.request_usb()
usb_iobuf = usbio.IoBuf(usb_pads.d_p, usb_pads.d_n, usb_pads.pullup)
if hasattr(self, "cpu") and not isinstance(self.cpu, CPUNone):
self.submodules.usb = eptri.TriEndpointInterface(usb_iobuf, debug=usb_debug)
else:
self.submodules.usb = dummyusb.DummyUsb(usb_iobuf, debug=usb_debug)
if usb_debug:
self.add_wb_master(self.usb.debug_bridge.wishbone)
# For the EVT board, ensure the pulldown pin is tristated as an input
if hasattr(usb_pads, "pulldown"):
pulldown = TSTriple()
self.specials += pulldown.get_tristate(usb_pads.pulldown)
self.comb += pulldown.oe.eq(0)
# Add GPIO pads for the touch buttons
if hasattr(platform, "add_touch"):
platform.add_touch(self)
if hasattr(platform, "add_button"):
platform.add_button(self)
bootloader_size = 512*1024
self.add_constant("FLASH_MAX_ADDR", value=platform.spi_size - bootloader_size)
# Allow the user to reboot the FPGA. Additionally, connect the CPU
# RESET line to a register that can be modified, to allow for
# us to debug programs even during reset.
platform.add_reboot(self)
if hasattr(self, "cpu") and not isinstance(self.cpu, CPUNone):
self.cpu.cpu_params.update(
i_externalResetVector=self.reboot.addr.storage,
)
platform.add_rgb(self)
self.submodules.version = Version(platform.revision, platform.hw_platform, self, pnr_seed, models=[
("0x45", "E", "Fomu EVT"),
("0x44", "D", "Fomu DVT"),
("0x50", "P", "Fomu PVT (production)"),
("0x48", "H", "Fomu Hacker"),
("0x11", "1", "OrangeCrab r0.1"),
("0x12", "2", "OrangeCrab r0.2"),
("0x63", "c", "OrangeCart"),
("0x3f", "?", "Unknown model"),
])
if hasattr(platform, "build_templates"):
platform.build_templates(use_dsp, pnr_seed, placer)
git_version_subprocess = subprocess.Popen("git describe --tags", shell=True, stdout=subprocess.PIPE)
git_version = git_version_subprocess.stdout.read().decode("utf-8").strip()
for (name,value) in platform.get_config(git_version):
self.add_constant("CONFIG_" + name, value)
def main():
parser = argparse.ArgumentParser(
description="Build Fomu Main Gateware")
parser.add_argument(
"--boot-source", choices=["spi", "rand", "bios"], default="bios",
help="where to have the CPU obtain its executable code from"
)
parser.add_argument(
"--document-only", default=False, action="store_true",
help="Don't build gateware or software, only build documentation"
)
parser.add_argument(
"--platform", choices=["fomu", "orangecrab", "orangecart"], required=True,
help="build foboot for a particular hardware"
)
parser.add_argument(
"--bios", help="use specified file as a BIOS, rather than building one"
)
parser.add_argument(
"--with-debug", help="enable debug support", choices=["usb", "uart", "spi", None]
)
parser.add_argument(
"--with-dsp", help="use dsp inference in yosys (not all yosys builds have -dsp)", action="store_true"
)
parser.add_argument(
"--no-cpu", help="disable cpu generation for debugging purposes", action="store_true"
)
parser.add_argument(
"--placer", choices=["sa", "heap"], default="heap", help="which placer to use in nextpnr"
)
parser.add_argument(
"--seed", default=0, help="seed to use in nextpnr"
)
parser.add_argument(
"--export-random-rom-file", help="Generate a random ROM file and save it to a file"
)
parser.add_argument(
"--skip-gateware", help="Skip generating gateware", default=False
)
args, _ = parser.parse_known_args()
# Select platform based arguments
if args.platform == "orangecrab":
from rtl.platform.orangecrab import Platform, add_platform_args
elif args.platform == "orangecart":
from rtl.platform.orangecart import Platform, add_platform_args
elif args.platform == "fomu":
from rtl.platform.fomu import Platform, add_platform_args
# Add any platform independent args
add_platform_args(parser)
args = parser.parse_args()
# load our platform file
if args.platform == "orangecrab":
platform = Platform(revision=args.revision, device=args.device)
elif args.platform == "orangecart":
platform = Platform(device=args.device)
elif args.platform == "fomu":
platform = Platform(revision=args.revision)
output_dir = 'build'
#if args.export_random_rom_file is not None:
rom_rand = os.path.join(output_dir, "gateware", "rand_rom.hex")
os.system(f"ecpbram --generate {rom_rand} --seed {0} --width {32} --depth {int(0x4000/4)}")
compile_software = False
if (args.boot_source == "bios" or args.boot_source == "spi") and args.bios is None:
compile_software = True
compile_gateware = True
if args.skip_gateware:
compile_gateware = False
cpu_type = "vexriscv"
cpu_variant = "minimal"
if args.with_debug:
cpu_variant = cpu_variant + "+debug"
if args.no_cpu:
cpu_type = None
cpu_variant = None
if args.document_only:
compile_gateware = False
compile_software = False
os.environ["LITEX"] = "1" # Give our Makefile something to look for
soc = BaseSoC(platform, cpu_type=cpu_type, cpu_variant=cpu_variant,
debug=args.with_debug, boot_source=args.boot_source,
bios_file=args.bios,
use_dsp=args.with_dsp, placer=args.placer,
pnr_seed=int(args.seed),
output_dir=output_dir)
builder = Builder(soc, output_dir=output_dir, csr_csv="build/csr.csv", csr_svd="build/soc.svd",
compile_software=compile_software, compile_gateware=compile_gateware)
if compile_software:
builder.software_packages = [
("bios", os.path.abspath(os.path.join(os.path.dirname(__file__), "..", "sw")))
]
vns = builder.build()
soc.do_exit(vns)
lxsocdoc.generate_docs(soc, "build/documentation/", project_name="Fomu Bootloader", author="<NAME>")
if not args.document_only:
platform.finalise(output_dir)
if __name__ == "__main__":
main()
def export_random_rom_file(filename):
size = 0x2000
def xorshift32(x):
x = x ^ (x << 13) & 0xffffffff
x = x ^ (x >> 17) & 0xffffffff
x = x ^ (x << 5) & 0xffffffff
return x & 0xffffffff
def get_rand(x):
out = 0
for i in range(32):
x = xorshift32(x)
if (x & 1) == 1:
out = out | (1 << i)
return out & 0xffffffff
seed = 1
with open(filename, "w", newline="\n") as output:
for _ in range(int(size / 4)):
seed = get_rand(seed)
print("{:08x}".format(seed), file=output)
return 0
| 1.679688 | 2 |
assignments/assignment02/code/tests/test_load.py | juanshishido/info290-dds | 0 | 12759119 | import unittest
from code.permutation import load
class TestLoadData(unittest.TestCase):
features, box_office = load()
def test_load_returns_not_None(self):
self.assertIsNotNone(load())
def test_load_features_shape(self):
self.assertEquals((39360, 2), self.features.shape)
def test_load_box_office_shape(self):
self.assertEquals((8304, 2), self.box_office.shape)
| 3.03125 | 3 |
tfat/tests/test_report_model.py | AdamCottrill/TFAT | 0 | 12759120 | """
=============================================================
c:/1work/Python/djcode/tfat/tfat/tests/test_report_model.py
Created: 18 Jun 2015 12:05:51
DESCRIPTION:
Tests of the methods associated with the tag report model.
<NAME>
=============================================================
"""
from tfat.models import Recovery
from tfat.tests.factories import *
import pytest
@pytest.mark.django_db
def test_report_str_complete():
"""The default string representation for a tag report is the anglers
fist and last name plus the date the report was filed.
"""
elements = {
"first_name": "Homer",
"last_name": "Simpson",
"obs_date": datetime(2013, 10, 16),
}
angler = JoePublicFactory(
first_name=elements["first_name"], last_name=elements["last_name"]
)
report = Report(reported_by=angler, report_date=elements["obs_date"])
# convert our date the expected string format
elements["obs_date"] = elements["obs_date"].strftime("%b-%d-%Y")
should_be = "{first_name} {last_name} on {obs_date}"
assert str(report) == should_be.format(**elements)
@pytest.mark.django_db
def test_report_str_no_date():
"""the string representation of a report without a date is the anglers
first and last name plus the report id.
"""
elements = {"first_name": "Homer", "last_name": "Simpson"}
angler = JoePublicFactory(
first_name=elements["first_name"], last_name=elements["last_name"]
)
report = Report(reported_by=angler, report_date=None)
elements["id"] = report.id
should_be = "{first_name} {last_name} <Report id={id}>"
assert str(report) == should_be.format(**elements)
@pytest.mark.django_db
def test_report_str_no_date_or_angler():
"""The string representation of a report without an angler or date is
just the report id
"""
report = Report(reported_by=None, report_date=None)
assert str(report) == "<Report id={}>".format(report.id)
@pytest.mark.django_db
def test_get_recoveries():
"""the get_recoveries() method of the report object should return a list of
tag numbers associated with the report.
"""
report = ReportFactory()
species = SpeciesFactory()
tag1 = RecoveryFactory(report=report, species=species)
tag2 = RecoveryFactory(report=report, species=species)
tag3 = RecoveryFactory(report=report, species=species)
tags = report.get_recoveries()
assert tag1 in tags
assert tag2 in tags
assert tag3 in tags
@pytest.mark.django_db
def test_get_recoveries_with_latlon():
"""the get_recoveries() method of the report object should return a list of
only those tag numbers associated with the lat-lon data.
"""
report = ReportFactory()
species = SpeciesFactory()
tag1 = RecoveryFactory(report=report, species=species, dd_lat=45.0, dd_lon=-81.0)
tag2 = RecoveryFactory(report=report, species=species, dd_lat=45.0, dd_lon=None)
tag3 = RecoveryFactory(report=report, species=species, dd_lat=None, dd_lon=-81.0)
tag4 = RecoveryFactory(report=report, species=species, dd_lat=None, dd_lon=None)
tags = report.get_recoveries_with_latlon()
assert tag1 == tags[0]
assert len(tags) == 1
@pytest.mark.django_db
def test_get_recoveries_no_tags():
"""the get_recoveries() method of the report object should gracefully return
None if no tags where associated with this report. (I'm not sure why
there is a report if there are not tags')
"""
report = ReportFactory()
tags = report.get_recoveries()
assert len(tags) == len([])
| 2.46875 | 2 |
tests/acceptance/glancesync_cmd_client/remote_client.py | telefonicaid/fiware-glancesync | 0 | 12759121 | # -*- coding: utf-8 -*-
# Copyright 2015-2016 Telefónica Investigación y Desarrollo, S.A.U
#
# This file is part of FIWARE project.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
#
# You may obtain a copy of the License at:
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
#
# See the License for the specific language governing permissions and
# limitations under the License.
#
# For those usages not covered by the Apache version 2.0 License please
# contact with <EMAIL>
from qautils.remote.fabric_utils import FabricUtils
from subprocess import Popen, PIPE
import os
__copyright__ = "Copyright 2015-2016"
__license__ = " Apache License, Version 2.0"
COMMAND_SYNC = "sync.py"
OUTPUT_PARALLEL_LOGS = "sync_*"
class GlanceSyncRemoteCmdClient:
""" Remote GlanceSync client for testing purposes """
def __init__(self, master_hostname, master_username, master_password, configuration_file_path,
master_keyfile=None, glancesyc_bin_path=None):
"""
Init GlanceSync client.
:param master_hostname (string): Hostname of Master.
:param master_username (string): Username.
:param master_password (string): Password.
:param configuration_file_path (string): Path where configuration file is located
:param master_keyfile (string): SSH private key file
:param glancesyc_bin_path (string): Path where GlanceSyn binary are located
:return:
"""
self.fabric_utils = FabricUtils(master_hostname, master_username, master_password, master_keyfile)
self.conf_file_path = configuration_file_path
self.conf_file_backup_path = None
self.bin_path = glancesyc_bin_path
self.host = master_hostname
def change_configuration_file(self, section, key, value):
"""
Change properties in the configuration file.
:param section (String): Section.
:param key (String): Property name.
:param value (String): Property value.
:return (String): Command output
"""
command = "crudini --set {config_file} {section} {key} {value}".format(config_file=self.conf_file_path,
section=section, key=key, value=value)
return self.execute_command(command)
def backup_glancesync_config_file(self, backup_dir):
"""
Create a backup of configuration file.
:param backup_dir (String): Copy the GlanceSync configuration file to tmp backup_dir
:return: None
"""
self.conf_file_backup_path = "{backup_dir}/glancesync.conf.backup".format(backup_dir=backup_dir)
command = "cp -f {config_file} {backup_file}".format(config_file=self.conf_file_path,
backup_file=self.conf_file_backup_path)
return self.execute_command(command)
def restore_backup(self):
"""
Restore backup of the configuration file.
:return: None
"""
if self.conf_file_backup_path:
command = "cp -f {backup_file} {config_file}".format(backup_file=self.conf_file_backup_path,
config_file=self.conf_file_path)
return self.execute_command(command)
def get_output_log_list(self):
"""
This method return the content of executing a 'ls' command filtering by output parallel logs dir name
:return (String): Command output
"""
command = "ls -d {output_files_pater}*/*".format(bin_path=self.bin_path,
output_files_pater=OUTPUT_PARALLEL_LOGS)
return self.execute_command(command)
def get_output_log_content(self, file_absolute_path):
"""
This method return the content of the given file.
:param file_absolute_path: Absolute path of the file (given by get_output_log_list function)
:return (String): Command output (content of the file)
"""
command = "cat {file_absolute_path}".format(file_absolute_path=file_absolute_path)
return self.execute_command(command)
def clean_all_parallel_output_logs(self):
"""
Remove all output files coming from a parallel execution
:return (String): Command output
"""
command = "rm -rf {output_files_pater}".format(bin_path=self.bin_path,
output_files_pater=OUTPUT_PARALLEL_LOGS)
return self.execute_command(command)
def sync(self, list_nodes=None, options=None):
"""
Execute SYNC command. If options are given, they will be passed to the GlanceSync CLI.
:param list_nodes (String): String with the list of nodes. e.i:
"Burgos"
"master:Burgos"
"Burgos target2:Madrid"
"master:Burgos target2:Madrid"
:param options (String): GlanceSync CLI options.
:return (String): Command output
"""
command = "{}/{}".format(self.bin_path, COMMAND_SYNC) if self.bin_path is not None else "sync"
command = "{command} {options}".format(command=command, options=options) if options else command
command = "{command} {list_nodes}".format(command=command, list_nodes=list_nodes) if list_nodes else command
return self.execute_command(command)
def execute_command(self, command):
if self.host == "localhost" or self.host == "fiwareglancesync":
return self.execute_command_locally(command)
else:
return self.fabric_utils.execute_command(command)
def execute_command_locally(self, command):
p = Popen(command, shell=True, stdout=PIPE)
metadatajson, err = p.communicate()
if err:
return None
if not metadatajson:
return 'ok'
return metadatajson
| 1.882813 | 2 |
Security/02 - Terminology and Concepts/02 - Security Key Spaces.py | srgeyK87/Hacker-Rank-30-days-challlenge | 275 | 12759122 | <filename>Security/02 - Terminology and Concepts/02 - Security Key Spaces.py<gh_stars>100-1000
# ========================
# Information
# ========================
# Direct Link: https://www.hackerrank.com/challenges/security-key-spaces/problem
# Difficulty: Easy
# Max Score: 10
# Language: Python
# ========================
# Solution
# ========================
num = (input())
e = int(input())
print(''.join([str((int(i)+e) % 10) for i in num]))
| 3.546875 | 4 |
steenpapierschaar/game.py | coderdojo-denhaag/python | 0 | 12759123 | <filename>steenpapierschaar/game.py
# importeer de random module
import random
# Print spelinstructies af
print("Blad steen schaar win je op de volgende manier: \n"
+ "Steen vs papier->Papier wint \n"
+ "Steen vs schaar->Steen wint \n"
+ "Blad vs schaar->Schaar wint \n")
while True:
print("Kies uit \n 1. steen \n 2. papier \n 3. schaar \n")
# neem input van de gebruiker
choice = int(input("Jouw beurt: "))
# loop tot er een ongeldige waarde wordt ingegeven
while choice > 3 or choice < 1:
choice = int(input("Geef een geldige input: "))
# de keuze van de gebruiker aan een variabele koppelen
if choice == 1:
choice_name = 'steen'
elif choice == 2:
choice_name = 'papier'
else:
choice_name = 'schaar'
# print user choice
print("Uw keuze is: " + choice_name)
print("\nNu is het de beurt aan de computer.......")
# De computer kiest willekeurig een nummer van de random module
comp_choice = random.randint(1, 3)
# loop totdat de computer keuze is gelijk aan de waarde van de keuzemogelijkheden
while comp_choice == choice:
comp_choice = random.randint(1, 3)
# de keuze van de computer wordt gekoppeld aan een variabele
if comp_choice == 1:
comp_choice_name = 'steen'
elif comp_choice == 2:
comp_choice_name = 'papier'
else:
comp_choice_name = 'schaar'
print("De keuze van de computer is: " + comp_choice_name)
print(choice_name + " vs " + comp_choice_name)
# voorwaarden om te winnen
if ((choice == 1 and comp_choice == 2) or
(choice == 2 and comp_choice == 1)):
print("blad wint => ", end="")
result = "papier"
elif ((choice == 1 and comp_choice == 3) or
(choice == 3 and comp_choice == 1)):
print("steen wint =>", end="")
result = "steen"
else:
print("schaar wint =>", end="")
result = "schaar"
# Print af of de gebruiker of de computer gewonnen is
if result == choice_name:
print(" ** Jij hebt gewonnen ** ")
else:
print(" ** De computer heeft gewonnen ** ")
print("Wil je nog eens spelen? (J/N)")
ans = input()
# als de gebruiker n of N ingeeft, stopt het programma
if ans == 'n' or ans == 'N':
break
# nadien komen we uit de while loop en willen we de gebruiker bedanken om het spel te spelen
print("\nBedankt om te spelen!")
| 4.125 | 4 |
tests/broker/test_vulcan2.py | ned21/aquilon | 7 | 12759124 | #!/usr/bin/env python
# -*- cpy-indent-level: 4; indent-tabs-mode: nil -*-
# ex: set expandtab softtabstop=4 shiftwidth=4:
#
# Copyright (C) 2012,2013,2014,2015,2016,2017 Contributor
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Module for testing the vulcan2 related commands."""
import unittest
if __name__ == "__main__":
import utils
utils.import_depends()
from brokertest import TestBrokerCommand
from notificationtest import VerifyNotificationsMixin
class TestVulcan20(VerifyNotificationsMixin, TestBrokerCommand):
def add_utcluster(self, name, metacluster):
command = ["add_esx_cluster", "--cluster=%s" % name,
"--metacluster=%s" % metacluster, "--room=utroom1",
"--buildstatus=build",
"--domain=unittest", "--down_hosts_threshold=0",
"--archetype=esx_cluster",
"--personality=vulcan2-server-dev"]
self.noouttest(command)
# metacluster aligned svc tests
def test_400_addvcenterservices(self):
command = ["add_required_service", "--service", "vcenter",
"--archetype", "vmhost", "--personality", "vulcan2-server-dev"]
self.noouttest(command)
command = ["add_required_service", "--service", "vcenter",
"--archetype", "metacluster", "--personality", "vulcan2"]
self.noouttest(command)
def test_410_bindvcenterservices(self):
command = ["bind_client", "--metacluster", "utmc8",
"--service", "vcenter", "--instance", "ut"]
err = self.statustest(command)
# The service should be bound to the metacluster and to the hosts, but
# not to the clusters as they do not require it
self.matchoutput(err, "Metacluster utmc8 adding binding for "
"service instance vcenter/ut", command)
self.matchoutput(err, "Host evh80.aqd-unittest.ms.com adding binding "
"for service instance vcenter/ut", command)
self.matchoutput(err, "Host evh81.aqd-unittest.ms.com adding binding "
"for service instance vcenter/ut", command)
self.matchclean(err, "utecl", command)
command = ["show", "host", "--hostname", "evh80.aqd-unittest.ms.com"]
out = self.commandtest(command)
self.matchoutput(out,
"Uses Service: vcenter Instance: ut",
command)
command = "show metacluster --metacluster utmc8"
out = self.commandtest(command.split(" "))
self.matchoutput(out, "Member Alignment: Service vcenter Instance ut", command)
def test_420_failmaxclientcount(self):
command = ["update_service", "--service", "vcenter", "--instance", "ut",
"--max_clients", "17"]
self.noouttest(command)
command = ["map", "service", "--service", "vcenter", "--instance", "ut",
"--building", "ut"] + self.valid_just_sn
self.noouttest(command)
self.add_utcluster("utpgcl2", "utmc8")
command = ["make", "cluster", "--cluster", "utmc8"]
out = self.badrequesttest(command)
self.matchoutput(out, "Please use the --metacluster option for "
"metaclusters.", command)
self.matchoutput(out,
"The available instances ['ut'] for service vcenter "
"are at full capacity.",
command)
command = ["unmap", "service", "--service", "vcenter",
"--instance", "ut", "--building", "ut"] + self.valid_just_sn
self.noouttest(command)
self.statustest(["del_cluster", "--cluster=utpgcl2"])
def test_430_unbindvcenterservices(self):
command = ["del_required_service", "--service", "vcenter",
"--archetype", "metacluster", "--personality", "vulcan2"]
self.noouttest(command)
command = ["del_required_service", "--service", "vcenter",
"--archetype", "vmhost", "--personality", "vulcan2-server-dev"]
self.noouttest(command)
self.noouttest(["unbind_client", "--metacluster", "utmc8",
"--service", "vcenter"])
def test_440_unmapvcenterservices(self):
command = ["unmap", "service", "--service", "vcenter",
"--instance", "ut", "--building", "ut",
"--personality", "vulcan2-server-dev", "--archetype", "vmhost"]
self.noouttest(command)
command = ["make", "--hostname", "evh80.aqd-unittest.ms.com"]
err = self.statustest(command)
self.matchoutput(err, "Host evh80.aqd-unittest.ms.com removing "
"binding for service instance vcenter/ut", command)
command = ["show", "host", "--hostname", "evh80.aqd-unittest.ms.com"]
out = self.commandtest(command)
self.matchclean(out,
"Uses Service: vcenter Instance: ut",
command)
#
# service binding conflicts
#
def test_500_add_mc_esx_service(self):
command = ["add", "service", "--service", "esx_management_server", "--instance", "ut.mc"]
self.noouttest(command)
command = ["add_required_service", "--service", "esx_management_server",
"--archetype", "metacluster", "--personality", "vulcan2"]
self.noouttest(command)
command = ["map", "service", "--service", "esx_management_server", "--instance", "ut.mc",
"--building", "ut", "--personality", "vulcan2",
"--archetype", "metacluster"]
self.noouttest(command)
command = ["rebind_client", "--metacluster", "utmc8",
"--service", "esx_management_server", "--instance", "ut.mc"]
err = self.statustest(command)
self.matchoutput(err,
"Metacluster utmc8 adding binding for service "
"instance esx_management_server/ut.mc",
command)
for cluster in ["utecl12", "utecl13"]:
self.searchoutput(err,
"ESX Cluster %s removing binding for service "
"instance esx_management_server/ut.[ab]" % cluster,
command)
self.matchoutput(err,
"ESX Cluster %s adding binding for service "
"instance esx_management_server/ut.mc" % cluster,
command)
for host in ["evh80", "evh81"]:
self.searchoutput(err,
"Host %s.aqd-unittest.ms.com removing binding for "
"service instance esx_management_server/ut.[ab]" % host,
command)
self.matchoutput(err,
"Host %s.aqd-unittest.ms.com adding binding for "
"service instance esx_management_server/ut.mc" % host,
command)
def test_510_fail_make_host(self):
command = ["make", "--hostname", "evh80.aqd-unittest.ms.com"]
out = self.badrequesttest(command)
self.matchoutput(out,
"ESX Metacluster utmc8 is set to use service instance "
"esx_management_server/ut.mc, but that instance is "
"not in a service map for "
"host evh80.aqd-unittest.ms.com.",
command)
def test_510_fail_make_cluster(self):
command = ["make", "cluster", "--cluster", "utecl12"]
out = self.badrequesttest(command)
self.matchoutput(out,
"ESX Metacluster utmc8 is set to use service instance "
"esx_management_server/ut.mc, but that instance is "
"not in a service map for ESX cluster utecl12.",
command)
self.matchoutput(out,
"ESX Metacluster utmc8 is set to use service instance "
"esx_management_server/ut.mc, but that instance is "
"not in a service map for "
"host evh80.aqd-unittest.ms.com.",
command)
def test_520_verify_client_count(self):
command = ["show_service", "--service=esx_management_server",
"--instance=ut.mc"]
out = self.commandtest(command)
self.searchoutput(out, r"^ Client Count: 16$", command)
def test_530_verify_mixed_client_count(self):
self.add_utcluster("utpgcl3", "utmc8")
command = ["bind_client", "--cluster", "utpgcl3", "--service",
"esx_management_server", "--instance", "ut.mc"]
err = self.statustest(command)
self.matchoutput(err, "ESX Cluster utpgcl3 adding binding for service "
"instance esx_management_server/ut.mc", command)
command = ["show_service", "--service=esx_management_server",
"--instance=ut.mc"]
out = self.commandtest(command)
self.searchoutput(out, r"^ Client Count: 24$", command)
# Can't unbind an an aligned service here and don't want unalign it
def test_538_del_utpgcl3(self):
self.statustest(["del_cluster", "--cluster=utpgcl3"])
def test_540_remove_mc_esx_service(self):
command = ["del_required_service", "--service", "esx_management_server",
"--archetype", "metacluster", "--personality", "vulcan2"]
self.noouttest(command)
command = ["unbind_client", "--metacluster", "utmc8",
"--service", "esx_management_server"]
self.noouttest(command)
command = ["unmap", "service", "--service", "esx_management_server", "--instance", "ut.mc",
"--building", "ut", "--personality", "vulcan2",
"--archetype", "metacluster"]
self.noouttest(command)
out = self.statustest(["make_cluster", "--cluster", "utecl12"])
self.matchoutput(out, "removing binding for service instance "
"esx_management_server/ut.mc", command)
self.searchoutput(out, "adding binding for service instance "
"esx_management_server/ut.[ab]", command)
out = self.statustest(["make_cluster", "--cluster", "utecl13"])
self.matchoutput(out, "removing binding for service instance "
"esx_management_server/ut.mc", command)
self.searchoutput(out, "adding binding for service instance "
"esx_management_server/ut.[ab]", command)
command = ["del", "service", "--service", "esx_management_server", "--instance", "ut.mc"]
self.noouttest(command)
if __name__ == '__main__':
suite = unittest.TestLoader().loadTestsFromTestCase(TestVulcan20)
unittest.TextTestRunner(verbosity=2).run(suite)
| 1.789063 | 2 |
DataStructures and Algorithms/Recursion/Reverse.py | abhishekratnam/Datastructuresandalgorithmsinpython | 0 | 12759125 | <gh_stars>0
def reverse(S, start, stop):
"""Reverse elements in emplicit slice S[start:stop]."""
if start < stop - 1:
S[start], S[stop-1] = S[stop - 1], S[start]
reverse(S, start+1, stop - 1)
def reverse_iterative(S):
"""Reverse elements in S using tail recursion"""
start,stop = 0,len(S)
while start < stop - 1:
S[start],S[stop-1] = S[stop - 1], S[start]
start,stop = start+1, stop -1
| 3.890625 | 4 |
submarine-sdk/pysubmarine/tests/environment/test_environment_client.py | 10088/submarine | 0 | 12759126 | # Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pytest
from submarine.client.api.environment_client import EnvironmentClient
from submarine.client.models.environment_spec import EnvironmentSpec
from submarine.client.models.kernel_spec import KernelSpec
@pytest.mark.e2e
def test_environment_e2e():
submarine_client = EnvironmentClient(host="http://localhost:8080")
kernel_spec = KernelSpec(
name="submarine_jupyter_py3",
channels=["defaults"],
conda_dependencies=[],
pip_dependencies=[],
)
environment_spec = EnvironmentSpec(
name="mytest",
kernel_spec=kernel_spec,
docker_image="apache/submarine:jupyter-notebook-gpu-0.7.0",
)
environment = submarine_client.create_environment(environment_spec=environment_spec)
environment_name = environment["environmentSpec"]["name"]
submarine_client.get_environment(environment_name)
submarine_client.list_environments()
submarine_client.delete_environment(environment_name)
| 1.84375 | 2 |
test/PR_test/unit_test/backend/test_categorical_crossentropy.py | Phillistan16/fastestimator | 0 | 12759127 | <reponame>Phillistan16/fastestimator
# Copyright 2020 The FastEstimator Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
import unittest
import numpy as np
import tensorflow as tf
import torch
import fastestimator as fe
class TestCategoricalCrossEntropy(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.tf_true = tf.constant([[0, 1, 0], [1, 0, 0], [0, 0, 1]])
cls.tf_pred = tf.constant([[0.1, 0.8, 0.1], [0.9, 0.05, 0.05], [0.1, 0.2, 0.7]])
cls.torch_true = torch.tensor([[0, 1, 0], [1, 0, 0], [0, 0, 1]])
cls.torch_pred = torch.tensor([[0.1, 0.8, 0.1], [0.9, 0.05, 0.05], [0.1, 0.2, 0.7]])
def test_categorical_crossentropy_average_loss_true_tf(self):
obj1 = fe.backend.categorical_crossentropy(y_pred=self.tf_pred, y_true=self.tf_true).numpy()
obj2 = 0.22839302
self.assertTrue(np.allclose(obj1, obj2))
def test_categorical_crossentropy_average_loss_false_tf(self):
obj1 = fe.backend.categorical_crossentropy(y_pred=self.tf_pred, y_true=self.tf_true, average_loss=False).numpy()
obj2 = np.array([0.22314353, 0.10536055, 0.35667497])
self.assertTrue(np.allclose(obj1, obj2))
def test_categorical_crossentropy_from_logits_average_loss_true_tf(self):
obj1 = fe.backend.categorical_crossentropy(y_pred=self.tf_pred,
y_true=self.tf_true,
average_loss=True,
from_logits=True).numpy()
obj2 = 0.69182307
self.assertTrue(np.allclose(obj1, obj2))
def test_categorical_crossentropy_from_logits_average_loss_false_tf(self):
obj1 = fe.backend.categorical_crossentropy(y_pred=self.tf_pred,
y_true=self.tf_true,
average_loss=False,
from_logits=True).numpy()
obj2 = np.array([0.6897267, 0.6177929, 0.7679496])
self.assertTrue(np.allclose(obj1, obj2))
def test_categorical_crossentropy_average_loss_true_torch(self):
obj1 = fe.backend.categorical_crossentropy(y_pred=self.torch_pred, y_true=self.torch_true).numpy()
obj2 = 0.22839302
self.assertTrue(np.allclose(obj1, obj2))
def test_categorical_crossentropy_average_loss_false_torch(self):
obj1 = fe.backend.categorical_crossentropy(y_pred=self.torch_pred, y_true=self.torch_true,
average_loss=False).numpy()
obj2 = np.array([0.22314353, 0.10536055, 0.35667497])
self.assertTrue(np.allclose(obj1, obj2))
def test_categorical_crossentropy_from_logits__average_loss_true_torch(self):
obj1 = fe.backend.categorical_crossentropy(y_pred=self.torch_pred,
y_true=self.torch_true,
average_loss=True,
from_logits=True).numpy()
obj2 = 0.69182307
self.assertTrue(np.allclose(obj1, obj2))
def test_categorical_crossentropy_from_logits_average_loss_false_torch(self):
obj1 = fe.backend.categorical_crossentropy(y_pred=self.torch_pred,
y_true=self.torch_true,
average_loss=False,
from_logits=True).numpy()
obj2 = np.array([0.6897267, 0.6177929, 0.7679496])
self.assertTrue(np.allclose(obj1, obj2))
| 2.046875 | 2 |
src/other/ext/stepcode/misc/wiki-scripts/update-matrix.py | dservin/brlcad | 262 | 12759128 | <gh_stars>100-1000
#!/usr/bin/env python
#you probably want to run the ctest script that calls this instead:
# ctest -S ctest_matrix.cmake
#must be ran from scl/build_matrix
from __future__ import print_function
from xml.etree import ElementTree as ET
import os
from datetime import date
import subprocess
import codecs
import io
#ctest xml file layout
#<Site ...=...>
# <testing>
# <StartDateTime>..</>
# <StartTestTime>..</>
# <TestList>..</>
# <Test status=..>..</>
# <EndDateTime>Dec 28 17:49 EST</EndDateTime>
# <EndTestTime>1325112579</EndTestTime>
# <ElapsedMinutes>1.9</ElapsedMinutes>
# </Testing>
#</Site>
#summary (aka 's') is a table near the top of the document
#body (aka 'b') contains the details for all schemas
def main():
xml_file = find_xml()
wikipath, matrix = find_wiki()
#codecs.open is deprecated but io.open doesn't seem to work, and open() complains of unicode
out = codecs.open(matrix,encoding='utf-8',mode='w')
out.write( header() )
out.write( read_tests(xml_file) )
out.close()
git_push(wikipath, matrix)
def find_xml():
#find xml file
i = 0
for dirname in os.listdir("Testing"):
if str(date.today().year) in dirname:
i += 1
if i > 1:
print("Too many directories, exiting")
exit(1)
xml = os.path.join("Testing", dirname, "Test.xml")
return xml
def find_wiki():
#find wiki and matrix file, issue 'git pull'
wikipath = os.path.abspath("../../wiki-scl")
if not os.path.isdir(os.path.join(wikipath,".git")):
print("Can't find wiki or not a git repo")
exit(1)
p = subprocess.call(["git", "pull", "origin"], cwd=wikipath)
if not p == 0:
print("'git pull' exited with error")
exit(1)
matrix = os.path.join(wikipath, "Schema-build-matrix.md")
if not os.path.isfile(matrix):
print("Matrix file doesn't exist or isn't a file")
exit(1)
return wikipath,matrix
def git_push(path,f):
p = subprocess.call(["git", "add", f], cwd=path)
if not p == 0:
print("'git add' exited with error")
exit(1)
msg = date.today().__str__() + " - schema matrix updated by update-matrix.py"
p = subprocess.call(["git", "commit", "-m", msg ], cwd=path)
if not p == 0:
print("'git commit' exited with error")
exit(1)
p = subprocess.call(["git", "push", "origin"], cwd=path)
if not p == 0:
print("'git push' exited with error")
exit(1)
def header():
h = "## Created " + date.today().__str__() + "\n" + "### Current as of commit "
l = subprocess.check_output(["git", "log", """--pretty=format:%H Commit Summary: %s<br>Author: %aN<br>Date: %aD""", "-n1"])
h += "[" + l[:8] + "](http://github.com/mpictor/StepClassLibrary/commit/" + l[:l.find(" ")]
h += ") --\n<font color=grey>" + l[l.find(" ")+1:] + "</font>\n\n----\n"
h += "### Summary\n<table width=100%><tr><th>Schema</th><th>Generate</th><th>Build</th></tr>"
return h
def read_tests(xml):
# read all <Test>s in xml, create mixed html/markdown
try:
tree = ET.parse(xml)
except Exception as inst:
print("Unexpected error opening %s: %s" % (xml, inst))
return
root = tree.getroot()
testing = root.find("Testing")
tests = testing.findall("Test")
summary = ""
body = ""
for test in tests:
s,b = schema_info(test,tests)
summary += s
body += b
summary += "</table>\n\n"
return summary + body
def schema_info(test,tests):
# this returns html & markdown formatted summary and body strings
# for the generate and build tests for a single schema
s=""
b=""
name = test.find("Name").text
if name.startswith("generate_cpp_"):
#print this one. if it passes, find and print build.
ap = name[len("generate_cpp_"):]
s += "<tr><td><a href=#" + ap + ">" + ap.title() + "</a></td><td>"
s += test_status(test) + "</td><td>"
b += "----\n<a name=\"wiki-" + ap + "\"></a>\n"
b += "### Schema " + ap.title()
b += "<table width=100%>"
b += test_table("generation",test)
if test.get("Status") == "passed":
for build in tests:
if build.find("Name").text == "build_cpp_sdai_" + ap:
s += test_status(build) + "</td></tr>\n"
b += test_table("compilation",build)
break
else:
s += "----</td></tr>\n"
b += "</table>\n"
return s,b
def test_table(ttype, test):
# populate the table for one test
# returns: html & markdown formatted text to be added to 'body'
b = "<tr><td>Code " + ttype
output = test.find("Results").find("Measurement").find("Value").text
w = output.count("WARNING")
w += output.count("warning")
lines = output.split("\n")
if "The rest of the test output was removed since it exceeds the threshold of" in lines[-2]:
trunc1 = "at least "
trunc2 = "(ctest truncated output)"
else:
trunc1 = ""
trunc2 = ""
if test.get("Status") == "passed":
#print summary in b
b += " succeeded with " + trunc1 + w.__str__() + " warnings " + trunc2
if w == 0: #nothing to print in the table, so skip it
b += "</td></tr>\n"
return b
else:
#print warnings and errors in b
e = output.count("ERROR")
e += output.count("error")
b += " failed with %s%d warnings and %d errors %s" %(trunc1, w, e, trunc2)
b += "<br>\n<table border=1 width=100%>\n"
b += "<tr><th>Line</th><th>Text</th></tr>\n"
# ERRORs
# 242_n2813_mim_lf.exp:2278: --ERROR: Expected a type...
# gcc errors look like ???
l=0
for line in lines:
if ": --ERROR:" in line:
l += 1
c1 = line.find(":")
c2 = line.find(":",c1+1)
b += "<tr><td>" + line[c1+1:c2] + "</td><td>" + line[c2+4:] + "</td></tr>\n"
elif ": error:" in line:
l += 1
c1 = line.find(":")
c2 = line.find(":",c1+1)
c3 = line.find(":",c2+1) #skip the character number
b += "<tr><td>" + line[c1+1:c2] + "</td><td>" + line[c3+2:] + "</td></tr>\n"
if l > 20:
b += "<tr><td>-</td><td><font color=red>-- maximum number of errors printed --</font></td></tr>\n"
break
# WARNINGs
# ap239_arm_lf.exp:2731: WARNING: Implicit downcast...
# WARNING: in SELECT TYPE date_or_date... (multi-line warning)
# compstructs.cc:28:23: warning: unused
l=0
for line in lines:
if ": WARNING" in line:
l += 1
c1 = line.find(":")
c2 = line.find(":",c1+1)
b += "<tr><td>" + line[c1+1:c2] + "</td><td>" + line[c2+2:] + "</td></tr>\n"
elif "WARNING" in line:
b += "<tr><td>????</td><td>" + line + "</td></tr>\n"
elif ": warning:" in line:
l += 1
c1 = line.find(":")
c2 = line.find(":",c1+1)
c3 = line.find(":",c2+1) #skip the character number
b += "<tr><td>" + line[c1+1:c2] + "</td><td>" + line[c3+2:] + "</td></tr>\n"
if l > 20:
b += "<tr><td>-</td><td><font color=red>-- maximum number of warnings printed --</font></td></tr>\n"
break
b += "</table></td></tr>\n"
return b
def test_status(test):
t = ""
for m in test.find("Results").findall("NamedMeasurement"):
if m.get("name") == "Execution Time":
t = " in " + m.find("Value").text + "s"
break
if test.get("Status") == "passed":
s = "<font color=green>PASS</font>"
elif test.get("Status") == "failed":
s = "<font color=red>FAIL</font>"
else:
s = "<font color=cyan>" + test.get("Status") + "</font>"
return s + t
if __name__ == "__main__":
# Someone is launching this directly
main()
| 2.3125 | 2 |
general_problems/palindrome_num.py | styam/coading_practice | 0 | 12759129 | <gh_stars>0
def palindrome_num():
num = int(input("Enter a number:"))
temp = num
rev = 0
while(num>0):
dig = num%10
rev = rev*10+dig
num = num//10
if(temp == rev):
print("The number is palindrome!")
else:
print("Not a palindrome!")
palindrome_num() | 4.15625 | 4 |
web/cable_bill.py | pearsonk27/billpay | 0 | 12759130 | <reponame>pearsonk27/billpay<gh_stars>0
from selenium import webdriver
from web.views import bill_steps
def run():
""""""
steps = bill_steps.BillSteps(webdriver.Chrome(), "cableBill")
steps.go_to_norwood_start_page()
# driver.get("https://unipaygold.unibank.com/transactioninfo.aspx?customerid=444")
# Given website setup, you cannot get to the right link without starting the session with norwood id (444)
# Link to cable bill is done with javascript postback
steps.go_to_cable_bill_page()
# Enter Account Number
steps.enter_account_id()
# Assert you have the right account
steps.assert_name()
# Get amount due
amount = steps.get_amount_due()
# Type amount in Pay Amount
steps.set_amount_due(amount)
# Assert that the total amount is the a`mount you have in memory
steps.assert_total_is_amount(amount)
# Submit form
# driver.execute_script("javascript:WebForm_DoPostBackWithOptions(new WebForm_PostBackOptions("ctl00$ctl00$LayoutArea$MainContent$Cart1$rptCart$ctl02$btnCheckout", "", true, "", "", false, false))")
steps.go_to_checkout()
steps.checkout_as_guest()
# Set a lot of things (need multi set method)
steps.set_address_info()
# Next page is credit card info
steps.toggle_credit_card_radio_button()
steps.set_payment_info()
steps.confirm_payment()
steps.finish()
| 2.9375 | 3 |
bot.py | HugoS99/FaceBot | 0 | 12759131 | <filename>bot.py
from selenium import webdriver
import random
import os
from selenium.webdriver.common.keys import Keys
from secretsP import loginEmail, password
from time import sleep
from urllib.request import (urlretrieve)
import pathlib
import string
class FaceBot:
# link to the current conversation
url = 'addlatter'
def __init__(self):
self.driver = webdriver.Chrome('../chromedriver.exe')
def login(self):
self.driver.get(self.url)
sleep(1)
emailin = self.driver.find_element_by_xpath(
'/html/body/div[1]/div[4]/div[1]/div/div[2]/div[2]/form/div[2]/div[1]/input')
emailin.send_keys(loginEmail)
passin = self.driver.find_element_by_xpath(
'/html/body/div[1]/div[4]/div[1]/div/div[2]/div[2]/form/div[2]/div[2]/input')
passin.send_keys(password)
button = self.driver.find_element_by_xpath(
'/html/body/div[1]/div[4]/div[1]/div/div[2]/div[2]/form/div[2]/div[3]/button')
button.click()
def messagerandom(self):
emailin = self.driver.find_element_by_xpath(
'/html/body/div[1]/div[3]/div[1]/div/div/div/div[2]/span/div[2]/div[2]/div[2]/div[2]/div[2]/div/div/div[1]/div/div[2]/div/div/div/div')
emailin.send_keys(random.choice(list(open('pick.txt'))))
emailin.send_keys(Keys.ENTER)
def message(self,text):
emailin = self.driver.find_element_by_xpath(
'/html/body/div[1]/div[3]/div[1]/div/div/div/div[2]/span/div[2]/div[2]/div[2]/div[2]/div[2]/div/div/div[1]/div/div[2]/div/div/div/div')
emailin.send_keys(text)
emailin.send_keys(Keys.ENTER)
def messagejoke(self):
emailin = self.driver.find_element_by_xpath(
'/html/body/div[1]/div[3]/div[1]/div/div/div/div[2]/span/div[2]/div[2]/div[2]/div[2]/div[2]/div/div/div[1]/div/div[2]/div/div/div/div')
emailin.send_keys(random.choice(list(open('shittyjokes.txt'))))
emailin.send_keys(Keys.ENTER)
def sendimage(self):
sendimage = self.driver.find_element_by_xpath(
'/html/body/div[1]/div[3]/div[1]/div/div/div/div[2]/span/div[2]/div[2]/div[2]/div[1]/div[1]/form/div/span/input')
dirname = "C:\\Users\hugos\Documents\PhytonShit\\bot\memes"
filename = random.choice(os.listdir(dirname))
sendimage.send_keys(os.path.join(dirname, filename))
emailin = self.driver.find_element_by_xpath(
'/html/body/div[1]/div[3]/div[1]/div/div/div/div[2]/span/div[2]/div[2]/div[2]/div[3]/div[2]/div/div/div[1]/div/div[2]/div/div/div/div')
emailin.send_keys('')
emailin.send_keys(Keys.ENTER)
def lastmessage(self):
lastmessage = self.driver.find_element_by_xpath(
'(/html/body//a[@data-href=\'' + self.url + '\']/div/div[2]/div[2]/span/span)[last()]').text
print(lastmessage)
return lastmessage
def replay_to_last_message(self):
message= self.lastmessage().strip()
if message.endswith('?'):
googleSearch= webdriver.Chrome('../chromedriver.exe')
googleSearch.get('https://www.google.com')
googleQuery = googleSearch.find_element_by_xpath('/html/body/div/div[3]/form/div[2]/div[1]/div[1]/div/div[2]/input')
googleQuery.send_keys(message)
googleQuery.send_keys(Keys.ENTER)
# sleep(10)
# googleSearch.close()
linkList = []
links = googleSearch.find_elements_by_xpath('//div[@class=\'bkWMgd\']//a[@href]')
for link in links:
linkList.append(link.get_attribute('href'))
self.message(linkList[0])
googleSearch.quit()
def send_google_message(self):
message = self.lastmessage().strip()
googleSearch = webdriver.Chrome('../chromedriver.exe')
googleSearch.get('https://www.google.com')
googleQuery = googleSearch.find_element_by_xpath(
'/html/body/div/div[3]/form/div[2]/div[1]/div[1]/div/div[2]/input')
googleQuery.send_keys(message)
googleQuery.send_keys(Keys.ENTER)
googleSearch.find_element_by_xpath('/html/body/div[7]/div[3]/div[4]/div/div/div[1]/div/div/div[1]/div/div[2]/a').click()
# sleep(10)
# googleSearch.close()
images = []
imagesResult = googleSearch.find_elements_by_xpath('//img[@class="rg_i Q4LuWd tx8vtf"]')
for image in imagesResult:
images.append(image.get_attribute('src'))
pathtotempfile=pathlib.Path(__file__).parent.absolute()
filename = ''.join([random.choice(string.ascii_letters + string.digits) for n in range(32)])
filetemp=os.path.join(pathtotempfile, filename)
sendimage = self.driver.find_element_by_xpath(
'//input[@data-testid=\'photo_input\']')
urlretrieve(images[0],filetemp+'.jpg')
sendimage.send_keys(filetemp+'.jpg')
emailin = self.driver.find_element_by_xpath(
'/html/body/div[1]/div[3]/div[1]/div/div/div/div[2]/span/div[2]/div[2]/div[2]/div[3]/div[2]/div/div/div[1]/div/div[2]/div/div/div/div')
emailin.send_keys('')
emailin.send_keys(Keys.ENTER)
#os.remove(filetemp+'.jpg')
googleSearch.quit()
sleep(1)
os.remove(filetemp + '.jpg')
def get_song_lyrics(self):
message = self.lastmessage().strip()
geniusWebDriver = webdriver.Chrome('../chromedriver.exe')
geniusWebDriver.get('https://genius.com/')
geniusInput=geniusWebDriver.find_element_by_xpath('/html/body/div[2]/div/div[1]/form/input')
geniusInput.send_keys(message)
geniusInput.send_keys(Keys.ENTER)
sleep(2)
temp=geniusWebDriver.find_elements_by_xpath('//mini-song-card/a')
temp[1].click()
#get lyrics from website
sleep(2)
result=''
liricsf=geniusWebDriver.find_elements_by_xpath('//section//p/*')
for liric in liricsf:
try:
temp=result+' '+liric.text
print(temp)
result=temp.replace('\n','')
except:
print('Error on read')
self.message(result)
geniusWebDriver.close() | 2.53125 | 3 |
pyaltitude/team.py | sdizazzo/pyaltitude | 1 | 12759132 | from . import base
import logging
logger = logging.getLogger(__name__)
class Team(base.Base):
pass
| 1.4375 | 1 |
backend/urlshortener/models.py | bright2227/URLShortener | 0 | 12759133 | <reponame>bright2227/URLShortener
from django.db import models
class Shortener(models.Model):
long_url = models.URLField(max_length=200)
class PurgeRecord(models.Model):
marked = models.OneToOneField("Shortener", on_delete=models.CASCADE)
created = models.DateTimeField(auto_now_add=True)
| 2.109375 | 2 |
cogs/utils/driver/__init__.py | FrostiiWeeb/OpenRobot-Bot | 8 | 12759134 | from .proxy import get_proxy
from selenium import webdriver
from selenium.webdriver.chrome.options import Options
class Driver:
def __init__(self, *, ad_block: bool = False, use_proxy: bool = False, proxy: str = None):
self.ad_block = ad_block
self.use_proxy = use_proxy
self.proxy = proxy
self.driver = None
def __exit__(self, exc_type, exc_value, traceback):
if self.driver:
self.driver.close()
def __enter__(self):
if self.use_proxy or self.proxy:
if not self.proxy:
addr = self.get_proxy()
else:
addr = self.proxy
else:
proxy, addr = None, None
chrome_options = Options()
if addr:
chrome_options.add_argument(f"--proxy-server={addr}")
chrome_options.add_argument("--headless")
chrome_options.add_argument("--no-sandbox")
if self.ad_block:
chrome_options.add_extension("/home/ubuntu/adblock_ext.crx")
self.driver = webdriver.Chrome(
"/home/ubuntu/chromedriver", options=chrome_options
)
return self.driver
@staticmethod
def get_proxy():
proxy = get_proxy()
if not proxy:
return None
return proxy["host"] + ":" + str(proxy["port"])
| 2.71875 | 3 |
src/tashi/aws/trans.py | apache/tashi | 6 | 12759135 | <reponame>apache/tashi<filename>src/tashi/aws/trans.py<gh_stars>1-10
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import types
def transArgsHelper(functionName, args):
if (type(args) == types.StringType):
return args
for key in ['Action', 'AWSAccessKeyId', 'SignatureVersion', 'Timestamp', 'Version', 'Signature']:
if key in args:
del args[key]
for key in args.keys():
firstChar = key[0]
if (firstChar.lower() != firstChar):
args[firstChar.lower() + key[1:]] = args[key]
del args[key]
for key in args.keys():
if (key.find(".") != -1):
(base, sep, sub) = key.partition(".")
args[base] = args.get(base, {})
args[base][sub] = args[key]
del args[key]
for key in args.keys():
args[key] = transArgsHelper(functionName, args[key])
return args
def transArgs(functionName, args):
args = transArgsHelper(functionName, args)
if (functionName == 'TerminateInstances'):
args['instancesSet'] = {'item':args['instanceId']}
del args['instanceId']
return args
def transNode(node):
try:
for i in range(0, len(node.childNodes)):
node.childNodes[i] = transNode(node.childNodes[i])
node.nodeName = node.nodeName.replace("ns1:", "")
except:
pass
return node
def transResult(functionName, doc):
try:
newRoot = transNode(doc.getElementsByTagName("ns1:" + functionName + "Response")[0])
newRoot.setAttribute('xmlns', 'http://ec2.amazonaws.com/doc/2009-03-01/')
newRoot.removeAttribute('xsi:type')
response = newRoot.cloneNode(True)
responseStr = '<?xml version="1.0"?>\n' + str(response.toxml())
return responseStr
except Exception, e:
_CGISendFault(Fault(Fault.Client, str(e)))
return 0
| 2.015625 | 2 |
storage/twitter.py | veanome/armchair-expert | 55 | 12759136 | <filename>storage/twitter.py
import datetime
from typing import List, Tuple
import tweepy
from sqlalchemy import Column, Integer, DateTime, BigInteger, String, BLOB
from sqlalchemy import func
from sqlalchemy import create_engine
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker, scoped_session
from tweepy import Status
from config.twitter import TWITTER_TRAINING_DB_PATH, TwitterApiCredentials
from storage.storage_common import TrainingDataManager
Base = declarative_base()
class ScraperStatus(Base):
__tablename__ = "scraperstatus"
id = Column(Integer, index=True, primary_key=True)
screen_name = Column(String, nullable=False)
since_id = Column(BigInteger, nullable=False)
class Tweet(Base):
__tablename__ = "tweet"
id = Column(Integer, index=True, primary_key=True)
status_id = Column(BigInteger, nullable=False, index=True, unique=True)
user_id = Column(BigInteger, nullable=False)
in_reply_to_status_id = Column(BigInteger)
in_reply_to_user_id = Column(BigInteger)
retweeted = Column(Integer, nullable=False)
timestamp = Column(DateTime, nullable=False, default=datetime.datetime.utcnow)
trained = Column(Integer, nullable=False, default=0)
text = Column(BLOB, nullable=False)
def __repr__(self):
return self.text.decode()
engine = create_engine('sqlite:///%s' % TWITTER_TRAINING_DB_PATH)
Base.metadata.create_all(engine)
session_factory = sessionmaker()
session_factory.configure(bind=engine)
Session = scoped_session(session_factory)
class TwitterTrainingDataManager(TrainingDataManager):
def __init__(self):
TrainingDataManager.__init__(self, Tweet)
self._session = Session()
def store(self, data: Status):
status = data
tweet = self._session.query(Tweet).filter(Tweet.status_id == status.id).first()
if tweet is None:
tweet = Tweet(status_id=status.id, user_id=status.user.id, in_reply_to_user_id=status.in_reply_to_user_id,
in_reply_to_status_id=status.in_reply_to_status_id, retweeted=int(status.retweeted),
timestamp=status.created_at, text=status.text.encode())
self._session.add(tweet)
self._session.commit()
class TwitterScraper(object):
def __init__(self, credentials: TwitterApiCredentials, screen_name: str):
self._credentials = credentials
self.screen_name = screen_name
self.session = Session()
row = self.session.query(func.max(Tweet.status_id)).first()
if row is not None:
since_id = row[0] if row[0] is not None else 0
else:
since_id = 0
self._latest_tweet_processed_id = since_id
self.scraper_status = self.session.query(ScraperStatus).filter(
ScraperStatus.screen_name == self.screen_name).first()
if self.scraper_status is None:
self.scraper_status = ScraperStatus(screen_name=screen_name, since_id=since_id)
self.session.add(self.scraper_status)
self.session.commit()
def _auth(self):
auth = tweepy.OAuthHandler(self._credentials.consumer_key, self._credentials.consumer_secret)
auth.set_access_token(self._credentials.access_token, self._credentials.access_token_secret)
return auth
def scrape(self, wait_on_rate_limit=True, learn_retweets=False):
auth = self._auth()
api = tweepy.API(auth, wait_on_rate_limit=wait_on_rate_limit)
if self.scraper_status.since_id == 0:
tweets = tweepy.Cursor(api.user_timeline, screen_name=self.screen_name, count=100,
lang="en").items()
else:
tweets = tweepy.Cursor(api.user_timeline, screen_name=self.screen_name, count=100,
lang="en", since_id=self.scraper_status.since_id).items()
for tweet in tweets:
tweet_row = self.session.query(Tweet).filter(Tweet.status_id == tweet.id).first()
if tweet_row is None:
if not tweet.retweeted or (tweet.retweeted and learn_retweets):
tweet_row = Tweet(status_id=tweet.id, user_id=tweet.author.id,
in_reply_to_status_id=tweet.in_reply_to_status_id,
in_reply_to_user_id=tweet.in_reply_to_user_id, retweeted=tweet.retweeted,
timestamp=tweet.created_at, text=tweet.text.encode())
self.session.add(tweet_row)
# Store the highest ID so we can set it to since_id later
if self._latest_tweet_processed_id is None or tweet.id > self._latest_tweet_processed_id:
self._latest_tweet_processed_id = tweet.id
# Normally it would be asinine to commit every insert, but we are rate limited by twitter anyway
self.session.commit()
# Complete scraper progress
self.scraper_status.since_id = self._latest_tweet_processed_id
self.session.commit()
| 2.6875 | 3 |
pyJMSHTML2_config.py | msghens/pyCanvasStomp | 0 | 12759137 | <filename>pyJMSHTML2_config.py
# Not necessary. Just wanted to separate program from credentials
def apikey():
return 'apikey'
def stomp_username():
return 'stomp user'
def stomp_password():
return '<PASSWORD>'
| 1.960938 | 2 |
goetia/alphabets.py | camillescott/boink | 3 | 12759138 | <reponame>camillescott/boink
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# (c) <NAME>, 2019
# File : alphabets.py
# License: MIT
# Author : <NAME> <<EMAIL>>
# Date : 27.02.2020
from goetia import libgoetia
DNA_SIMPLE = libgoetia.DNA_SIMPLE
DNAN_SIMPLE = libgoetia.DNAN_SIMPLE
IUPAC_NUCL = libgoetia.IUPAC_NUCL
ANY = libgoetia.ANY
| 1.195313 | 1 |
Alertor/Alertor.py | bourdibay/EbayAlertor | 2 | 12759139 |
import sys
from PyQt5.QtWidgets import QApplication
from PyQt5.QtWidgets import QStyle
from PyQt5.QtCore import Qt
import MainWindow
if __name__ == '__main__':
app = QApplication(sys.argv)
# Got a lot of errors related to Gtk on Linux:
# Gtk-CRITICAL **: IA__gtk_widget_style_get: assertion 'GTK_IS_WIDGET (widget)' failed
# A possible fix is provided here, by setting the fusion style:
# http://stackoverflow.com/questions/35351024/pyqt5-gtk-critical-ia-gtk-widget-style-get-assertion-gtk-is-widget-widg
if sys.platform == "linux" or sys.platform == "linux2":
app.setStyle("fusion")
window = MainWindow.MainWindow("Alertor")
# Put in the center of the screen.
window.setGeometry(QStyle.alignedRect(Qt.LeftToRight,
Qt.AlignCenter,
window.size(),
app.desktop().availableGeometry()))
window.show()
sys.exit(app.exec_())
| 2.265625 | 2 |
slybot/slybot/settings.py | ruairif/portia | 0 | 12759140 | <filename>slybot/slybot/settings.py<gh_stars>0
SPIDER_MANAGER_CLASS = 'slybot.spidermanager.SlybotSpiderManager'
EXTENSIONS = {'slybot.closespider.SlybotCloseSpider': 1}
ITEM_PIPELINES = {'slybot.dupefilter.DupeFilterPipeline': 1}
SPIDER_MIDDLEWARES = {'slybot.spiderlets.SpiderletsMiddleware': 999} # as close as possible to spider output
PLUGINS = ['slybot.plugins.scrapely_annotations.Annotations']
SLYDUPEFILTER_ENABLED = True
PROJECT_DIR = 'slybot-project'
FEED_EXPORTERS = {
'csv': 'slybot.exporter.SlybotCSVItemExporter',
}
CSV_EXPORT_FIELDS = None
try:
from local_slybot_settings import *
except ImportError:
pass
| 1.648438 | 2 |
FirstPython/syntax/syntax_basic.py | cxMax/Python-practise-sample | 0 | 12759141 | <gh_stars>0
print 'hello world'
__doc__
__name__
if True:
print 'android'
print 'ios'
else:
print 'c++'
print 'python'
total = 'hello' \
'world'
process = "these are" \
"multi" \
"line"
single = '''this is a''' \
'''simple'''
print total
# this is an annotation
'''
multi-line annotations
'''
"""
also multi-line annotations
"""
raw_input("please input something : ")
import sys; x = "saonima"; sys.stdout.write(x);
| 2.828125 | 3 |
regal/__main__.py | psurply/ReGAL | 43 | 12759142 | <filename>regal/__main__.py<gh_stars>10-100
import argparse
import logging
import sys
import tempfile
from regal.dump import dump, tt2v
from regal.pnr import pnr
from regal.synth import synth
_default_netlist_file = "netlist.json"
_default_jedec_file = "fuses.jed"
_default_dump_device = "/dev/ttyACM0"
def get_parser():
parser = argparse.ArgumentParser()
parser.add_argument("--verbose", "-v", action="count", default=0)
parser.set_defaults(action="")
subparser = parser.add_subparsers()
synth_parser = subparser.add_parser(
name="synth",
help="synthesize a verilog file into a netlist (requires yosys)"
)
synth_parser.add_argument("-o", "--outfile", default=_default_netlist_file)
synth_parser.add_argument("-s", "--show", action="store_true")
synth_parser.add_argument("rtl", nargs="+")
synth_parser.set_defaults(action="synth")
pnr_parser = subparser.add_parser(
name="pnr",
help="place-and-route a netlist and output a JEDEC file"
)
pnr_parser.add_argument("-o", "--outfile", default=_default_jedec_file)
pnr_parser.add_argument("-t", "--top", default="top")
pnr_parser.add_argument("config")
pnr_parser.add_argument("netlist", default=_default_netlist_file)
pnr_parser.set_defaults(action="pnr")
build_parser = subparser.add_parser(
name="build",
help="synthesize and place-and-route a verilog design"
)
build_parser.add_argument("-o", "--outfile", default=_default_jedec_file)
build_parser.add_argument("-t", "--top", default="top")
build_parser.add_argument("config")
build_parser.add_argument("rtl", nargs="+")
build_parser.set_defaults(action="build")
dump_parser = subparser.add_parser(
name="dump",
help="dump the truth-table from a PAL"
)
dump_parser.add_argument("config")
dump_parser.set_defaults(action="dump")
tt2v_parser = subparser.add_parser(
name="tt2v",
help="simplify and convert a truth-table to a verilog file"
)
tt2v_parser.add_argument("infile")
tt2v_parser.add_argument("outfile")
tt2v_parser.add_argument("--clk", default=None)
tt2v_parser.add_argument("--seq", nargs="+", default=[])
tt2v_parser.set_defaults(action="tt2v")
return parser
def main():
parser = get_parser()
args = parser.parse_args()
loglevels = [logging.WARN, logging.INFO, logging.DEBUG]
if args.verbose >= len(loglevels):
args.verbose = len(loglevels) - 1
logging.basicConfig(stream=sys.stderr, level=loglevels[args.verbose],
format='[%(levelname)s][%(name)s] %(message)s')
logging.addLevelName(logging.DEBUG, "\033[1;34mDEBUG\033[1;0m")
logging.addLevelName(logging.INFO, "\033[1;32mINFO \033[1;0m")
logging.addLevelName(logging.WARNING, "\033[1;33mWARN \033[1;0m")
logging.addLevelName(logging.ERROR, "\033[1;31mERROR\033[1;0m")
if args.action == "build":
netlist_file = tempfile.NamedTemporaryFile(delete=False, suffix=".json")
netlist = netlist_file.name
netlist_file.close()
if not synth(netlist, *args.rtl):
sys.exit(1)
if not pnr(netlist, args.config, args.outfile, args.top):
sys.exit(1)
elif args.action == "synth":
if not synth(args.outfile, *args.rtl, show=args.show):
sys.exit(1)
elif args.action == "pnr":
if not pnr(args.netlist, args.config, args.outfile, args.top):
sys.exit(1)
elif args.action == "dump":
if not (dump(args.config)):
sys.exit(1)
elif args.action == "tt2v":
if not (tt2v(args.infile, args.outfile, clock=args.clk, seq=args.seq)):
sys.exit(1)
else:
parser.print_help()
if __name__ == "__main__":
main()
| 2.4375 | 2 |
commands/public.py | onemec/cloudmapper | 0 | 12759143 | <filename>commands/public.py
from __future__ import print_function
import json
import sys
from shared.common import parse_arguments
from shared.public import get_public_nodes
__description__ = "Find publicly exposed services and their ports"
def public(accounts, config):
all_accounts = []
for account in accounts:
public_nodes, warnings = get_public_nodes(account, config)
for public_node in public_nodes:
all_accounts.append(public_node)
for warning in warnings:
print("WARNING: {}".format(warning), file=sys.stderr)
print(json.dumps(all_accounts, indent=4, sort_keys=True))
def run(arguments):
_, accounts, config = parse_arguments(arguments)
public(accounts, config)
| 2.640625 | 3 |
func_prog.py | TomHam2021/Python2a_week5 | 0 | 12759144 | <reponame>TomHam2021/Python2a_week5<gh_stars>0
'''
# Functional composition
# • Lego is a popular toy because it’s so versatile
# • Similar to how Lego pieces can be combined, in functional programming it’s common to combine functions
def f(number): return number * 2
def g(number): return number + 1
x = 2
# print(f(2)) # detta ger --> 4
# print(g(2)) # detta ger --> 3
# skicka 2 till g(+1) --> 3 och vidare till f(*2) --> 6
y = f(g(x))
# skcika 2 till f(*2) --> 4 och vidare till g(+1) --> 5
z = g(f(x))
print(y, z)
# Output: 6 5
'''
# Lambda
# • Python supports anonymous functions with lambda
# • Useful to define simple functions
# lambda = anonymous function , parametern till lambda finns mellan "Lambda" och ":"
# nedan skickas number till functionerna, och number uppdateras och returneras
# obs att pylance ändrar från lambda... :D
# f = lambda number: number * 2
# g = lambda number: number + 1
def f(number): return number * 2
def g(number): return number + 1
x = 2
y = f(g(x))
z = g(f(x))
print(y, z)
# Output: 6 5
| 4.28125 | 4 |
Lab 1.py | Pranav847/MB215Lab1 | 0 | 12759145 | <gh_stars>0
print("Hello world from <NAME>") | 1.257813 | 1 |
tests/mechanisms/test_Exponential.py | Bhaskers-Blu-Org1/differential-privacy-library | 0 | 12759146 | import numpy as np
from unittest import TestCase
from diffprivlib.mechanisms import Exponential
from diffprivlib.utils import global_seed
class TestExponential(TestCase):
def setup_method(self, method):
if method.__name__ .endswith("prob"):
global_seed(314159)
self.mech = Exponential()
def teardown_method(self, method):
del self.mech
def test_not_none(self):
self.assertIsNotNone(self.mech)
def test_class(self):
from diffprivlib.mechanisms import DPMechanism
self.assertTrue(issubclass(Exponential, DPMechanism))
def test_no_params(self):
with self.assertRaises(ValueError):
self.mech.randomise("A")
def test_no_epsilon(self):
utility_list = [
["A", "B", 1],
["A", "C", 2],
["B", "C", 2]
]
self.mech.set_utility(utility_list)
with self.assertRaises(ValueError):
self.mech.randomise("A")
def test_inf_epsilon(self):
utility_list = [
["A", "B", 1],
["A", "C", 2],
["B", "C", 2]
]
self.mech.set_utility(utility_list).set_epsilon(float("inf"))
# print(_mech.randomise("A"))
for i in range(1000):
self.assertEqual(self.mech.randomise("A"), "A")
def test_neg_epsilon(self):
with self.assertRaises(ValueError):
self.mech.set_epsilon(-1)
def test_complex_epsilon(self):
with self.assertRaises(TypeError):
self.mech.set_epsilon(1+2j)
def test_string_epsilon(self):
with self.assertRaises(TypeError):
self.mech.set_epsilon("Two")
def test_non_zero_delta(self):
with self.assertRaises(ValueError):
self.mech.set_epsilon_delta(1, 0.5)
def test_no_utility(self):
self.mech.set_epsilon(1)
with self.assertRaises(ValueError):
self.mech.randomise("1")
def test_hierarchy_first(self):
utility_list = [
["A", "B", 1],
["A", "2", 2],
["B", "2", 2]
]
self.mech.set_utility(utility_list)
self.assertIsNotNone(self.mech)
def test_non_string_hierarchy(self):
utility_list = [
["A", "B", 1],
["A", 2, 2],
["B", 2, 2]
]
with self.assertRaises(TypeError):
self.mech.set_utility(utility_list)
def test_missing_utilities(self):
utility_list = [
["A", "B", 1],
["A", "C", 2]
]
with self.assertRaises(ValueError):
self.mech.set_utility(utility_list)
def test_wrong_utilities(self):
utility_list = (
["A", "B", 1],
["A", "C", 2],
["B", "C", 2]
)
with self.assertRaises(TypeError):
self.mech.set_utility(utility_list)
utility_list = [
["A", "B", 1],
["A", "C", 2],
["B", "C", "2"]
]
with self.assertRaises(TypeError):
self.mech.set_utility(utility_list)
utility_list = [
["A", "B", 1],
["A", "C", 2],
["B", "C", -2]
]
with self.assertRaises(ValueError):
self.mech.set_utility(utility_list)
def test_non_string_input(self):
utility_list = [
["A", "B", 1],
["A", "C", 2],
["B", "C", 2]
]
self.mech.set_epsilon(1).set_utility(utility_list)
with self.assertRaises(TypeError):
self.mech.randomise(2)
def test_outside_domain(self):
utility_list = [
["A", "B", 1],
["A", "C", 2],
["B", "C", 2]
]
self.mech.set_epsilon(1).set_utility(utility_list)
with self.assertRaises(ValueError):
self.mech.randomise("D")
def test_get_utility_list(self):
self.assertIsNone(self.mech.get_utility_list())
utility_list = [
["A", "B", 1],
["A", "C", 2],
["B", "C", 2]
]
self.mech.set_epsilon(1).set_utility(utility_list)
_utility_list = self.mech.get_utility_list()
self.assertEqual(len(_utility_list), len(utility_list))
def test_self_in_utility(self):
utility_list = [
["A", "B", 1],
["A", "C", 2],
["B", "C", 2],
["A", "A", 5]
]
self.mech.set_epsilon(1).set_utility(utility_list)
_utility_list = self.mech.get_utility_list()
self.assertEqual(len(_utility_list) + 1, len(utility_list))
self.assertEqual(self.mech._get_utility("A", "A"), 0)
def test_distrib_prob(self):
epsilon = np.log(2)
runs = 20000
utility_list = [
["A", "B", 1],
["A", "C", 2],
["B", "C", 2]
]
self.mech.set_epsilon(epsilon).set_utility(utility_list)
count = [0, 0, 0]
for i in range(runs):
val = self.mech.randomise("A")
if val == "A":
count[0] += 1
elif val == "B":
count[1] += 1
elif val == "C":
count[2] += 1
# print("A: %d, B: %d, C: %d" % (count[0], count[1], count[2]))
self.assertLessEqual(count[0] / runs, np.exp(epsilon) * count[2] / runs + 0.05)
self.assertAlmostEqual(count[0] / count[1], count[1] / count[2], delta=0.1)
def test_repr(self):
repr_ = repr(self.mech.set_epsilon(1))
self.assertIn(".Exponential(", repr_)
| 2.65625 | 3 |
original_version/page_parser.py | xuerenlv/PaperWork | 1 | 12759147 | <filename>original_version/page_parser.py
# -*- coding: utf-8 -*-
'''
@author <NAME>
created on 2013/10/19
'''
import traceback
try:
import storage
from errors import UnsuspectedPageStructError, JsonDataParsingError
from bs4 import BeautifulSoup
from datetime import datetime
import re
import json
except:
s = traceback.format_exc()
print s
def parse_user_profile(html, weibo_user_type=1001):
'''
return an object of UserInfo
may raise UnsuspectedPageStructError
Arguments:
`html`: a text of html code
`weibo_user_type`: 1001 stands for normal users while 1002 stands for media users
'''
soup = BeautifulSoup(html)
user_info = storage.UserInfo()
#domain
regex = re.compile("\$CONFIG\['domain']='[0-9]+';")
m = regex.search(html)
if m is not None:
user_info.domain = m.group()[19:-2]
for script in soup.findAll('script'):
text = script.text
if 'FM.view(' in text:
text = text[8:]
if text.endswith(')'):
text = text[:-1]
if text.endswith(');'):
text = text[:-2]
data = json.loads(text)
inner_html = data.get('html')
if inner_html is None:
continue
inner_soup = BeautifulSoup(inner_html)
#n_followees, n_followers and n_mblogs
if weibo_user_type == 1001:
followee_strong = inner_soup.find('strong', attrs={'node-type': 'follow'})
if followee_strong is not None:
user_info.n_followees = int(followee_strong.text.strip())
follower_strong = inner_soup.find('strong', attrs={'node-type': 'fans'})
if follower_strong is not None:
user_info.n_followers = int(follower_strong.text.strip())
mblogs_strong = inner_soup.find('strong', attrs={'node-type': 'weibo'})
if mblogs_strong is not None:
user_info.n_mblogs = int(mblogs_strong.text.strip())
elif weibo_user_type == 1002:
td_S_lines1 = inner_soup.findAll('td', attrs={'class': 'S_line1'})
if len(td_S_lines1) == 3:
try:
user_info.n_followees = int(td_S_lines1[0].a.strong.text.strip())
user_info.n_followers = int(td_S_lines1[1].a.strong.text.strip())
user_info.n_mblogs = int(td_S_lines1[2].a.strong.text.strip())
except Exception, e:
raise UnsuspectedPageStructError
#other information
pf_items = inner_soup.findAll('div', attrs={'class': 'pf_item clearfix'})
for item in pf_items:
divs = item.findAll('div')
i = 0
while i < len(divs):
anna = divs[i].text
if anna == u'昵称':
user_info.nickname = divs[i+1].text.strip()
elif anna == u'所在地':
user_info.location = divs[i+1].text.strip()
elif anna == u'性别':
user_info.sex = True if divs[i+1].text.strip() == u'男' else False
elif anna == u'生日':
user_info.birth = divs[i+1].text.strip()
elif anna == u'博客':
user_info.blog = divs[i+1].text.strip()
elif anna == u'简介':
user_info.intro = divs[i+1].text.strip()
elif anna == u'邮箱':
user_info.email = divs[i+1].text.strip()
elif anna == u'个性域名':
user_info.site = divs[i+1].text.strip()
elif anna == u'QQ':
user_info.qq = divs[i+1].text.strip()
elif anna == u'MSN':
user_info.msn = divs[i+1].text.strip()
elif anna == u'大学' or anna == u'高中' or anna == u'初中' or anna == u'小学':
edu_info = storage.EduInfo()
school = divs[i+1].p.contents[0]
if school is not None:
edu_info.school = school.text.strip()
if school.nextSibling is not None:
edu_info.time = school.nextSibling.strip()
if len(divs[i+1].findAll('p')) > 1:
edu_info.detail = divs[i+1].findAll('p')[1].text.strip()
if edu_info.school is not None:
user_info.edu.append(edu_info)
elif anna == u'公司':
while (i+1) < len(divs):
acompany = divs[i+1]
work_info = storage.WorkInfo()
all_p = acompany.findAll('p')
company = all_p[0].contents[0]
if company is not None:
work_info.company = company.text.strip()
if company.nextSibling is not None:
work_info.time = company.nextSibling.strip()
j = 1
while j < len(all_p):
if all_p[j].text.strip().startswith(u'地区'):
work_info.location = all_p[j].text.strip()[3:]
if all_p[j].text.strip().startswith(u'职位'):
work_info.department_or_position = all_p[j].text.strip()[3:]
j += 1
user_info.work.append(work_info)
i += 1
elif anna == u'标签':
tags = divs[i+1]
if tags is not None:
for a in tags.findAll('a'):
user_info.tags.append(a.text.strip())
i += 2
return user_info
def parse_follow(html):
'''
return a list of follows in this page
`html`: html code as string-type
'''
soup = BeautifulSoup(html)
follows = []
for script in soup.findAll('script'):
text = script.text
if 'FM.view(' in text:
text = text[8:]
if text.endswith(')'):
text = text[:-1]
if text.endswith(');'):
text = text[:-2]
data = json.loads(text)
inner_html = data.get('html')
if inner_html is None:
continue
inner_soup = BeautifulSoup(inner_html)
follows_li = inner_soup.findAll('li', attrs={'class': 'clearfix S_line1'})
for item in follows_li:
follow = storage.Follow()
a = item.find('a', usercard=True)
if a is not None:
follow.uid = a['usercard'][3:]
follow.nickname = a.text.strip()
follows.append(follow)
return follows
def parse_mblog(html, uid):
'''
return a list of micro-blogs in this page
may raise UnsuspectedPageStructError
`html`: html code as string-type
`uid`: uid of user who owns this page
'''
# print html
mblogs = list()
soup = BeautifulSoup(html)
for script in soup.findAll('script'):
text = script.text
old_text = text
if 'FM.view(' in text:
#print "before test is: " + text
# text = text[8:]
# if text.endswith(')'):
# text = text[:-1]
# if text.endswith(');'):
# text = text[:-2]
p = re.compile(u"view\((\{.*\})\)")
mid_text = p.search(text)
if mid_text is not None:
text = mid_text.group(1)
#print "text is: " + text
inner_html = None
try:
data = json.loads(text)
inner_html = data.get('html')
except ValueError:
print "old_text is: " + old_text
print "text is: " + text
if inner_html is None:
continue
mblogs += _get_mblogs(inner_html, uid)
return mblogs
def parse_json(data, uid):
'''
parse json data, and return a list of micro blogs
may raise JsonDataParsingError, UnsuspectedPageStructError
`data`: json data
'uid': weibo user id
'''
try:
html = json.loads(data)['data']
except Exception:
raise JsonDataParsingError
else:
return _get_mblogs(html, uid)
def _get_mblogs(html, uid):
'''
get mblogs from showed html
may raise UnsuspectedPageStructError
`html`: html codes
'''
mblogs = list()
soup = BeautifulSoup(html)
mbdivs = soup.findAll('div', attrs={'class':'WB_feed_type SW_fun S_line2 '}) #a list of div-tags, each item of it contains a micro-blog
for mbdiv in mbdivs:
try:
mblog = storage.MicroBlog()
mblog.mid = mbdiv['mid']
mblog.uid = uid
mblog.content = mbdiv.find('div', attrs={'class': 'WB_text'}).text.strip()
created_time = mbdiv.findAll('a', attrs={'class': 'S_link2 WB_time'})[-1]['title']
date, time = created_time.split()
year, month, day = date.split('-')
hour, minute = time.split(':')
mblog.created_time = datetime(int(year), int(month), int(day), int(hour), int(minute))
#n_likes, n_forwards, n_comments
handle_divs = mbdiv.findAll('div', attrs={'class': 'WB_handle'})
if len(handle_divs) > 0:
handle_div = handle_divs[-1]
handles_a = handle_div.findAll('a')
like = handles_a[0]
repost = handles_a[1]
comment = handles_a[3]
if len(like.text.strip()) > 0:
mblog.n_likes = int(like.text.strip()[1:-1])
else:
mblog.n_likes = 0
if len(repost.text.strip()) > 4:
mblog.n_forwards = int(repost.text.strip()[3:-1])
else:
mblog.n_forwards = 0
if len(comment.text.strip()) > 4:
mblog.n_comments = int(comment.text.strip()[3:-1])
else:
mblog.n_comments = 0
#geographical message
geodiv = mbdiv.find('div', attrs={'class': 'map_data'})
if geodiv is not None:
geo = storage.Geo()
geo.location = geodiv.text.strip()
regex = re.compile('geo=.*&')
anna = regex.findall(str(geodiv))[0]
anna = anna.split('&')[0][4:]
longitude, latitude = anna.split(',')
geo.longitude = float(longitude)
geo.latitude = float(latitude)
mblog.geo = geo
#if this mblog is not an original mblog
if mbdiv.get('isforward') is not None:
mblog.is_forward = True
ori_mblog = storage.OriMBlog()
usercard = mbdiv.find('a', attrs={'class': 'WB_name S_func3'})
if usercard is not None:
oriuid = usercard['usercard'][3:]
ori_mblog.uid = oriuid
ori_mblog.mid = mbdiv.get('omid')
ori_mblog.content = mbdiv.findAll('div', attrs={'class': 'WB_text'})[-1].text.strip()
mblog.ori_mblog = ori_mblog
else:
mblog.is_forward = False
mblogs.append(mblog)
except Exception, e:
raise UnsuspectedPageStructError
return mblogs
def test_info():
from weibo_login import login
from conf import USERNAME, PASSWORD, COOKIE_FILE
from opener import urlfetch
if login(USERNAME, PASSWORD, COOKIE_FILE) is False:
print "Login failed!"
return
#html = urlfetch("http://weibo.com/1789809794/info")
html = urlfetch("http://weibo.com/1618051664/info")
user_info = parse_user_profile(html, 1002)
print user_info.nickname, user_info.location, user_info.sex, user_info.birth, \
user_info.blog, user_info.site, user_info.intro, user_info.email, user_info.qq, user_info.msn
print "n_followees: ", user_info.n_followees
print "n_followers: ", user_info.n_followers
print "n_mblogs: ", user_info.n_mblogs
print "domain: ", user_info.domain
for edu in user_info.edu:
print edu.school, edu.time, edu.detail
for work in user_info.work:
print work.company, work.time, work.department_or_position, work.location
for tag in user_info.tags:
print tag
def test_follow():
ff = open('test/follow.html', 'r')
html = ff.read()
ff.close()
follows = parse_follow(html)
for follow in follows:
print follow.uid, follow.nickname
def test_mblog():
#ff = open('test/mblog2.html', 'r')
#html = ff.read()
#ff.close()
from weibo_login import login
from conf import USERNAME, PASSWORD, COOKIE_FILE
from urllib2 import urlopen
html = ''
if login(USERNAME, PASSWORD, COOKIE_FILE):
print 'Login!'
from opener import urlfetch
html = urlfetch("http://weibo.com/2584784292/weibo")
#html = urlfetch("http://weibo.com/2803301701/mblog")
#print html
mblogs = parse_mblog(html, '2584784292')
for m in mblogs:
print 'uid: ', m.uid
print 'mid: ', m.mid
print 'content: ' , m.content
print 'time: ', m.created_time
print 'n_likes: ', m.n_likes
print 'n_forward: ', m.n_forwards
print 'n_comments: ', m.n_comments
if m.geo:
print 'longitude: ', m.geo.longitude
print 'latitude: ', m.geo.latitude
print 'location: ', m.geo.location
print m.is_forward
if m.is_forward:
print 'ouid: ', m.ori_mblog.uid
print 'omid: ', m.ori_mblog.mid
print 'ocontent: ', m.ori_mblog.content
print '======================================'
if __name__ == '__main__':
test_mblog()
#test_info()
#test_get_domain()
#test_get_n_mblogs()
| 2.65625 | 3 |
doom_a2c/models.py | edbeeching/ego_map | 3 | 12759148 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Wed Mar 14 10:53:06 2018
@author: anonymous
"""
import math
import torch
import torch.nn as nn
import torch.nn.functional as F
import sys
sys.path.append('..')
if __name__ == '__main__':
from distributions import Categorical
from ego_map import EgoMap
else:
from .distributions import Categorical
from .ego_map import EgoMap
# A temporary solution from the master branch.
# https://github.com/pytorch/pytorch/blob/7752fe5d4e50052b3b0bbc9109e599f8157febc0/torch/nn/init.py#L312
# Remove after the next version of PyTorch gets release.
def orthogonal(tensor, gain=1):
if tensor.ndimension() < 2:
raise ValueError("Only tensors with 2 or more dimensions are supported")
rows = tensor.size(0)
cols = tensor[0].numel()
flattened = torch.Tensor(rows, cols).normal_(0, 1)
if rows < cols:
flattened.t_()
# Compute the qr factorization
q, r = torch.qr(flattened)
# Make Q uniform according to https://arxiv.org/pdf/math-ph/0609050.pdf
d = torch.diag(r, 0)
ph = d.sign()
q *= ph.expand_as(q)
if rows < cols:
q.t_()
tensor.view_as(q).copy_(q)
tensor.mul_(gain)
return tensor
def weights_init(m):
classname = m.__class__.__name__
if classname.find('Conv') != -1 or classname.find('Linear') != -1:
orthogonal(m.weight.data)
print(m.__class__.__name__)
print(m.weight.size())
if m.bias is not None:
m.bias.data.fill_(0)
class Lin_View(nn.Module):
def __init__(self):
super(Lin_View, self).__init__()
def forward(self, x):
return x.view(x.size()[0], -1)
class FFPolicy(nn.Module):
def __init__(self):
super(FFPolicy, self).__init__()
def forward(self, inputs, states, masks):
raise NotImplementedError
def act(self, inputs, states, masks,
deterministic=False, **kwargs):
result = self(inputs, states, masks, **kwargs)
x = result['x']
actions = self.dist.sample(x, deterministic=deterministic)
action_log_probs, dist_entropy, action_probs = self.dist.logprobs_and_entropy(x, actions)
del result['x']
result['actions'] = actions
result['dist_entropy'] = dist_entropy
result['action_log_probs'] = action_log_probs
result['action_probs'] = action_probs
return result
def evaluate_actions(self, inputs, states,
masks, actions, pred_depths=False, **kwargs):
if pred_depths:
result = self(inputs, states, masks, pred_depths=pred_depths, **kwargs)
x = result['x']
action_log_probs, dist_entropy, action_probs = self.dist.logprobs_and_entropy(x, actions)
del result['x']
result['actions'] = actions
result['dist_entropy'] = dist_entropy
result['action_log_probs'] = action_log_probs
result['action_probs'] = action_probs
return result
else:
result = self(inputs, states, masks, **kwargs)
x = result['x']
action_log_probs, dist_entropy, action_probs = self.dist.logprobs_and_entropy(x, actions)
del result['x']
result['actions'] = actions
result['dist_entropy'] = dist_entropy
result['action_log_probs'] = action_log_probs
result['action_probs'] = action_probs
return result
def get_action_value_and_probs(self, inputs, states, masks,
deterministic=False, **kwargs):
result = self(inputs, states, masks, **kwargs)
x = result['x']
actions = self.dist.sample(x, deterministic=deterministic)
result['actions'] = actions
result['action_softmax'] = F.softmax(self.dist(x),dim=1)
del result['x']
return result
class CNNPolicy(FFPolicy):
def __init__(self, num_inputs, input_shape, params):
super(CNNPolicy, self).__init__()
if params.pretrained_vae:
class Args:
pass
args=Args()
args.hidden2=params.hidden_size
self.vae = VAE2(args)
else:
self.conv_head = nn.Sequential(nn.Conv2d(num_inputs, params.conv1_size, 8, stride=4),
nn.ReLU(True),
nn.Conv2d(params.conv1_size, params.conv2_size, 4, stride=2),
nn.ReLU(True),
nn.Conv2d(params.conv2_size, params.conv3_size, 3, stride=1),
nn.ReLU(True))
if params.action_prediction:
# an additional conv head for action prediction
self.cnn_action_head = nn.Conv2d(params.conv3_size, params.num_actions,1,1)
if params.predict_depth:
# predict depth with a 1x1 conv
self.depth_head = nn.Conv2d(params.conv3_size, 8, 1, 1)
conv_input = torch.Tensor(torch.randn((1,) + input_shape))
print(conv_input.size(), self.conv_head(conv_input).size(), self.conv_head(conv_input).size())
self.conv_out_size = self.conv_head(conv_input).nelement()
self.hidden_size = params.hidden_size
if params.skip_fc:
if params.recurrent_policy:
assert params.use_lstm == False, 'Cannot have both GRU and LSTM!'
#self.gru = MaskedGRU(self.conv_out_size, self.hidden_size)
self.gru = nn.GRUCell(self.conv_out_size, self.hidden_size)
if params.stacked_gru:
#self.gru2 = MaskedGRU(self.hidden_size, self.hidden_size)
self.gru2 = nn.GRUCell(self.hidden_size, self.hidden_size)
if params.use_lstm:
self.lstm = nn.LSTMCell(self.conv_out_size, self.hidden_size)
else:
if not params.pretrained_vae:
self.linear1 = nn.Linear(self.conv_out_size, self.hidden_size)
if params.recurrent_policy:
assert params.use_lstm == False, 'Cannot have both GRU and LSTM!'
#self.gru = MaskedGRU(self.hidden_size, self.hidden_size)
if params.pos_as_obs:
self.gru = nn.GRUCell(self.hidden_size + 4, self.hidden_size) # x, y sin(orientation), cos(orientation)
else:
self.gru = nn.GRUCell(self.hidden_size, self.hidden_size)
if params.stacked_gru:
#self.gru2 = MaskedGRU(self.hidden_size, self.hidden_size)
self.gru2 = nn.GRUCell(self.hidden_size, self.hidden_size)
if params.use_lstm:
self.lstm = nn.LSTMCell(self.hidden_size, self.hidden_size)
if params.gru_skip:
self.critic_linear = nn.Linear(self.hidden_size*2, 1)
self.dist = Categorical(self.hidden_size*2, params.num_actions)
else:
self.critic_linear = nn.Linear(self.hidden_size, 1)
self.dist = Categorical(self.hidden_size, params.num_actions)
if params.loop_detect:
self.loop_linear = nn.Linear(self.hidden_size, 1)
self.params = params
self.train()
self.reset_parameters()
if params.pretrained_vae:
enc_checkpoint = torch.load(params.pretrained_vae, map_location=lambda storage, loc: storage)
self.vae.load_state_dict(enc_checkpoint['model'])
self.vae.eval()
@property
def state_size(self):
if hasattr(self, 'gru') or hasattr(self, 'lstm'):
return self.hidden_size
else:
return 1
def load_conv_head(self, old_model):
for i in range(0, 6, 2):
self.conv_head[i].weight.data = old_model.conv_head[i].weight.data.clone()
self.conv_head[i].bias.data = old_model.conv_head[i].bias.data.clone()
self.conv_head[i].weight.requires_grad = False
self.conv_head[i].bias.requires_grad = False
def load_linear_layer(self, old_model):
self.linear1.weight.data = old_model.linear1.weight.data.clone()
self.linear1.bias.data = old_model.linear1.bias.data.clone()
self.linear1.weight.requires_grad = False
self.linear1.bias.requires_grad = False
def reset_parameters(self):
self.apply(weights_init)
relu_gain = nn.init.calculate_gain('relu')
if not self.params.pretrained_vae:
for i in range(0, 6, 2):
self.conv_head[i].weight.data.mul_(relu_gain)
self.linear1.weight.data.mul_(relu_gain)
if self.params.loop_detect:
self.loop_linear.weight.data.mul_(relu_gain)
if hasattr(self, 'gru'):
#self.gru.reset_parameters()
orthogonal(self.gru.weight_ih.data)
orthogonal(self.gru.weight_hh.data)
self.gru.bias_ih.data.fill_(0)
self.gru.bias_hh.data.fill_(0)
if self.params.learn_init_state:
self.init_state = nn.Parameter(torch.randn(1, self.hidden_size) * 0.00)
if self.params.gru_forget_init:
self.gru.bias_ih.data.uniform_(-self.params.gru_bias_range, self.params.gru_bias_range)
self.gru.bias_hh.data.uniform_(-self.params.gru_bias_range, self.params.gru_bias_range)
if hasattr(self, 'gru2'):
#self.gru2.reset_parameters()
orthogonal(self.gru2.weight_ih.data)
orthogonal(self.gru2.weight_hh.data)
self.gru2.bias_ih.data.fill_(0)
self.gru2.bias_hh.data.fill_(0)
if self.params.learn_init_state:
self.init_state2 = nn.Parameter(torch.randn(1, self.hidden_size) * 0.00)
if self.params.gru_forget_init:
self.gru2.bias_ih.data.uniform_(-self.params.gru_bias_range, self.params.gru_bias_range)
self.gru2.bias_hh.data.uniform_(-self.params.gru_bias_range, self.params.gru_bias_range)
if hasattr(self, 'lstm'):
orthogonal(self.lstm.weight_ih.data)
orthogonal(self.lstm.weight_hh.data)
self.lstm.bias_ih.data.fill_(1)
self.lstm.bias_hh.data.fill_(1)
if self.params.gate_init:
self.lstm.bias_ih.data.fill_(0)
self.lstm.bias_hh.data.fill_(0)
if self.dist.__class__.__name__ == "DiagGaussian":
self.dist.fc_mean.weight.data.mul_(0.01)
def pred_depth(self, inputs):
x = self.conv_head(inputs * (1.0/255.0))
return self.depth_head(x)
def forward(self, inputs, states, masks,
pred_depth=False, pos_deltas_origins=None, **kwargs):
depth_preds = None
action_preds = None
if self.params.pos_as_obs:
assert pos_deltas_origins is not None
if self.params.pretrained_vae:
with torch.no_grad():
self.vae.eval()
x = self.vae.encode(inputs * (1.0/128.0))[0].detach()
x = x.view(-1, self.params.hidden_size)
else:
x = self.conv_head(inputs * (1.0/self.params.image_scalar))
if pred_depth:
depth_preds = self.depth_head(x)
if self.params.action_prediction:
action_preds = self.cnn_action_head(x)
x = x.view(-1, self.conv_out_size)
if not self.params.skip_fc:
x = self.linear1(x)
x = F.relu(x)
if self.params.gru_skip:
rnn_input = x
if hasattr(self, 'gru'):
if hasattr(self,'gru2'):
states2 = kwargs['states2']
assert states2 is not None
else:
states2 = None
# x, states = self.gru(x, states, masks)
# if hasattr(self,'gru2'):
# x, states2 = self.gru2(x, states2, masks)
if inputs.size(0) == states.size(0):
if self.params.pos_as_obs:
dxys, dthetas = pos_deltas_origins[:,3:5], pos_deltas_origins[:,5:6]
x = torch.cat([x, dxys, torch.sin(dthetas), torch.cos(dthetas)], dim=1)
x = states = self.gru(x, states*masks)
if hasattr(self,'gru2'):
x = states2 = self.gru2(x, states2*masks)
else:
x = x.view(-1, states.size(0), x.size(1))
masks = masks.view(-1, states.size(0), 1)
outputs = []
for i in range(x.size(0)):
if hasattr(self, 'gru2'):
hx = states = self.gru(x[i], states * masks[i])
hx2 = states2 = self.gru2(hx, states2 * masks[i])
outputs.append(hx2)
else:
inp = x[i]
if self.params.pos_as_obs:
dxys, dthetas = pos_deltas_origins[i, :,3:5], pos_deltas_origins[i, :,5:6]
inp = torch.cat([inp, dxys, torch.sin(dthetas), torch.cos(dthetas)], dim=1)
hx = states = self.gru(inp, states * masks[i])
outputs.append(hx)
x = torch.cat(outputs, 0)
loop_preds = None
if self.params.loop_detect:
loop_preds = F.sigmoid(self.loop_linear(x))
if self.params.gru_skip:
x = torch.cat([rnn_input, x], 1)
result = {'values': self.critic_linear(x),
'x': x,
'states': states,
'states2': states2,
'depth_preds': depth_preds,
'action_preds': action_preds,
'loop_preds': loop_preds}
return result
class EgoMap0_Policy(FFPolicy):
def __init__(self, num_inputs, input_shape, params):
super(EgoMap0_Policy, self).__init__()
self.params = params
if params.new_padding:
if params.skip_cnn_relu:
self.conv_head = nn.Sequential(nn.Conv2d(num_inputs, params.conv1_size, 8, stride=4, padding=2),
nn.ReLU(True),
nn.Conv2d(params.conv1_size, params.conv2_size, 4, stride=2, padding=1),
nn.ReLU(True),
nn.Conv2d(params.conv2_size, params.ego_num_chans, 3, stride=1, padding=1))
else:
self.conv_head = nn.Sequential(nn.Conv2d(num_inputs, params.conv1_size, 8, stride=4, padding=2),
nn.ReLU(True),
nn.Conv2d(params.conv1_size, params.conv2_size, 4, stride=2, padding=1),
nn.ReLU(True),
nn.Conv2d(params.conv2_size, params.ego_num_chans, 3, stride=1, padding=1),
nn.ReLU())
else:
if params.skip_cnn_relu:
self.conv_head = nn.Sequential(nn.Conv2d(num_inputs, params.conv1_size, 8, stride=4),
nn.ReLU(True),
nn.Conv2d(params.conv1_size, params.conv2_size, 4, stride=2),
nn.ReLU(True),
nn.Conv2d(params.conv2_size, params.ego_num_chans, 3, stride=1))
else:
self.conv_head = nn.Sequential(nn.Conv2d(num_inputs, params.conv1_size, 8, stride=4),
nn.ReLU(True),
nn.Conv2d(params.conv1_size, params.conv2_size, 4, stride=2),
nn.ReLU(True),
nn.Conv2d(params.conv2_size, params.ego_num_chans, 3, stride=1),
nn.ReLU())
self.ego_map = EgoMap(params)
ac = 0 # addtional channel
if params.ego_curiousity:
ac = 1
if not params.ego_skip_global:
self.ego_head = nn.Sequential(nn.Conv2d(params.ego_num_chans + ac, params.ego_num_chans, 3, stride=1, padding=1),
nn.ReLU(True),
nn.Conv2d(params.ego_num_chans, params.ego_num_chans, 4, stride=2, padding=0),
nn.ReLU(True),
nn.Conv2d(params.ego_num_chans, params.ego_num_chans, 4, stride=2, padding=0),
nn.ReLU())
ego_head_input = torch.Tensor(torch.randn((1,) + (params.ego_num_chans + ac,
2*params.ego_half_size,
2*params.ego_half_size,)))
ego_out_size = self.ego_head(ego_head_input).nelement()
# This has to be defined as an extension as I do not know the size in advance
if params.ego_use_tanh:
ego_extension = nn.Sequential(Lin_View(),
nn.Linear(ego_out_size, 256),
nn.ReLU(True),
nn.Linear(256, params.ego_hidden_size),
nn.Tanh())
else:
ego_extension = nn.Sequential(Lin_View(),
nn.Linear(ego_out_size, 256),
nn.ReLU(True),
nn.Linear(256, params.ego_hidden_size),
nn.ReLU(True))
self.ego_head = nn.Sequential(self.ego_head, ego_extension)
ego_out_size = params.ego_hidden_size
self.ego_out_size = ego_out_size
conv_input = torch.Tensor(torch.randn((1,) + input_shape))
conv_out_size = self.conv_head(conv_input).nelement()
if params.ego_curiousity:
conv_out_size += 4*10
self.conv_out_size = conv_out_size
print('conv out size', self.conv_out_size)
if params.ego_skip and not params.ego_skip_global and not params.merge_later:
self.linear1 = nn.Linear(ego_out_size + conv_out_size, params.hidden_size)
elif params.ego_skip and (params.merge_later or params.ego_skip_global):
print('params.ego_skip and (params.merge_later or params.ego_skip_global)')
self.linear1 = nn.Linear(conv_out_size, params.hidden_size)
else:
self.linear1 = nn.Linear(ego_out_size, params.hidden_size)
if params.recurrent_policy:
assert params.use_lstm == False, 'Cannot have both GRU and LSTM!'
if params.merge_later and not params.ego_skip_global:
self.gru = nn.GRUCell(params.hidden_size + ego_out_size, params.hidden_size)
else:
self.gru = nn.GRUCell(params.hidden_size, params.hidden_size)
if params.ego_query:
if params.ego_query_scalar:
query_out_size = params.ego_num_chans + 1 + ac
else:
query_out_size = params.ego_num_chans + ac
if params.ego_skip_global:
self.query_head = nn.Linear(params.hidden_size, query_out_size)
else:
self.query_head = nn.Linear(ego_out_size + params.hidden_size, query_out_size)
if params.query_position:
self.critic_linear = nn.Linear(params.hidden_size + params.ego_num_chans + 2 + ac, 1)
self.dist = Categorical(params.hidden_size + params.ego_num_chans + 2 + ac, params.num_actions)
else:
self.critic_linear = nn.Linear(params.hidden_size + params.ego_num_chans + ac, 1)
self.dist = Categorical(params.hidden_size + params.ego_num_chans + ac, params.num_actions)
else:
self.critic_linear = nn.Linear(params.hidden_size, 1)
self.dist = Categorical(params.hidden_size, params.num_actions)
print(params.num_actions, ' actions')
self.train()
self.reset_parameters()
if params.ego_curiousity:
# Load the forward model
class Args:
pass
args = Args()
args.hidden2 = 128
args.load_vae = ''
args.shared_action_size = 32
args.load_forward = ''
save_name = params.forward_model_name
model = ForwardModel(args)
checkpoint = torch.load(save_name, map_location=lambda storage, loc: storage)
model.load_state_dict(checkpoint['model'])
model.eval()
self.forward_model = model
@property
def state_size(self):
if hasattr(self, 'gru') or hasattr(self, 'lstm'):
return self.params.hidden_size
else:
return 1
def load_conv_head(self, old_model):
for i in range(0, 6, 2):
self.conv_head[i].weight.data = old_model.conv_head[i].weight.data.clone()
self.conv_head[i].bias.data = old_model.conv_head[i].bias.data.clone()
self.conv_head[i].weight.requires_grad = False
self.conv_head[i].bias.requires_grad = False
def reset_parameters(self):
self.apply(weights_init)
relu_gain = nn.init.calculate_gain('relu')
for i in range(0, 6, 2):
self.conv_head[i].weight.data.mul_(relu_gain)
if not self.params.ego_skip_global:
self.ego_head[0][i].weight.data.mul_(relu_gain)
if not self.params.ego_skip_global:
for i in range(1, 5, 2):
self.ego_head[1][i].weight.data.mul_(relu_gain)
self.linear1.weight.data.mul_(relu_gain)
if hasattr(self, 'gru'):
orthogonal(self.gru.weight_ih.data)
orthogonal(self.gru.weight_hh.data)
self.gru.bias_ih.data.fill_(0)
self.gru.bias_hh.data.fill_(0)
if self.params.learn_init_state:
self.init_state = nn.Parameter(torch.randn(1, self.params.hidden_size) * 0.05)
if hasattr(self, 'lstm'):
orthogonal(self.lstm.weight_ih.data)
orthogonal(self.lstm.weight_hh.data)
self.lstm.bias_ih.data.fill_(0)
self.lstm.bias_hh.data.fill_(0)
if self.params.gate_init:
self.lstm.bias_ih.data[self.hidden_size: 2*self.params.hidden_size].fill_(1)
print('hidden gate initialized')
if self.dist.__class__.__name__ == "DiagGaussian":
self.dist.fc_mean.weight.data.mul_(0.01)
def forward(self, inputs, states, masks,
pred_depth=False,
ego_states=None, ego_depths=None,
pos_deltas_origins=None, states2=None,
prev_obs=None, prev_actions=None, curiousity=None):
assert ego_states is not None, 'Trying to apply ego update with states'
assert ego_depths is not None, 'Trying to apply ego update with no depths'
assert pos_deltas_origins is not None, 'Trying to apply ego update with no pdo'
if self.params.ego_curiousity:
assert prev_obs is not None
assert prev_actions is not None
if curiousity is None:
curiousity = self.calculate_curiousity(inputs, prev_obs, pos_deltas_origins, prev_actions)
conv_out = self.conv_head(inputs * (1.0/255.0))
if self.params.ego_curiousity: # concat the curiousity vectors for ego reading
assert conv_out.size(0) == curiousity.size(0)
assert masks.size(0) == curiousity.size(0)
conv_out = torch.cat([conv_out, curiousity*masks.view(-1,1,1,1)], 1)
if inputs.size(0) == states.size(0):
xxs, yys, thetas = pos_deltas_origins[:,0], pos_deltas_origins[:,1], pos_deltas_origins[:,2]
origin_x, origin_y = pos_deltas_origins[:,6], pos_deltas_origins[:,7]
ego_states = self.ego_map.ego_mapper(conv_out, ego_depths, ego_states * masks.view(-1,1,1,1), xxs, yys, thetas, masks, origin_x, origin_y)
x = ego_rots = self.ego_map.rotate_for_read(ego_states, xxs, yys, thetas, origin_x, origin_y)
else:
x = conv_out.view(-1, states.size(0), * conv_out.size()[1:])
masks = masks.view(-1, states.size(0), 1)
read_outputs = []
for i in range(x.size(0)):
xxs, yys, thetas = pos_deltas_origins[i, :,0], pos_deltas_origins[i, :,1], pos_deltas_origins[i, :,2]
origin_x, origin_y = pos_deltas_origins[i, :,6], pos_deltas_origins[i, :,7]
ego_states = self.ego_map.ego_mapper(x[i], ego_depths[i], ego_states * masks[i].unsqueeze(-1).unsqueeze(-1),
xxs, yys, thetas, masks[i], origin_x, origin_y)
read = self.ego_map.rotate_for_read(ego_states, xxs, yys, thetas, origin_x, origin_y)
read_outputs.append(read)
x = ego_rots = torch.cat(read_outputs, 0)
if self.params.ego_skip_global: #do not include a global CNN read on egomap
if self.params.skip_cnn_relu: # Relu was not applied earlier so apply now
conv_out = F.relu(conv_out.view(-1, self.conv_out_size))
else:
conv_out = conv_out.view(-1, self.conv_out_size)
x = self.linear1(conv_out)
x = F.relu(x)
else:
x = ego_reads = self.ego_head(x).view(-1, self.ego_out_size)
if not self.params.ego_skip: #do not include skip connect
x = self.linear1(x)
x = F.relu(x)
if self.params.ego_skip and not self.params.merge_later:
if self.params.skip_cnn_relu: # Relu was not applied earlier so apply now
conv_out = F.relu(conv_out.view(-1, self.conv_out_size))
else:
conv_out = conv_out.view(-1, self.conv_out_size)
x = torch.cat([x, conv_out], dim=1)
x = self.linear1(x)
x = F.relu(x)
if self.params.ego_skip and self.params.merge_later:
if self.params.skip_cnn_relu: # Relu was not applied earlier so apply now
conv_out = F.relu(conv_out.view(-1, self.conv_out_size))
else:
conv_out = conv_out.view(-1, self.conv_out_size)
y = self.linear1(conv_out)
y = F.relu(y)
x = torch.cat([x, y], dim=1)
if hasattr(self, 'gru'):
if inputs.size(0) == states.size(0):
if self.params.learn_init_state:
x = states = self.gru(x, states * masks + (1-masks)*self.init_state.clone().repeat(states.size(0), 1))
else:
x = states = self.gru(x, states * masks)
else:
x = x.view(-1, states.size(0), x.size(1))
outputs = []
for i in range(x.size(0)):
if self.params.learn_init_state:
hx = states = self.gru(x[i], states * masks[i] + (1-masks[i])*self.init_state.clone().repeat(states.size(0), 1))
else:
hx = states = self.gru(x[i], states * masks[i])
outputs.append(hx)
x = torch.cat(outputs, 0)
# note as gru and lstm gave comparable results, just gru is used
if self.params.ego_query:
if self.params.ego_skip_global:
query_vectors = self.query_head(x)
else:
query_vectors = self.query_head(torch.cat([x, ego_reads], dim=1))
if self.params.query_relu:
query_vectors = F.relu(query_vectors)
context_vectors = self.ego_map.query(query_vectors, ego_rots)
# print('context norms')
# print(context_vectors.norm(), x.norm())
x = torch.cat([x, context_vectors], dim=1)
result = {'values': self.critic_linear(x),
'x': x,
'states': states,
'ego_states': ego_states,
'curiousity': curiousity}
return result
def calculate_curiousity(self, inputs, prev_obs, pos_deltas_origins, actions):
with torch.no_grad():
self.forward_model.eval()
actions = actions.view(-1)
# calulate
# 1. VAE encoder + decode of current obs
o2_hat, mus1, logvars1 = self.forward_model.vae(inputs/128.0)
# 2. forward prediction of previous obs with actions and deltas
o1_hat, mus0, logvars0 = self.forward_model.vae(prev_obs/128.0)
deltas = pos_deltas_origins[:, 3:5]
action_emb = self.forward_model.action_embedding(actions, deltas)
# TODO: test logvar conversion
p_in = torch.cat([action_emb, mus0, logvars0], 1)
p_out = self.forward_model.p_mlp(p_in)
mus_t1_hat, logvars_t1_hat = p_out[:,:self.forward_model.args.hidden2], p_out[:,self.forward_model.args.hidden2:]
z = self.forward_model.vae.reparametrize(mus_t1_hat, logvars_t1_hat)
o2t1_hat = self.forward_model.vae.decode(z)
# TODO: compare with abs error
error = ((o2_hat.detach() - o2t1_hat.detach())**2).mean(dim=1, keepdim=True)
error = F.interpolate(error, size=(8,14))
if not self.params.new_padding:
error = error[:,:,2:-2,2:-2]
return error * (1.0/100.0) # normlizing to range of around 0-5
if __name__ == '__main__':
from doom_a2c.arguments import parse_game_args
params = parse_game_args()
params.num_actions = 5
# ego_model = EgoMap0_Policy(3, (3, 64, 112), params)
# neural_map_model = NeuralMapPolicy(3, (3, 64, 112), params)
# ego_states = torch.zeros(2, params.ego_num_chans,
# params.ego_half_size*2 -1,
# params.ego_half_size*2 -1)
# query_vector = torch.randn(2,params.ego_num_chans)
# result, scores = neural_map_model.query(query_vector, ego_states)
# result
# scores
model = CNNPolicy(3, (3, 64, 112), params)
example_input = torch.randn(1,3,64,112)
out = model.conv_head[:4](example_input)
print(out.size())
| 2.4375 | 2 |
webapp/scanner/management/commands/civicrm_test.py | fragmuffin/QR-Code-Reader | 2 | 12759149 | <reponame>fragmuffin/QR-Code-Reader<gh_stars>1-10
import os
import requests
import re
import argparse
from datetime import datetime
import pytz
import json
from django.core.management.base import BaseCommand, CommandError
from django.db.utils import IntegrityError
import scanner.models
class Command(BaseCommand):
help = "Test CiviCRM API (only for debugging)"
REST_URL_BASE = 'https://www.melbpc.org.au/wp-content/plugins/civicrm/civicrm/extern/rest.php'
def add_arguments(self, parser):
parser.add_argument(
'--model', default='MembershipStatus',
help="Model to import.",
)
group = parser.add_argument_group('CiviCRM Options')
group.add_argument(
'--site-key', dest='site_key',
help="site key (default from 'CIVICRM_SITEKEY' env var)",
)
group.add_argument(
'--user-key', dest='user_key',
help="user key (default from 'CIVICRM_USERKEY' env var)",
)
def handle(self, *args, **kwargs):
# ----- Get Keys
self.api_key = kwargs.get('user_key', None) or os.environ.get('CIVICRM_USERKEY', None)
self.key = kwargs.get('site_key', None) or os.environ.get('CIVICRM_SITEKEY', None)
# Generate Payload
payload = {
'entity': kwargs['model'],
'action': 'get',
'api_key': self.api_key,
'key': self.key,
'json': 1,
'options[limit]': 0,
#'return': ','.join(['id'] + list(remote_fieldmap.keys())),
}
# Send Request
request = requests.post(self.REST_URL_BASE, data=payload)
if request.status_code != 200:
raise ValueError("response status code: {!r}".format(request.status_code))
# Extract Data
request_json = request.json()
print(json.dumps(request_json, indent=2))
| 2.21875 | 2 |
hummingbot/connector/exchange/ndax/ndax_order_book_message.py | mrkoq/hummingbot | 0 | 12759150 | #!/usr/bin/env python
from collections import namedtuple
from typing import (
Dict,
List,
Optional,
)
from hummingbot.core.data_type.order_book_row import OrderBookRow
from hummingbot.core.data_type.order_book_message import (
OrderBookMessage,
OrderBookMessageType,
)
NdaxOrderBookEntry = namedtuple("NdaxOrderBookEntry", "mdUpdateId accountId actionDateTime actionType lastTradePrice orderId price productPairCode quantity side")
NdaxTradeEntry = namedtuple("NdaxTradeEntry", "tradeId productPairCode quantity price order1 order2 tradeTime direction takerSide blockTrade orderClientId")
class NdaxOrderBookMessage(OrderBookMessage):
_DELETE_ACTION_TYPE = 2
_BUY_SIDE = 0
_SELL_SIDE = 1
def __new__(
cls,
message_type: OrderBookMessageType,
content: Dict[str, any],
timestamp: Optional[float] = None,
*args,
**kwargs,
):
if timestamp is None:
if message_type is OrderBookMessageType.SNAPSHOT:
raise ValueError("timestamp must not be None when initializing snapshot messages.")
timestamp = content["timestamp"]
return super(NdaxOrderBookMessage, cls).__new__(
cls, message_type, content, timestamp=timestamp, *args, **kwargs
)
@property
def update_id(self) -> int:
if self.type in [OrderBookMessageType.DIFF, OrderBookMessageType.SNAPSHOT]:
entry: NdaxOrderBookEntry = self.content["data"][0]
return int(entry.actionDateTime)
elif self.type == OrderBookMessageType.TRADE:
entry: NdaxTradeEntry = self.content["data"][0]
return int(entry.tradeTime)
@property
def trade_id(self) -> int:
entry: NdaxTradeEntry = self.content["data"][0]
return entry.tradeId
@property
def trading_pair(self) -> str:
return self.content["trading_pair"]
@property
def asks(self) -> List[OrderBookRow]:
entries: List[NdaxOrderBookEntry] = self.content["data"]
return [self._order_book_row_for_entry(entry) for entry in entries if entry.side == self._SELL_SIDE]
@property
def bids(self) -> List[OrderBookRow]:
entries: List[NdaxOrderBookEntry] = self.content["data"]
return [self._order_book_row_for_entry(entry) for entry in entries if entry.side == self._BUY_SIDE]
def _order_book_row_for_entry(self, entry: NdaxOrderBookEntry) -> OrderBookRow:
price = float(entry.price)
amount = float(entry.quantity) if entry.actionType != self._DELETE_ACTION_TYPE else 0.0
update_id = entry.mdUpdateId
return OrderBookRow(price, amount, update_id)
def __eq__(self, other) -> bool:
return type(self) == type(other) and self.type == other.type and self.timestamp == other.timestamp
def __lt__(self, other) -> bool:
# If timestamp is the same, the ordering is snapshot < diff < trade
return (self.timestamp < other.timestamp or (self.timestamp == other.timestamp and self.type.value < other.type.value))
def __hash__(self) -> int:
return hash((self.type, self.timestamp))
| 2.234375 | 2 |