hexsha stringlengths 40 40 | size int64 5 2.06M | ext stringclasses 11 values | lang stringclasses 1 value | max_stars_repo_path stringlengths 3 251 | max_stars_repo_name stringlengths 4 130 | max_stars_repo_head_hexsha stringlengths 40 78 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 3 251 | max_issues_repo_name stringlengths 4 130 | max_issues_repo_head_hexsha stringlengths 40 78 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 116k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 3 251 | max_forks_repo_name stringlengths 4 130 | max_forks_repo_head_hexsha stringlengths 40 78 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 1 1.05M | avg_line_length float64 1 1.02M | max_line_length int64 3 1.04M | alphanum_fraction float64 0 1 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
c709a775fc2c2a745cb1ed61a6cbd8778daaee06 | 609 | py | Python | datadog_checks_dev/datadog_checks/dev/tooling/commands/env/__init__.py | vbarbaresi/integrations-core | ab26ab1cd6c28a97c1ad1177093a93659658c7aa | [
"BSD-3-Clause"
] | 1 | 2021-01-28T01:45:37.000Z | 2021-01-28T01:45:37.000Z | datadog_checks_dev/datadog_checks/dev/tooling/commands/env/__init__.py | vbarbaresi/integrations-core | ab26ab1cd6c28a97c1ad1177093a93659658c7aa | [
"BSD-3-Clause"
] | 3 | 2021-01-27T04:56:40.000Z | 2021-02-26T06:29:22.000Z | datadog_checks_dev/datadog_checks/dev/tooling/commands/env/__init__.py | vbarbaresi/integrations-core | ab26ab1cd6c28a97c1ad1177093a93659658c7aa | [
"BSD-3-Clause"
] | 1 | 2021-04-07T16:58:27.000Z | 2021-04-07T16:58:27.000Z | # (C) Datadog, Inc. 2018-present
# All rights reserved
# Licensed under a 3-clause BSD style license (see LICENSE)
import click
from ..console import CONTEXT_SETTINGS
from .check import check_run
from .ls import ls
from .prune import prune
from .reload import reload_env
from .shell import shell
from .start import start
from .stop import stop
from .test import test
ALL_COMMANDS = (check_run, ls, prune, reload_env, shell, start, stop, test)
for command in ALL_COMMANDS:
env.add_command(command)
| 23.423077 | 81 | 0.771757 |
c709d0df6d7c96b0dace86ff6283e481bd4f3000 | 8,584 | py | Python | sdk/python/pulumi_azure_nextgen/marketplace/private_store_offer.py | pulumi/pulumi-azure-nextgen | 452736b0a1cf584c2d4c04666e017af6e9b2c15c | [
"Apache-2.0"
] | 31 | 2020-09-21T09:41:01.000Z | 2021-02-26T13:21:59.000Z | sdk/python/pulumi_azure_nextgen/marketplace/private_store_offer.py | pulumi/pulumi-azure-nextgen | 452736b0a1cf584c2d4c04666e017af6e9b2c15c | [
"Apache-2.0"
] | 231 | 2020-09-21T09:38:45.000Z | 2021-03-01T11:16:03.000Z | sdk/python/pulumi_azure_nextgen/marketplace/private_store_offer.py | pulumi/pulumi-azure-nextgen | 452736b0a1cf584c2d4c04666e017af6e9b2c15c | [
"Apache-2.0"
] | 4 | 2020-09-29T14:14:59.000Z | 2021-02-10T20:38:16.000Z | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union
from .. import _utilities, _tables
from . import outputs
from ._enums import *
from ._inputs import *
__all__ = ['PrivateStoreOffer']
| 40.11215 | 230 | 0.646086 |
c70a49b112aadc6ae32c90aac8b9581dc39ca540 | 1,491 | py | Python | examples/custom_shape/stages.py | oksumoron/locust | fddfefe7ef1082bc5284cd2dd8477221484dfb0c | [
"MIT"
] | 18,336 | 2015-01-03T20:38:40.000Z | 2022-03-31T16:02:35.000Z | examples/custom_shape/stages.py | oksumoron/locust | fddfefe7ef1082bc5284cd2dd8477221484dfb0c | [
"MIT"
] | 1,779 | 2015-01-01T02:09:30.000Z | 2022-03-31T09:58:10.000Z | examples/custom_shape/stages.py | oksumoron/locust | fddfefe7ef1082bc5284cd2dd8477221484dfb0c | [
"MIT"
] | 2,689 | 2015-01-05T02:01:50.000Z | 2022-03-31T13:13:09.000Z | from locust import HttpUser, TaskSet, task, constant
from locust import LoadTestShape
| 29.82 | 90 | 0.602951 |
c70b23f1cce14640f16607fb8ec77754089292bc | 2,115 | py | Python | db/seed_ids.py | xtuyaowu/jtyd_python_spider | ca5c3efd5519f592c0d587c22f03812e7756c8ea | [
"MIT"
] | 7 | 2017-08-19T22:36:29.000Z | 2018-06-03T07:02:04.000Z | db/seed_ids.py | xtuyaowu/jtyd_python_spider | ca5c3efd5519f592c0d587c22f03812e7756c8ea | [
"MIT"
] | 2 | 2021-04-30T20:37:14.000Z | 2021-12-13T19:46:29.000Z | db/seed_ids.py | xtuyaowu/jtyd_python_spider | ca5c3efd5519f592c0d587c22f03812e7756c8ea | [
"MIT"
] | 4 | 2017-09-06T03:00:11.000Z | 2017-12-10T08:04:21.000Z | # coding:utf-8
from sqlalchemy import text
from db.basic_db import db_session
from db.models import SeedIds
from decorators.decorator import db_commit_decorator
def get_seed():
"""
Get all user id to be crawled
:return: user ids
"""
return db_session.query(SeedIds).filter(text('status=0')).all()
def get_seed_ids():
"""
Get all user id to be crawled
:return: user ids
"""
return db_session.query(SeedIds.uid).filter(text('is_crawled=0')).all()
def get_home_ids():
"""
Get all user id who's home pages need to be crawled
:return: user ids
"""
return db_session.query(SeedIds.uid).filter(text('home_crawled=0')).all()
| 24.593023 | 99 | 0.659102 |
c70b35ed30f0bbb93f6ab0a59185f9e44b410fce | 16,745 | py | Python | tobler/area_weighted/area_interpolate.py | sjsrey/tobler | 8e3ebd5d01de459e4387fabd57cbb12cb6735596 | [
"BSD-3-Clause"
] | 1 | 2019-06-21T19:32:22.000Z | 2019-06-21T19:32:22.000Z | tobler/area_weighted/area_interpolate.py | sjsrey/tobler | 8e3ebd5d01de459e4387fabd57cbb12cb6735596 | [
"BSD-3-Clause"
] | null | null | null | tobler/area_weighted/area_interpolate.py | sjsrey/tobler | 8e3ebd5d01de459e4387fabd57cbb12cb6735596 | [
"BSD-3-Clause"
] | null | null | null | """
Area Weighted Interpolation
"""
import numpy as np
import geopandas as gpd
from ._vectorized_raster_interpolation import _fast_append_profile_in_gdf
import warnings
from scipy.sparse import dok_matrix, diags, coo_matrix
import pandas as pd
from tobler.util.util import _check_crs, _nan_check, _inf_check, _check_presence_of_crs
def _area_tables_binning(source_df, target_df, spatial_index):
"""Construct area allocation and source-target correspondence tables using a spatial indexing approach
...
NOTE: this currently relies on Geopandas' spatial index machinery
Parameters
----------
source_df : geopandas.GeoDataFrame
GeoDataFrame containing input data and polygons
target_df : geopandas.GeoDataFramee
GeoDataFrame defining the output geometries
spatial_index : str
Spatial index to use to build the allocation of area from source to
target tables. It currently support the following values:
- "source": build the spatial index on `source_df`
- "target": build the spatial index on `target_df`
- "auto": attempts to guess the most efficient alternative.
Currently, this option uses the largest table to build the
index, and performs a `bulk_query` on the shorter table.
Returns
-------
tables : scipy.sparse.dok_matrix
"""
if _check_crs(source_df, target_df):
pass
else:
return None
df1 = source_df.copy()
df2 = target_df.copy()
# it is generally more performant to use the longer df as spatial index
if spatial_index == "auto":
if df1.shape[0] > df2.shape[0]:
spatial_index = "source"
else:
spatial_index = "target"
if spatial_index == "source":
ids_tgt, ids_src = df1.sindex.query_bulk(df2.geometry, predicate="intersects")
elif spatial_index == "target":
ids_src, ids_tgt = df2.sindex.query_bulk(df1.geometry, predicate="intersects")
else:
raise ValueError(
f"'{spatial_index}' is not a valid option. Use 'auto', 'source' or 'target'."
)
areas = df1.geometry.values[ids_src].intersection(df2.geometry.values[ids_tgt]).area
table = coo_matrix(
(areas, (ids_src, ids_tgt),),
shape=(df1.shape[0], df2.shape[0]),
dtype=np.float32,
)
table = table.todok()
return table
def _area_tables(source_df, target_df):
"""
Construct area allocation and source-target correspondence tables.
Parameters
----------
source_df : geopandas.GeoDataFrame
target_df : geopandas.GeoDataFrame
Returns
-------
tables : tuple (optional)
two 2-D numpy arrays
SU: area of intersection of source geometry i with union geometry j
UT: binary mapping of union geometry j to target geometry t
Notes
-----
The assumption is both dataframes have the same coordinate reference system.
Union geometry is a geometry formed by the intersection of a source geometry and a target geometry
SU Maps source geometry to union geometry, UT maps union geometry to target geometry
"""
if _check_crs(source_df, target_df):
pass
else:
return None
source_df = source_df.copy()
source_df = source_df.copy()
n_s = source_df.shape[0]
n_t = target_df.shape[0]
_left = np.arange(n_s)
_right = np.arange(n_t)
source_df.loc[:, "_left"] = _left # create temporary index for union
target_df.loc[:, "_right"] = _right # create temporary index for union
res_union = gpd.overlay(source_df, target_df, how="union")
n_u, _ = res_union.shape
SU = np.zeros(
(n_s, n_u)
) # holds area of intersection of source geom with union geom
UT = np.zeros((n_u, n_t)) # binary table mapping union geom to target geom
for index, row in res_union.iterrows():
# only union polygons that intersect both a source and a target geometry matter
if not np.isnan(row["_left"]) and not np.isnan(row["_right"]):
s_id = int(row["_left"])
t_id = int(row["_right"])
SU[s_id, index] = row[row.geometry.name].area
UT[index, t_id] = 1
source_df.drop(["_left"], axis=1, inplace=True)
target_df.drop(["_right"], axis=1, inplace=True)
return SU, UT
def _area_interpolate_binning(
source_df,
target_df,
extensive_variables=None,
intensive_variables=None,
table=None,
allocate_total=True,
spatial_index="auto",
):
"""
Area interpolation for extensive and intensive variables.
Parameters
----------
source_df : geopandas.GeoDataFrame
target_df : geopandas.GeoDataFrame
extensive_variables : list
[Optional. Default=None] Columns in dataframes for extensive variables
intensive_variables : list
[Optional. Default=None] Columns in dataframes for intensive variables
table : scipy.sparse.dok_matrix
[Optional. Default=None] Area allocation source-target correspondence
table. If not provided, it will be built from `source_df` and
`target_df` using `tobler.area_interpolate._area_tables_binning`
allocate_total : boolean
[Optional. Default=True] True if total value of source area should be
allocated. False if denominator is area of i. Note that the two cases
would be identical when the area of the source polygon is exhausted by
intersections. See Notes for more details.
spatial_index : str
[Optional. Default="auto"] Spatial index to use to build the
allocation of area from source to target tables. It currently support
the following values:
- "source": build the spatial index on `source_df`
- "target": build the spatial index on `target_df`
- "auto": attempts to guess the most efficient alternative.
Currently, this option uses the largest table to build the
index, and performs a `bulk_query` on the shorter table.
Returns
-------
estimates : geopandas.GeoDataFrame
new geodaraframe with interpolated variables as columns and target_df geometry
as output geometry
Notes
-----
The assumption is both dataframes have the same coordinate reference system.
For an extensive variable, the estimate at target polygon j (default case) is:
.. math::
v_j = \\sum_i v_i w_{i,j}
w_{i,j} = a_{i,j} / \\sum_k a_{i,k}
If the area of the source polygon is not exhausted by intersections with
target polygons and there is reason to not allocate the complete value of
an extensive attribute, then setting allocate_total=False will use the
following weights:
.. math::
v_j = \\sum_i v_i w_{i,j}
w_{i,j} = a_{i,j} / a_i
where a_i is the total area of source polygon i.
For an intensive variable, the estimate at target polygon j is:
.. math::
v_j = \\sum_i v_i w_{i,j}
w_{i,j} = a_{i,j} / \\sum_k a_{k,j}
"""
source_df = source_df.copy()
target_df = target_df.copy()
if _check_crs(source_df, target_df):
pass
else:
return None
if table is None:
table = _area_tables_binning(source_df, target_df, spatial_index)
den = source_df[source_df.geometry.name].area.values
if allocate_total:
den = np.asarray(table.sum(axis=1))
den = den + (den == 0)
den = 1.0 / den
n = den.shape[0]
den = den.reshape((n,))
den = diags([den], [0])
weights = den.dot(table) # row standardize table
dfs = []
extensive = []
if extensive_variables:
for variable in extensive_variables:
vals = _nan_check(source_df, variable)
vals = _inf_check(source_df, variable)
estimates = diags([vals], [0]).dot(weights)
estimates = estimates.sum(axis=0)
extensive.append(estimates.tolist()[0])
extensive = np.asarray(extensive)
extensive = np.array(extensive)
extensive = pd.DataFrame(extensive.T, columns=extensive_variables)
area = np.asarray(table.sum(axis=0))
den = 1.0 / (area + (area == 0))
n, k = den.shape
den = den.reshape((k,))
den = diags([den], [0])
weights = table.dot(den)
intensive = []
if intensive_variables:
for variable in intensive_variables:
vals = _nan_check(source_df, variable)
vals = _inf_check(source_df, variable)
n = vals.shape[0]
vals = vals.reshape((n,))
estimates = diags([vals], [0])
estimates = estimates.dot(weights).sum(axis=0)
intensive.append(estimates.tolist()[0])
intensive = np.asarray(intensive)
intensive = pd.DataFrame(intensive.T, columns=intensive_variables)
if extensive_variables:
dfs.append(extensive)
if intensive_variables:
dfs.append(intensive)
df = pd.concat(dfs, axis=1)
df["geometry"] = target_df[target_df.geometry.name].reset_index(drop=True)
df = gpd.GeoDataFrame(df.replace(np.inf, np.nan))
return df
def _area_interpolate(
source_df,
target_df,
extensive_variables=None,
intensive_variables=None,
tables=None,
allocate_total=True,
):
"""
Area interpolation for extensive and intensive variables.
Parameters
----------
source_df : geopandas.GeoDataFrame (required)
geodataframe with polygon geometries
target_df : geopandas.GeoDataFrame (required)
geodataframe with polygon geometries
extensive_variables : list, (optional)
columns in dataframes for extensive variables
intensive_variables : list, (optional)
columns in dataframes for intensive variables
tables : tuple (optional)
two 2-D numpy arrays
SU: area of intersection of source geometry i with union geometry j
UT: binary mapping of union geometry j to target geometry t
allocate_total : boolean
True if total value of source area should be allocated.
False if denominator is area of i. Note that the two cases
would be identical when the area of the source polygon is
exhausted by intersections. See Notes for more details.
Returns
-------
estimates : geopandas.GeoDataFrame
new geodaraframe with interpolated variables as columns and target_df geometry
as output geometry
Notes
-----
The assumption is both dataframes have the same coordinate reference system.
For an extensive variable, the estimate at target polygon j (default case) is:
v_j = \sum_i v_i w_{i,j}
w_{i,j} = a_{i,j} / \sum_k a_{i,k}
If the area of the source polygon is not exhausted by intersections with
target polygons and there is reason to not allocate the complete value of
an extensive attribute, then setting allocate_total=False will use the
following weights:
v_j = \sum_i v_i w_{i,j}
w_{i,j} = a_{i,j} / a_i
where a_i is the total area of source polygon i.
For an intensive variable, the estimate at target polygon j is:
v_j = \sum_i v_i w_{i,j}
w_{i,j} = a_{i,j} / \sum_k a_{k,j}
"""
source_df = source_df.copy()
target_df = target_df.copy()
if _check_crs(source_df, target_df):
pass
else:
return None
if tables is None:
SU, UT = _area_tables(source_df, target_df)
else:
SU, UT = tables
den = source_df[source_df.geometry.name].area.values
if allocate_total:
den = SU.sum(axis=1)
den = den + (den == 0)
weights = np.dot(np.diag(1 / den), SU)
dfs = []
extensive = []
if extensive_variables:
for variable in extensive_variables:
vals = _nan_check(source_df, variable)
vals = _inf_check(source_df, variable)
estimates = np.dot(np.diag(vals), weights)
estimates = np.dot(estimates, UT)
estimates = estimates.sum(axis=0)
extensive.append(estimates)
extensive = np.array(extensive)
extensive = pd.DataFrame(extensive.T, columns=extensive_variables)
ST = np.dot(SU, UT)
area = ST.sum(axis=0)
den = np.diag(1.0 / (area + (area == 0)))
weights = np.dot(ST, den)
intensive = []
if intensive_variables:
for variable in intensive_variables:
vals = _nan_check(source_df, variable)
vals = _inf_check(source_df, variable)
vals.shape = (len(vals), 1)
est = (vals * weights).sum(axis=0)
intensive.append(est)
intensive = np.array(intensive)
intensive = pd.DataFrame(intensive.T, columns=intensive_variables)
if extensive_variables:
dfs.append(extensive)
if intensive_variables:
dfs.append(intensive)
df = pd.concat(dfs, axis=1)
df["geometry"] = target_df[target_df.geometry.name].reset_index(drop=True)
df = gpd.GeoDataFrame(df.replace(np.inf, np.nan))
return df
def _area_tables_raster(
source_df, target_df, raster_path, codes=[21, 22, 23, 24], force_crs_match=True
):
"""
Construct area allocation and source-target correspondence tables according to a raster 'populated' areas
Parameters
----------
source_df : geopandas.GeoDataFrame
geeodataframe with geometry column of polygon type
target_df : geopandas.GeoDataFrame
geodataframe with geometry column of polygon type
raster_path : str
the path to the associated raster image.
codes : list
list of integer code values that should be considered as 'populated'.
Since this draw inspiration using the National Land Cover Database (NLCD), the default is 21 (Developed, Open Space), 22 (Developed, Low Intensity), 23 (Developed, Medium Intensity) and 24 (Developed, High Intensity).
The description of each code can be found here: https://www.mrlc.gov/sites/default/files/metadata/landcover.html
Only taken into consideration for harmonization raster based.
force_crs_match : bool (default is True)
Whether the Coordinate Reference System (CRS) of the polygon will be reprojected to the CRS of the raster file.
It is recommended to let this argument as True.
Returns
-------
tables: tuple (optional)
two 2-D numpy arrays
SU: area of intersection of source geometry i with union geometry j
UT: binary mapping of union geometry j to target geometry t
Notes
-----
The assumption is both dataframes have the same coordinate reference system.
Union geometry is a geometry formed by the intersection of a source geometry and a target geometry
SU Maps source geometry to union geometry, UT maps union geometry to target geometry
"""
if _check_crs(source_df, target_df):
pass
else:
return None
source_df = source_df.copy()
target_df = target_df.copy()
n_s = source_df.shape[0]
n_t = target_df.shape[0]
_left = np.arange(n_s)
_right = np.arange(n_t)
source_df.loc[:, "_left"] = _left # create temporary index for union
target_df.loc[:, "_right"] = _right # create temporary index for union
res_union_pre = gpd.overlay(source_df, target_df, how="union")
# Establishing a CRS for the generated union
warnings.warn(
"The CRS for the generated union will be set to be the same as source_df."
)
res_union_pre.crs = source_df.crs
# The 'append_profile_in_gdf' function is present in nlcd.py script
res_union = _fast_append_profile_in_gdf(
res_union_pre, raster_path, force_crs_match=force_crs_match
)
str_codes = [str(i) for i in codes]
str_list = ["Type_" + i for i in str_codes]
# Extract list of code names that actually appear in the appended dataset
str_list_ok = [col for col in res_union.columns if col in str_list]
res_union["Populated_Pixels"] = res_union[str_list_ok].sum(axis=1)
n_u, _ = res_union.shape
SU = np.zeros(
(n_s, n_u)
) # holds area of intersection of source geom with union geom
UT = np.zeros((n_u, n_t)) # binary table mapping union geom to target geom
for index, row in res_union.iterrows():
# only union polygons that intersect both a source and a target geometry matter
if not np.isnan(row["_left"]) and not np.isnan(row["_right"]):
s_id = int(row["_left"])
t_id = int(row["_right"])
SU[s_id, index] = row["Populated_Pixels"]
UT[index, t_id] = 1
source_df.drop(["_left"], axis=1, inplace=True)
target_df.drop(["_right"], axis=1, inplace=True)
return SU, UT
| 33.828283 | 225 | 0.657928 |
c70bc413822aaad70486fa31ce67b5a7d9e44d76 | 49,568 | py | Python | cave/com.raytheon.viz.gfe/python/autotest/VTEC_GHG_FFA_TestScript.py | srcarter3/awips2 | 37f31f5e88516b9fd576eaa49d43bfb762e1d174 | [
"Apache-2.0"
] | null | null | null | cave/com.raytheon.viz.gfe/python/autotest/VTEC_GHG_FFA_TestScript.py | srcarter3/awips2 | 37f31f5e88516b9fd576eaa49d43bfb762e1d174 | [
"Apache-2.0"
] | null | null | null | cave/com.raytheon.viz.gfe/python/autotest/VTEC_GHG_FFA_TestScript.py | srcarter3/awips2 | 37f31f5e88516b9fd576eaa49d43bfb762e1d174 | [
"Apache-2.0"
] | 1 | 2021-10-30T00:03:05.000Z | 2021-10-30T00:03:05.000Z | ##
# This software was developed and / or modified by Raytheon Company,
# pursuant to Contract DG133W-05-CQ-1067 with the US Government.
#
# U.S. EXPORT CONTROLLED TECHNICAL DATA
# This software product contains export-restricted data whose
# export/transfer/disclosure is restricted by U.S. law. Dissemination
# to non-U.S. persons whether in the United States or abroad requires
# an export license or other authorization.
#
# Contractor Name: Raytheon Company
# Contractor Address: 6825 Pine Street, Suite 340
# Mail Stop B8
# Omaha, NE 68106
# 402.291.0100
#
# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
# further licensing information.
##
# ----------------------------------------------------------------------------
# This software is in the public domain, furnished "as is", without technical
# support, and with no warranty, express or implied, as to its usefulness for
# any purpose.
#
# Headlines Timing
#
# Author:
# ----------------------------------------------------------------------------
#set up to test area names and part of states
# without locationName defined
areaT1 = """
AreaDictionary['FLZ050']['fullStateName'] = 'Florida'
AreaDictionary['FLZ050']['partOfState'] = 'western'
AreaDictionary['FLZ057']['fullStateName'] = 'Florida'
AreaDictionary['FLZ057']['partOfState'] = 'western'
AreaDictionary['FLZ160']['fullStateName'] = 'Florida'
AreaDictionary['FLZ160']['partOfState'] = 'central'
AreaDictionary['FLZ151']['fullStateName'] = 'Florida'
AreaDictionary['FLZ151']['partOfState'] = 'central'
AreaDictionary['FLZ043']['fullStateName'] = 'Florida'
AreaDictionary['FLZ043']['partOfState'] = 'central'
AreaDictionary['FLZ162']['fullStateName'] = 'Florida'
AreaDictionary['FLZ162']['partOfState'] = 'central'
AreaDictionary['FLZ165']['fullStateName'] = 'Florida'
AreaDictionary['FLZ165']['partOfState'] = 'central'
AreaDictionary['FLZ056']['fullStateName'] = 'Florida'
AreaDictionary['FLZ056']['partOfState'] = 'southern'
AreaDictionary['FLZ052']['fullStateName'] = 'Georgia'
AreaDictionary['FLZ052']['partOfState'] = 'western'
AreaDictionary['FLZ155']['fullStateName'] = 'Georgia'
AreaDictionary['FLZ155']['partOfState'] = 'western'
AreaDictionary['FLZ061']['fullStateName'] = 'Georgia'
AreaDictionary['FLZ061']['partOfState'] = 'southern'
AreaDictionary['FLZ148']['fullStateName'] = 'Georgia'
AreaDictionary['FLZ148']['partOfState'] = 'southern'
AreaDictionary['FLZ142']['fullStateName'] = 'South Carolina'
AreaDictionary['FLZ142']['partOfState'] = 'western'
AreaDictionary['FLZ043']['fullStateName'] = 'South Carolina'
AreaDictionary['FLZ043']['partOfState'] = 'western'
"""
#with location name defined
areaT2= """
AreaDictionary['FLZ050']['fullStateName'] = 'Florida'
AreaDictionary['FLZ050']['partOfState'] = 'western'
AreaDictionary['FLZ050']['locationName'] = 'Clearfield'
AreaDictionary['FLZ057']['fullStateName'] = 'Florida'
AreaDictionary['FLZ057']['partOfState'] = 'western'
AreaDictionary['FLZ057']['locationName'] = 'Clearfield'
AreaDictionary['FLZ160']['fullStateName'] = 'Florida'
AreaDictionary['FLZ160']['partOfState'] = 'central'
AreaDictionary['FLZ160']['locationName'] = 'Aunt Ruby'
AreaDictionary['FLZ151']['fullStateName'] = 'Florida'
AreaDictionary['FLZ151']['partOfState'] = 'central'
AreaDictionary['FLZ151']['locationName'] = 'Aunt Ruby'
AreaDictionary['FLZ043']['fullStateName'] = 'Florida'
AreaDictionary['FLZ043']['partOfState'] = 'central'
AreaDictionary['FLZ043']['locationName'] = 'Adams'
AreaDictionary['FLZ162']['fullStateName'] = 'Florida'
AreaDictionary['FLZ162']['partOfState'] = 'central'
AreaDictionary['FLZ162']['locationName'] = 'Adams'
AreaDictionary['FLZ165']['fullStateName'] = 'Florida'
AreaDictionary['FLZ165']['partOfState'] = 'central'
#AreaDictionary['FLZ165']['locationName'] = 'western'
AreaDictionary['FLZ056']['fullStateName'] = 'Florida'
AreaDictionary['FLZ056']['partOfState'] = 'southern'
AreaDictionary['FLZ056']['locationName'] = 'Tampa'
AreaDictionary['FLZ052']['fullStateName'] = 'Georgia'
AreaDictionary['FLZ052']['partOfState'] = 'western'
AreaDictionary['FLZ052']['locationName'] = 'Tampa'
AreaDictionary['FLZ155']['fullStateName'] = 'Georgia'
AreaDictionary['FLZ155']['partOfState'] = 'western'
AreaDictionary['FLZ155']['locationName'] = 'Atlanta'
AreaDictionary['FLZ061']['fullStateName'] = 'Georgia'
AreaDictionary['FLZ061']['partOfState'] = 'southern'
AreaDictionary['FLZ061']['locationName'] = 'Beach'
AreaDictionary['FLZ148']['fullStateName'] = 'Georgia'
AreaDictionary['FLZ148']['partOfState'] = 'southern'
AreaDictionary['FLZ148']['locationName'] = 'Beach'
AreaDictionary['FLZ142']['fullStateName'] = 'South Carolina'
AreaDictionary['FLZ142']['partOfState'] = 'western'
AreaDictionary['FLZ142']['locationName'] = 'South Park'
AreaDictionary['FLZ043']['fullStateName'] = 'South Carolina'
AreaDictionary['FLZ043']['partOfState'] = 'western'
AreaDictionary['FLZ043']['locationName'] = 'South Park'
"""
#for testing of parishes, counties, and areas
areaT3 = """
AreaDictionary['FLC017']['fullStateName'] = 'Louisiana'
AreaDictionary['FLC017']['partOfState'] = 'western'
AreaDictionary['FLC017']['independentCity'] = 1
AreaDictionary['FLC105']['fullStateName'] = 'Louisiana'
AreaDictionary['FLC105']['partOfState'] = 'western'
AreaDictionary['FLC027']['fullStateName'] = 'Louisiana'
AreaDictionary['FLC027']['partOfState'] = 'western'
AreaDictionary['FLC053']['fullStateName'] = 'Florida'
AreaDictionary['FLC053']['partOfState'] = 'western'
"""
areaT3FIPS0= '#Definition["areaType"] = "FIPS"'
areaT3FIPS1= 'Definition["areaType"] = "FIPS"'
scripts = [
{
"commentary": "Clear out all Hazards Table and Grids.",
"name": "Hazard_FFA_0",
"productType": None,
"clearHazardsTable": 1,
"checkStrings": [],
},
{
"commentary": "NEW FFA",
"name": "Hazard_FFA_1",
"drtTime": "20100101_0510",
"productType": "Hazard_FFA_Local",
"cmdLineVars": "{('Flood Reason', 'floodReason'): 'ER '}",
"createGrids": [
("Fcst", "Hazards", "DISCRETE", -100, 100, "<None>", "all"),
("Fcst", "Hazards", "DISCRETE", 0, 3, "FA.A", ["FLZ149"]),
],
"checkStrings": ["URGENT - IMMEDIATE BROADCAST REQUESTED",
"Flood Watch",
"National Weather Service Tampa Bay Ruskin FL",
"FLZ149-",
"/X.NEW.KTBW.FA.A.0001.100101T0510Z-100101T0800Z/",
"/00000.0.ER.000000T0000Z.000000T0000Z.000000T0000Z.OO/",
"Coastal Pasco-",
"1210 AM EST Fri Jan 1 2010",
"...FLOOD WATCH IN EFFECT UNTIL 3 AM EST EARLY THIS MORNING...",
"The National Weather Service in Tampa Bay Ruskin has issued a",
"* Flood Watch for a portion of west central Florida, including the following area, Coastal Pasco.",
"* Until 3 AM EST early this morning",
],
},
{
"commentary": "CON FFA",
"name": "Hazard_FFA_2",
"drtTime": "20100101_0530",
"productType": "Hazard_FFA_Local",
"cmdLineVars": "{('Flood Reason', 'floodReason'): 'SM '}",
"createGrids": [
("Fcst", "Hazards", "DISCRETE", -100, 100, "<None>", "all"),
("Fcst", "Hazards", "DISCRETE", 0, 3, "FA.A", ["FLZ149"]),
],
"checkStrings": ["Flood Watch",
"National Weather Service Tampa Bay Ruskin FL",
"FLZ149-",
"/X.CON.KTBW.FA.A.0001.000000T0000Z-100101T0800Z/",
"/00000.0.SM.000000T0000Z.000000T0000Z.000000T0000Z.OO/",
"...FLOOD WATCH REMAINS IN EFFECT UNTIL 3 AM EST EARLY THIS MORNING...",
"The Flood Watch continues for",
"* A portion of west central Florida, including the following area, Coastal Pasco.",
"* Until 3 AM EST early this morning",
],
},
{
"commentary": "EXA FFA",
"name": "Hazard_FFA_3",
"drtTime": "20100101_0700",
"productType": "Hazard_FFA_Local",
"cmdLineVars": "{('Flood Reason', 'floodReason'): 'DM '}",
"createGrids": [
("Fcst", "Hazards", "DISCRETE", -100, 100, "<None>", "all"),
("Fcst", "Hazards", "DISCRETE", 0, 3, "FA.A", ["FLZ149","FLZ057"]),
],
"checkStrings": ["URGENT - IMMEDIATE BROADCAST REQUESTED",
"Flood Watch",
"National Weather Service Tampa Bay Ruskin FL",
"FLZ057-",
"/X.EXA.KTBW.FA.A.0001.000000T0000Z-100101T0800Z/",
"/00000.0.DM.000000T0000Z.000000T0000Z.000000T0000Z.OO/",
"...FLOOD WATCH IN EFFECT UNTIL 3 AM EST EARLY THIS MORNING...",
"The National Weather Service in Tampa Bay Ruskin has expanded the",
"* Flood Watch to include a portion of south central Florida, including the following area, Highlands.",
"* Until 3 AM EST early this morning",
"FLZ149-",
"/X.CON.KTBW.FA.A.0001.000000T0000Z-100101T0800Z/",
"/00000.0.DM.000000T0000Z.000000T0000Z.000000T0000Z.OO/",
"...FLOOD WATCH REMAINS IN EFFECT UNTIL 3 AM EST EARLY THIS MORNING...",
"The Flood Watch continues for",
"* A portion of west central Florida, including the following area, Coastal Pasco.",
"* Until 3 AM EST early this morning",
],
},
{
"commentary": "CAN FFA, NEW FFA",
"name": "Hazard_FFA_4",
"drtTime": "20100101_0720",
"productType": "Hazard_FFA_Local",
"cmdLineVars": "{('Flood Reason', 'floodReason'): 'IJ '}",
"createGrids": [
("Fcst", "Hazards", "DISCRETE", -100, 100, "<None>", "all"),
("Fcst", "Hazards", "DISCRETE", 0, 8, "FF.A", ["FLZ057"]),
("Fcst", "Hazards", "DISCRETE", 24, 32, "FF.A", ["FLZ057"]),
],
"checkStrings": ["URGENT - IMMEDIATE BROADCAST REQUESTED",
"Flood Watch",
"National Weather Service Tampa Bay Ruskin FL",
"FLZ057-",
"/X.CAN.KTBW.FA.A.0001.000000T0000Z-100101T0800Z/",
"/X.NEW.KTBW.FF.A.0001.100101T0720Z-100101T1300Z/",
"/X.NEW.KTBW.FF.A.0002.100102T0500Z-100102T1300Z/",
"/00000.0.IJ.000000T0000Z.000000T0000Z.000000T0000Z.OO/",
"...FLASH FLOOD WATCH IN EFFECT UNTIL 8 AM EST THIS MORNING...",
"...FLASH FLOOD WATCH IN EFFECT FROM LATE TONIGHT THROUGH SATURDAY MORNING...",
"...FLOOD WATCH IS CANCELLED...",
"The National Weather Service in Tampa Bay Ruskin has issued a",
"* Flash Flood Watch for a portion of south central Florida, including the following area, Highlands.",
"* Until 8 AM EST this morning",
"The National Weather Service in Tampa Bay Ruskin has issued a",
"* Flash Flood Watch for a portion of south central Florida, including the following area, Highlands.",
"* From late tonight through Saturday morning",
"The Flood Watch for a portion of south central Florida has been cancelled.",
"FLZ149-",
"/X.CAN.KTBW.FA.A.0001.000000T0000Z-100101T0800Z/",
"/00000.0.IJ.000000T0000Z.000000T0000Z.000000T0000Z.OO/",
"...FLOOD WATCH IS CANCELLED...",
"The Flood Watch for a portion of west central Florida has been cancelled."
],
},
{
"commentary": "EXP FFA, 2 NEW FFA",
"name": "Hazard_FFA_5",
"drtTime": "20100101_1300",
"productType": "Hazard_FFA_Local",
"cmdLineVars": "{('Flood Reason', 'floodReason'): 'FS '}",
"createGrids": [
("Fcst", "Hazards", "DISCRETE", -100, 100, "<None>", "all"),
("Fcst", "Hazards", "DISCRETE", 24, 32, "FF.A", ["FLZ057"]),
("Fcst", "Hazards", "DISCRETE", 46, 62, "FF.A", ["FLZ057"]),
("Fcst", "Hazards", "DISCRETE", 45, 46, "FA.A", ["FLZ149"]),
("Fcst", "Hazards", "DISCRETE", 46, 62, "FA.A", ["FLZ149"]),
("Fcst", "Hazards", "DISCRETE", 62, 68, "FA.A", ["FLZ149"]),
],
"checkStrings": ["URGENT - IMMEDIATE BROADCAST REQUESTED",
"Flood Watch",
"National Weather Service Tampa Bay Ruskin FL",
"FLZ057-",
"/X.EXP.KTBW.FF.A.0001.000000T0000Z-100101T1300Z/",
"/X.NEW.KTBW.FF.A.0003.100103T0300Z-100103T1900Z/",
"/X.CON.KTBW.FF.A.0002.100102T0500Z-100102T1300Z/",
"/00000.0.FS.000000T0000Z.000000T0000Z.000000T0000Z.OO/",
"...FLASH FLOOD WATCH REMAINS IN EFFECT FROM LATE TONIGHT THROUGH SATURDAY MORNING...",
"...FLASH FLOOD WATCH IN EFFECT FROM SATURDAY EVENING THROUGH SUNDAY AFTERNOON...",
"...FLASH FLOOD WATCH HAS EXPIRED...",
"The Flash Flood Watch continues for",
"* A portion of south central Florida, including the following area, Highlands.",
"* From late tonight through Saturday morning",
"The National Weather Service in Tampa Bay Ruskin has issued a",
"* Flash Flood Watch for a portion of south central Florida, including the following area, Highlands.",
"* From Saturday evening through Sunday afternoon",
"The Flash Flood Watch for a portion of south central Florida has expired.",
"FLZ149-",
"/X.NEW.KTBW.FA.A.0002.100103T0200Z-100104T0100Z/",
"/00000.0.FS.000000T0000Z.000000T0000Z.000000T0000Z.OO/",
"...FLOOD WATCH IN EFFECT FROM SATURDAY EVENING THROUGH SUNDAY EVENING...",
"The National Weather Service in Tampa Bay Ruskin has issued a",
"* Flood Watch for a portion of west central Florida, including the following area, Coastal Pasco.",
"* From Saturday evening through Sunday evening",
],
},
{
"commentary": "CON test of multiple events",
"name": "Hazard_FFA_6",
"drtTime": "20100102_0300",
"productType": "Hazard_FFA_Local",
"cmdLineVars": "{('Flood Reason', 'floodReason'): 'RS '}",
"createGrids": [
("Fcst", "Hazards", "DISCRETE", -100, 100, "<None>", "all"),
("Fcst", "Hazards", "DISCRETE", 24, 32, "FF.A", ["FLZ057"]),
("Fcst", "Hazards", "DISCRETE", 46, 62, "FF.A", ["FLZ057"]),
("Fcst", "Hazards", "DISCRETE", 45, 46, "FA.A", ["FLZ149"]),
("Fcst", "Hazards", "DISCRETE", 46, 62, "FA.A", ["FLZ149"]),
("Fcst", "Hazards", "DISCRETE", 62, 68, "FA.A", ["FLZ149"]),
],
"checkStrings": ["Flood Watch",
"National Weather Service Tampa Bay Ruskin FL",
"FLZ057-",
"/X.CON.KTBW.FF.A.0002.100102T0500Z-100102T1300Z/",
"/X.CON.KTBW.FF.A.0003.100103T0300Z-100103T1900Z/",
"/00000.0.RS.000000T0000Z.000000T0000Z.000000T0000Z.OO/",
"...FLASH FLOOD WATCH REMAINS IN EFFECT UNTIL 8 AM EST SATURDAY...",
"...FLASH FLOOD WATCH REMAINS IN EFFECT FROM SATURDAY EVENING THROUGH SUNDAY AFTERNOON...",
"The Flash Flood Watch continues for",
"* A portion of south central Florida, including the following area, Highlands.",
"* Until 8 AM EST Saturday",
"The Flash Flood Watch continues for",
"* A portion of south central Florida, including the following area, Highlands.",
"* From Saturday evening through Sunday afternoon",
"FLZ149-",
"/X.CON.KTBW.FA.A.0002.100103T0200Z-100104T0100Z/",
"/00000.0.RS.000000T0000Z.000000T0000Z.000000T0000Z.OO/",
"...FLOOD WATCH REMAINS IN EFFECT FROM SATURDAY EVENING THROUGH SUNDAY EVENING...",
"The Flood Watch continues for",
"* A portion of west central Florida, including the following area, Coastal Pasco.",
"* From Saturday evening through Sunday evening",
],
},
{
"commentary": "middle of 1st event",
"name": "Hazard_FFA_7",
"drtTime": "20100102_0700",
"productType": "Hazard_FFA_Local",
"cmdLineVars": "{('Flood Reason', 'floodReason'): 'ER '}",
"createGrids": [
("Fcst", "Hazards", "DISCRETE", -100, 100, "<None>", "all"),
("Fcst", "Hazards", "DISCRETE", 24, 32, "FF.A", ["FLZ057"]),
("Fcst", "Hazards", "DISCRETE", 46, 62, "FF.A", ["FLZ057"]),
("Fcst", "Hazards", "DISCRETE", 45, 46, "FA.A", ["FLZ149"]),
("Fcst", "Hazards", "DISCRETE", 46, 62, "FA.A", ["FLZ149"]),
("Fcst", "Hazards", "DISCRETE", 62, 68, "FA.A", ["FLZ149"]),
],
"checkStrings": ["Flood Watch",
"National Weather Service Tampa Bay Ruskin FL",
"FLZ057-",
"/X.CON.KTBW.FF.A.0002.000000T0000Z-100102T1300Z/",
"/X.CON.KTBW.FF.A.0003.100103T0300Z-100103T1900Z/",
"/00000.0.ER.000000T0000Z.000000T0000Z.000000T0000Z.OO/",
"...FLASH FLOOD WATCH REMAINS IN EFFECT UNTIL 8 AM EST THIS MORNING...",
"...FLASH FLOOD WATCH REMAINS IN EFFECT FROM THIS EVENING THROUGH SUNDAY AFTERNOON...",
"The Flash Flood Watch continues for",
"* A portion of south central Florida, including the following area, Highlands.",
"* Until 8 AM EST this morning",
"The Flash Flood Watch continues for",
"* A portion of south central Florida, including the following area, Highlands.",
"* From this evening through Sunday afternoon",
"FLZ149-",
"/X.CON.KTBW.FA.A.0002.100103T0200Z-100104T0100Z/",
"/00000.0.ER.000000T0000Z.000000T0000Z.000000T0000Z.OO/",
"...FLOOD WATCH REMAINS IN EFFECT FROM THIS EVENING THROUGH SUNDAY EVENING...",
"The Flood Watch continues for",
"* A portion of west central Florida, including the following area, Coastal Pasco.",
"* From this evening through Sunday evening",
],
},
{
"commentary": "joining two events",
"name": "Hazard_FFA_8",
"drtTime": "20100102_1200",
"productType": "Hazard_FFA_Local",
"cmdLineVars": "{('Flood Reason', 'floodReason'): 'IC '}",
"createGrids": [
("Fcst", "Hazards", "DISCRETE", -100, 100, "<None>", "all"),
("Fcst", "Hazards", "DISCRETE", 24, 45, "FF.A", ["FLZ057"]),
("Fcst", "Hazards", "DISCRETE", 45, 62, "FF.A", ["FLZ057"]),
("Fcst", "Hazards", "DISCRETE", 45, 62, "FA.A", ["FLZ149"]),
("Fcst", "Hazards", "DISCRETE", 62, 68, "FA.A", ["FLZ149"]),
],
"checkStrings": ["URGENT - IMMEDIATE BROADCAST REQUESTED",
"Flood Watch",
"National Weather Service Tampa Bay Ruskin FL",
"FLZ057-",
"/X.CAN.KTBW.FF.A.0002.000000T0000Z-100102T1300Z/",
"/X.EXT.KTBW.FF.A.0003.100102T1200Z-100103T1900Z/",
"/00000.0.IC.000000T0000Z.000000T0000Z.000000T0000Z.OO/",
"...FLASH FLOOD WATCH NOW IN EFFECT THROUGH SUNDAY AFTERNOON...",
"The Flash Flood Watch is now in effect for",
"* A portion of south central Florida, including the following area, Highlands.",
"* Through Sunday afternoon",
"FLZ149-",
"/X.CON.KTBW.FA.A.0002.100103T0200Z-100104T0100Z/",
"/00000.0.IC.000000T0000Z.000000T0000Z.000000T0000Z.OO/",
"...FLOOD WATCH REMAINS IN EFFECT FROM THIS EVENING THROUGH SUNDAY EVENING...",
"The Flood Watch continues for",
"* A portion of west central Florida, including the following area, Coastal Pasco.",
"* From this evening through Sunday evening",
],
},
{
"commentary": "into the tail end of the events",
"name": "Hazard_FFA_9",
"drtTime": "20100103_1100",
"productType": "Hazard_FFA_Local",
"cmdLineVars": "{('Flood Reason', 'floodReason'): 'SM '}",
"createGrids": [
("Fcst", "Hazards", "DISCRETE", -100, 100, "<None>", "all"),
("Fcst", "Hazards", "DISCRETE", 24, 45, "FF.A", ["FLZ057"]),
("Fcst", "Hazards", "DISCRETE", 45, 62, "FF.A", ["FLZ057"]),
("Fcst", "Hazards", "DISCRETE", 45, 62, "FA.A", ["FLZ149"]),
("Fcst", "Hazards", "DISCRETE", 62, 68, "FA.A", ["FLZ149"]),
],
"checkStrings": ["Flood Watch",
"National Weather Service Tampa Bay Ruskin FL",
"FLZ057-",
"/X.CON.KTBW.FF.A.0003.000000T0000Z-100103T1900Z/",
"/00000.0.SM.000000T0000Z.000000T0000Z.000000T0000Z.OO/",
"...FLASH FLOOD WATCH REMAINS IN EFFECT UNTIL 2 PM EST THIS AFTERNOON...",
"The Flash Flood Watch continues for",
"* A portion of south central Florida, including the following area, Highlands.",
"* Until 2 PM EST this afternoon",
"FLZ149-",
"/X.CON.KTBW.FA.A.0002.000000T0000Z-100104T0100Z/",
"/00000.0.SM.000000T0000Z.000000T0000Z.000000T0000Z.OO/",
"...FLOOD WATCH REMAINS IN EFFECT THROUGH THIS EVENING...",
"The Flood Watch continues for",
"* A portion of west central Florida, including the following area, Coastal Pasco.",
"* Through this evening",
],
},
{
"commentary": "exp 1st event, continue 2nd event",
"name": "Hazard_FFA_10",
"drtTime": "20100103_1855",
"productType": "Hazard_FFA_Local",
"cmdLineVars": "{('Flood Reason', 'floodReason'): 'DR '}",
"createGrids": [
("Fcst", "Hazards", "DISCRETE", -100, 100, "<None>", "all"),
("Fcst", "Hazards", "DISCRETE", 24, 45, "FF.A", ["FLZ057"]),
("Fcst", "Hazards", "DISCRETE", 45, 62, "FF.A", ["FLZ057"]),
("Fcst", "Hazards", "DISCRETE", 45, 62, "FA.A", ["FLZ149"]),
("Fcst", "Hazards", "DISCRETE", 62, 68, "FA.A", ["FLZ149"]),
],
"checkStrings": ["Flood Watch",
"National Weather Service Tampa Bay Ruskin FL",
"FLZ057-",
"/X.EXP.KTBW.FF.A.0003.000000T0000Z-100103T1900Z/",
"/00000.0.DR.000000T0000Z.000000T0000Z.000000T0000Z.OO/",
"...FLASH FLOOD WATCH WILL EXPIRE AT 2 PM EST THIS AFTERNOON...",
"The Flash Flood Watch for a portion of south central Florida will expire at 2 PM EST this afternoon.",
"FLZ149-",
"/X.CON.KTBW.FA.A.0002.000000T0000Z-100104T0100Z/",
"/00000.0.DR.000000T0000Z.000000T0000Z.000000T0000Z.OO/",
"...FLOOD WATCH REMAINS IN EFFECT UNTIL 8 PM EST THIS EVENING...",
"The Flood Watch continues for",
"* A portion of west central Florida, including the following area, Coastal Pasco.",
"* Until 8 PM EST this evening",
],
},
{
"commentary": "cancel 2nd event",
"name": "Hazard_FFA_11",
"drtTime": "20100104_0000",
"productType": "Hazard_FFA_Local",
"cmdLineVars": "{('Flood Reason', 'floodReason'): 'GO '}",
"createGrids": [
("Fcst", "Hazards", "DISCRETE", -100, 100, "<None>", "all"),
],
"checkStrings": ["Flood Watch",
"National Weather Service Tampa Bay Ruskin FL",
"FLZ149-",
"/X.CAN.KTBW.FA.A.0002.000000T0000Z-100104T0100Z/",
"/00000.0.GO.000000T0000Z.000000T0000Z.000000T0000Z.OO/",
"...FLOOD WATCH IS CANCELLED...",
"The Flood Watch for a portion of west central Florida has been cancelled.",
],
},
{
"commentary": "Deleting hazard grids.",
"name": "Hazard_FFA_12",
"productType": None,
"checkStrings": [],
"clearHazardsTable": 1,
},
# Begin detailed phrasing of location tests
{
"commentary": "one state, single area, w/o location",
"name": "Hazard_FFA_13a",
"drtTime": "20100101_0510",
"productType": "Hazard_FFA_Local",
"cmdLineVars": "{('Flood Reason', 'floodReason'): 'ER '}",
"decodeVTEC": 0,
"vtecMode": "O",
"fileChanges": [("AreaDictionary", "TextUtility", "add", areaT1, "delete"),],
"createGrids": [
("Fcst", "Hazards", "DISCRETE", -100, 100, "<None>", "all"),
("Fcst", "Hazards", "DISCRETE", 0, 3, "FA.A", ["FLZ050"]),
],
"checkStrings": [
"WGUS62 KTBW 010510",
"FFATBW",
"URGENT - IMMEDIATE BROADCAST REQUESTED",
"Flood Watch",
"National Weather Service Tampa Bay Ruskin FL",
"1210 AM EST Fri Jan 1 2010",
"...|*Overview headline (must edit)*|...",
".|*Overview (must edit)*|.",
"FLZ050-010800-",
"/O.NEW.KTBW.FA.A.0001.100101T0510Z-100101T0800Z/",
"/00000.0.ER.000000T0000Z.000000T0000Z.000000T0000Z.OO/",
"Pinellas-",
"1210 AM EST Fri Jan 1 2010",
"...FLOOD WATCH IN EFFECT UNTIL 3 AM EST EARLY THIS MORNING...",
"The National Weather Service in Tampa Bay Ruskin has issued a",
"* Flood Watch for a portion of western Florida, including the following area, Pinellas.",
"* Until 3 AM EST early this morning",
"* |* Basis for the watch *|",
"* |* (optional) potential impacts of flooding *|",
"PRECAUTIONARY/PREPAREDNESS ACTIONS...",
"A Flood Watch means there is a potential for flooding based on current forecasts.",
"You should monitor later forecasts and be alert for possible Flood Warnings. Those living in areas prone to flooding should be prepared to take action should flooding develop.",
"&&",
"$$",
],
},
{
"commentary": "one state, single area, w location",
"name": "Hazard_FFA_13b",
"drtTime": "20100101_0510",
"productType": "Hazard_FFA_Local",
"cmdLineVars": "{('Flood Reason', 'floodReason'): 'ER '}",
"decodeVTEC": 0,
"vtecMode": "O",
"fileChanges": [("AreaDictionary", "TextUtility", "add", areaT2, "delete"),],
"createGrids": [
("Fcst", "Hazards", "DISCRETE", -100, 100, "<None>", "all"),
("Fcst", "Hazards", "DISCRETE", 0, 3, "FA.A", ["FLZ050"]),
],
"checkStrings": [
"WGUS62 KTBW 010510",
"FFATBW",
"URGENT - IMMEDIATE BROADCAST REQUESTED",
"Flood Watch",
"National Weather Service Tampa Bay Ruskin FL",
"1210 AM EST Fri Jan 1 2010",
"...|*Overview headline (must edit)*|...",
".|*Overview (must edit)*|.",
"FLZ050-010800-",
"/O.NEW.KTBW.FA.A.0001.100101T0510Z-100101T0800Z/",
"/00000.0.ER.000000T0000Z.000000T0000Z.000000T0000Z.OO/",
"Pinellas-",
"1210 AM EST Fri Jan 1 2010",
"...FLOOD WATCH IN EFFECT UNTIL 3 AM EST EARLY THIS MORNING...",
"The National Weather Service in Tampa Bay Ruskin has issued a",
"* Flood Watch for a portion of western Florida, including the following area, Clearfield.",
"* Until 3 AM EST early this morning",
"* |* Basis for the watch *|",
"* |* (optional) potential impacts of flooding *|",
"PRECAUTIONARY/PREPAREDNESS ACTIONS...",
"A Flood Watch means there is a potential for flooding based on current forecasts.",
"You should monitor later forecasts and be alert for possible Flood Warnings. Those living in areas prone to flooding should be prepared to take action should flooding develop.",
"&&",
"$$",
],
},
{
"commentary": "two states, single area, w/o location",
"name": "Hazard_FFA_14a",
"drtTime": "20100101_0510",
"productType": "Hazard_FFA_Local",
"cmdLineVars": "{('Flood Reason', 'floodReason'): 'ER '}",
"decodeVTEC": 0,
"vtecMode": "O",
"fileChanges": [("AreaDictionary", "TextUtility", "add", areaT1, "delete"),],
"createGrids": [
("Fcst", "Hazards", "DISCRETE", -100, 100, "<None>", "all"),
("Fcst", "Hazards", "DISCRETE", 0, 3, "FA.A", ["FLZ050","FLZ057",
"FLZ052","FLZ155"]),
],
"checkStrings": [
"WGUS62 KTBW 010510",
"FFATBW",
"URGENT - IMMEDIATE BROADCAST REQUESTED",
"Flood Watch",
"National Weather Service Tampa Bay Ruskin FL",
"1210 AM EST Fri Jan 1 2010",
"...|*Overview headline (must edit)*|...",
".|*Overview (must edit)*|.",
"FLZ050-052-057-155-010800-",
"/O.NEW.KTBW.FA.A.0001.100101T0510Z-100101T0800Z/",
"/00000.0.ER.000000T0000Z.000000T0000Z.000000T0000Z.OO/",
"Pinellas-Polk-Highlands-Coastal Manatee-",
# "Including the cities of St. Petersburg, Clearwater, Largo, ",
# "Lakeland, Winter Haven, Bradenton, Bayshore Gardens, ",
# "Palmetto, Sebring, Avon Park, Placid Lakes",
"1210 AM EST Fri Jan 1 2010",
"...FLOOD WATCH IN EFFECT UNTIL 3 AM EST EARLY THIS MORNING...",
"The National Weather Service in Tampa Bay Ruskin has issued a",
"* Flood Watch for portions of western Florida and western Georgia, including the following areas, in western Florida, Highlands and Pinellas. In western Georgia, Coastal Manatee and Polk.",
"* Until 3 AM EST early this morning",
"* |* Basis for the watch *|",
"* |* (optional) potential impacts of flooding *|",
"PRECAUTIONARY/PREPAREDNESS ACTIONS...",
"A Flood Watch means there is a potential for flooding based on current forecasts.",
"You should monitor later forecasts and be alert for possible Flood Warnings. Those living in areas prone to flooding should be prepared to take action should flooding develop.",
"&&",
"$$",
],
},
{
"commentary": "two states, single area, w location",
"name": "Hazard_FFA_14b",
"drtTime": "20100101_0510",
"productType": "Hazard_FFA_Local",
"cmdLineVars": "{('Flood Reason', 'floodReason'): 'ER '}",
"decodeVTEC": 0,
"vtecMode": "O",
"fileChanges": [("AreaDictionary", "TextUtility", "add", areaT2, "delete"),],
"createGrids": [
("Fcst", "Hazards", "DISCRETE", -100, 100, "<None>", "all"),
("Fcst", "Hazards", "DISCRETE", 0, 3, "FA.A", ["FLZ050","FLZ057",
"FLZ052","FLZ155"]),
],
"checkStrings": [
"WGUS62 KTBW 010510",
"FFATBW",
"URGENT - IMMEDIATE BROADCAST REQUESTED",
"Flood Watch",
"National Weather Service Tampa Bay Ruskin FL",
"1210 AM EST Fri Jan 1 2010",
"...|*Overview headline (must edit)*|...",
".|*Overview (must edit)*|.",
"FLZ050-052-057-155-010800-",
"/O.NEW.KTBW.FA.A.0001.100101T0510Z-100101T0800Z/",
"/00000.0.ER.000000T0000Z.000000T0000Z.000000T0000Z.OO/",
"Pinellas-Polk-Highlands-Coastal Manatee-",
"1210 AM EST Fri Jan 1 2010",
"...FLOOD WATCH IN EFFECT UNTIL 3 AM EST EARLY THIS MORNING...",
"The National Weather Service in Tampa Bay Ruskin has issued a",
"* Flood Watch for portions of western Florida and western Georgia, including the following areas, in western Florida, Clearfield. In western Georgia, Atlanta and Tampa.",
"* Until 3 AM EST early this morning",
"* |* Basis for the watch *|",
"* |* (optional) potential impacts of flooding *|",
"PRECAUTIONARY/PREPAREDNESS ACTIONS...",
"A Flood Watch means there is a potential for flooding based on current forecasts.",
"You should monitor later forecasts and be alert for possible Flood Warnings. Those living in areas prone to flooding should be prepared to take action should flooding develop.",
"&&",
"$$",
],
},
{
"commentary": "one state, multiple areas, w/o location",
"name": "Hazard_FFA_15a",
"drtTime": "20100101_0510",
"productType": "Hazard_FFA_Local",
"cmdLineVars": "{('Flood Reason', 'floodReason'): 'ER '}",
"decodeVTEC": 0,
"vtecMode": "O",
"fileChanges": [("AreaDictionary", "TextUtility", "add", areaT1, "delete"),],
"createGrids": [
("Fcst", "Hazards", "DISCRETE", -100, 100, "<None>", "all"),
("Fcst", "Hazards", "DISCRETE", 0, 3, "FA.A", ["FLZ050","FLZ160",
"FLZ057","FLZ151","FLZ056"]),
],
"checkStrings": [
"WGUS62 KTBW 010510",
"FFATBW",
"URGENT - IMMEDIATE BROADCAST REQUESTED",
"Flood Watch",
"National Weather Service Tampa Bay Ruskin FL",
"1210 AM EST Fri Jan 1 2010",
"...|*Overview headline (must edit)*|...",
".|*Overview (must edit)*|.",
"FLZ050-056-057-151-160-010800-",
"/O.NEW.KTBW.FA.A.0001.100101T0510Z-100101T0800Z/",
"/00000.0.ER.000000T0000Z.000000T0000Z.000000T0000Z.OO/",
"Pinellas-Hardee-Highlands-Coastal Hillsborough-Coastal Sarasota-",
"1210 AM EST Fri Jan 1 2010",
"...FLOOD WATCH IN EFFECT UNTIL 3 AM EST EARLY THIS MORNING...",
"The National Weather Service in Tampa Bay Ruskin has issued a",
"* Flood Watch for portions of central Florida, southern Florida, and western Florida, including the following areas, in central Florida, Coastal Hillsborough and Coastal Sarasota. In southern Florida, Hardee. In western Florida, Highlands and Pinellas.",
"* Until 3 AM EST early this morning",
"* |* Basis for the watch *|",
"* |* (optional) potential impacts of flooding *|",
"PRECAUTIONARY/PREPAREDNESS ACTIONS...",
"A Flood Watch means there is a potential for flooding based on current forecasts.",
"You should monitor later forecasts and be alert for possible Flood Warnings. Those living in areas prone to flooding should be prepared to take action should flooding develop.",
"&&",
"$$",
],
},
{
"commentary": "one state, multiple areas, w location",
"name": "Hazard_FFA_15b",
"drtTime": "20100101_0510",
"productType": "Hazard_FFA_Local",
"cmdLineVars": "{('Flood Reason', 'floodReason'): 'ER '}",
"decodeVTEC": 0,
"vtecMode": "O",
"fileChanges": [("AreaDictionary", "TextUtility", "add", areaT2, "delete"),],
"createGrids": [
("Fcst", "Hazards", "DISCRETE", -100, 100, "<None>", "all"),
("Fcst", "Hazards", "DISCRETE", 0, 3, "FA.A", ["FLZ050","FLZ160",
"FLZ057","FLZ151","FLZ056"]),
],
"checkStrings": [
"WGUS62 KTBW 010510",
"FFATBW",
"URGENT - IMMEDIATE BROADCAST REQUESTED",
"Flood Watch",
"National Weather Service Tampa Bay Ruskin FL",
"1210 AM EST Fri Jan 1 2010",
"...|*Overview headline (must edit)*|...",
".|*Overview (must edit)*|.",
"FLZ050-056-057-151-160-010800-",
"/O.NEW.KTBW.FA.A.0001.100101T0510Z-100101T0800Z/",
"/00000.0.ER.000000T0000Z.000000T0000Z.000000T0000Z.OO/",
"Pinellas-Hardee-Highlands-Coastal Hillsborough-Coastal Sarasota-",
"1210 AM EST Fri Jan 1 2010",
"...FLOOD WATCH IN EFFECT UNTIL 3 AM EST EARLY THIS MORNING...",
"The National Weather Service in Tampa Bay Ruskin has issued a",
"* Flood Watch for portions of central Florida, southern Florida, and western Florida, including the following areas, in central Florida, Aunt Ruby. In southern Florida, Tampa. In western Florida, Clearfield.",
"* Until 3 AM EST early this morning",
"* |* Basis for the watch *|",
"* |* (optional) potential impacts of flooding *|",
"PRECAUTIONARY/PREPAREDNESS ACTIONS...",
"A Flood Watch means there is a potential for flooding based on current forecasts.",
"You should monitor later forecasts and be alert for possible Flood Warnings. Those living in areas prone to flooding should be prepared to take action should flooding develop.",
"&&",
"$$",
],
},
{
"commentary": "two states, single area 1st, mulitple area 2nd, w/o location",
"name": "Hazard_FFA_16a",
"drtTime": "20100101_0510",
"productType": "Hazard_FFA_Local",
"cmdLineVars": "{('Flood Reason', 'floodReason'): 'ER '}",
"decodeVTEC": 0,
"vtecMode": "O",
"fileChanges": [("AreaDictionary", "TextUtility", "add", areaT1, "delete"),],
"createGrids": [
("Fcst", "Hazards", "DISCRETE", -100, 100, "<None>", "all"),
("Fcst", "Hazards", "DISCRETE", 0, 3, "FA.A", ["FLZ050","FLZ052",
"FLZ155","FLZ061"]),
],
"checkStrings": [
"WGUS62 KTBW 010510",
"FFATBW",
"URGENT - IMMEDIATE BROADCAST REQUESTED",
"Flood Watch",
"National Weather Service Tampa Bay Ruskin FL",
"1210 AM EST Fri Jan 1 2010",
"...|*Overview headline (must edit)*|...",
".|*Overview (must edit)*|.",
"FLZ050-052-061-155-010800-",
"/O.NEW.KTBW.FA.A.0001.100101T0510Z-100101T0800Z/",
"/00000.0.ER.000000T0000Z.000000T0000Z.000000T0000Z.OO/",
"Pinellas-Polk-DeSoto-Coastal Manatee-",
"1210 AM EST Fri Jan 1 2010",
"...FLOOD WATCH IN EFFECT UNTIL 3 AM EST EARLY THIS MORNING...",
"The National Weather Service in Tampa Bay Ruskin has issued a",
"* Flood Watch for portions of western Florida and Georgia, including the following areas, in western Florida, Pinellas. In Georgia, Coastal Manatee, DeSoto, and Polk.",
"* Until 3 AM EST early this morning",
"* |* Basis for the watch *|",
"* |* (optional) potential impacts of flooding *|",
"PRECAUTIONARY/PREPAREDNESS ACTIONS...",
"A Flood Watch means there is a potential for flooding based on current forecasts.",
"You should monitor later forecasts and be alert for possible Flood Warnings. Those living in areas prone to flooding should be prepared to take action should flooding develop.",
"&&",
"$$",
],
},
{
"commentary": "two states, single area 1st, mulitple area 2nd, w location",
"name": "Hazard_FFA_16b",
"drtTime": "20100101_0510",
"productType": "Hazard_FFA_Local",
"cmdLineVars": "{('Flood Reason', 'floodReason'): 'ER '}",
"decodeVTEC": 0,
"vtecMode": "O",
"fileChanges": [("AreaDictionary", "TextUtility", "add", areaT2, "delete"),],
"createGrids": [
("Fcst", "Hazards", "DISCRETE", -100, 100, "<None>", "all"),
("Fcst", "Hazards", "DISCRETE", 0, 3, "FA.A", ["FLZ050","FLZ052",
"FLZ155","FLZ061"]),
],
"checkStrings": [
"WGUS62 KTBW 010510",
"FFATBW",
"URGENT - IMMEDIATE BROADCAST REQUESTED",
"Flood Watch",
"National Weather Service Tampa Bay Ruskin FL",
"1210 AM EST Fri Jan 1 2010",
"...|*Overview headline (must edit)*|...",
".|*Overview (must edit)*|.",
"FLZ050-052-061-155-010800-",
"/O.NEW.KTBW.FA.A.0001.100101T0510Z-100101T0800Z/",
"/00000.0.ER.000000T0000Z.000000T0000Z.000000T0000Z.OO/",
"Pinellas-Polk-DeSoto-Coastal Manatee-",
"1210 AM EST Fri Jan 1 2010",
"...FLOOD WATCH IN EFFECT UNTIL 3 AM EST EARLY THIS MORNING...",
"The National Weather Service in Tampa Bay Ruskin has issued a",
"* Flood Watch for portions of western Florida and Georgia, including the following areas, in western Florida, Clearfield. In Georgia, Atlanta, Beach, and Tampa.",
"* Until 3 AM EST early this morning",
"* |* Basis for the watch *|",
"* |* (optional) potential impacts of flooding *|",
"PRECAUTIONARY/PREPAREDNESS ACTIONS...",
"A Flood Watch means there is a potential for flooding based on current forecasts.",
"You should monitor later forecasts and be alert for possible Flood Warnings. Those living in areas prone to flooding should be prepared to take action should flooding develop.",
"&&",
"$$",
],
},
{
"commentary": "two states, multiple areas, w/o location",
"name": "Hazard_FFA_17a",
"drtTime": "20100101_0510",
"productType": "Hazard_FFA_Local",
"cmdLineVars": "{('Flood Reason', 'floodReason'): 'ER '}",
"decodeVTEC": 0,
"vtecMode": "O",
"fileChanges": [("AreaDictionary", "TextUtility", "add", areaT1, "delete"),],
"createGrids": [
("Fcst", "Hazards", "DISCRETE", -100, 100, "<None>", "all"),
("Fcst", "Hazards", "DISCRETE", 0, 3, "FA.A", ["FLZ050","FLZ057",
"FLZ160","FLZ151","FLZ052","FLZ155","FLZ061","FLZ148"]),
],
"checkStrings": [
"WGUS62 KTBW 010510",
"FFATBW",
"URGENT - IMMEDIATE BROADCAST REQUESTED",
"Flood Watch",
"National Weather Service Tampa Bay Ruskin FL",
"1210 AM EST Fri Jan 1 2010",
"...|*Overview headline (must edit)*|...",
".|*Overview (must edit)*|.",
"FLZ050-052-057-061-148-151-155-160-010800-",
"/O.NEW.KTBW.FA.A.0001.100101T0510Z-100101T0800Z/",
"/00000.0.ER.000000T0000Z.000000T0000Z.000000T0000Z.OO/",
"Pinellas-Polk-Highlands-DeSoto-Coastal Hernando-",
"Coastal Hillsborough-Coastal Manatee-Coastal Sarasota-",
"1210 AM EST Fri Jan 1 2010",
"...FLOOD WATCH IN EFFECT UNTIL 3 AM EST EARLY THIS MORNING...",
"The National Weather Service in Tampa Bay Ruskin has issued a",
"* Flood Watch for portions of Florida and Georgia, including the following areas, in Florida, Coastal Hillsborough, Coastal Sarasota, Highlands, and Pinellas. In Georgia, Coastal Hernando, Coastal Manatee, DeSoto, and Polk.",
"* Until 3 AM EST early this morning",
"* |* Basis for the watch *|",
"* |* (optional) potential impacts of flooding *|",
"PRECAUTIONARY/PREPAREDNESS ACTIONS...",
"A Flood Watch means there is a potential for flooding based on current forecasts.",
"You should monitor later forecasts and be alert for possible Flood Warnings. Those living in areas prone to flooding should be prepared to take action should flooding develop.",
"&&",
"$$",
],
},
{
"commentary": "two states, multiple areas, w location",
"name": "Hazard_FFA_17b",
"drtTime": "20100101_0510",
"productType": "Hazard_FFA_Local",
"cmdLineVars": "{('Flood Reason', 'floodReason'): 'ER '}",
"decodeVTEC": 0,
"vtecMode": "O",
"fileChanges": [("AreaDictionary", "TextUtility", "add", areaT2, "delete"),],
"createGrids": [
("Fcst", "Hazards", "DISCRETE", -100, 100, "<None>", "all"),
("Fcst", "Hazards", "DISCRETE", 0, 3, "FA.A", ["FLZ050","FLZ057",
"FLZ160","FLZ151","FLZ052","FLZ155","FLZ061","FLZ148"]),
],
"checkStrings": [
"WGUS62 KTBW 010510",
"FFATBW",
"URGENT - IMMEDIATE BROADCAST REQUESTED",
"Flood Watch",
"National Weather Service Tampa Bay Ruskin FL",
"1210 AM EST Fri Jan 1 2010",
"...|*Overview headline (must edit)*|...",
".|*Overview (must edit)*|.",
"FLZ050-052-057-061-148-151-155-160-010800-",
"/O.NEW.KTBW.FA.A.0001.100101T0510Z-100101T0800Z/",
"/00000.0.ER.000000T0000Z.000000T0000Z.000000T0000Z.OO/",
"Pinellas-Polk-Highlands-DeSoto-Coastal Hernando-",
"Coastal Hillsborough-Coastal Manatee-Coastal Sarasota-",
"1210 AM EST Fri Jan 1 2010",
"...FLOOD WATCH IN EFFECT UNTIL 3 AM EST EARLY THIS MORNING...",
"The National Weather Service in Tampa Bay Ruskin has issued a",
"* Flood Watch for portions of Florida and Georgia, including the following areas, in Florida, Aunt Ruby and Clearfield. In Georgia, Atlanta, Beach, and Tampa.",
"* Until 3 AM EST early this morning",
"* |* Basis for the watch *|",
"* |* (optional) potential impacts of flooding *|",
"PRECAUTIONARY/PREPAREDNESS ACTIONS...",
"A Flood Watch means there is a potential for flooding based on current forecasts.",
"You should monitor later forecasts and be alert for possible Flood Warnings. Those living in areas prone to flooding should be prepared to take action should flooding develop.",
"&&",
"$$",
],
},
{
"commentary": "parishes 1, independent 1, counties 1",
"name": "Hazard_FFA_18a",
"drtTime": "20100101_0510",
"productType": "Hazard_FFA_Local",
"cmdLineVars": "{('Flood Reason', 'floodReason'): 'ER '}",
"decodeVTEC": 0,
"vtecMode": "O",
"fileChanges": [
("AreaDictionary", "TextUtility", "add", areaT3, "delete"),
("Hazard_FFA_Local", "TextProduct", "replace",
(areaT3FIPS0, areaT3FIPS1), "delete"),
],
"createGrids": [
("Fcst", "Hazards", "DISCRETE", -100, 100, "<None>", "all"),
("Fcst", "Hazards", "DISCRETE", 0, 3, "FA.A", ["FLC017","FLC027",
"FLC053"]),
],
"checkStrings": [
"WGUS62 KTBW 010510",
"FFATBW",
"URGENT - IMMEDIATE BROADCAST REQUESTED",
"Flood Watch",
"National Weather Service Tampa Bay Ruskin FL",
"1210 AM EST Fri Jan 1 2010",
"...|*Overview headline (must edit)*|...",
".|*Overview (must edit)*|.",
"FLC017-027-053-010800-",
"/O.NEW.KTBW.FA.A.0001.100101T0510Z-100101T0800Z/",
"/00000.0.ER.000000T0000Z.000000T0000Z.000000T0000Z.OO/",
"Citrus-DeSoto-Hernando-",
"1210 AM EST Fri Jan 1 2010",
"...FLOOD WATCH IN EFFECT UNTIL 3 AM EST EARLY THIS MORNING...",
"The National Weather Service in Tampa Bay Ruskin has issued a",
"* Flood Watch for portions of western Florida and western Louisiana, including the following county, independent city, and parish, in western Florida, Hernando. In western Louisiana, Citrus and DeSoto.",
"* Until 3 AM EST early this morning",
"* |* Basis for the watch *|",
"* |* (optional) potential impacts of flooding *|",
"PRECAUTIONARY/PREPAREDNESS ACTIONS...",
"A Flood Watch means there is a potential for flooding based on current forecasts.",
"You should monitor later forecasts and be alert for possible Flood Warnings. Those living in areas prone to flooding should be prepared to take action should flooding develop.",
"&&",
"$$",
],
},
{
"commentary": "parishes 2, independent 1, counties 1",
"name": "Hazard_FFA_18b",
"drtTime": "20100101_0510",
"productType": "Hazard_FFA_Local",
"cmdLineVars": "{('Flood Reason', 'floodReason'): 'ER '}",
"decodeVTEC": 0,
"vtecMode": "O",
"fileChanges": [
("AreaDictionary", "TextUtility", "add", areaT3, "delete"),
("Hazard_FFA_Local", "TextProduct", "replace",
(areaT3FIPS0, areaT3FIPS1), "delete"),
],
"createGrids": [
("Fcst", "Hazards", "DISCRETE", -100, 100, "<None>", "all"),
("Fcst", "Hazards", "DISCRETE", 0, 3, "FA.A", ["FLC017","FLC027",
"FLC053","FLC105"]),
],
"checkStrings": [
"WGUS62 KTBW 010510",
"FFATBW",
"URGENT - IMMEDIATE BROADCAST REQUESTED",
"Flood Watch",
"National Weather Service Tampa Bay Ruskin FL",
"1210 AM EST Fri Jan 1 2010",
"...|*Overview headline (must edit)*|...",
".|*Overview (must edit)*|.",
"FLC017-027-053-105-010800-",
"/O.NEW.KTBW.FA.A.0001.100101T0510Z-100101T0800Z/",
"/00000.0.ER.000000T0000Z.000000T0000Z.000000T0000Z.OO/",
"Citrus-DeSoto-Hernando-Polk-",
"1210 AM EST Fri Jan 1 2010",
"...FLOOD WATCH IN EFFECT UNTIL 3 AM EST EARLY THIS MORNING...",
"The National Weather Service in Tampa Bay Ruskin has issued a",
"* Flood Watch for portions of western Florida and western Louisiana, including the following county, independent city, and parishes, in western Florida, Hernando. In western Louisiana, Citrus, DeSoto, and Polk.",
"* Until 3 AM EST early this morning",
"* |* Basis for the watch *|",
"* |* (optional) potential impacts of flooding *|",
"PRECAUTIONARY/PREPAREDNESS ACTIONS...",
"A Flood Watch means there is a potential for flooding based on current forecasts.",
"You should monitor later forecasts and be alert for possible Flood Warnings. Those living in areas prone to flooding should be prepared to take action should flooding develop.",
"&&",
"$$",
],
},
]
import TestScript
| 47.342884 | 261 | 0.588162 |
c70bf8219d2bb2dabd3039c6feeeaba05de046c4 | 1,701 | py | Python | main.py | hasanzadeh99/mapna_test_2021 | 1e2e50a9aff32e2d730bf3d0fd20393e5aea0872 | [
"MIT"
] | null | null | null | main.py | hasanzadeh99/mapna_test_2021 | 1e2e50a9aff32e2d730bf3d0fd20393e5aea0872 | [
"MIT"
] | null | null | null | main.py | hasanzadeh99/mapna_test_2021 | 1e2e50a9aff32e2d730bf3d0fd20393e5aea0872 | [
"MIT"
] | null | null | null | import time
old_input_value = False
flag_falling_edge = None
start = None
flag_output_mask = False
DELAY_CONST = 10 # delay time from falling edge ... .
output = None
if __name__ == '__main__':
DELAY_CONST=int(input("Hello \nPlease Enter Your delay value here :"))
while True:
response_function()
| 25.772727 | 79 | 0.621399 |
c70c23e78ecc9c77169196b937ad121dbbab19c4 | 1,345 | py | Python | ansiblemetrics/playbook/num_deprecated_modules.py | radon-h2020/AnsibleMetrics | 8a8e27d9b54fc1578d00526c8663184a2e686cb2 | [
"Apache-2.0"
] | 1 | 2020-04-24T16:09:14.000Z | 2020-04-24T16:09:14.000Z | ansiblemetrics/playbook/num_deprecated_modules.py | radon-h2020/AnsibleMetrics | 8a8e27d9b54fc1578d00526c8663184a2e686cb2 | [
"Apache-2.0"
] | null | null | null | ansiblemetrics/playbook/num_deprecated_modules.py | radon-h2020/AnsibleMetrics | 8a8e27d9b54fc1578d00526c8663184a2e686cb2 | [
"Apache-2.0"
] | null | null | null | from ansiblemetrics.ansible_modules import DEPRECATED_MODULES_LIST
from ansiblemetrics.ansible_metric import AnsibleMetric
| 25.377358 | 90 | 0.553903 |
c70c9127731c0e67539a6749c14a06e75f1c3481 | 789 | py | Python | app/api/v1/validators/validators.py | GraceKiarie/iReporter | 1011f878f9fb643798192aeed1b68c3e6de4dedc | [
"MIT"
] | 1 | 2018-12-14T09:52:39.000Z | 2018-12-14T09:52:39.000Z | app/api/v1/validators/validators.py | GraceKiarie/iReporter | 1011f878f9fb643798192aeed1b68c3e6de4dedc | [
"MIT"
] | 6 | 2018-12-08T11:15:46.000Z | 2018-12-15T11:04:36.000Z | app/api/v1/validators/validators.py | GraceKiarie/iReporter | 1011f878f9fb643798192aeed1b68c3e6de4dedc | [
"MIT"
] | 5 | 2018-12-04T11:00:54.000Z | 2019-06-13T12:53:50.000Z | """ This module does validation for data input in incidents """
import re
| 24.65625 | 70 | 0.532319 |
c70da4e644f1e748e2087d4c879dc99b2751ebd0 | 2,710 | py | Python | bin/find_latest_versions.py | ebreton/ghost-in-a-shell | 8b3382d60a86322c74c6ee1b52f068dfcfc3d79e | [
"MIT"
] | 2 | 2018-05-31T08:56:16.000Z | 2020-01-23T15:12:44.000Z | bin/find_latest_versions.py | ebreton/ghost-in-a-shell | 8b3382d60a86322c74c6ee1b52f068dfcfc3d79e | [
"MIT"
] | null | null | null | bin/find_latest_versions.py | ebreton/ghost-in-a-shell | 8b3382d60a86322c74c6ee1b52f068dfcfc3d79e | [
"MIT"
] | null | null | null | #!/usr/bin/python
from distutils.version import LooseVersion
import argparse
import logging
import requests
import re
session = requests.Session()
# authorization token
TOKEN_URL = "https://auth.docker.io/token?service=registry.docker.io&scope=repository:%s:pull"
# find all tags
TAGS_URL = "https://index.docker.io/v2/%s/tags/list"
TAG_RE = re.compile("^[\d]+(\.[\d]+)*$")
# get image digest for target
TARGET_DIGEST = "https://index.docker.io/v2/%(repository)s/manifests/%(tag)s"
if __name__ == '__main__':
parser = argparse.ArgumentParser(
usage="""Version checker script
This file retreives the latest version of ghost container image from docker hub
It can be run with both python 2.7 and 3.6""")
parser.add_argument("repository", nargs='?',
help="repository name [default:library/ghost]",
default="library/ghost")
parser.add_argument('-d', '--debug', action='store_true')
parser.add_argument('-q', '--quiet', action='store_true')
args = parser.parse_args()
# set up level of logging
level = logging.INFO
if args.quiet:
level = logging.WARNING
elif args.debug:
level = logging.DEBUG
# set up logging to console
logging.basicConfig(format='%(levelname)s - %(funcName)s - %(message)s')
logger = logging.getLogger()
logger.setLevel(level)
logging.debug(args)
# version needs to be print to output in order to be retrieved by Makefile
print(find_latest(args.repository))
| 30.449438 | 94 | 0.667897 |
c70ef8c2db16a8357afdb58004c2cb5a69fd6d01 | 326 | py | Python | tests/conftest.py | badarsebard/terraform-pytest | 58c8096f0405ec1d0061723fc1dd2d099655c3c5 | [
"MIT"
] | null | null | null | tests/conftest.py | badarsebard/terraform-pytest | 58c8096f0405ec1d0061723fc1dd2d099655c3c5 | [
"MIT"
] | null | null | null | tests/conftest.py | badarsebard/terraform-pytest | 58c8096f0405ec1d0061723fc1dd2d099655c3c5 | [
"MIT"
] | 1 | 2021-11-19T16:36:31.000Z | 2021-11-19T16:36:31.000Z | from .terraform import TerraformManager
import pytest
from _pytest.tmpdir import TempPathFactory
| 25.076923 | 90 | 0.760736 |
c70f068d9386d59199952ccdcd03582e192c0909 | 2,933 | py | Python | pelicanconf.py | myrle-krantz/treasurer-site | e0beca3d0d724ae09300974f7020a5611fbd3034 | [
"Apache-2.0"
] | 1 | 2021-11-09T21:42:44.000Z | 2021-11-09T21:42:44.000Z | pelicanconf.py | myrle-krantz/treasurer-site | e0beca3d0d724ae09300974f7020a5611fbd3034 | [
"Apache-2.0"
] | 1 | 2021-11-01T11:14:10.000Z | 2021-11-01T11:14:10.000Z | pelicanconf.py | isabella232/treasurer-site | 9a2e33c85e040183df049d63814ef6b1b0bb7a46 | [
"Apache-2.0"
] | 3 | 2021-06-04T09:07:48.000Z | 2021-11-09T21:42:31.000Z | #!/usr/bin/env python
# -*- coding: utf-8 -*- #
# vim: encoding=utf-8
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import unicode_literals
from datetime import date
# import os
# import sys
PATH = 'content'
TIMEZONE = 'UTC'
DEFAULT_LANG = u'en'
AUTHOR = u'Treasurer Team'
SITENAME = u'Apache Treasurer'
SITEDOMAIN = 'treasurer.apache.org'
SITEURL = 'https://treasurer.apache.org'
# SITELOGO = 'https://treasurer.apache.org/images/logo.png'
# SITEDESC = u'<blank>'
SITEREPOSITORY = 'https://github.com/apache/treasurer-site/blob/main/content/pages/'
TRADEMARKS = u'Apache and the Apache feather logo are trademarks or registered trademarks'
CURRENTYEAR = date.today().year
# Save pages using full directory preservation
PAGES_PATHS = ['content']
# PATH_METADATA= '(?P<path_no_ext>.*)\..*'
# PAGE_SAVE_AS= '{path_no_ext}.html'
PAGE_URL = '{slug}.html'
SLUGIFY_SOURCE = 'basename'
PAGE_SAVE_AS = '{slug}.html'
# We want to serve any images
STATIC_PATHS = ['.htaccess', 'images']
# We don't use articles, but we don't want pelican to think
# that content/ contains articles.
ARTICLE_PATHS = ['articles']
# Disable these pages
ARCHIVES_SAVE_AS = ''
ARTICLE_SAVE_AS = ''
AUTHORS_SAVE_AS = ''
CATEGORIES_SAVE_AS = ''
INDEX_SAVE_AS = ''
TAGS_SAVE_AS = ''
# Enable ATOM feed and Disable other feeds
FEED_DOMAIN = SITEURL
FEED_ALL_ATOM = 'feeds/all.atom.xml'
CATEGORY_FEED_ATOM = None
TRANSLATION_FEED_ATOM = None
AUTHOR_FEED_ATOM = None
AUTHOR_FEED_RSS = None
# Pelican Plugins
# The provided location. If the buildbot does not have a new plugin then look into requirements.txt
PLUGIN_PATHS = ['./theme/plugins']
PLUGINS = ['toc', 'pelican-gfm', 'sitemap']
# TOC Generator
TOC_HEADERS = r"h[1-6]"
# Sitemap Generator
SITEMAP = {
"exclude": ["tag/", "category/"],
"format": "xml",
"priorities": {
"articles": 0.1,
"indexes": 0.1,
"pages": 0.8
},
"changefreqs": {
"articles": "never",
"indexes": "never",
"pages": "monthly"
}
}
# Unused links
LINKS = ( )
SOCIAL = ( )
DEFAULT_PAGINATION = False
# Uncomment following line if you want document-relative URLs when developing
# RELATIVE_URLS = True
| 27.411215 | 99 | 0.715309 |
c70f37923d6264953c0f43a70aaafcb143563524 | 10,935 | py | Python | TurtleArt/taturtle.py | sugar-activities/4585-activity | 38e6efd7b4fcb9cf820efaf7406ce7abde92406e | [
"MIT"
] | null | null | null | TurtleArt/taturtle.py | sugar-activities/4585-activity | 38e6efd7b4fcb9cf820efaf7406ce7abde92406e | [
"MIT"
] | null | null | null | TurtleArt/taturtle.py | sugar-activities/4585-activity | 38e6efd7b4fcb9cf820efaf7406ce7abde92406e | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
#Copyright (c) 2010,12 Walter Bender
#Permission is hereby granted, free of charge, to any person obtaining a copy
#of this software and associated documentation files (the "Software"), to deal
#in the Software without restriction, including without limitation the rights
#to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
#copies of the Software, and to permit persons to whom the Software is
#furnished to do so, subject to the following conditions:
#The above copyright notice and this permission notice shall be included in
#all copies or substantial portions of the Software.
#THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
#IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
#FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
#AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
#LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
#OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
#THE SOFTWARE.
from random import uniform
from math import sin, cos, pi, sqrt
from gettext import gettext as _
import gtk
import cairo
from taconstants import TURTLE_LAYER, DEFAULT_TURTLE_COLORS
from tasprite_factory import SVG, svg_str_to_pixbuf
from tacanvas import wrap100, COLOR_TABLE
from sprites import Sprite
from tautils import debug_output
SHAPES = 36
def generate_turtle_pixbufs(colors):
""" Generate pixbufs for generic turtles """
shapes = []
svg = SVG()
svg.set_scale(1.0)
for i in range(SHAPES):
svg.set_orientation(i * 10)
shapes.append(svg_str_to_pixbuf(svg.turtle(colors)))
return shapes
| 34.936102 | 78 | 0.561225 |
c71003847371f17bbe96951b791e894ed7483c4a | 1,384 | py | Python | django_backend/group.py | holg/django_backend | 6cef76a378664e6621619862e6db476788a58992 | [
"BSD-3-Clause"
] | null | null | null | django_backend/group.py | holg/django_backend | 6cef76a378664e6621619862e6db476788a58992 | [
"BSD-3-Clause"
] | null | null | null | django_backend/group.py | holg/django_backend | 6cef76a378664e6621619862e6db476788a58992 | [
"BSD-3-Clause"
] | null | null | null | try:
from django.forms.utils import pretty_name
except ImportError:
from django.forms.forms import pretty_name
from django.template import Context
from django.template.loader import render_to_string
from .compat import context_flatten
| 28.833333 | 77 | 0.66474 |
c7102803d3080f23edcd56ddbfc0360cc305ab8a | 971 | py | Python | src/eodc_openeo_bindings/map_comparison_processes.py | eodcgmbh/eodc-openeo-bindings | 4e80eba036771a0c81359e1ac66862f1eead407b | [
"MIT"
] | null | null | null | src/eodc_openeo_bindings/map_comparison_processes.py | eodcgmbh/eodc-openeo-bindings | 4e80eba036771a0c81359e1ac66862f1eead407b | [
"MIT"
] | 7 | 2020-02-18T17:12:31.000Z | 2020-09-24T07:19:04.000Z | src/eodc_openeo_bindings/map_comparison_processes.py | eodcgmbh/eodc-openeo-bindings | 4e80eba036771a0c81359e1ac66862f1eead407b | [
"MIT"
] | null | null | null | """
"""
from eodc_openeo_bindings.map_utils import map_default
def map_lt(process):
"""
"""
param_dict = {'y': 'float'}
return map_default(process, 'lt', 'apply', param_dict)
def map_lte(process):
"""
"""
param_dict = {'y': 'float'}
return map_default(process, 'lte', 'apply', param_dict)
def map_gt(process):
"""
"""
param_dict = {'y': 'float'}
return map_default(process, 'gt', 'apply', param_dict)
def map_gte(process):
"""
"""
param_dict = {'y': 'float'}
return map_default(process, 'gte', 'apply', param_dict)
def map_eq(process):
"""
"""
param_dict = {'y': 'numpy.array'}
# NOTE: how to map type dynamically to support strings?
if 'delta' in process['arguments']:
param_dict['delta'] = 'int'
if 'case_sensitive' in process['arguments']:
param_dict['case_sensitive'] = 'bool'
return map_default(process, 'eq', 'apply', param_dict)
| 15.918033 | 59 | 0.589083 |
c711129f24117223c3e97558213be4cfb18083e6 | 38 | py | Python | scripts/flow_tests/__init__.py | rombie/contrail-test | a68c71d6f282142501a7e2e889bbb232fdd82dc3 | [
"Apache-2.0"
] | 5 | 2020-09-29T00:36:57.000Z | 2022-02-16T06:51:32.000Z | serial_scripts/system_test/flow_tests/__init__.py | vkolli/contrail-test-perf | db04b8924a2c330baabe3059788b149d957a7d67 | [
"Apache-2.0"
] | 27 | 2019-11-02T02:18:34.000Z | 2022-02-24T18:49:08.000Z | serial_scripts/system_test/flow_tests/__init__.py | vkolli/contrail-test-perf | db04b8924a2c330baabe3059788b149d957a7d67 | [
"Apache-2.0"
] | 20 | 2019-11-28T16:02:25.000Z | 2022-01-06T05:56:58.000Z | """FLOW RELATED SYSTEM TEST CASES."""
| 19 | 37 | 0.684211 |
c711b732931b1daa135dbab87c710f6b0e8237b0 | 1,444 | py | Python | server/main.py | KejiaQiang/Spicy_pot_search | 72aaa9618e54178da513371802c2bcb751037bb0 | [
"MIT"
] | 1 | 2021-03-04T09:02:05.000Z | 2021-03-04T09:02:05.000Z | server/main.py | yanansong0930/Spicy_pot_search | 72aaa9618e54178da513371802c2bcb751037bb0 | [
"MIT"
] | null | null | null | server/main.py | yanansong0930/Spicy_pot_search | 72aaa9618e54178da513371802c2bcb751037bb0 | [
"MIT"
] | 1 | 2021-03-04T08:59:02.000Z | 2021-03-04T08:59:02.000Z | #!/usr/bin/python
# -*- coding: utf-8 -*-
from flask import Flask, request, abort, render_template
from datetime import timedelta
import pymysql
from search import start_search, decorate
page_dir = "E:/WEBPAGES_RAW"
app = Flask(__name__)
app.config['DEBUG'] = True
app.config['SEND_FILE_MAX_AGE_DEFAULT'] = timedelta(seconds=1)
connection = pymysql.connect(host="localhost",port=3306,user="root",db="spicy_pot")
cursor = connection.cursor()
app.run(host='0.0.0.0',port=80,debug=True)
| 29.469388 | 103 | 0.637812 |
c711e0dd9090b2b45a4e1e0eca15dbcffe106551 | 5,355 | py | Python | examples/3d/subduction/viz/plot_dispwarp.py | cehanagan/pylith | cf5c1c34040460a82f79b6eb54df894ed1b1ee93 | [
"MIT"
] | 93 | 2015-01-08T16:41:22.000Z | 2022-02-25T13:40:02.000Z | examples/3d/subduction/viz/plot_dispwarp.py | sloppyjuicy/pylith | ac2c1587f87e45c948638b19560813d4d5b6a9e3 | [
"MIT"
] | 277 | 2015-02-20T16:27:35.000Z | 2022-03-30T21:13:09.000Z | examples/3d/subduction/viz/plot_dispwarp.py | sloppyjuicy/pylith | ac2c1587f87e45c948638b19560813d4d5b6a9e3 | [
"MIT"
] | 71 | 2015-03-24T12:11:08.000Z | 2022-03-03T04:26:02.000Z | #!/usr/bin/env pvpython
# -*- Python -*- (syntax highlighting)
# ----------------------------------------------------------------------
#
# Brad T. Aagaard, U.S. Geological Survey
# Charles A. Williams, GNS Science
# Matthew G. Knepley, University at Buffalo
#
# This code was developed as part of the Computational Infrastructure
# for Geodynamics (http://geodynamics.org).
#
# Copyright (c) 2010-2021 University of California, Davis
#
# See LICENSE.md.md for license information.
#
# ----------------------------------------------------------------------
# Plot the undeformed domain as a gray wireframe and then the deformed
# domain, colored by the value of the x-displacemenet.
# User-specified parameters.
#
# Default values for parameters. To use different values, overwrite
# them in the ParaView Python shell or on the command line. For
# example, set OUTPUT_DIR to the absolute path if not starting
# ParaView from the terminal shell where you ran PyLith:
#
# import os
# OUTPUT_DIR = os.path.join(os.environ["HOME"], "src", "pylith", "examples", "2d", "subduction", "output")
DEFAULTS = {
"OUTPUT_DIR": "output",
"SIM": "step02",
"WARP_SCALE": 10.0e+3,
"FIELD": "displacement",
"FIELD_COMPONENT": "Magnitude",
"TIMESTEP": 0, # Use 0 for first, -1 for last.
}
# ----------------------------------------------------------------------
from paraview.simple import *
import os
# ----------------------------------------------------------------------
if __name__ == "__main__":
# Running from outside the ParaView GUI via pvpython
import argparse
parser = argparse.ArgumentParser()
parser.add_argument("--output-dir", action="store", dest="output_dir", default=DEFAULTS["OUTPUT_DIR"])
parser.add_argument("--sim", action="store", dest="sim", default=DEFAULTS["SIM"])
parser.add_argument("--warp-scale", action="store", type=float, dest="warp_scale", default=DEFAULTS["WARP_SCALE"])
parser.add_argument("--field", action="store", dest="field", default=DEFAULTS["FIELD"])
parser.add_argument("--component", action="store", dest="field_component", default=DEFAULTS["FIELD_COMPONENT"])
parser.add_argument("--timestep", action="store", dest="timestep", default=-1)
parser.add_argument("--screenshot", action="store", dest="screenshot")
args = parser.parse_args()
visualize(args)
view = GetRenderView()
view.CameraPosition = [78002.89373974672, -1531813.1739094853, 595774.2094961794]
view.CameraFocalPoint = [-45014.6313325238, 149523.68421156122, -335271.271063906]
view.CameraViewUp = [0.0, 0.0, 1.0]
view.ViewSize = [960, 540]
view.Update()
if args.screenshot:
WriteImage(args.screenshot)
Interact()
else:
# Running inside the ParaView GUI
visualize(Parameters())
# End of file
| 35 | 118 | 0.651727 |
c713402fab437e2023ffb914ab06de89a1b21a69 | 220 | py | Python | src/spaceone/inventory/manager/rds_manager.py | jean1042/plugin-aws-cloud-services | 1cf192557b03478af33ae81f40b2a49f735716bb | [
"Apache-2.0"
] | 4 | 2020-06-22T01:48:07.000Z | 2020-08-24T00:51:09.000Z | src/spaceone/inventory/manager/rds_manager.py | jean1042/plugin-aws-cloud-services | 1cf192557b03478af33ae81f40b2a49f735716bb | [
"Apache-2.0"
] | 2 | 2020-07-20T01:58:32.000Z | 2020-08-04T07:41:37.000Z | src/spaceone/inventory/manager/rds_manager.py | jean1042/plugin-aws-cloud-services | 1cf192557b03478af33ae81f40b2a49f735716bb | [
"Apache-2.0"
] | 6 | 2020-06-22T09:19:40.000Z | 2020-09-17T06:35:37.000Z | from spaceone.inventory.libs.manager import AWSManager
# todo: __init__
#
| 24.444444 | 54 | 0.777273 |
c714251263633c1447c106182ffec957c2c483cc | 1,775 | py | Python | script/upload-checksums.py | fireball-x/atom-shell | d229338e40058a9b4323b2544f62818a3c55748c | [
"MIT"
] | 4 | 2016-04-02T14:53:54.000Z | 2017-07-26T05:47:43.000Z | script/upload-checksums.py | cocos-creator/atom-shell | d229338e40058a9b4323b2544f62818a3c55748c | [
"MIT"
] | null | null | null | script/upload-checksums.py | cocos-creator/atom-shell | d229338e40058a9b4323b2544f62818a3c55748c | [
"MIT"
] | 2 | 2015-07-18T09:31:03.000Z | 2019-12-24T09:55:03.000Z | #!/usr/bin/env python
import argparse
import hashlib
import os
import tempfile
from lib.config import s3_config
from lib.util import download, rm_rf, s3put
DIST_URL = 'https://atom.io/download/atom-shell/'
if __name__ == '__main__':
import sys
sys.exit(main())
| 23.666667 | 75 | 0.668169 |
c71481b1ca69523b36b0345fe995b27fb6d37535 | 2,533 | py | Python | pythoncode/kmeansimage.py | loganpadon/PokemonOneShot | 22f9904250c8c90b4fe4573d6ca060fd9f95c1d3 | [
"MIT"
] | null | null | null | pythoncode/kmeansimage.py | loganpadon/PokemonOneShot | 22f9904250c8c90b4fe4573d6ca060fd9f95c1d3 | [
"MIT"
] | 1 | 2019-04-04T20:40:20.000Z | 2019-04-04T20:40:20.000Z | pythoncode/kmeansimage.py | loganpadon/PokemonOneShot | 22f9904250c8c90b4fe4573d6ca060fd9f95c1d3 | [
"MIT"
] | null | null | null | # import the necessary packages
from sklearn.cluster import KMeans
import skimage
import matplotlib.pyplot as plt
import argparse
import cv2
# import the necessary packages
import numpy as np
import cv2
# construct the argument parser and parse the arguments
ap = argparse.ArgumentParser()
ap.add_argument("-i", "--image", required = True, help = "Path to the image")
ap.add_argument("-c", "--clusters", required = True, type = int,
help = "# of clusters")
args = vars(ap.parse_args())
# load the image and convert it from BGR to RGB so that
# we can dispaly it with matplotlib
image = cv2.imread(args["image"])
image2 = cv2.cvtColor(image, cv2.COLOR_BGR2RGB)
image = skimage.color.rgb2lab(image2)
# show our image
plt.figure()
plt.axis("off")
plt.imshow(image2)
# reshape the image to be a list of pixels
imagedata = image.reshape((image.shape[0] * image.shape[1], 3))
# cluster the pixel intensities
clt = KMeans(n_clusters = args["clusters"])
clt.fit(imagedata)
hist = centroid_histogram(clt)
bar = plot_colors(hist, clt.cluster_centers_)
# show our color bar
plt.figure()
plt.axis("off")
plt.imshow(bar)
imagek=mean_image(image,clt)
plt.figure()
plt.axis("off")
plt.imshow(imagek)
plt.show()
| 28.460674 | 78 | 0.696802 |
c716271a9b4b9b525bfcb14f8c07170e7179b37f | 134 | py | Python | tests/encode.py | EddieBreeg/C_b64 | d49b155d1ae889c2ab779f54e6215f9d5e1031e6 | [
"MIT"
] | null | null | null | tests/encode.py | EddieBreeg/C_b64 | d49b155d1ae889c2ab779f54e6215f9d5e1031e6 | [
"MIT"
] | null | null | null | tests/encode.py | EddieBreeg/C_b64 | d49b155d1ae889c2ab779f54e6215f9d5e1031e6 | [
"MIT"
] | null | null | null | from sys import argv
from base64 import b64encode
with open("data", 'rb') as fIn:
b = fIn.read()
print(b64encode(b).decode()) | 22.333333 | 32 | 0.671642 |
c7162d1c243872610bbf29a5583204c35093859d | 1,691 | py | Python | src/json_sort/lib.py | cdumay/json-sort | a76fe2deaad649264e8ca0d1cc096d9741c60a04 | [
"Apache-2.0"
] | 3 | 2017-01-03T14:36:25.000Z | 2021-03-06T05:42:08.000Z | src/json_sort/lib.py | cdumay/json-sort | a76fe2deaad649264e8ca0d1cc096d9741c60a04 | [
"Apache-2.0"
] | null | null | null | src/json_sort/lib.py | cdumay/json-sort | a76fe2deaad649264e8ca0d1cc096d9741c60a04 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
.. codeauthor:: Cdric Dumay <cedric.dumay@gmail.com>
"""
import logging
import sys, os, json
from cdumay_rest_client.client import RESTClient
from cdumay_rest_client.exceptions import NotFound, HTTPException
def oncritical(exc):
"""description of oncritical"""
if isinstance(exc, HTTPException):
logging.critical(exc.message)
else:
logging.critical(str(exc))
sys.exit(1)
def file_exists(filename):
"""description of file_exists"""
filename = os.path.realpath(filename)
logging.debug("Checking file: {}".format(filename))
if not os.path.exists(filename):
raise NoSuchFile(
message="No such file '{}'".format(filename),
extra=dict(filename=filename)
)
return filename
def file_write(dst, data):
"""description of file_write"""
if dst:
dst = os.path.realpath(dst)
logging.debug("Saving to: {}".format(dst))
out = open(dst, "w")
else:
logging.debug("Current std will be used")
out = sys.stdout
json.dump(
data, out, ensure_ascii=False, sort_keys=True, indent=2,
separators=(',', ': ')
)
def from_local(src, dst=None):
"""description of from_local"""
try:
file_write(dst, json.load(open(file_exists(src), "r")))
except Exception as exc:
oncritical(exc)
def from_remote(src, dst=None):
"""description of fromurl"""
try:
file_write(
dst, RESTClient(server=src).do_request(method="GET", path="")
)
except Exception as exc:
oncritical(exc)
| 23.486111 | 73 | 0.622708 |
c7165074ee0affcd71c302a41edf2c2139ea9a06 | 4,484 | py | Python | test/test_create_dataset.py | gregstarr/ttools | fc8dcbf094370e9885311126724697830167d931 | [
"MIT"
] | null | null | null | test/test_create_dataset.py | gregstarr/ttools | fc8dcbf094370e9885311126724697830167d931 | [
"MIT"
] | null | null | null | test/test_create_dataset.py | gregstarr/ttools | fc8dcbf094370e9885311126724697830167d931 | [
"MIT"
] | null | null | null | import numpy as np
import pytest
import apexpy
import tempfile
import os
import h5py
from ttools import create_dataset, config, io, utils
map_periods = [np.timedelta64(10, 'm'), np.timedelta64(30, 'm'), np.timedelta64(1, 'h'), np.timedelta64(2, 'h')]
def test_calculate_bins():
mlat = np.arange(10)[None, :, None] * np.ones((1, 1, 10))
mlt = np.arange(10)[None, None, :] * np.ones((1, 10, 1))
tec = np.zeros((1, 10, 10))
tec[0, 0, 0] = 10
tec[0, 0, -1] = 20
tec[0, -1, 0] = 30
times = ssmlon = np.ones(1) * np.nan
be = np.array([-.5, 4.5, 9.5])
bins = [be, be]
out_t, out_tec, out_ssm, out_n, out_std = create_dataset.calculate_bins(mlat.ravel(), mlt.ravel(), tec.ravel(),
times, ssmlon, bins)
assert np.isnan(out_t)
assert np.isnan(out_ssm)
assert out_tec.shape == (2, 2)
assert out_tec[0, 0] == 10 / 25
assert out_tec[0, 1] == 20 / 25
assert out_tec[1, 0] == 30 / 25
assert out_tec[1, 1] == 0
assert np.all(out_n == 25)
def test_process_dataset():
start_date = np.datetime64("2012-03-07")
end_date = np.datetime64("2012-03-08")
file_dt = np.timedelta64(12, 'h')
mlat_bins = np.array([35, 45, 55, 65])
mlt_bins = np.array([-1.5, -.5, .5, 1.5])
dates = np.arange(start_date, end_date, file_dt)
with tempfile.TemporaryDirectory() as tempdir:
files = [os.path.join(tempdir, fn_pattern(d)) for d in dates]
create_dataset.process_dataset(start_date, end_date, mlat_bins, mlt_bins, apex_dt=np.timedelta64(365, 'D'),
file_dt=file_dt, output_dir=tempdir, file_name_pattern=fn_pattern)
grid_fn = os.path.join(tempdir, 'grid.h5')
assert os.path.exists(grid_fn)
with h5py.File(grid_fn, 'r') as f:
mlt_vals = f['mlt'][()]
mlat_vals = f['mlat'][()]
assert np.all(mlt_vals == [-1, 0, 1])
assert np.all(mlat_vals == [40, 50, 60])
for f, d in zip(files, dates):
assert os.path.exists(f)
tec, times, ssmlon, n, std = io.open_tec_file(f)
assert tec.shape == (12, 3, 3)
assert utils.datetime64_to_timestamp(d) == times[0]
| 40.396396 | 115 | 0.599242 |
c717ca8a8d1e158509ebb8f364af201eeca89e64 | 296 | py | Python | docs_src/options/callback/tutorial001.py | madkinsz/typer | a1520dcda685220a9a796288f5eaaebd00d68845 | [
"MIT"
] | 7,615 | 2019-12-24T13:08:20.000Z | 2022-03-31T22:07:53.000Z | docs_src/options/callback/tutorial001.py | madkinsz/typer | a1520dcda685220a9a796288f5eaaebd00d68845 | [
"MIT"
] | 351 | 2019-12-24T22:17:54.000Z | 2022-03-31T15:35:08.000Z | docs_src/options/callback/tutorial001.py | jina-ai/typer | 8b5e14b25ddf0dd777403015883301b17bedcee0 | [
"MIT"
] | 360 | 2019-12-24T15:29:59.000Z | 2022-03-30T20:33:10.000Z | import typer
if __name__ == "__main__":
typer.run(main)
| 18.5 | 64 | 0.658784 |
c719c2fbf99902f8dda33cce99ae748883db934d | 3,276 | py | Python | qft-client-py2.py | bocajspear1/qft | 7a8f3bb5d24bf173489dc4ad6159021e9365e9c4 | [
"MIT"
] | null | null | null | qft-client-py2.py | bocajspear1/qft | 7a8f3bb5d24bf173489dc4ad6159021e9365e9c4 | [
"MIT"
] | null | null | null | qft-client-py2.py | bocajspear1/qft | 7a8f3bb5d24bf173489dc4ad6159021e9365e9c4 | [
"MIT"
] | null | null | null | import socket
import threading
from time import sleep
from threading import Thread
import json
import sys
try:
timeout = 5
if len(sys.argv) > 1:
if (len(sys.argv) -1 ) % 2 != 0:
print "\nInvalid number of arguments\n\n-t Time between tests in seconds\n"
sys.exit()
else:
if sys.argv[1] == "-t" and sys.argv[2].isdigit() and int(sys.argv[2]) > 2:
timeout = int(sys.argv[2])
else:
print "\nInvalid arguments\n\n-t Time between tests in seconds\n"
sys.exit()
print "\nqft-client.py v1.s\n\n"
json_cfg = json.loads(open("client.cfg").read())
print "Config loaded. Starting tests in 1 second...\n\n"
sleep(1)
while True:
for item in json_cfg:
if item["type"] == "tcp":
t = Thread(target=TCPTest, args=( item["remote_address"], item["port"], item["test_for"]))
elif item["type"] == "udp":
t = Thread(target=UDPTest, args=( item["remote_address"], item["port"], item["test_for"]))
else:
print "Invalid Type!"
t.start()
sleep(timeout)
print "\n=======================================================\n"
except IOError as e:
print("Config file, client.cfg, not found")
sys.exit()
except ValueError as e:
print("Error in config JSON")
sys.exit()
| 30.616822 | 108 | 0.514042 |
c719cc42bfa09eeceed2d7963f0cd71faeceedf7 | 14,277 | py | Python | mdemanipulation/src/mdeoperation.py | modelia/ai-for-model-manipulation | 0b15b9d59b0f6009a5709b20db4e55b7d511ac38 | [
"BSD-3-Clause"
] | null | null | null | mdemanipulation/src/mdeoperation.py | modelia/ai-for-model-manipulation | 0b15b9d59b0f6009a5709b20db4e55b7d511ac38 | [
"BSD-3-Clause"
] | 1 | 2022-01-10T14:16:48.000Z | 2022-01-10T14:16:48.000Z | mdemanipulation/src/mdeoperation.py | modelia/ai-for-model-manipulation | 0b15b9d59b0f6009a5709b20db4e55b7d511ac38 | [
"BSD-3-Clause"
] | null | null | null | #!/usr/bin/env python2
import math
import os
import random
import sys
import time
import logging
import argparse
import numpy as np
from six.moves import xrange
import json
import torch
import torch.nn as nn
import torch.optim as optim
from torch import cuda
from torch.autograd import Variable
from torch.nn.utils import clip_grad_norm
import data_utils
import network
import cPickle as pickle
import datetime
parser = argparse.ArgumentParser()
parser.add_argument('--param_init', type=float, default=0.1,
help='Parameters are initialized over uniform distribution in (-param_init, param_init)')
parser.add_argument('--num_epochs', type=int, default=30, help='number of training epochs') #default 30
parser.add_argument('--learning_rate', type=float, default=0.005, # default 0.005
help='learning rate')
parser.add_argument('--learning_rate_decay_factor', type=float, default=0.8,
help='learning rate decays by this much')
parser.add_argument('--learning_rate_decay_steps', type=int, default=2000, # default=2000
help='decay the learning rate after certain steps')
parser.add_argument('--max_gradient_norm', type=float, default=5.0,
help='clip gradients to this norm')
parser.add_argument('--batch_size', type=int, default=64, #default 100
help='batch size')
parser.add_argument('--max_depth', type=int, default=100,
help='max depth for tree models')
parser.add_argument('--hidden_size', type=int, default=256,
help='size of each model layer')
parser.add_argument('--embedding_size', type=int, default=256,
help='size of the embedding')
parser.add_argument('--dropout_rate', type=float, default=0.75, # default=0.5
help='dropout rate')
parser.add_argument('--num_layers', type=int, default=1, # default=1,
help='number of layers in the model')
parser.add_argument('--source_vocab_size', type=int, default=0,
help='source vocabulary size (0: no limit)')
parser.add_argument('--target_vocab_size', type=int, default=0,
help='target vocabulary size (0: no limit)')
parser.add_argument('--train_dir_checkpoints', type=str, default='/home/lola/nn/checkpoints', # default='../model_ckpts/tree2tree/',
help='training directory - checkpoints')
parser.add_argument('--training_dataset', type=str, default='/home/lola/nn/models_train.json', # default='../data/CS-JS/BL/preprocessed_progs_train.json',
help='training dataset path')
parser.add_argument('--validation_dataset', type=str, default='/home/lola/nn/models_valid.json', #default='../data/CS-JS/BL/preprocessed_progs_valid.json',
help='validation dataset path')
parser.add_argument('--test_dataset', type=str, default='/home/lola/nn/models_test.json', #default='../data/CS-JS/BL/preprocessed_progs_test.json',
help='test dataset path')
parser.add_argument('--load_model', type=str, default='/home/lola/nn/neuralnetwork.pth', # default=None
help='path to the pretrained model')
parser.add_argument('--vocab_filename', type=str, default=None,
help='filename for the vocabularies')
parser.add_argument('--steps_per_checkpoint', type=int, default=500,
help='number of training steps per checkpoint')
parser.add_argument('--max_source_len', type=int, default=115,
help='max length for input')
parser.add_argument('--max_target_len', type=int, default=315,
help='max length for output')
parser.add_argument('--test', action='store_true', help='set to true for testing')
parser.add_argument('--no_attention', action='store_true', help='set to true to disable attention')
parser.add_argument('--no_pf', action='store_true', help='set to true to disable parent attention feeding')
parser.add_argument('--no_train', help='set to true to prevent the network from training', action='store_true')
args = parser.parse_args()
main()
| 43.794479 | 155 | 0.665826 |
c71a546240f7c071174fd45a93cc36d20aa838b4 | 5,388 | py | Python | barbican/common/resources.py | stanzikratel/barbican-2 | 10fae57c1cae3e140c19069a48f562d62ca53663 | [
"Apache-2.0"
] | null | null | null | barbican/common/resources.py | stanzikratel/barbican-2 | 10fae57c1cae3e140c19069a48f562d62ca53663 | [
"Apache-2.0"
] | null | null | null | barbican/common/resources.py | stanzikratel/barbican-2 | 10fae57c1cae3e140c19069a48f562d62ca53663 | [
"Apache-2.0"
] | null | null | null | # Copyright (c) 2013-2014 Rackspace, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Shared business logic.
"""
from barbican.common import exception
from barbican.common import utils
from barbican.common import validators
from barbican.model import models
LOG = utils.getLogger(__name__)
def get_or_create_tenant(keystone_id, tenant_repo):
"""Returns tenant with matching keystone_id.
Creates it if it does not exist.
:param keystone_id: The external-to-Barbican ID for this tenant.
:param tenant_repo: Tenant repository.
:return: Tenant model instance
"""
tenant = tenant_repo.find_by_keystone_id(keystone_id,
suppress_exception=True)
if not tenant:
LOG.debug('Creating tenant for {0}'.format(keystone_id))
tenant = models.Tenant()
tenant.keystone_id = keystone_id
tenant.status = models.States.ACTIVE
tenant_repo.create_from(tenant)
return tenant
def create_secret(data, tenant, crypto_manager,
secret_repo, tenant_secret_repo, datum_repo, kek_repo,
ok_to_generate=False):
"""Common business logic to create a secret."""
time_keeper = utils.TimeKeeper('Create Secret Resource')
new_secret = models.Secret(data)
time_keeper.mark('after Secret model create')
new_datum = None
content_type = data.get('payload_content_type',
'application/octet-stream')
if 'payload' in data:
payload = data.get('payload')
content_encoding = data.get('payload_content_encoding')
LOG.debug('Encrypting payload...')
new_datum = crypto_manager.encrypt(payload,
content_type,
content_encoding,
new_secret,
tenant,
kek_repo,
enforce_text_only=True)
time_keeper.mark('after encrypt')
elif ok_to_generate:
LOG.debug('Generating new secret...')
# TODO(atiwari): With new typed Order API proposal
# we need to translate new_secret to meta
# currently it is working as meta will have same attributes
new_datum = crypto_manager. \
generate_symmetric_encryption_key(new_secret,
content_type,
tenant,
kek_repo)
time_keeper.mark('after secret generate')
else:
LOG.debug('Creating metadata only for the new secret. '
'A subsequent PUT is required')
# Create Secret entities in datastore.
secret_repo.create_from(new_secret)
time_keeper.mark('after Secret datastore create')
new_assoc = models.TenantSecret()
time_keeper.mark('after TenantSecret model create')
new_assoc.tenant_id = tenant.id
new_assoc.secret_id = new_secret.id
new_assoc.role = "admin"
new_assoc.status = models.States.ACTIVE
tenant_secret_repo.create_from(new_assoc)
time_keeper.mark('after TenantSecret datastore create')
if new_datum:
new_datum.secret_id = new_secret.id
datum_repo.create_from(new_datum)
time_keeper.mark('after Datum datastore create')
time_keeper.dump()
return new_secret
def create_encrypted_datum(secret, payload,
content_type, content_encoding,
tenant, crypto_manager, datum_repo, kek_repo):
"""Modifies the secret to add the plain_text secret information.
:param secret: the secret entity to associate the secret data to
:param payload: secret data to store
:param content_type: payload content mime type
:param content_encoding: payload content encoding
:param tenant: the tenant (entity) who owns the secret
:param crypto_manager: the crypto plugin manager
:param datum_repo: the encrypted datum repository
:param kek_repo: the KEK metadata repository
:retval The response body, None if N/A
"""
if not payload:
raise exception.NoDataToProcess()
if validators.secret_too_big(payload):
raise exception.LimitExceeded()
if secret.encrypted_data:
raise ValueError('Secret already has encrypted data stored for it.')
# Encrypt payload
LOG.debug('Encrypting secret payload...')
new_datum = crypto_manager.encrypt(payload,
content_type,
content_encoding,
secret,
tenant,
kek_repo)
datum_repo.create_from(new_datum)
return new_datum
| 37.416667 | 76 | 0.625464 |
c71ac734d6782f901c4c5400d878122dd11ea416 | 567 | py | Python | 7/prime.py | redfast00/euler | 98fc49a1fcb8b49415cc4384952a6447378bd4f4 | [
"MIT"
] | null | null | null | 7/prime.py | redfast00/euler | 98fc49a1fcb8b49415cc4384952a6447378bd4f4 | [
"MIT"
] | null | null | null | 7/prime.py | redfast00/euler | 98fc49a1fcb8b49415cc4384952a6447378bd4f4 | [
"MIT"
] | null | null | null | from math import sqrt
for prime in stream_primes(10001):
print(prime)
| 22.68 | 45 | 0.560847 |
c71be407b214b6130f22496ab986a3ca003cfe56 | 777 | py | Python | app/utils.py | HealYouDown/flo-league | c729cad1daddfb89e997c101bd2da505b7137d98 | [
"MIT"
] | null | null | null | app/utils.py | HealYouDown/flo-league | c729cad1daddfb89e997c101bd2da505b7137d98 | [
"MIT"
] | 3 | 2021-05-03T19:05:11.000Z | 2021-06-12T09:43:02.000Z | app/utils.py | HealYouDown/flo-league | c729cad1daddfb89e997c101bd2da505b7137d98 | [
"MIT"
] | null | null | null | import datetime
from app.models import Log
from flask_login import current_user
from app.extensions import db
# https://stackoverflow.com/questions/6558535/find-the-date-for-the-first-monday-after-a-given-date
| 28.777778 | 99 | 0.705277 |
c71c00b730b4e3cf508cdefb7968765436ad7ce3 | 68,625 | py | Python | benchmarks/SimResults/combinations_spec_mylocality/oldstuff/cmp_soplexmcfcalculixgcc/power.py | TugberkArkose/MLScheduler | e493b6cbf7b9d29a2c9300d7dd6f0c2f102e4061 | [
"Unlicense"
] | null | null | null | benchmarks/SimResults/combinations_spec_mylocality/oldstuff/cmp_soplexmcfcalculixgcc/power.py | TugberkArkose/MLScheduler | e493b6cbf7b9d29a2c9300d7dd6f0c2f102e4061 | [
"Unlicense"
] | null | null | null | benchmarks/SimResults/combinations_spec_mylocality/oldstuff/cmp_soplexmcfcalculixgcc/power.py | TugberkArkose/MLScheduler | e493b6cbf7b9d29a2c9300d7dd6f0c2f102e4061 | [
"Unlicense"
] | null | null | null | power = {'BUSES': {'Area': 1.33155,
'Bus/Area': 1.33155,
'Bus/Gate Leakage': 0.00662954,
'Bus/Peak Dynamic': 0.0,
'Bus/Runtime Dynamic': 0.0,
'Bus/Subthreshold Leakage': 0.0691322,
'Bus/Subthreshold Leakage with power gating': 0.0259246,
'Gate Leakage': 0.00662954,
'Peak Dynamic': 0.0,
'Runtime Dynamic': 0.0,
'Subthreshold Leakage': 0.0691322,
'Subthreshold Leakage with power gating': 0.0259246},
'Core': [{'Area': 32.6082,
'Execution Unit/Area': 8.2042,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.181181,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.344996,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.977935,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.122718,
'Execution Unit/Instruction Scheduler/Area': 2.17927,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.328073,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.00115349,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.20978,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.486054,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.017004,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00962066,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00730101,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 1.00996,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00529112,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 2.07911,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.841669,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0800117,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0455351,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 4.84781,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.841232,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.000856399,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.55892,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.482721,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.0178624,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00897339,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 1.81044,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.114878,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.0641291,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.330514,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 7.28395,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.184753,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.0176198,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.195265,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.130309,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.380018,
'Execution Unit/Register Files/Runtime Dynamic': 0.147929,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0442632,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00607074,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.521478,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 1.08927,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.0920413,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0345155,
'Execution Unit/Runtime Dynamic': 3.79801,
'Execution Unit/Subthreshold Leakage': 1.83518,
'Execution Unit/Subthreshold Leakage with power gating': 0.709678,
'Gate Leakage': 0.372997,
'Instruction Fetch Unit/Area': 5.86007,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.00272158,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.00272158,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.0023766,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.000923356,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.00187191,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.00969166,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.0258763,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0590479,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.12527,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 6.43323,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.372767,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.425473,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 8.96874,
'Instruction Fetch Unit/Runtime Dynamic': 0.959077,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932587,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.408542,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.090727,
'L2/Runtime Dynamic': 0.0127692,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80969,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 4.08122,
'Load Store Unit/Data Cache/Runtime Dynamic': 1.38167,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0351387,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.0920133,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.0920133,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 4.51749,
'Load Store Unit/Runtime Dynamic': 1.92746,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.226889,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.453778,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591622,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283406,
'Memory Management Unit/Area': 0.434579,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.0805237,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.0817258,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00813591,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.399995,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.061585,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.697703,
'Memory Management Unit/Runtime Dynamic': 0.143311,
'Memory Management Unit/Subthreshold Leakage': 0.0769113,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0399462,
'Peak Dynamic': 26.1203,
'Renaming Unit/Area': 0.369768,
'Renaming Unit/FP Front End RAT/Area': 0.168486,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00489731,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 3.33511,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.644561,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0437281,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.024925,
'Renaming Unit/Free List/Area': 0.0414755,
'Renaming Unit/Free List/Gate Leakage': 4.15911e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0401324,
'Renaming Unit/Free List/Runtime Dynamic': 0.0326103,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000670426,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000377987,
'Renaming Unit/Gate Leakage': 0.00863632,
'Renaming Unit/Int Front End RAT/Area': 0.114751,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.00038343,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.86945,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.237087,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00611897,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00348781,
'Renaming Unit/Peak Dynamic': 4.56169,
'Renaming Unit/Runtime Dynamic': 0.914258,
'Renaming Unit/Subthreshold Leakage': 0.070483,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0362779,
'Runtime Dynamic': 7.75489,
'Subthreshold Leakage': 6.21877,
'Subthreshold Leakage with power gating': 2.58311},
{'Area': 32.0201,
'Execution Unit/Area': 7.68434,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.11996,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.29691,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.64733,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.120359,
'Execution Unit/Instruction Scheduler/Area': 1.66526,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.275653,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.000977433,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.04181,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.234954,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.0143453,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00810519,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00568913,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 0.805223,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00414562,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 1.6763,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.378972,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0625755,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0355964,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 3.82262,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.584388,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.00056608,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.10451,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.191292,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.00906853,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00364446,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 0.805218,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.0859892,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.047346,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.169475,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 5.2954,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.122295,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.00985502,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.116195,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.0728839,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.23849,
'Execution Unit/Register Files/Runtime Dynamic': 0.0827389,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0390912,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00537402,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.274787,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.565173,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.081478,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0305543,
'Execution Unit/Runtime Dynamic': 2.15542,
'Execution Unit/Subthreshold Leakage': 1.79543,
'Execution Unit/Subthreshold Leakage with power gating': 0.688821,
'Gate Leakage': 0.368936,
'Instruction Fetch Unit/Area': 5.85939,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.00133282,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.00133282,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.00118494,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.000471861,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.00104698,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.00489756,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.0119197,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0589979,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.0700652,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 4.45674,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.197355,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.237973,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 6.89155,
'Instruction Fetch Unit/Runtime Dynamic': 0.522211,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932286,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.40843,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.0504299,
'L2/Runtime Dynamic': 0.0069462,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80901,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 2.70196,
'Load Store Unit/Data Cache/Runtime Dynamic': 0.713329,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0350888,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.0473909,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.0473909,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 2.92575,
'Load Store Unit/Runtime Dynamic': 0.994436,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.116858,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.233716,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591321,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283293,
'Memory Management Unit/Area': 0.4339,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.0414733,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.0421754,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00808595,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.277104,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0325171,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.504457,
'Memory Management Unit/Runtime Dynamic': 0.0746925,
'Memory Management Unit/Subthreshold Leakage': 0.0766103,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0398333,
'Peak Dynamic': 19.2571,
'Renaming Unit/Area': 0.303608,
'Renaming Unit/FP Front End RAT/Area': 0.131045,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00351123,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 2.51468,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.321701,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0308571,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.0175885,
'Renaming Unit/Free List/Area': 0.0340654,
'Renaming Unit/Free List/Gate Leakage': 2.5481e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0306032,
'Renaming Unit/Free List/Runtime Dynamic': 0.0145155,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000370144,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000201064,
'Renaming Unit/Gate Leakage': 0.00708398,
'Renaming Unit/Int Front End RAT/Area': 0.0941223,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.000283242,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.731965,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.111753,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00435488,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00248228,
'Renaming Unit/Peak Dynamic': 3.58947,
'Renaming Unit/Runtime Dynamic': 0.44797,
'Renaming Unit/Subthreshold Leakage': 0.0552466,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0276461,
'Runtime Dynamic': 4.20167,
'Subthreshold Leakage': 6.16288,
'Subthreshold Leakage with power gating': 2.55328},
{'Area': 32.0201,
'Execution Unit/Area': 7.68434,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.0065108,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.207803,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.0335685,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.120359,
'Execution Unit/Instruction Scheduler/Area': 1.66526,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.275653,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.000977433,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.04181,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.102536,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.0143453,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00810519,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00568913,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 0.805223,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00414562,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 1.6763,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.165386,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0625755,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0355964,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 3.82262,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.584388,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.00056608,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.10451,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.0834813,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.00906853,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00364446,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 0.351403,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.0859892,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.047346,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.112125,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 4.10223,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.00634181,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.0043008,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.0336025,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.0318071,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.0399443,
'Execution Unit/Register Files/Runtime Dynamic': 0.0361079,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0390912,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00537402,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.0724192,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.179703,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.081478,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0305543,
'Execution Unit/Runtime Dynamic': 1.18039,
'Execution Unit/Subthreshold Leakage': 1.79543,
'Execution Unit/Subthreshold Leakage with power gating': 0.688821,
'Gate Leakage': 0.368936,
'Instruction Fetch Unit/Area': 5.85939,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.00112696,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.00112696,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.000995662,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.000393137,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.000456911,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.0037065,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.0103022,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0589979,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.0305769,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 1.94496,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.0958958,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.103853,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 4.25787,
'Instruction Fetch Unit/Runtime Dynamic': 0.244335,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932286,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.40843,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.0538499,
'L2/Runtime Dynamic': 0.0148173,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80901,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 2.02873,
'Load Store Unit/Data Cache/Runtime Dynamic': 0.40237,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0350888,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.0256105,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.0256104,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 2.14967,
'Load Store Unit/Runtime Dynamic': 0.554282,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.063151,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.126302,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591321,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283293,
'Memory Management Unit/Area': 0.4339,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.0224125,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.0232096,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00808595,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.12093,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0157552,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.31554,
'Memory Management Unit/Runtime Dynamic': 0.0389648,
'Memory Management Unit/Subthreshold Leakage': 0.0766103,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0398333,
'Peak Dynamic': 14.4686,
'Renaming Unit/Area': 0.303608,
'Renaming Unit/FP Front End RAT/Area': 0.131045,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00351123,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 2.51468,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.0166828,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0308571,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.0175885,
'Renaming Unit/Free List/Area': 0.0340654,
'Renaming Unit/Free List/Gate Leakage': 2.5481e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0306032,
'Renaming Unit/Free List/Runtime Dynamic': 0.00482915,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000370144,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000201064,
'Renaming Unit/Gate Leakage': 0.00708398,
'Renaming Unit/Int Front End RAT/Area': 0.0941223,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.000283242,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.731965,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.0520126,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00435488,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00248228,
'Renaming Unit/Peak Dynamic': 3.58947,
'Renaming Unit/Runtime Dynamic': 0.0735245,
'Renaming Unit/Subthreshold Leakage': 0.0552466,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0276461,
'Runtime Dynamic': 2.10632,
'Subthreshold Leakage': 6.16288,
'Subthreshold Leakage with power gating': 2.55328},
{'Area': 32.0201,
'Execution Unit/Area': 7.68434,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.00682822,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.208052,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.0364806,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.120359,
'Execution Unit/Instruction Scheduler/Area': 1.66526,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.275653,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.000977433,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.04181,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.106185,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.0143453,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00810519,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00568913,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 0.805223,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00414562,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 1.6763,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.171272,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0625755,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0355964,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 3.82262,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.584388,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.00056608,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.10451,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.0864526,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.00906853,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00364446,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 0.36391,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.0859892,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.047346,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.115853,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 4.11398,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.00689197,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.00445387,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.0347798,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.0329391,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.0416718,
'Execution Unit/Register Files/Runtime Dynamic': 0.037393,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0390912,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00537402,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.0749788,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.202833,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.081478,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0305543,
'Execution Unit/Runtime Dynamic': 1.21756,
'Execution Unit/Subthreshold Leakage': 1.79543,
'Execution Unit/Subthreshold Leakage with power gating': 0.688821,
'Gate Leakage': 0.368936,
'Instruction Fetch Unit/Area': 5.85939,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.000625326,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.000625326,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.000550159,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.000215984,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.000473173,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.00227399,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.00579905,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0589979,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.0316652,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 2.01418,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.0689457,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.107549,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 4.33045,
'Instruction Fetch Unit/Runtime Dynamic': 0.216233,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932286,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.40843,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.0418086,
'L2/Runtime Dynamic': 0.00989266,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80901,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 2.36015,
'Load Store Unit/Data Cache/Runtime Dynamic': 0.554162,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0350888,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.0363327,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.0363327,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 2.53172,
'Load Store Unit/Runtime Dynamic': 0.769675,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.0895903,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.17918,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591321,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283293,
'Memory Management Unit/Area': 0.4339,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.0317959,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.0324228,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00808595,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.125234,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0113054,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.335963,
'Memory Management Unit/Runtime Dynamic': 0.0437282,
'Memory Management Unit/Subthreshold Leakage': 0.0766103,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0398333,
'Peak Dynamic': 14.9434,
'Renaming Unit/Area': 0.303608,
'Renaming Unit/FP Front End RAT/Area': 0.131045,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00351123,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 2.51468,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.0181291,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0308571,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.0175885,
'Renaming Unit/Free List/Area': 0.0340654,
'Renaming Unit/Free List/Gate Leakage': 2.5481e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0306032,
'Renaming Unit/Free List/Runtime Dynamic': 0.0050114,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000370144,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000201064,
'Renaming Unit/Gate Leakage': 0.00708398,
'Renaming Unit/Int Front End RAT/Area': 0.0941223,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.000283242,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.731965,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.0551057,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00435488,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00248228,
'Renaming Unit/Peak Dynamic': 3.58947,
'Renaming Unit/Runtime Dynamic': 0.0782462,
'Renaming Unit/Subthreshold Leakage': 0.0552466,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0276461,
'Runtime Dynamic': 2.33534,
'Subthreshold Leakage': 6.16288,
'Subthreshold Leakage with power gating': 2.55328}],
'DRAM': {'Area': 0,
'Gate Leakage': 0,
'Peak Dynamic': 3.868411224021876,
'Runtime Dynamic': 3.868411224021876,
'Subthreshold Leakage': 4.252,
'Subthreshold Leakage with power gating': 4.252},
'L3': [{'Area': 61.9075,
'Gate Leakage': 0.0484137,
'Peak Dynamic': 0.371973,
'Runtime Dynamic': 0.183113,
'Subthreshold Leakage': 6.80085,
'Subthreshold Leakage with power gating': 3.32364}],
'Processor': {'Area': 191.908,
'Gate Leakage': 1.53485,
'Peak Dynamic': 75.1614,
'Peak Power': 108.274,
'Runtime Dynamic': 16.5813,
'Subthreshold Leakage': 31.5774,
'Subthreshold Leakage with power gating': 13.9484,
'Total Cores/Area': 128.669,
'Total Cores/Gate Leakage': 1.4798,
'Total Cores/Peak Dynamic': 74.7894,
'Total Cores/Runtime Dynamic': 16.3982,
'Total Cores/Subthreshold Leakage': 24.7074,
'Total Cores/Subthreshold Leakage with power gating': 10.2429,
'Total L3s/Area': 61.9075,
'Total L3s/Gate Leakage': 0.0484137,
'Total L3s/Peak Dynamic': 0.371973,
'Total L3s/Runtime Dynamic': 0.183113,
'Total L3s/Subthreshold Leakage': 6.80085,
'Total L3s/Subthreshold Leakage with power gating': 3.32364,
'Total Leakage': 33.1122,
'Total NoCs/Area': 1.33155,
'Total NoCs/Gate Leakage': 0.00662954,
'Total NoCs/Peak Dynamic': 0.0,
'Total NoCs/Runtime Dynamic': 0.0,
'Total NoCs/Subthreshold Leakage': 0.0691322,
'Total NoCs/Subthreshold Leakage with power gating': 0.0259246}} | 75.082057 | 124 | 0.682157 |
c71c6e80583baf2cb3846a4c3d378463d41f4b27 | 9,582 | py | Python | packages/gtmcore/gtmcore/environment/conda.py | gigabackup/gigantum-client | 70fe6b39b87b1c56351f2b4c551b6f1693813e4f | [
"MIT"
] | 60 | 2018-09-26T15:46:00.000Z | 2021-10-10T02:37:14.000Z | packages/gtmcore/gtmcore/environment/conda.py | gigabackup/gigantum-client | 70fe6b39b87b1c56351f2b4c551b6f1693813e4f | [
"MIT"
] | 1,706 | 2018-09-26T16:11:22.000Z | 2021-08-20T13:37:59.000Z | packages/gtmcore/gtmcore/environment/conda.py | griffinmilsap/gigantum-client | 70fe6b39b87b1c56351f2b4c551b6f1693813e4f | [
"MIT"
] | 11 | 2019-03-14T13:23:51.000Z | 2022-01-25T01:29:16.000Z | from typing import List, Dict
import json
from gtmcore.http import ConcurrentRequestManager, ConcurrentRequest
from gtmcore.environment.packagemanager import PackageManager, PackageResult, PackageMetadata
from gtmcore.container import container_for_context
from gtmcore.labbook import LabBook
from gtmcore.logging import LMLogger
logger = LMLogger.get_logger()
| 40.774468 | 120 | 0.611668 |
c71da90915f08f68f935060eea6dba44dc3beaac | 1,147 | py | Python | netchos/io/io_mpl_to_px.py | brainets/netchos | ccfcd2ec85894adffbd20fbc67410dbdacfe6812 | [
"BSD-3-Clause"
] | 11 | 2021-04-20T19:45:23.000Z | 2021-11-17T15:18:33.000Z | netchos/io/io_mpl_to_px.py | brainets/netchos | ccfcd2ec85894adffbd20fbc67410dbdacfe6812 | [
"BSD-3-Clause"
] | 3 | 2021-04-26T09:01:42.000Z | 2021-06-30T12:09:15.000Z | netchos/io/io_mpl_to_px.py | brainets/netchos | ccfcd2ec85894adffbd20fbc67410dbdacfe6812 | [
"BSD-3-Clause"
] | 2 | 2021-05-06T20:28:46.000Z | 2021-05-24T10:36:44.000Z | """Conversion of Matplotlib / Seaborn inputs to plotly."""
import os.path as op
from pkg_resources import resource_filename
import json
def mpl_to_px_inputs(inputs, plt_types=None):
"""Convert typical matplotlib inputs to plotly to simplify API.
Parameters
----------
inputs : dict
Dictionary of inputs
plt_types : string or list or None
Sub select some plotting types (e.g heatmap, line etc.). If None, all
types are used
Returns
-------
outputs : dict
Dictionary of converted inputs
"""
# load reference table
file = op.join(op.dirname(__file__), "io_mpl_to_px.json")
with open(file, 'r') as f:
table = json.load(f)
# go through the desired plotting types for conversion
if plt_types is None:
plt_types = list(table.keys())
if isinstance(plt_types, str):
plt_types = [plt_types]
ref = {}
for plt_type in plt_types:
ref.update(table[plt_type])
# convert inputs
outputs = {}
for k, v in inputs.items():
if k in ref.keys():
k = ref[k]
outputs[k] = v
return outputs
| 25.488889 | 77 | 0.62075 |
c71dc157e40f86937d395921d62896697e8b4c70 | 186 | py | Python | fizzbuzz_for_02.py | toastyxen/FizzBuzz | 094270e3882e743a80c5d32b3903c2483d37755f | [
"MIT"
] | null | null | null | fizzbuzz_for_02.py | toastyxen/FizzBuzz | 094270e3882e743a80c5d32b3903c2483d37755f | [
"MIT"
] | null | null | null | fizzbuzz_for_02.py | toastyxen/FizzBuzz | 094270e3882e743a80c5d32b3903c2483d37755f | [
"MIT"
] | null | null | null | """Fizzbuzz for loop variant 3"""
for x in range(1, 101):
OUTPUT = ""
if x % 3 == 0:
OUTPUT += "Fizz"
if x % 5 == 0:
OUTPUT += "Buzz"
print(OUTPUT or x)
| 18.6 | 33 | 0.473118 |
c71ef3a9007aa0aebc08a606ded35bff47c69406 | 242 | py | Python | cnn/struct/layer/parse_tensor_module.py | hslee1539/GIS_GANs | 6901c830b924e59fd06247247db3f925bab26583 | [
"MIT"
] | null | null | null | cnn/struct/layer/parse_tensor_module.py | hslee1539/GIS_GANs | 6901c830b924e59fd06247247db3f925bab26583 | [
"MIT"
] | null | null | null | cnn/struct/layer/parse_tensor_module.py | hslee1539/GIS_GANs | 6901c830b924e59fd06247247db3f925bab26583 | [
"MIT"
] | null | null | null | from tensor.main_module import Tensor
import numpy as np | 24.2 | 41 | 0.68595 |
c71f19c3cf33a6be263067d8b8a273844fc916bd | 3,337 | py | Python | openstack_dashboard/dashboards/admin/volume_types/qos_specs/forms.py | hemantsonawane95/horizon-apelby | 01a5e72219aeca8c1451701ee85e232ed0618751 | [
"Apache-2.0"
] | null | null | null | openstack_dashboard/dashboards/admin/volume_types/qos_specs/forms.py | hemantsonawane95/horizon-apelby | 01a5e72219aeca8c1451701ee85e232ed0618751 | [
"Apache-2.0"
] | null | null | null | openstack_dashboard/dashboards/admin/volume_types/qos_specs/forms.py | hemantsonawane95/horizon-apelby | 01a5e72219aeca8c1451701ee85e232ed0618751 | [
"Apache-2.0"
] | null | null | null | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import re
from django.urls import reverse
from django.utils.translation import gettext_lazy as _
from horizon import exceptions
from horizon import forms
from horizon import messages
from openstack_dashboard import api
KEY_NAME_REGEX = re.compile(r"^[a-zA-Z0-9-_:. /]+$", re.UNICODE)
KEY_ERROR_MESSAGES = {
'invalid': _("The key must match the following the regex: "
"'^[a-zA-Z0-9-_:. /]'")}
| 39.72619 | 77 | 0.585556 |
c71fc189fa6f73122afbe242bbfd89bd9a8a50ea | 9,050 | py | Python | data_structure/const_tree.py | alipay/StructuredLM_RTDT | 6edf2acf8747e17015523d78b6c580431a4f7b5c | [
"Apache-2.0"
] | 42 | 2021-06-01T07:07:12.000Z | 2022-03-18T02:38:53.000Z | data_structure/const_tree.py | alipay/StructuredLM_RTDT | 6edf2acf8747e17015523d78b6c580431a4f7b5c | [
"Apache-2.0"
] | 1 | 2021-12-15T03:50:24.000Z | 2021-12-15T08:46:56.000Z | data_structure/const_tree.py | alipay/StructuredLM_RTDT | 6edf2acf8747e17015523d78b6c580431a4f7b5c | [
"Apache-2.0"
] | 7 | 2021-06-02T02:28:01.000Z | 2022-01-14T06:59:29.000Z | # coding=utf-8
# Copyright (c) 2021 Ant Group
import sys
LABEL_SEP = '@'
INDENT_STRING1 = ' '
INDENT_STRING2 = ''
EMPTY_TOKEN = '___EMPTY___'
| 30.782313 | 98 | 0.575912 |
c71fcfdd300a9f0f56bf5188a7e7a694d05f3faa | 4,098 | py | Python | tests/test_minimize.py | The-Ludwig/iminuit | 8eef7b711846d6c8db9fe1fc883f6fa0977eb514 | [
"MIT"
] | null | null | null | tests/test_minimize.py | The-Ludwig/iminuit | 8eef7b711846d6c8db9fe1fc883f6fa0977eb514 | [
"MIT"
] | null | null | null | tests/test_minimize.py | The-Ludwig/iminuit | 8eef7b711846d6c8db9fe1fc883f6fa0977eb514 | [
"MIT"
] | null | null | null | import pytest
from iminuit import minimize
import numpy as np
from numpy.testing import assert_allclose, assert_equal
opt = pytest.importorskip("scipy.optimize")
| 26.269231 | 85 | 0.59346 |
c72190831a83ec1b623a951d123f7148309fad86 | 2,468 | py | Python | murtanto/parsing.py | amandatv20/botfb | 2be3ce0265fd86f48f24d2b496d36fd346e49d29 | [
"MIT"
] | 1 | 2021-03-24T13:54:33.000Z | 2021-03-24T13:54:33.000Z | murtanto/parsing.py | amandatv20/botfb | 2be3ce0265fd86f48f24d2b496d36fd346e49d29 | [
"MIT"
] | 2 | 2020-06-15T08:10:55.000Z | 2020-06-16T15:03:19.000Z | murtanto/parsing.py | amandatv20/botfb | 2be3ce0265fd86f48f24d2b496d36fd346e49d29 | [
"MIT"
] | null | null | null | # coded by: salism3
# 23 - 05 - 2020 23:18 (Malam Takbir)
from bs4 import BeautifulSoup as parser
from . import sorting
import re
| 31.641026 | 113 | 0.636548 |
c721ab40af9f4d2f1e869b104c622361e1311025 | 878 | py | Python | test/test_watchdog_status.py | ike709/tgs4-api-pyclient | 97918cfe614cc4ef06ef2485efff163417a8cd44 | [
"MIT"
] | null | null | null | test/test_watchdog_status.py | ike709/tgs4-api-pyclient | 97918cfe614cc4ef06ef2485efff163417a8cd44 | [
"MIT"
] | null | null | null | test/test_watchdog_status.py | ike709/tgs4-api-pyclient | 97918cfe614cc4ef06ef2485efff163417a8cd44 | [
"MIT"
] | null | null | null | # coding: utf-8
"""
TGS API
A production scale tool for BYOND server management # noqa: E501
OpenAPI spec version: 9.0.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import unittest
import swagger_client
from swagger_client.models.watchdog_status import WatchdogStatus # noqa: E501
from swagger_client.rest import ApiException
if __name__ == '__main__':
unittest.main()
| 21.95 | 86 | 0.702733 |
c721d7a43c6300b41e4a0357169d5ebc646135d1 | 235 | py | Python | setup.py | joesan/housing-classification-example | 93f921cf01c79ab63732ef302ab52d2c9ffedee1 | [
"FTL"
] | null | null | null | setup.py | joesan/housing-classification-example | 93f921cf01c79ab63732ef302ab52d2c9ffedee1 | [
"FTL"
] | null | null | null | setup.py | joesan/housing-classification-example | 93f921cf01c79ab63732ef302ab52d2c9ffedee1 | [
"FTL"
] | null | null | null | from setuptools import find_packages, setup
setup(
name='src',
packages=find_packages(),
version='0.1.0',
description='Python codebase for the housing classification ML problem',
author='Joesan',
license='',
)
| 21.363636 | 76 | 0.685106 |
c7226ff1219f925df17003fe42d233729469035d | 4,187 | py | Python | tests/test_models/test_backbones/test_sr_backbones/test_edvr_net.py | wangruohui/mmediting | 6577d307caf9edfb34c6e46547994e6314fffc37 | [
"Apache-2.0"
] | 45 | 2022-03-05T06:54:34.000Z | 2022-03-30T02:15:42.000Z | tests/test_models/test_backbones/test_sr_backbones/test_edvr_net.py | wangruohui/mmediting | 6577d307caf9edfb34c6e46547994e6314fffc37 | [
"Apache-2.0"
] | 1 | 2022-03-25T14:04:39.000Z | 2022-03-31T04:48:38.000Z | tests/test_models/test_backbones/test_sr_backbones/test_edvr_net.py | wangruohui/mmediting | 6577d307caf9edfb34c6e46547994e6314fffc37 | [
"Apache-2.0"
] | 1 | 2022-03-10T01:00:24.000Z | 2022-03-10T01:00:24.000Z | # Copyright (c) OpenMMLab. All rights reserved.
import pytest
import torch
from mmedit.models.backbones.sr_backbones.edvr_net import (EDVRNet,
PCDAlignment,
TSAFusion)
def test_pcd_alignment():
"""Test PCDAlignment."""
# cpu
pcd_alignment = PCDAlignment(mid_channels=4, deform_groups=2)
input_list = []
for i in range(3, 0, -1):
input_list.append(torch.rand(1, 4, 2**i, 2**i))
pcd_alignment = pcd_alignment
input_list = [v for v in input_list]
output = pcd_alignment(input_list, input_list)
assert output.shape == (1, 4, 8, 8)
with pytest.raises(AssertionError):
pcd_alignment(input_list[0:2], input_list)
# gpu
if torch.cuda.is_available():
pcd_alignment = PCDAlignment(mid_channels=4, deform_groups=2)
input_list = []
for i in range(3, 0, -1):
input_list.append(torch.rand(1, 4, 2**i, 2**i))
pcd_alignment = pcd_alignment.cuda()
input_list = [v.cuda() for v in input_list]
output = pcd_alignment(input_list, input_list)
assert output.shape == (1, 4, 8, 8)
with pytest.raises(AssertionError):
pcd_alignment(input_list[0:2], input_list)
def test_tsa_fusion():
"""Test TSAFusion."""
# cpu
tsa_fusion = TSAFusion(mid_channels=4, num_frames=5, center_frame_idx=2)
input_tensor = torch.rand(1, 5, 4, 8, 8)
output = tsa_fusion(input_tensor)
assert output.shape == (1, 4, 8, 8)
# gpu
if torch.cuda.is_available():
tsa_fusion = tsa_fusion.cuda()
input_tensor = input_tensor.cuda()
output = tsa_fusion(input_tensor)
assert output.shape == (1, 4, 8, 8)
def test_edvrnet():
"""Test EDVRNet."""
# cpu
# with tsa
edvrnet = EDVRNet(
3,
3,
mid_channels=8,
num_frames=5,
deform_groups=2,
num_blocks_extraction=1,
num_blocks_reconstruction=1,
center_frame_idx=2,
with_tsa=True)
input_tensor = torch.rand(1, 5, 3, 8, 8)
edvrnet.init_weights(pretrained=None)
output = edvrnet(input_tensor)
assert output.shape == (1, 3, 32, 32)
# without tsa
edvrnet = EDVRNet(
3,
3,
mid_channels=8,
num_frames=5,
deform_groups=2,
num_blocks_extraction=1,
num_blocks_reconstruction=1,
center_frame_idx=2,
with_tsa=False)
output = edvrnet(input_tensor)
assert output.shape == (1, 3, 32, 32)
with pytest.raises(AssertionError):
# The height and width of inputs should be a multiple of 4
input_tensor = torch.rand(1, 5, 3, 3, 3)
edvrnet(input_tensor)
with pytest.raises(TypeError):
# pretrained should be str or None
edvrnet.init_weights(pretrained=[1])
# gpu
if torch.cuda.is_available():
# with tsa
edvrnet = EDVRNet(
3,
3,
mid_channels=8,
num_frames=5,
deform_groups=2,
num_blocks_extraction=1,
num_blocks_reconstruction=1,
center_frame_idx=2,
with_tsa=True).cuda()
input_tensor = torch.rand(1, 5, 3, 8, 8).cuda()
edvrnet.init_weights(pretrained=None)
output = edvrnet(input_tensor)
assert output.shape == (1, 3, 32, 32)
# without tsa
edvrnet = EDVRNet(
3,
3,
mid_channels=8,
num_frames=5,
deform_groups=2,
num_blocks_extraction=1,
num_blocks_reconstruction=1,
center_frame_idx=2,
with_tsa=False).cuda()
output = edvrnet(input_tensor)
assert output.shape == (1, 3, 32, 32)
with pytest.raises(AssertionError):
# The height and width of inputs should be a multiple of 4
input_tensor = torch.rand(1, 5, 3, 3, 3).cuda()
edvrnet(input_tensor)
with pytest.raises(TypeError):
# pretrained should be str or None
edvrnet.init_weights(pretrained=[1])
| 28.482993 | 76 | 0.578696 |
c72294488588ee770a6039927fb6209367d51df5 | 225 | py | Python | mat2py/core/datastoreio.py | mat2py/mat2py | 2776fbe9ca4ad2aaa3eac6aa79d17747a9ec24a8 | [
"MIT"
] | null | null | null | mat2py/core/datastoreio.py | mat2py/mat2py | 2776fbe9ca4ad2aaa3eac6aa79d17747a9ec24a8 | [
"MIT"
] | 37 | 2021-12-23T03:22:20.000Z | 2022-02-16T15:40:47.000Z | mat2py/core/datastoreio.py | mat2py/mat2py | 2776fbe9ca4ad2aaa3eac6aa79d17747a9ec24a8 | [
"MIT"
] | 2 | 2022-01-23T07:59:10.000Z | 2022-02-03T09:15:54.000Z | # type: ignore
__all__ = [
"readDatastoreImage",
"datastore",
]
| 15 | 51 | 0.711111 |
c7235d9e02846d039085054a4375d4bc687a9231 | 12,229 | py | Python | enjoliver-api/tests/test_generate_groups.py | netturpin/enjoliver | 9700470939da40ff84304af6e8c7210a5fd693a4 | [
"MIT"
] | 11 | 2017-11-06T08:42:55.000Z | 2021-01-08T11:01:02.000Z | enjoliver-api/tests/test_generate_groups.py | netturpin/enjoliver | 9700470939da40ff84304af6e8c7210a5fd693a4 | [
"MIT"
] | 7 | 2017-12-28T12:05:50.000Z | 2021-04-02T15:04:46.000Z | enjoliver-api/tests/test_generate_groups.py | netturpin/enjoliver | 9700470939da40ff84304af6e8c7210a5fd693a4 | [
"MIT"
] | 4 | 2017-11-08T10:03:31.000Z | 2018-06-03T17:59:43.000Z | import os
from shutil import rmtree
from tempfile import mkdtemp
from unittest import TestCase
from enjoliver import generator
| 35.446377 | 93 | 0.568485 |
c72423d0c9647d3f45e1ae401dca8a26496518f2 | 265 | py | Python | HackerRank/Calendar Module/solution.py | nikku1234/Code-Practise | 94eb6680ea36efd10856c377000219285f77e5a4 | [
"Apache-2.0"
] | 9 | 2020-07-02T06:06:17.000Z | 2022-02-26T11:08:09.000Z | HackerRank/Calendar Module/solution.py | nikku1234/Code-Practise | 94eb6680ea36efd10856c377000219285f77e5a4 | [
"Apache-2.0"
] | 1 | 2021-11-04T17:26:36.000Z | 2021-11-04T17:26:36.000Z | HackerRank/Calendar Module/solution.py | nikku1234/Code-Practise | 94eb6680ea36efd10856c377000219285f77e5a4 | [
"Apache-2.0"
] | 8 | 2021-01-31T10:31:12.000Z | 2022-03-13T09:15:55.000Z | # Enter your code here. Read input from STDIN. Print output to STDOUT
import calendar
mm,dd,yyyy = map(int,input().split())
day = ["MONDAY","TUESDAY","WEDNESDAY","THURSDAY","FRIDAY","SATURDAY","SUNDAY"]
val = int (calendar.weekday(yyyy,mm,dd))
print(day[val])
| 22.083333 | 78 | 0.698113 |
c7245a8913ae3a1c31f00b1392df9f4dd3d991e9 | 7,560 | py | Python | scale/trigger/models.py | stevevarner/scale | 9623b261db4ddcf770f00df16afc91176142bb7c | [
"Apache-2.0"
] | null | null | null | scale/trigger/models.py | stevevarner/scale | 9623b261db4ddcf770f00df16afc91176142bb7c | [
"Apache-2.0"
] | null | null | null | scale/trigger/models.py | stevevarner/scale | 9623b261db4ddcf770f00df16afc91176142bb7c | [
"Apache-2.0"
] | null | null | null | """Defines the models for trigger rules and events"""
from __future__ import unicode_literals
import django.contrib.postgres.fields
from django.db import models, transaction
from django.utils.timezone import now
| 38.769231 | 120 | 0.693783 |
c724bce6559444b809161c07169a0eaf827f8a70 | 1,125 | py | Python | leetcode/0506_relative_ranks.py | chaosWsF/Python-Practice | ff617675b6bcd125933024bb4c246b63a272314d | [
"BSD-2-Clause"
] | null | null | null | leetcode/0506_relative_ranks.py | chaosWsF/Python-Practice | ff617675b6bcd125933024bb4c246b63a272314d | [
"BSD-2-Clause"
] | null | null | null | leetcode/0506_relative_ranks.py | chaosWsF/Python-Practice | ff617675b6bcd125933024bb4c246b63a272314d | [
"BSD-2-Clause"
] | null | null | null | """
Given scores of N athletes, find their relative ranks and the people with the top
three highest scores, who will be awarded medals: "Gold Medal", "Silver Medal" and
"Bronze Medal".
Example 1:
Input: [5, 4, 3, 2, 1]
Output: ["Gold Medal", "Silver Medal", "Bronze Medal", "4", "5"]
Explanation: The first three athletes got the top three highest scores, so they
got "Gold Medal", "Silver Medal" and "Bronze Medal". For the left two athletes,
you just need to output their relative ranks according to their scores.
Note:
N is a positive integer and won't exceed 10,000.
All the scores of athletes are guaranteed to be unique.
"""
| 32.142857 | 84 | 0.593778 |
c724c503b44eb473d695fa13f0446956650e0c2b | 987 | py | Python | barriers/models/history/assessments/economic_impact.py | felix781/market-access-python-frontend | 3b0e49feb4fdf0224816326938a46002aa4a2b1c | [
"MIT"
] | 1 | 2021-12-15T04:14:03.000Z | 2021-12-15T04:14:03.000Z | barriers/models/history/assessments/economic_impact.py | felix781/market-access-python-frontend | 3b0e49feb4fdf0224816326938a46002aa4a2b1c | [
"MIT"
] | 19 | 2019-12-11T11:32:47.000Z | 2022-03-29T15:40:57.000Z | barriers/models/history/assessments/economic_impact.py | felix781/market-access-python-frontend | 3b0e49feb4fdf0224816326938a46002aa4a2b1c | [
"MIT"
] | 2 | 2021-02-09T09:38:45.000Z | 2021-03-29T19:07:09.000Z | from ..base import BaseHistoryItem, GenericHistoryItem
from ..utils import PolymorphicBase
| 24.675 | 59 | 0.68997 |
c72537aa56e0fec5c2e19ae544ffe17dd652b46b | 727 | py | Python | link_prob_show.py | Rheinwalt/spatial-effects-networks | 7b77a22b45341b024a57e1759b7b61cd91d90849 | [
"MIT"
] | 3 | 2018-12-21T20:19:18.000Z | 2021-01-02T12:58:56.000Z | link_prob_show.py | rick-foo/spatial-effects-networks | 7b77a22b45341b024a57e1759b7b61cd91d90849 | [
"MIT"
] | null | null | null | link_prob_show.py | rick-foo/spatial-effects-networks | 7b77a22b45341b024a57e1759b7b61cd91d90849 | [
"MIT"
] | 2 | 2020-09-03T14:18:37.000Z | 2021-10-01T18:06:42.000Z | import sys
import numpy as np
from sern import *
ids, lon, lat = np.loadtxt('nodes', unpack = True)
links = np.loadtxt('links', dtype = 'int')
A, b = AdjacencyMatrix(ids, links)
lon, lat = lon[b], lat[b]
n = A.shape[0]
# LinkProbability expects A as triu
A = A[np.triu_indices(n, 1)]
# play around with the scale, maybe you don't need log binning?
D, x = IntegerDistances(lat, lon, scale = 50)
p = LinkProbability(A, D)
from matplotlib import pyplot as pl
pl.plot(p, 'bo')
pl.ylabel('Link probability given distance')
pl.xlabel('Bin number')
pl.savefig('link_prob_bin.png')
pl.close('all')
pl.semilogx(x, p, 'bo')
pl.ylabel('Link probability given distance')
pl.xlabel('Distance [km]')
pl.savefig('link_prob_distance.png')
| 25.964286 | 63 | 0.707015 |
c7268aa939534725180b033986da1a690622e70b | 3,899 | py | Python | controller/components/app.py | isabella232/flight-lab | bd666b1d2bcec6f928a2e8da9f13fd5dae21319f | [
"Apache-2.0"
] | 15 | 2018-10-18T07:50:46.000Z | 2021-10-21T03:40:55.000Z | controller/components/app.py | google/flight-lab | bd666b1d2bcec6f928a2e8da9f13fd5dae21319f | [
"Apache-2.0"
] | 9 | 2018-09-17T23:00:02.000Z | 2019-01-22T21:08:04.000Z | controller/components/app.py | isabella232/flight-lab | bd666b1d2bcec6f928a2e8da9f13fd5dae21319f | [
"Apache-2.0"
] | 12 | 2019-01-07T12:43:37.000Z | 2021-10-21T03:40:44.000Z | # Copyright 2018 Flight Lab authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Library for components related to running apps."""
import subprocess
import threading
from components import base
from protos import controller_pb2
from utils import app
| 33.612069 | 77 | 0.691459 |
c727467c9c5f9cbcf49804ff4103bf27f2140c3f | 1,504 | py | Python | botorch/acquisition/__init__.py | jmren168/botorch | 6c067185f56d3a244c4093393b8a97388fb1c0b3 | [
"MIT"
] | 1 | 2020-03-29T20:06:45.000Z | 2020-03-29T20:06:45.000Z | botorch/acquisition/__init__.py | jmren168/botorch | 6c067185f56d3a244c4093393b8a97388fb1c0b3 | [
"MIT"
] | null | null | null | botorch/acquisition/__init__.py | jmren168/botorch | 6c067185f56d3a244c4093393b8a97388fb1c0b3 | [
"MIT"
] | 1 | 2020-03-29T20:06:48.000Z | 2020-03-29T20:06:48.000Z | #!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
from .acquisition import AcquisitionFunction
from .analytic import (
AnalyticAcquisitionFunction,
ConstrainedExpectedImprovement,
ExpectedImprovement,
NoisyExpectedImprovement,
PosteriorMean,
ProbabilityOfImprovement,
UpperConfidenceBound,
)
from .fixed_feature import FixedFeatureAcquisitionFunction
from .monte_carlo import (
MCAcquisitionFunction,
qExpectedImprovement,
qNoisyExpectedImprovement,
qProbabilityOfImprovement,
qSimpleRegret,
qUpperConfidenceBound,
)
from .objective import (
ConstrainedMCObjective,
GenericMCObjective,
IdentityMCObjective,
LinearMCObjective,
MCAcquisitionObjective,
ScalarizedObjective,
)
from .utils import get_acquisition_function
__all__ = [
"AcquisitionFunction",
"AnalyticAcquisitionFunction",
"ConstrainedExpectedImprovement",
"ExpectedImprovement",
"FixedFeatureAcquisitionFunction",
"NoisyExpectedImprovement",
"PosteriorMean",
"ProbabilityOfImprovement",
"UpperConfidenceBound",
"qExpectedImprovement",
"qNoisyExpectedImprovement",
"qProbabilityOfImprovement",
"qSimpleRegret",
"qUpperConfidenceBound",
"ConstrainedMCObjective",
"GenericMCObjective",
"IdentityMCObjective",
"LinearMCObjective",
"MCAcquisitionFunction",
"MCAcquisitionObjective",
"ScalarizedObjective",
"get_acquisition_function",
]
| 25.491525 | 70 | 0.757979 |
c72c87715b18d844a4d1e6b4c82ec44a40f2bde2 | 2,810 | py | Python | examples/pybullet/gym/pybullet_envs/minitaur/envs/env_randomizers/minitaur_alternating_legs_env_randomizer.py | felipeek/bullet3 | 6a59241074720e9df119f2f86bc01765917feb1e | [
"Zlib"
] | 9,136 | 2015-01-02T00:41:45.000Z | 2022-03-31T15:30:02.000Z | examples/pybullet/gym/pybullet_envs/minitaur/envs/env_randomizers/minitaur_alternating_legs_env_randomizer.py | felipeek/bullet3 | 6a59241074720e9df119f2f86bc01765917feb1e | [
"Zlib"
] | 2,424 | 2015-01-05T08:55:58.000Z | 2022-03-30T19:34:55.000Z | examples/pybullet/gym/pybullet_envs/minitaur/envs/env_randomizers/minitaur_alternating_legs_env_randomizer.py | felipeek/bullet3 | 6a59241074720e9df119f2f86bc01765917feb1e | [
"Zlib"
] | 2,921 | 2015-01-02T10:19:30.000Z | 2022-03-31T02:48:42.000Z | """Randomize the minitaur_gym_alternating_leg_env when reset() is called.
The randomization include swing_offset, extension_offset of all legs that mimics
bent legs, desired_pitch from user input, battery voltage and motor damping.
"""
import os, inspect
currentdir = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))
parentdir = os.path.dirname(os.path.dirname(currentdir))
parentdir = os.path.dirname(os.path.dirname(parentdir))
os.sys.path.insert(0, parentdir)
import numpy as np
import tf.compat.v1 as tf
from pybullet_envs.minitaur.envs import env_randomizer_base
# Absolute range.
NUM_LEGS = 4
BATTERY_VOLTAGE_RANGE = (14.8, 16.8)
MOTOR_VISCOUS_DAMPING_RANGE = (0, 0.01)
| 45.322581 | 86 | 0.70605 |
c72ca1c8b4319d09d601fa708b5ddc14cb8e0859 | 14,704 | py | Python | pygsti/modelmembers/states/tensorprodstate.py | pyGSTi-Developers/pyGSTi | bfedc1de4d604f14b0f958615776fb80ddb59e33 | [
"Apache-2.0"
] | 73 | 2016-01-28T05:02:05.000Z | 2022-03-30T07:46:33.000Z | pygsti/modelmembers/states/tensorprodstate.py | pyGSTi-Developers/pyGSTi | bfedc1de4d604f14b0f958615776fb80ddb59e33 | [
"Apache-2.0"
] | 113 | 2016-02-25T15:32:18.000Z | 2022-03-31T13:18:13.000Z | pygsti/modelmembers/states/tensorprodstate.py | pyGSTi-Developers/pyGSTi | bfedc1de4d604f14b0f958615776fb80ddb59e33 | [
"Apache-2.0"
] | 41 | 2016-03-15T19:32:07.000Z | 2022-02-16T10:22:05.000Z | """
The TensorProductState class and supporting functionality.
"""
#***************************************************************************************************
# Copyright 2015, 2019 National Technology & Engineering Solutions of Sandia, LLC (NTESS).
# Under the terms of Contract DE-NA0003525 with NTESS, the U.S. Government retains certain rights
# in this software.
# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
# in compliance with the License. You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0 or in the LICENSE file in the root pyGSTi directory.
#***************************************************************************************************
import functools as _functools
import itertools as _itertools
import numpy as _np
from pygsti.modelmembers.states.state import State as _State
from pygsti.modelmembers import modelmember as _modelmember, term as _term
from pygsti.baseobjs import statespace as _statespace
from pygsti.tools import listtools as _lt
from pygsti.tools import matrixtools as _mt
| 42.994152 | 118 | 0.609698 |
c72d167470fc1e484c9ed6ee92db56b541a26d0c | 3,216 | py | Python | edivorce/apps/core/views/graphql.py | gerritvdm/eDivorce | e3c0a4037a7141769250b96df6cc4eb4ea5ef3af | [
"Apache-2.0"
] | 6 | 2017-03-24T18:20:33.000Z | 2021-01-29T03:25:07.000Z | edivorce/apps/core/views/graphql.py | gerritvdm/eDivorce | e3c0a4037a7141769250b96df6cc4eb4ea5ef3af | [
"Apache-2.0"
] | 13 | 2018-10-12T17:20:37.000Z | 2021-11-05T23:13:21.000Z | edivorce/apps/core/views/graphql.py | gerritvdm/eDivorce | e3c0a4037a7141769250b96df6cc4eb4ea5ef3af | [
"Apache-2.0"
] | 11 | 2017-03-15T12:36:39.000Z | 2021-03-05T14:35:59.000Z | import graphene
import graphene_django
from django.http import HttpResponseForbidden
from graphene_django.views import GraphQLView
from graphql import GraphQLError
from edivorce.apps.core.models import Document
graphql_schema = graphene.Schema(query=Query, mutation=Mutations)
| 36.545455 | 135 | 0.668221 |
c72e729bd791fda04d1f1bf87cc60496068da071 | 5,862 | py | Python | amazing/maze.py | danieloconell/maze-solver | f60e476d827d59bfa17cd2148787332707846882 | [
"MIT"
] | null | null | null | amazing/maze.py | danieloconell/maze-solver | f60e476d827d59bfa17cd2148787332707846882 | [
"MIT"
] | 2 | 2021-06-08T19:35:19.000Z | 2021-09-08T00:44:59.000Z | amazing/maze.py | danieloconell/amazing | f60e476d827d59bfa17cd2148787332707846882 | [
"MIT"
] | null | null | null | from .exceptions import MazeNotSolved, AlgorithmNotFound
from .dijkstra import Dijkstra
from .astar import Astar
from functools import wraps
import warnings
from daedalus import Maze as _maze
from PIL import Image
warnings.simplefilter("once", UserWarning)
| 32.932584 | 114 | 0.525589 |
c72eaa2b73efe739c3a50690c7c96660b59023bd | 4,215 | py | Python | config.py | FarbodFarhangfar/midi_player_python | 924cd164b7867d294c761a70d06ab330fa1b8373 | [
"MIT"
] | null | null | null | config.py | FarbodFarhangfar/midi_player_python | 924cd164b7867d294c761a70d06ab330fa1b8373 | [
"MIT"
] | null | null | null | config.py | FarbodFarhangfar/midi_player_python | 924cd164b7867d294c761a70d06ab330fa1b8373 | [
"MIT"
] | null | null | null | import os
| 38.669725 | 106 | 0.474496 |
c72f4c5b309a87813b09f64b422ca7519b3e740b | 2,182 | py | Python | roles/openshift_health_checker/library/ocutil.py | shgriffi/openshift-ansible | 6313f519307cf50055589c3876d8bec398bbc4d4 | [
"Apache-2.0"
] | 164 | 2015-07-29T17:35:04.000Z | 2021-12-16T16:38:04.000Z | roles/openshift_health_checker/library/ocutil.py | shgriffi/openshift-ansible | 6313f519307cf50055589c3876d8bec398bbc4d4 | [
"Apache-2.0"
] | 3,634 | 2015-06-09T13:49:15.000Z | 2022-03-23T20:55:44.000Z | roles/openshift_health_checker/library/ocutil.py | shgriffi/openshift-ansible | 6313f519307cf50055589c3876d8bec398bbc4d4 | [
"Apache-2.0"
] | 250 | 2015-06-08T19:53:11.000Z | 2022-03-01T04:51:23.000Z | #!/usr/bin/python
"""Interface to OpenShift oc command"""
import os
import shlex
import shutil
import subprocess
from ansible.module_utils.basic import AnsibleModule
ADDITIONAL_PATH_LOOKUPS = ['/usr/local/bin', os.path.expanduser('~/bin')]
def locate_oc_binary():
"""Find and return oc binary file"""
# https://github.com/openshift/openshift-ansible/issues/3410
# oc can be in /usr/local/bin in some cases, but that may not
# be in $PATH due to ansible/sudo
paths = os.environ.get("PATH", os.defpath).split(os.pathsep) + ADDITIONAL_PATH_LOOKUPS
oc_binary = 'oc'
# Use shutil.which if it is available, otherwise fallback to a naive path search
try:
which_result = shutil.which(oc_binary, path=os.pathsep.join(paths))
if which_result is not None:
oc_binary = which_result
except AttributeError:
for path in paths:
if os.path.exists(os.path.join(path, oc_binary)):
oc_binary = os.path.join(path, oc_binary)
break
return oc_binary
def main():
"""Module that executes commands on a remote OpenShift cluster"""
module = AnsibleModule(
argument_spec=dict(
namespace=dict(type="str", required=False),
config_file=dict(type="str", required=True),
cmd=dict(type="str", required=True),
extra_args=dict(type="list", default=[]),
),
)
cmd = [locate_oc_binary(), '--config', module.params["config_file"]]
if module.params["namespace"]:
cmd += ['-n', module.params["namespace"]]
cmd += shlex.split(module.params["cmd"]) + module.params["extra_args"]
failed = True
try:
cmd_result = subprocess.check_output(list(cmd), stderr=subprocess.STDOUT)
failed = False
except subprocess.CalledProcessError as exc:
cmd_result = '[rc {}] {}\n{}'.format(exc.returncode, ' '.join(exc.cmd), exc.output)
except OSError as exc:
# we get this when 'oc' is not there
cmd_result = str(exc)
module.exit_json(
changed=False,
failed=failed,
result=cmd_result,
)
if __name__ == '__main__':
main()
| 29.486486 | 91 | 0.636114 |
c7300e0d4920ea9bf3233fb48ec01feb851a08ad | 4,125 | py | Python | code/network/__init__.py | michalochman/complex-networks | 49337376e32fac253d8de9919d5acd00a9b566bb | [
"MIT"
] | null | null | null | code/network/__init__.py | michalochman/complex-networks | 49337376e32fac253d8de9919d5acd00a9b566bb | [
"MIT"
] | null | null | null | code/network/__init__.py | michalochman/complex-networks | 49337376e32fac253d8de9919d5acd00a9b566bb | [
"MIT"
] | null | null | null | import fractions
| 42.96875 | 109 | 0.615758 |
c730483de9837a25bc1e629091819a776f0b1ff3 | 3,055 | py | Python | invoke_ansible.py | samvarankashyap/ansible_api_usage | d03c67b4606d2e101ef7341bd31161b4db39cd5b | [
"Apache-2.0"
] | null | null | null | invoke_ansible.py | samvarankashyap/ansible_api_usage | d03c67b4606d2e101ef7341bd31161b4db39cd5b | [
"Apache-2.0"
] | null | null | null | invoke_ansible.py | samvarankashyap/ansible_api_usage | d03c67b4606d2e101ef7341bd31161b4db39cd5b | [
"Apache-2.0"
] | null | null | null | import ansible
import pprint
from ansible import utils
from jinja2 import Environment, PackageLoader
from collections import namedtuple
from ansible import utils
from ansible.parsing.dataloader import DataLoader
from ansible.vars import VariableManager
from ansible.inventory import Inventory
from ansible.executor.playbook_executor import PlaybookExecutor
from ansible.plugins.callback import CallbackBase
from callbacks import PlaybookCallback
def invoke_ansible_playbook(module_path, e_vars, playbook_path="site.yml", console=True):
""" Invokes playbook """
loader = DataLoader()
variable_manager = VariableManager()
variable_manager.extra_vars = e_vars
inventory = Inventory(loader=loader,
variable_manager=variable_manager,
host_list=['localhost'])
passwords = {}
utils.VERBOSITY = 4
Options = namedtuple('Options', ['listtags',
'listtasks',
'listhosts',
'syntax',
'connection',
'module_path',
'forks',
'remote_user',
'private_key_file',
'ssh_common_args',
'ssh_extra_args',
'sftp_extra_args',
'scp_extra_args',
'become',
'become_method',
'become_user',
'verbosity',
'check'])
options = Options(listtags=False,
listtasks=False,
listhosts=False,
syntax=False,
connection='ssh',
module_path=module_path,
forks=100,
remote_user='root',
private_key_file=None,
ssh_common_args=None,
ssh_extra_args=None,
sftp_extra_args=None,
scp_extra_args=None,
become=False,
become_method=None,
become_user='root',
verbosity=utils.VERBOSITY,
check=False)
pbex = PlaybookExecutor(playbooks=[playbook_path],
inventory=inventory,
variable_manager=variable_manager,
loader=loader,
options=options,
passwords=passwords)
if not console:
cb = PlaybookCallback()
pbex._tqm._stdout_callback = cb
return_code = pbex.run()
results = cb.results
else:
results = pbex.run()
return results
| 40.197368 | 89 | 0.466776 |
c733c87e85c1c4f5626af759efe7bb3290f415c6 | 2,336 | py | Python | bin/python/csv2es.py | reid-wagner/proteomics-pipelines | 2214c2ad4c14fabcb50a3c0800e9d383ce73df3d | [
"MIT"
] | 2 | 2018-09-06T14:05:59.000Z | 2022-02-18T10:09:06.000Z | bin/python/csv2es.py | reid-wagner/proteomics-pipelines | 2214c2ad4c14fabcb50a3c0800e9d383ce73df3d | [
"MIT"
] | 7 | 2018-09-30T00:49:04.000Z | 2022-01-27T07:55:26.000Z | bin/python/csv2es.py | reid-wagner/proteomics-pipelines | 2214c2ad4c14fabcb50a3c0800e9d383ce73df3d | [
"MIT"
] | 3 | 2019-10-29T12:20:45.000Z | 2021-10-06T14:38:43.000Z | #!/usr/bin/env python3
import itertools
import string
from elasticsearch import Elasticsearch,helpers
import sys
import os
from glob import glob
import pandas as pd
import json
host = sys.argv[1]
port = int(sys.argv[2])
alias = sys.argv[3]
print(host)
print(port)
print(alias)
es = Elasticsearch([{'host': host, 'port': port}])
# create our test index
# Get all csv files in /root/data
files = [y for x in os.walk('/root/data') for y in glob(os.path.join(x[0], '*.csv'))]
count = 0
es.indices.delete(index=alias + '*', ignore=[400, 404])
indices = []
for file in files:
data = pd.read_csv(file, sep=None, engine='python')
index = alias + '_'.join(file.split('/'))
index = clean_field(index).lower().split('_csv')[0]
indices.append(index)
es.indices.create(index)
for col in data.columns:
if col.startswith('Unnamed'):
del data[col]
else:
data.rename(columns= { col : clean_field(col) },inplace=True )
data = data.reset_index() # Make sure there is no duplicate indexing
data.rename(columns={'index':'row'},inplace =True)
data['File'] = file
data['_id'] = data['File'] + '.{}.'.format(str(count)) + data.reset_index()['index'].apply(str)
data['_type'] = "document"
data['_index'] = index
records = data.to_json(orient='records')
records = json.loads(records)
helpers.bulk(es, records, chunk_size=100)
count += 1
print(es.count(index=index))
# Create an index table in elasticsearch to locate the files
indices_table = pd.DataFrame()
indices_table['Index'] = pd.Series(indices)
indices_table['File'] = pd.Series(files)
indices_table['Alias'] = alias
indices_table['_id'] = indices_table['Alias'] + '.' + indices_table['File']
indices_table['_type'] = "document"
indices_table['_index'] = alias + '_indices'
es.indices.create(alias + '_indices')
records = indices_table.to_json(orient='records')
records = json.loads(records)
helpers.bulk(es, records, chunk_size=100)
print(es.count(index=alias + '_indices'))
| 28.144578 | 99 | 0.644264 |
c7345842917a4fbe78846b66040cbcd50b2fa112 | 45 | py | Python | main/src/preparation/parsers/tree-sitter-python/examples/crlf-line-endings.py | jason424217/Artificial-Code-Gen | a6e2c097c5ffe8cb0929e6703035b526f477e514 | [
"MIT"
] | null | null | null | main/src/preparation/parsers/tree-sitter-python/examples/crlf-line-endings.py | jason424217/Artificial-Code-Gen | a6e2c097c5ffe8cb0929e6703035b526f477e514 | [
"MIT"
] | null | null | null | main/src/preparation/parsers/tree-sitter-python/examples/crlf-line-endings.py | jason424217/Artificial-Code-Gen | a6e2c097c5ffe8cb0929e6703035b526f477e514 | [
"MIT"
] | null | null | null | print a
if b:
if c:
d
e
| 6.428571 | 9 | 0.311111 |
c7349ec685ce1af0110178abaaf2eb1878a5bd71 | 106 | py | Python | Src/main.py | DukeA/DAT02X-19-03-MachineLearning-Starcraft2 | ade31deb4cf6cacd0c411c39310aeb1300561936 | [
"MIT"
] | null | null | null | Src/main.py | DukeA/DAT02X-19-03-MachineLearning-Starcraft2 | ade31deb4cf6cacd0c411c39310aeb1300561936 | [
"MIT"
] | null | null | null | Src/main.py | DukeA/DAT02X-19-03-MachineLearning-Starcraft2 | ade31deb4cf6cacd0c411c39310aeb1300561936 | [
"MIT"
] | null | null | null |
from absl import app
from mainLoop import main
if __name__ == '__main__':
app.run(main)
| 13.25 | 27 | 0.632075 |
c735745b02553eb9e477617ad9c63df5e4730b1c | 3,793 | py | Python | bos_sarcat_scraper/__main__.py | hysds/bos_sarcat_scraper | 1bf3612e7d8fad80c8704a909087be19cc3e1db2 | [
"Apache-2.0"
] | 1 | 2020-06-24T00:25:30.000Z | 2020-06-24T00:25:30.000Z | bos_sarcat_scraper/__main__.py | aria-jpl/bos_sarcat_scraper | 1bf3612e7d8fad80c8704a909087be19cc3e1db2 | [
"Apache-2.0"
] | null | null | null | bos_sarcat_scraper/__main__.py | aria-jpl/bos_sarcat_scraper | 1bf3612e7d8fad80c8704a909087be19cc3e1db2 | [
"Apache-2.0"
] | 1 | 2019-05-08T17:15:00.000Z | 2019-05-08T17:15:00.000Z | from __future__ import absolute_import
from builtins import str
from builtins import input
import sys
import argparse
from . import bosart_scrape
import datetime
import json
if __name__ == '__main__':
main()
| 39.926316 | 240 | 0.675718 |
c73803a506dad8312572b3d3624ec1ddd2985a19 | 23,181 | py | Python | vgm2electron.py | simondotm/vgm2electron | 38e340d2baeaa3e5722ac982c82e58fb9858f9d9 | [
"MIT"
] | 2 | 2021-03-08T13:55:02.000Z | 2021-05-02T12:50:38.000Z | vgm2electron.py | simondotm/vgm2electron | 38e340d2baeaa3e5722ac982c82e58fb9858f9d9 | [
"MIT"
] | null | null | null | vgm2electron.py | simondotm/vgm2electron | 38e340d2baeaa3e5722ac982c82e58fb9858f9d9 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# vgm2electron.py
# Tool for converting SN76489-based PSG VGM data to Acorn Electron
# By Simon Morris (https://github.com/simondotm/)
# See https://github.com/simondotm/vgm-packer
#
# Copyright (c) 2019 Simon Morris. All rights reserved.
#
# "MIT License":
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the Software
# is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
# INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
# PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
import functools
import itertools
import struct
import sys
import time
import binascii
import math
import operator
import os
from modules.vgmparser import VgmStream
#------------------------------------------------------------------------
# Main()
#------------------------------------------------------------------------
import argparse
# Determine if running as a script
if __name__ == '__main__':
print("Vgm2Electron.py : VGM music converter for Acorn Electron")
print("Written in 2019 by Simon Morris, https://github.com/simondotm/vgm-packer")
print("")
epilog_string = ""
parser = argparse.ArgumentParser(
formatter_class=argparse.RawDescriptionHelpFormatter,
epilog=epilog_string)
parser.add_argument("input", help="VGM source file (must be single SN76489 PSG format) [input]")
parser.add_argument("-o", "--output", metavar="<output>", help="write VGC file <output> (default is '[input].vgc')")
parser.add_argument("-v", "--verbose", help="Enable verbose mode", action="store_true")
parser.add_argument("-a", "--attenuation", default="444", metavar="<nnn>", help="Set attenuation threshold for each channel, 3 character string where each character is 0-F and 0 is loudest, 4 is 50%, F is quietest, default: 444")
parser.add_argument("-t", "--transpose", default="000", metavar="<nnn>", help="Set octaves to transpose for each channel, where 1 is +1 octave and F is -1 octave.")
parser.add_argument("-c", "--channels", default="123", metavar="[1][2][3]", help="Set which channels will be included in the conversion, default 123, which means all 3 channels")
parser.add_argument("-q", "--technique", default=2, metavar="<n>", help="Set which downmix technique to use 1 or 2.")
args = parser.parse_args()
src = args.input
dst = args.output
if dst == None:
dst = os.path.splitext(src)[0] + ".electron.vgm"
# attenuation options
attenuation = args.attenuation
if (len(attenuation) != 3):
print("ERROR: attenuation must be 3 values eg. '444'")
sys.exit()
#print("attenuation=" + attenuation)
VgmElectron.ATTENTUATION_THRESHOLD1 = int(attenuation[0],16)
VgmElectron.ATTENTUATION_THRESHOLD2 = int(attenuation[1],16)
VgmElectron.ATTENTUATION_THRESHOLD3 = int(attenuation[2],16)
# transpose options
transpose = args.transpose
if (len(transpose) != 3):
print("ERROR: transpose must be 3 values eg. '000'")
sys.exit()
#print("transpose=" + transpose)
# 0 1 2 3 4 5 6 7 8 9 a b c d e f
ttable = [0,1,2,3,4,5,6,7,-8,-7,-6,-5,-4,-3,-2,-1]
VgmElectron.TRANSPOSE_OCTAVES1 = ttable[ int(transpose[0],16) ]
VgmElectron.TRANSPOSE_OCTAVES2 = ttable[ int(transpose[1],16) ]
VgmElectron.TRANSPOSE_OCTAVES3 = ttable[ int(transpose[2],16) ]
# channel options
print(args.channels)
VgmElectron.ENABLE_CHANNEL1 = args.channels.find("1") >= 0
VgmElectron.ENABLE_CHANNEL2 = args.channels.find("2") >= 0
VgmElectron.ENABLE_CHANNEL3 = args.channels.find("3") >= 0
print("Channel 1: Enabled=" + str(VgmElectron.ENABLE_CHANNEL1) + ", Transpose=" + str(VgmElectron.TRANSPOSE_OCTAVES1) + ", Attenuation="+str(VgmElectron.ATTENTUATION_THRESHOLD1))
print("Channel 2: Enabled=" + str(VgmElectron.ENABLE_CHANNEL2) + ", Transpose=" + str(VgmElectron.TRANSPOSE_OCTAVES2) + ", Attenuation="+str(VgmElectron.ATTENTUATION_THRESHOLD2))
print("Channel 3: Enabled=" + str(VgmElectron.ENABLE_CHANNEL3) + ", Transpose=" + str(VgmElectron.TRANSPOSE_OCTAVES3) + ", Attenuation="+str(VgmElectron.ATTENTUATION_THRESHOLD3))
# technique
VgmElectron.USE_TECHNIQUE = int(args.technique)
print("Using technique " + str(VgmElectron.USE_TECHNIQUE))
# check for missing files
if not os.path.isfile(src):
print("ERROR: File '" + src + "' not found")
sys.exit()
packer = VgmElectron()
packer.VERBOSE = args.verbose
packer.process(src, dst)
| 31.798354 | 230 | 0.60981 |
c739f9c426d2980ab50d3acc428d5d636d5dd280 | 14,198 | py | Python | twitter_sent.py | rthorst/TwitterSentiment | b719feffbfed1dfe9028db0900b3158d19322284 | [
"MIT"
] | 6 | 2020-02-21T15:50:34.000Z | 2021-11-09T19:45:50.000Z | twitter_sent.py | rthorst/TwitterSentiment | b719feffbfed1dfe9028db0900b3158d19322284 | [
"MIT"
] | null | null | null | twitter_sent.py | rthorst/TwitterSentiment | b719feffbfed1dfe9028db0900b3158d19322284 | [
"MIT"
] | null | null | null | import webapp2
import tweepy
import json
import csv
import os
import statistics
import bokeh
from bokeh.io import show, output_file
from bokeh.plotting import figure
from bokeh.models import HoverTool, ColumnDataSource
from bokeh.embed import components, json_item
from bokeh.resources import INLINE
from bokeh.models.glyphs import Line, Text
import numpy as np
import random
import operator
from collections import Counter
from vaderSentiment.vaderSentiment import SentimentIntensityAnalyzer
"""
---AUTHOR: ---
Robert Thorstad
thorstadrs@gmail.com
---LICENSE: ---
MIT License.
---ABOUT: ---
Application to get the sentiment of recent tweets based on a keyword.
Example:
keyword -> "taco bell"
retrieve 300 recent tweets mentioning taco bell.
get average sentiment.
plot distribution of tweets and sentiment.
plot most informative words for this application.
This script runs based on google app server.
Expects Python 2.7
Depenencies need to be included in the lib/ directory (pip install -t lib [PACKAGE_NAME])
The main work is done by the MainPage class. The get() method runs the main pipeline of code and returns HTML as a
string.
Working online version: https://twittersentiment-247018.appspot.com/
"""
def get_tweets(keyword, max_tweets=200):
"""
Given a keyword as a string (e.g. "data science"), get recent tweets matching that string up to # max_tweets.
Return a list of tweets, represented as strings.
"""
# API keys.
consumer_key = "kNOG1klRMMUYbsjMuY5TKl4lE"
consumer_secret = "ieghv6WI1qseYly43A0Ra1MPksEw1i5Onma0txfEu5aHantD2v"
access_key = "3291622062-15ssVc0qpJXf2SFXbA7vgfl1Sooz4Ueo2DGPQVz"
access_secret = "9XJuzgGSVLnx93tq6NfRzMT07S6o2lzjmHfjt3VRlkqXn"
# Initialize tweepy API object and authorize using API key.
auth = tweepy.OAuthHandler(consumer_key, consumer_secret)
auth.set_access_token(access_key, access_secret)
api = tweepy.API(auth)
""" Get tweets."""
alltweets = []
for status in tweepy.Cursor(
api.search,
q=keyword + " -RT", # the -RT flag excludes retweets.
count=1000,
result_type="recent",
include_entities=True,
monitor_rate_limit=True,
wait_on_rate_limit=True,
lang="en",
).items():
# get text of the tweet, encoding as utf-8.
text = str(status.text.encode("utf-8"))
# add to the data structure, alltweets, holding the tweets.
alltweets.append(text)
# if we've reached max_tweets, break.
if len(alltweets) >= max_tweets:
break
return alltweets
def plot_tweets(tweets, sentiment_scores):
"""
Create a histogram-style barplot of tweets and their sentiment.
Return a bokeh plot object, expressed as a tuple of (resources, script, div).
Where :
resources: some CSS, etc. that goes in the head of the webpage for styling the plot.
script: javascript for the plot to function. expressed as string.
div: html div container for the plot. expressed as string.
"""
# Sort tweets from negative to positive.
# This step is not strictly necessary, but makes it easier to see the overall shape of the data.
sorted_indices = np.argsort(sentiment_scores)
sentiment_scores = np.array(sentiment_scores)[sorted_indices]
tweets = np.array(tweets)[sorted_indices]
# Express the data as a bokeh data source object.
source = ColumnDataSource(data={
"text": tweets,
"sentiment": sentiment_scores,
"x": np.arange(len(tweets)),
})
"""
Create plot.
"""
# Create plot object.
width = 0.9
p = figure(x_axis_label="Tweet", y_axis_label="Sentiment (0 = Neutral)")
p.vbar(source=source, x="x", top="sentiment", width=width)
# Add hover tool, allowing mouseover to view text and sentiment.
hover = HoverTool(
tooltips=[
("text", "@text"),
("sentiment", "@sentiment")
],
formatters={
"text": "printf",
"sentiment": "printf"
},
mode="vline"
)
p.add_tools(hover)
"""
Format plot.
"""
# axis font size
p.xaxis.axis_label_text_font_size = "15pt"
p.yaxis.axis_label_text_font_size = "15pt"
# remove tick marks from axes
p.xaxis.major_tick_line_color = None
p.xaxis.minor_tick_line_color = None
p.yaxis.major_tick_line_color = None
p.yaxis.minor_tick_line_color = None
# adjust plot width, height
scale = 1.5
p.plot_height = int(250 * scale)
p.plot_width = int(450 * scale)
# remove toolbar (e.g. move, resize, etc) from right of plot.
p.toolbar.logo = None
p.toolbar_location = None
# remove gridlines
p.xgrid.visible = False
p.ygrid.visible = False
# remove x axis tick labels (done by setting label fontsize to 0 pt)
p.xaxis.major_label_text_font_size = '0pt'
"""
Export plot
"""
# Create resources string, which is CSS, etc. that goes in the head of
resources = INLINE.render()
# Get javascript (script) and HTML div (div) for the plot.
script, div = components(p)
return (resources, script, div)
def plot_reason(tweets, sentiment_scores):
"""
Plot the top words that lead us to the classification as positive or negative.
Return:
script : javascript for the plot, expressed as string.
div : html container for the plot, expressed as string.
NOTE: requires the shared resources attribute from plot_tweets() in the HTML header.
"""
"""
Calculate the sentiment of each individual token in the tweets.
"""
# list tokens, keeping only unique tokens (e.g. remove repeated words).
all_toks = []
for tweet in tweets:
toks = tweet.lower().split()
all_toks.extend(toks)
all_toks = [tok for tok in set(all_toks)] # remove duplicates.
# calculate sentiment of each token.
sm = VaderSentimentModel()
toks_sentiment = [sm.classify_sentiment(tok) for tok in all_toks]
"""
sort tokens by sentiment.
if overall valence is negative, sort negative to postitive.
if overall valence is positive, sort positive to negative.
thus, in any case, the earliest elements in the list are the most informative words.
"""
nwords = 20
# negative? sort neg -> positive.
if np.mean(sentiment_scores) < 0:
sorted_indices = np.argsort(toks_sentiment)
# else (positive)? sort positive -> negative
else:
sorted_indices = np.argsort(toks_sentiment)[::-1]
# toks_to_plot: shape (nwords, ) list of informative tokens.
# sentiment_to_plot: shape (nwords, ) list of sentiment of these tokens.
toks_to_plot = np.array(all_toks)[sorted_indices][:nwords]
sentiment_to_plot = np.array(toks_sentiment)[sorted_indices][:nwords]
# convert all sentiment scores to positive values.
# this is for DISPLAY only, to make all plots go from left to right.
# we still retain the correct tokens and sorting order.
sentiment_to_plot = np.array([abs(v) for v in sentiment_to_plot])
"""
Set up plot.
- create data source object.
- define formatting variables.
"""
text_offset = 0.1
source = ColumnDataSource(data={
"token": toks_to_plot,
"sentiment": sentiment_to_plot,
"x": np.arange(len(toks_to_plot))[::-1],
"label_x": sentiment_to_plot + text_offset
})
"""
Make plot.
"""
# Create initial plot.
width = 0.9
xrange = [0, max(sentiment_to_plot) + 1]
p2 = figure(x_axis_label="Sentiment", y_axis_label="Word", x_range=xrange)
p2.hbar(source=source, y="x", right="sentiment", height=width)
"""
Format plot.
"""
# Annotate each bar with the word being represented.
glyph = Text(x="label_x", y="x", text="token")
p2.add_glyph(source, glyph)
# Axis labels.
p2.xaxis.axis_label_text_font_size = "15pt"
p2.yaxis.axis_label_text_font_size = "15pt"
# Remove ticks.
p2.xaxis.major_tick_line_color = None
p2.xaxis.minor_tick_line_color = None
p2.yaxis.major_tick_line_color = None
p2.yaxis.minor_tick_line_color = None
# Remove y axis tick labels.
p2.yaxis.major_label_text_font_size = '0pt'
# Plot width, height.
scale = 1.5
p2.plot_height = int(250 * scale)
p2.plot_width = int(250 * scale)
# remove toolbar (e.g. move, resize, etc) from right of plot.
p2.toolbar.logo = None
p2.toolbar_location = None
# remove gridlines
p2.xgrid.visible = False
p2.ygrid.visible = False
# remove x axis tick labels (set font to 0pt)
p2.xaxis.major_label_text_font_size = '0pt'
# get bokeh component for plot 2.
script2, div2 = components(p2)
return (script2, div2)
# Run application.
routes = [('/', MainPage)]
my_app = webapp2.WSGIApplication(routes, debug=True) | 33.885442 | 120 | 0.623257 |
c73a657eabaaa5580cd95fd8f430b160b1e8e216 | 8,956 | py | Python | tests/testcgatools.py | ereide/pyga-camcal | fd25748ddb11c5b05ef24a2deca2689e0d899875 | [
"MIT"
] | 5 | 2018-05-22T09:11:31.000Z | 2022-03-11T02:32:01.000Z | tests/testcgatools.py | ereide/pyga-camcal | fd25748ddb11c5b05ef24a2deca2689e0d899875 | [
"MIT"
] | null | null | null | tests/testcgatools.py | ereide/pyga-camcal | fd25748ddb11c5b05ef24a2deca2689e0d899875 | [
"MIT"
] | null | null | null | import unittest
import clifford as cl
from clifford import g3c
from numpy import pi, e
import numpy as np
from scipy.sparse.linalg.matfuncs import _sinch as sinch
from clifford import MultiVector
from pygacal.common.cgatools import ( Sandwich, Dilator, Translator, Reflector,
inversion, Rotor, Transversor, I3, I5,
VectorEquality, Distance, ga_log, ga_exp, MVEqual, Meet,
extractBivectorParameters_complicated, ga_exp_complicated, one)
from pygacal.geometry import createRandomBivector, createRandomVector, createRandomPoints
from pygacal.geometry.lines import createLine
from pygacal.geometry.planes import createPlane
layout = g3c.layout
locals().update(g3c.blades)
ep, en, up, down, homo, E0, ninf, no = (g3c.stuff["ep"], g3c.stuff["en"],
g3c.stuff["up"], g3c.stuff["down"], g3c.stuff["homo"],
g3c.stuff["E0"], g3c.stuff["einf"], -g3c.stuff["eo"])
np.random.seed(2512)
if __name__ == "__main__":
unittest.main()
| 33.17037 | 125 | 0.546226 |
c73c3d02ecdfac6eb2c791e1853c9f4bcf52f552 | 6,909 | py | Python | router/posts.py | DiegoLing33/prestij.xyz-api | 69a11a2c93dd98975f9becbc4b8f596e4941a05f | [
"MIT"
] | null | null | null | router/posts.py | DiegoLing33/prestij.xyz-api | 69a11a2c93dd98975f9becbc4b8f596e4941a05f | [
"MIT"
] | null | null | null | router/posts.py | DiegoLing33/prestij.xyz-api | 69a11a2c93dd98975f9becbc4b8f596e4941a05f | [
"MIT"
] | null | null | null | #
#
#
#
#
#
#
# Developed by Yakov V. Panov (C) Ling Black 2020
# @site http://ling.black
#
#
#
#
#
#
#
# Developed by Yakov V. Panov (C) Ling Black 2020
# @site http://ling.black
from typing import List
from fastapi import APIRouter, Depends, HTTPException
from pydantic import BaseModel
from core.response import RequestLimit
from database import get_db, DatabaseUtils
from database.wow.models import PostModel, PostCommentsModel
from wow.interface.entity import PostCategory, Post, PostCategoryCreate, PostCreate, PostLikeCreate, PostCommentCreate
from wow.utils.posts import PostsUtils
from wow.utils.users import BlizzardUsersUtils
router = APIRouter()
# -----------------------------------
# CATEGORIES
# -----------------------------------
# -----------------------------------
# POSTS
# -----------------------------------
| 25.876404 | 118 | 0.568823 |
c73c5c8e9b60dd28827b865f9cd0c2682cc0cd16 | 3,216 | py | Python | toontown/catalog/CatalogChatBalloon.py | CrankySupertoon01/Toontown-2 | 60893d104528a8e7eb4aced5d0015f22e203466d | [
"MIT"
] | 1 | 2021-02-13T22:40:50.000Z | 2021-02-13T22:40:50.000Z | toontown/catalog/CatalogChatBalloon.py | CrankySupertoonArchive/Toontown-2 | 60893d104528a8e7eb4aced5d0015f22e203466d | [
"MIT"
] | 1 | 2018-07-28T20:07:04.000Z | 2018-07-30T18:28:34.000Z | toontown/catalog/CatalogChatBalloon.py | CrankySupertoonArchive/Toontown-2 | 60893d104528a8e7eb4aced5d0015f22e203466d | [
"MIT"
] | 2 | 2019-12-02T01:39:10.000Z | 2021-02-13T22:41:00.000Z | from pandac.PandaModules import *
| 34.212766 | 81 | 0.589552 |
c73c9cd86a4a585bb09b4cbd3f15cf16c3ddc42d | 831 | py | Python | TTS/vocoder/tf/utils/io.py | mightmay/Mien-TTS | 8a22ff0a79558b3cf4981ce1b63f4d1485ea6338 | [
"MIT"
] | null | null | null | TTS/vocoder/tf/utils/io.py | mightmay/Mien-TTS | 8a22ff0a79558b3cf4981ce1b63f4d1485ea6338 | [
"MIT"
] | null | null | null | TTS/vocoder/tf/utils/io.py | mightmay/Mien-TTS | 8a22ff0a79558b3cf4981ce1b63f4d1485ea6338 | [
"MIT"
] | 1 | 2021-04-28T17:30:03.000Z | 2021-04-28T17:30:03.000Z | import datetime
import pickle
import tensorflow as tf
def save_checkpoint(model, current_step, epoch, output_path, **kwargs):
""" Save TF Vocoder model """
state = {
'model': model.weights,
'step': current_step,
'epoch': epoch,
'date': datetime.date.today().strftime("%B %d, %Y"),
}
state.update(kwargs)
pickle.dump(state, open(output_path, 'wb'))
def load_checkpoint(model, checkpoint_path):
""" Load TF Vocoder model """
checkpoint = pickle.load(open(checkpoint_path, 'rb'))
chkp_var_dict = {var.name: var.numpy() for var in checkpoint['model']}
tf_vars = model.weights
for tf_var in tf_vars:
layer_name = tf_var.name
chkp_var_value = chkp_var_dict[layer_name]
tf.keras.backend.set_value(tf_var, chkp_var_value)
return model
| 29.678571 | 74 | 0.65704 |
c73caaa0e2719e60ad785aecaaee84cf63518c02 | 1,497 | py | Python | tests/test_path_choice.py | jataware/flee | 67c00c4572e71dd2bbfb390d7d7ede13ffb9594e | [
"BSD-3-Clause"
] | 3 | 2021-05-24T14:07:48.000Z | 2022-01-10T03:20:36.000Z | tests/test_path_choice.py | jataware/flee | 67c00c4572e71dd2bbfb390d7d7ede13ffb9594e | [
"BSD-3-Clause"
] | 15 | 2020-06-05T11:42:23.000Z | 2022-03-09T20:17:29.000Z | tests/test_path_choice.py | jataware/flee | 67c00c4572e71dd2bbfb390d7d7ede13ffb9594e | [
"BSD-3-Clause"
] | 3 | 2020-05-29T15:10:28.000Z | 2022-03-09T19:51:41.000Z | from flee import flee
"""
Generation 1 code. Incorporates only distance, travel always takes one day.
"""
if __name__ == "__main__":
test_path_choice()
| 33.266667 | 75 | 0.663327 |
c73dae2399d233b79b4e4ba84ebee8f7d71a6c22 | 10,463 | py | Python | archive/old_plots/plot_supplemental_divergence_correlations.py | garudlab/mother_infant | 98a27c83bf5ece9497d5a030c6c9396a8c514781 | [
"BSD-2-Clause"
] | 2 | 2020-08-09T06:19:11.000Z | 2021-08-18T17:12:23.000Z | archive/old_plots/plot_supplemental_divergence_correlations.py | garudlab/mother_infant | 98a27c83bf5ece9497d5a030c6c9396a8c514781 | [
"BSD-2-Clause"
] | null | null | null | archive/old_plots/plot_supplemental_divergence_correlations.py | garudlab/mother_infant | 98a27c83bf5ece9497d5a030c6c9396a8c514781 | [
"BSD-2-Clause"
] | 8 | 2019-02-20T22:21:55.000Z | 2021-02-13T00:55:40.000Z | import matplotlib
matplotlib.use('Agg')
import config
import parse_midas_data
import parse_HMP_data
import os.path
import pylab
import sys
import numpy
import diversity_utils
import gene_diversity_utils
import calculate_substitution_rates
import stats_utils
import matplotlib.colors as colors
import matplotlib.cm as cmx
from math import log10,ceil
import matplotlib as mpl
import matplotlib.pyplot as plt
import matplotlib.gridspec as gridspec
from numpy.random import randint
from scipy.cluster.hierarchy import dendrogram, linkage
from scipy.cluster.hierarchy import cophenet
from scipy.cluster.hierarchy import fcluster
from scipy.stats import gaussian_kde
mpl.rcParams['font.size'] = 6
mpl.rcParams['lines.linewidth'] = 0.5
mpl.rcParams['legend.frameon'] = False
mpl.rcParams['legend.fontsize'] = 'small'
################################################################################
#
# Standard header to read in argument information
#
################################################################################
import argparse
parser = argparse.ArgumentParser()
parser.add_argument("--debug", help="Loads only a subset of SNPs for speed", action="store_true")
parser.add_argument("--chunk-size", type=int, help="max number of records to load", default=1000000000)
args = parser.parse_args()
debug = args.debug
chunk_size = args.chunk_size
################################################################################
good_species_list = ['Bacteroides_vulgatus_57955', 'Bacteroides_uniformis_57318', 'Alistipes_putredinis_61533']
####################################################
#
# Set up Figure (3 panels, arranged in 1x3 grid)
#
####################################################
pylab.figure(1,figsize=(7,1.5))
fig = pylab.gcf()
# make three panels panels
outer_grid = gridspec.GridSpec(1,3,width_ratios=[1,1,1],wspace=0.1)
#######
#
# SNP divergence vs Gene divergence in B. vulgatus
#
#######
gene_axis = plt.Subplot(fig, outer_grid[0])
fig.add_subplot(gene_axis)
gene_axis.set_ylabel('SNP divergence\n %s' % (good_species_list[0]))
gene_axis.set_xlabel('Gene divergence\n %s' % (good_species_list[0]))
gene_axis.set_ylim([1e-06,1e-01])
#gene_axis.set_xlim([1e-02,1])
gene_axis.spines['top'].set_visible(False)
gene_axis.spines['right'].set_visible(False)
gene_axis.get_xaxis().tick_bottom()
gene_axis.get_yaxis().tick_left()
#######
#
# SNP divergence (B vulgatus) vs SNP divergence (A putredinis)
#
#######
species_axis_1 = plt.Subplot(fig, outer_grid[1])
fig.add_subplot(species_axis_1)
species_axis_1.set_xlabel('SNP divergence\n %s' % (good_species_list[1]))
species_axis_1.set_ylim([1e-06,1e-01])
species_axis_1.set_xlim([1e-06,1e-01])
species_axis_1.spines['top'].set_visible(False)
species_axis_1.spines['right'].set_visible(False)
species_axis_1.get_xaxis().tick_bottom()
species_axis_1.get_yaxis().tick_left()
#######
#
# SNP divergence (B vulgatus) vs SNP divergence (A putredinis)
#
#######
species_axis_2 = plt.Subplot(fig, outer_grid[2])
fig.add_subplot(species_axis_2)
species_axis_2.set_xlabel('SNP divergence\n %s' % (good_species_list[2]))
species_axis_2.set_ylim([1e-06,1e-01])
species_axis_2.set_xlim([1e-06,1e-01])
species_axis_2.spines['top'].set_visible(False)
species_axis_2.spines['right'].set_visible(False)
species_axis_2.get_xaxis().tick_bottom()
species_axis_2.get_yaxis().tick_left()
########
#
# Now do calculation and plot figures
#
########
sys.stderr.write("Loading sample metadata...\n")
subject_sample_map = parse_HMP_data.parse_subject_sample_map()
sample_order_map = parse_HMP_data.parse_sample_order_map()
sys.stderr.write("Done!\n")
snp_divergence_map = {species_name: {} for species_name in good_species_list}
gene_divergence_map = {species_name: {} for species_name in good_species_list}
for species_name in good_species_list:
sys.stderr.write("Loading haploid samples...\n")
snp_samples = diversity_utils.calculate_haploid_samples(species_name, debug=debug)
sys.stderr.write("Calculating unique samples...\n")
# Only consider one sample per person
snp_samples = snp_samples[parse_midas_data.calculate_unique_samples(subject_sample_map, sample_list=snp_samples)]
sys.stderr.write("Loading pre-computed substitution rates for %s...\n" % species_name)
substitution_rate_map = calculate_substitution_rates.load_substitution_rate_map(species_name)
sys.stderr.write("Calculating snp matrix...\n")
dummy_samples, snp_difference_matrix, snp_opportunity_matrix = calculate_substitution_rates.calculate_matrices_from_substitution_rate_map(substitution_rate_map, 'core', allowed_samples=snp_samples)
snp_samples = dummy_samples
sys.stderr.write("Done!\n")
sys.stderr.write("Calculating gene matrix...\n")
gene_samples, gene_difference_matrix, gene_opportunity_matrix = calculate_substitution_rates.calculate_matrices_from_substitution_rate_map(substitution_rate_map, 'genes', allowed_samples=snp_samples)
snp_samples = gene_samples
sys.stderr.write("Done!\n")
# Focus on the subset of samples that have sufficient gene depth and snp depth
desired_samples = gene_samples
# Figure out which pairs of indices in desired_samples belong to diff subjects
desired_same_sample_idxs, desired_same_subject_idxs, desired_diff_subject_idxs = parse_midas_data.calculate_subject_pairs( subject_sample_map, desired_samples)
# Turn these into indices for snp and gene matrices
snp_sample_idx_map = parse_midas_data.calculate_sample_idx_map(desired_samples, snp_samples)
gene_sample_idx_map = parse_midas_data.calculate_sample_idx_map(desired_samples, gene_samples)
same_subject_snp_idxs = parse_midas_data.apply_sample_index_map_to_indices(snp_sample_idx_map, desired_same_subject_idxs)
same_subject_gene_idxs = parse_midas_data.apply_sample_index_map_to_indices(gene_sample_idx_map, desired_same_subject_idxs)
diff_subject_snp_idxs = parse_midas_data.apply_sample_index_map_to_indices(snp_sample_idx_map, desired_diff_subject_idxs)
diff_subject_gene_idxs = parse_midas_data.apply_sample_index_map_to_indices(gene_sample_idx_map, desired_diff_subject_idxs)
for sample_pair_idx in xrange(0,len(diff_subject_snp_idxs[0])):
snp_i = diff_subject_snp_idxs[0][sample_pair_idx]
snp_j = diff_subject_snp_idxs[1][sample_pair_idx]
gene_i = diff_subject_gene_idxs[0][sample_pair_idx]
gene_j = diff_subject_gene_idxs[1][sample_pair_idx]
sample_i = desired_samples[gene_i]
sample_j = desired_samples[gene_j]
# This will serve as a key in snp_divergence_map
sample_pair = frozenset([sample_i,sample_j])
# Focus on pairs of samples with sufficient coverage
if snp_opportunity_matrix[snp_i,snp_j]>0:
snp_d = snp_difference_matrix[snp_i,snp_j]*1.0/snp_opportunity_matrix[snp_i,snp_j]
snp_divergence_map[species_name][sample_pair] = snp_d
if gene_opportunity_matrix[gene_i, gene_j]>0:
gene_d = gene_difference_matrix[gene_i, gene_j]*1.0/gene_opportunity_matrix[gene_i, gene_j]
gene_divergence_map[species_name][sample_pair] = gene_d
#################
#
# Plot figures!
#
#################
# First calculate SNP vs gene divergence in B. vulgatus
species_name = good_species_list[0]
snp_divergences = []
gene_divergences = []
# Loop over sample pairs that are in both snp_divergence_map and gene_divergence_map
for sample_pair in (set(snp_divergence_map[species_name].keys()) & set(gene_divergence_map[species_name].keys()) ):
snp_divergences.append( snp_divergence_map[species_name][sample_pair] )
gene_divergences.append( gene_divergence_map[species_name][sample_pair] )
snp_divergences = numpy.array(snp_divergences)
gene_divergences = numpy.array(gene_divergences)
# Null expectation (medians line up)
median_ratio = numpy.median(snp_divergences)/numpy.median(gene_divergences)
gene_axis.loglog([1e-02,1],[1e-02*median_ratio,1*median_ratio],'k-',linewidth=0.25)
gene_axis.loglog(gene_divergences, snp_divergences, 'r.', markersize=2,alpha=0.5,markeredgewidth=0, rasterized=True)
# Then SNP divergence between two species
species_1 = good_species_list[0]
species_2 = good_species_list[1]
snp_divergences_1 = []
snp_divergences_2 = []
# Loop over sample pairs that are in both snp_divergence_map and gene_divergence_map
for sample_pair in (set(snp_divergence_map[species_1].keys()) & set(snp_divergence_map[species_2].keys()) ):
snp_divergences_1.append( snp_divergence_map[species_1][sample_pair] )
snp_divergences_2.append( snp_divergence_map[species_2][sample_pair] )
snp_divergences_1 = numpy.array(snp_divergences_1)
snp_divergences_2 = numpy.array(snp_divergences_2)
# Null expectation (medians line up)
median_ratio = numpy.median(snp_divergences_1)/numpy.median(snp_divergences_2)
species_axis_1.loglog([1e-06,1e-01],[1e-06*median_ratio,1e-01*median_ratio],'k-',linewidth=0.25)
# Observed values
species_axis_1.loglog(snp_divergences_2, snp_divergences_1, 'r.', markersize=2,alpha=0.5,markeredgewidth=0, rasterized=True)
# Then SNP divergence between other two species
species_1 = good_species_list[0]
species_2 = good_species_list[2]
snp_divergences_1 = []
snp_divergences_2 = []
# Loop over sample pairs that are in both snp_divergence_map and gene_divergence_map
for sample_pair in (set(snp_divergence_map[species_1].keys()) & set(snp_divergence_map[species_2].keys()) ):
snp_divergences_1.append( snp_divergence_map[species_1][sample_pair] )
snp_divergences_2.append( snp_divergence_map[species_2][sample_pair] )
snp_divergences_1 = numpy.array(snp_divergences_1)
snp_divergences_2 = numpy.array(snp_divergences_2)
# Null expectation (medians line up)
median_ratio = numpy.median(snp_divergences_1)/numpy.median(snp_divergences_2)
species_axis_2.loglog([1e-06,1e-01],[1e-06*median_ratio,1e-01*median_ratio],'k-',linewidth=0.25)
species_axis_2.loglog(snp_divergences_2, snp_divergences_1, 'r.', markersize=2,alpha=0.5,markeredgewidth=0,rasterized=True)
# Since y-axes are shared, do not duplicate ticklables
species_axis_1.set_yticklabels([])
species_axis_2.set_yticklabels([])
sys.stderr.write("Saving figure...\t")
fig.savefig('%s/supplemental_divergence_correlations.pdf' % (parse_midas_data.analysis_directory),bbox_inches='tight',dpi=600)
sys.stderr.write("Done!\n")
| 38.047273 | 203 | 0.750454 |
c73e6e9b07e0e5afa67a521f170e1521081ec4b3 | 34,246 | py | Python | multivis/plotFeatures.py | brettChapman/cimcb_vis | b373ed426b24ece1dcc20febd7c8023921b024d6 | [
"MIT"
] | 1 | 2021-06-27T23:52:40.000Z | 2021-06-27T23:52:40.000Z | multivis/plotFeatures.py | brettChapman/cimcb_vis | b373ed426b24ece1dcc20febd7c8023921b024d6 | [
"MIT"
] | null | null | null | multivis/plotFeatures.py | brettChapman/cimcb_vis | b373ed426b24ece1dcc20febd7c8023921b024d6 | [
"MIT"
] | 2 | 2021-06-27T23:53:03.000Z | 2021-07-12T12:59:23.000Z | import sys
import copy
import matplotlib
import matplotlib.pyplot as plt
import seaborn as sns
from collections import Counter
from .utils import *
import numpy as np
import pandas as pd | 52.605223 | 586 | 0.5464 |
c73eca01ba5620a706110aaabb7ea66ae754f7f0 | 1,183 | py | Python | core/data/DataWriter.py | berendkleinhaneveld/Registrationshop | 0d6f3ee5324865cdcb419369139f37c39dfe9a1c | [
"MIT"
] | 25 | 2015-11-08T16:36:54.000Z | 2022-01-20T16:03:28.000Z | core/data/DataWriter.py | berendkleinhaneveld/Registrationshop | 0d6f3ee5324865cdcb419369139f37c39dfe9a1c | [
"MIT"
] | 2 | 2016-12-01T23:13:08.000Z | 2017-07-25T02:40:49.000Z | core/data/DataWriter.py | berendkleinhaneveld/Registrationshop | 0d6f3ee5324865cdcb419369139f37c39dfe9a1c | [
"MIT"
] | 10 | 2016-07-05T14:39:16.000Z | 2022-01-01T02:05:55.000Z | """
DataWriter.py
"""
from DataController import DataController
from DataReader import DataReader
from vtk import vtkMetaImageWriter
from vtk import vtkXMLImageDataWriter
| 27.511628 | 76 | 0.752325 |
c73ff4534e3b71c1974b4bf7835f8ec9472d9d62 | 7,483 | py | Python | parkings/models/permit.py | klemmari1/parkkihubi | 93218c6046c0910e8a4c723dc7128c6eec085b8c | [
"MIT"
] | 12 | 2016-11-29T15:13:10.000Z | 2021-06-12T06:45:38.000Z | parkings/models/permit.py | niuzhipeng123/parkkihubi | 93218c6046c0910e8a4c723dc7128c6eec085b8c | [
"MIT"
] | 154 | 2016-11-30T09:07:58.000Z | 2022-02-12T08:29:36.000Z | parkings/models/permit.py | niuzhipeng123/parkkihubi | 93218c6046c0910e8a4c723dc7128c6eec085b8c | [
"MIT"
] | 15 | 2016-11-29T19:32:48.000Z | 2022-01-05T11:31:39.000Z | from itertools import chain
from django.conf import settings
from django.contrib.gis.db import models as gis_models
from django.db import models, router, transaction
from django.utils import timezone
from django.utils.translation import gettext_lazy as _
from ..fields import CleaningJsonField
from ..validators import DictListValidator, TextField, TimestampField
from .constants import GK25FIN_SRID
from .enforcement_domain import EnforcementDomain
from .mixins import TimestampedModelMixin
from .parking import Parking
| 37.415 | 103 | 0.667379 |
c744286930e6918cebec7544521adbaf000c03cc | 4,265 | py | Python | poi_mining/biz/LSA/logEntropy.py | yummydeli/machine_learning | 54471182ac21ef0eee26557a7bd6f3a3dc3a09bd | [
"MIT"
] | 1 | 2019-09-29T13:36:29.000Z | 2019-09-29T13:36:29.000Z | poi_mining/biz/LSA/logEntropy.py | yummydeli/machine_learning | 54471182ac21ef0eee26557a7bd6f3a3dc3a09bd | [
"MIT"
] | null | null | null | poi_mining/biz/LSA/logEntropy.py | yummydeli/machine_learning | 54471182ac21ef0eee26557a7bd6f3a3dc3a09bd | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# encoding:utf-8
# ##############################################################################
# The MIT License (MIT)
#
# Copyright (c) [2015] [baidu.com]
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# ##############################################################################
"""
LogEntropy
"""
import glob
import collections
import pandas
from sklearn.feature_extraction.text import CountVectorizer
import math
if __name__ == '__main__':
lsaEntropy = LogEntropy()
lsaEntropy.logEntropyWeighting()
| 35.541667 | 100 | 0.557562 |
c7465ff1ea985cda2b457c6697cd774f312adad2 | 40 | py | Python | Python/swap_numbers.py | saurabhcommand/Hello-world | 647bad9da901a52d455f05ecc37c6823c22dc77e | [
"MIT"
] | 1,428 | 2018-10-03T15:15:17.000Z | 2019-03-31T18:38:36.000Z | Python/swap_numbers.py | saurabhcommand/Hello-world | 647bad9da901a52d455f05ecc37c6823c22dc77e | [
"MIT"
] | 1,162 | 2018-10-03T15:05:49.000Z | 2018-10-18T14:17:52.000Z | Python/swap_numbers.py | saurabhcommand/Hello-world | 647bad9da901a52d455f05ecc37c6823c22dc77e | [
"MIT"
] | 3,909 | 2018-10-03T15:07:19.000Z | 2019-03-31T18:39:08.000Z | a = 5
b = 7
a,b = b,a
print a
print b
| 5 | 9 | 0.5 |
c746b2ee9cd86b479c95bc6e51b1c40a08b1d7da | 2,162 | py | Python | algorithms/tests/test_unionfind.py | tommyod/PythonAlgorithms | f0a0f67be069fc9e9fa3027ed83942d6401223fe | [
"MIT"
] | 1 | 2021-08-23T17:15:06.000Z | 2021-08-23T17:15:06.000Z | algorithms/tests/test_unionfind.py | tommyod/PythonAlgorithms | f0a0f67be069fc9e9fa3027ed83942d6401223fe | [
"MIT"
] | 1 | 2018-05-02T17:29:42.000Z | 2018-05-02T17:31:18.000Z | algorithms/tests/test_unionfind.py | tommyod/PythonAlgorithms | f0a0f67be069fc9e9fa3027ed83942d6401223fe | [
"MIT"
] | 1 | 2018-05-02T12:31:52.000Z | 2018-05-02T12:31:52.000Z | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Tests for the union find data structure.
"""
try:
from ..unionfind import UnionFind
except ValueError:
pass
def test_unionfind_basics():
"""
Test the basic properties of unionfind.
"""
u = UnionFind([1, 2, 3])
assert u.in_same_set(1, 2) is False
assert u.in_same_set(2, 3) is False
u.union(1, 3)
assert u.in_same_set(1, 2) is False
assert u.in_same_set(3, 1)
assert u.get_root(1) == u.get_root(3)
def test_unionfind_adding_elements():
"""
Test adding operations, mostly syntactic sugar.
"""
u = UnionFind([1, 2])
u.add(['a', 'b'])
assert 1 in u
assert 'a' in u
def test_unionfind_example():
"""
Test on a slightly more invovled example.
"""
u = UnionFind([1, 2, 3, 4, 5])
u.union(1, 3)
u.union(2, 4)
assert u.in_same_set(1, 3)
assert u.in_same_set(4, 2)
assert not u.in_same_set(2, 5)
assert not u.in_same_set(2, 1)
assert not u.in_same_set(1, 4)
u.union(5, 1)
assert u.in_same_set(3, 5)
def test_unionfind_several():
"""
Test that we can take union of more than two elements.
"""
u = UnionFind([1, 2, 3, 4, 5, 6, 7, 8])
u.union([1, 2, 3])
u.union([4, 5, 6])
u.union([7, 8])
assert u.in_same_set(1, 3)
assert u.in_same_set(6, 4)
assert u.in_same_set(7, 8)
assert not u.in_same_set(2, 5)
assert not u.in_same_set(4, 8)
def test_unionfind_compression():
"""
Test path compression and the union by rank.
"""
# Test the ranking
elements = list(range(100))
u = UnionFind(elements)
for i in range(len(elements) - 1):
u.union(elements[i], elements[i + 1])
assert max(u._rank.values()) == 1
# Test path compression
parent_nodes = list(u._parent.values())
assert all(parent == parent_nodes[0] for parent in parent_nodes)
if __name__ == "__main__":
import pytest
# --durations=10 <- May be used to show potentially slow tests
pytest.main(args=['.', '--doctest-modules', '-v']) | 21.62 | 68 | 0.584181 |
c746ec91b306e818609b2388a6f07e590b53157d | 10,961 | py | Python | a3/ga.py | mishless/LearningSystems | 635d9af9d00ae0360d7ca8571bf47f782fdcdfe9 | [
"MIT"
] | 1 | 2021-08-01T03:30:49.000Z | 2021-08-01T03:30:49.000Z | a3/ga.py | mishless/LearningSystems | 635d9af9d00ae0360d7ca8571bf47f782fdcdfe9 | [
"MIT"
] | null | null | null | a3/ga.py | mishless/LearningSystems | 635d9af9d00ae0360d7ca8571bf47f782fdcdfe9 | [
"MIT"
] | null | null | null | # Genetic Algorithm for solving the Traveling Salesman problem
# Authors: Mihaela Stoycheva, Vukan Turkulov
# Includes
import configparser
import math
import matplotlib.pyplot as plt
import numpy
import random
import sys
from operator import itemgetter
#Global variables(yay!)
# Configuration variables(read from config.txt)
mutation_rate = 0;
population_size = 0;
elitism_rate = 0;
tournament_rate = 0;
max_iterations = 0;
input_file_name = "";
parent_rate = 0;
# General global variables
cities = {};
number_of_cities = 0;
parent_number = 0;
tournament_size = 0;
elite_number = 0;
crossover_number = 0;
def test_stuff():
"""
p1 = "abcdefg";
p2 = "1234567";
for i in range(0,10):
print(create_child(p1,p2));
ind = [1,2,3,4,5,6];
print("Before", ind);
mutate_individual(ind);
print("After", ind);
exit();"""
#main
init();
do_what_needs_to_be_done()
| 26.159905 | 77 | 0.624487 |
c7477304b232543e959b4e41d7f4db3d8d55814b | 334 | py | Python | products/migrations/0010_remove_product_updated_at.py | UB-ES-2021-A1/wannasell-backend | 84360b2985fc28971867601373697f39303e396b | [
"Unlicense"
] | null | null | null | products/migrations/0010_remove_product_updated_at.py | UB-ES-2021-A1/wannasell-backend | 84360b2985fc28971867601373697f39303e396b | [
"Unlicense"
] | 62 | 2021-11-22T21:52:44.000Z | 2021-12-17T15:07:02.000Z | products/migrations/0010_remove_product_updated_at.py | UB-ES-2021-A1/wannasell-backend | 84360b2985fc28971867601373697f39303e396b | [
"Unlicense"
] | null | null | null | # Generated by Django 3.2.8 on 2021-11-25 17:50
from django.db import migrations
| 18.555556 | 48 | 0.598802 |
c74852ff0006431dcf627c07119eece06aae36cb | 160 | py | Python | ResumeAnalyser/apps.py | samyakj2307/recruitai_resume_backend | 52f8eda63d479b28fc19fe2d7149ab9ee9be122f | [
"MIT"
] | null | null | null | ResumeAnalyser/apps.py | samyakj2307/recruitai_resume_backend | 52f8eda63d479b28fc19fe2d7149ab9ee9be122f | [
"MIT"
] | null | null | null | ResumeAnalyser/apps.py | samyakj2307/recruitai_resume_backend | 52f8eda63d479b28fc19fe2d7149ab9ee9be122f | [
"MIT"
] | 1 | 2021-06-03T13:56:53.000Z | 2021-06-03T13:56:53.000Z | from django.apps import AppConfig
| 22.857143 | 56 | 0.78125 |
c748ba40f4f42a2340be17f0209db3df304f6bd7 | 196 | py | Python | plugins/core/player_manager_plugin/__init__.py | StarryPy/StarryPy-Historic | b9dbd552b8c4631a5a8e9dda98b7ba447eca59da | [
"WTFPL"
] | 38 | 2015-02-12T11:57:59.000Z | 2018-11-15T16:03:45.000Z | plugins/core/player_manager_plugin/__init__.py | StarryPy/StarryPy-Historic | b9dbd552b8c4631a5a8e9dda98b7ba447eca59da | [
"WTFPL"
] | 68 | 2015-02-05T23:29:47.000Z | 2017-12-27T08:26:25.000Z | plugins/core/player_manager_plugin/__init__.py | StarryPy/StarryPy-Historic | b9dbd552b8c4631a5a8e9dda98b7ba447eca59da | [
"WTFPL"
] | 21 | 2015-02-06T18:58:21.000Z | 2017-12-24T20:08:59.000Z | from plugins.core.player_manager_plugin.plugin import PlayerManagerPlugin
from plugins.core.player_manager_plugin.manager import (
Banned,
UserLevels,
permissions,
PlayerManager
)
| 24.5 | 73 | 0.795918 |
c74916514901ff1d3dbfb832b264c70329520805 | 3,063 | py | Python | src/config/svc-monitor/svc_monitor/services/loadbalancer/drivers/ha_proxy/custom_attributes/haproxy_validator.py | jnpr-pranav/contrail-controller | 428eee37c28c31830fd764315794e1a6e52720c1 | [
"Apache-2.0"
] | 37 | 2020-09-21T10:42:26.000Z | 2022-01-09T10:16:40.000Z | src/config/svc-monitor/svc_monitor/services/loadbalancer/drivers/ha_proxy/custom_attributes/haproxy_validator.py | jnpr-pranav/contrail-controller | 428eee37c28c31830fd764315794e1a6e52720c1 | [
"Apache-2.0"
] | null | null | null | src/config/svc-monitor/svc_monitor/services/loadbalancer/drivers/ha_proxy/custom_attributes/haproxy_validator.py | jnpr-pranav/contrail-controller | 428eee37c28c31830fd764315794e1a6e52720c1 | [
"Apache-2.0"
] | 21 | 2020-08-25T12:48:42.000Z | 2022-03-22T04:32:18.000Z | from builtins import str
from builtins import range
from builtins import object
import logging
import inspect
import os
def validate_custom_attributes(custom_attributes_dict, section,
custom_attributes):
section_dict = {}
if custom_attributes and section in custom_attributes_dict:
for key, value in list(custom_attributes.items()):
if key in custom_attributes_dict[section]:
#Sanitize the value
try:
type_attr = custom_attributes_dict[section][key]['type']
limits = custom_attributes_dict[section][key]['limits']
if type_attr == 'int':
value = int(value)
if value in range(limits[0], limits[1]):
section_dict.update({key:value})
else:
logging.info("Skipping key: %s, value: %s due to" \
"validation failure" % (key, value))
elif type_attr == 'str':
if len(value) in range(limits[0], limits[1]):
section_dict.update({key:value})
else:
logging.info("Skipping key: %s, value: %s due to" \
"validation failure" % (key, value))
elif type_attr == 'bool':
if value in limits:
if value == 'True':
value = ''
elif value == 'False':
value = 'no '
section_dict.update({key:value})
else:
logging.info("Skipping key: %s, value: %s due to" \
"validation failure" % (key, value))
elif inspect.isclass(eval(type_attr)):
new_custom_attr = eval(type_attr)(key, value)
if new_custom_attr.validate():
value = new_custom_attr.post_validation()
section_dict.update({key:value})
else:
logging.info("Skipping key: %s, value: %s due to" \
"validation failure" % (key, value))
except Exception as e:
logging.error(str(e))
continue
return section_dict
| 39.269231 | 79 | 0.479595 |
c74949362f59fa0673a80dd80fbdd7f5a0af70d8 | 1,405 | py | Python | python/janitor/typecache.py | monkeyman79/janitor | a41187c1b58b736a5de2b0b30eb51d85a65b17c3 | [
"MIT"
] | 2 | 2018-11-06T13:02:27.000Z | 2021-02-22T19:07:22.000Z | python/janitor/typecache.py | monkeyman79/janitor | a41187c1b58b736a5de2b0b30eb51d85a65b17c3 | [
"MIT"
] | 1 | 2016-09-28T12:24:43.000Z | 2016-09-28T13:47:35.000Z | python/janitor/typecache.py | monkeyman79/janitor | a41187c1b58b736a5de2b0b30eb51d85a65b17c3 | [
"MIT"
] | null | null | null |
import gdb
cache = TypeCache()
| 26.509434 | 59 | 0.540925 |
c74a04a139575fe8c546ea452d0215d058b4fa6f | 805 | py | Python | key_phrase.py | Santara/autoSLR | 8c524b8a0023d1434cb7be4e110103605d0d2cab | [
"MIT"
] | 1 | 2020-08-12T23:17:38.000Z | 2020-08-12T23:17:38.000Z | key_phrase.py | Santara/autoSLR | 8c524b8a0023d1434cb7be4e110103605d0d2cab | [
"MIT"
] | null | null | null | key_phrase.py | Santara/autoSLR | 8c524b8a0023d1434cb7be4e110103605d0d2cab | [
"MIT"
] | 1 | 2019-08-29T09:36:46.000Z | 2019-08-29T09:36:46.000Z | import os
import sys
directory = sys.argv[1]
outfile = open("key_phrases.csv","w")
files = {}
for filename in os.listdir(directory):
text=[]
with open(os.path.join(directory, filename)) as f:
text=[l.strip() for l in f if len(l.strip())>2]
data=''
for t in text:
if len(t.split()) > 1:
data = data+'. '+t.strip()
whitelist = set('abcdefghijklmnopqrstuvwxy ABCDEFGHIJKLMNOPQRSTUVWXYZ')
answer = ''.join(filter(whitelist.__contains__, data))
answer=' '.join(answer.split())
import rake
import operator
rake_object = rake.Rake("/home/ashutosh/Sudeshna/RAKE-tutorial/data/stoplists/SmartStoplist.txt", 3,3,1)
import pprint
pp = pprint.PrettyPrinter()
keywords = rake_object.run(answer)
for entry in keywords:
outfile.write("%s, %s\n" % (entry[0], str(entry[1])) )
outfile.close()
| 25.15625 | 105 | 0.695652 |
c74ab0b0f80631d9cb06c8040217e1f860dd10c2 | 1,127 | py | Python | tests/test_utils.py | aced-differentiate/dft-input-gen | 14bee323517714c433682bad2dcb897b223dd5ec | [
"Apache-2.0"
] | 1 | 2021-04-15T09:54:52.000Z | 2021-04-15T09:54:52.000Z | tests/test_utils.py | CitrineInformatics/dft-input-gen | 14bee323517714c433682bad2dcb897b223dd5ec | [
"Apache-2.0"
] | 1 | 2021-01-28T22:12:07.000Z | 2021-01-28T22:12:07.000Z | tests/test_utils.py | aced-differentiate/dft-input-gen | 14bee323517714c433682bad2dcb897b223dd5ec | [
"Apache-2.0"
] | 2 | 2020-12-08T18:14:13.000Z | 2020-12-18T19:01:11.000Z | """Unit tests for helper utilities in :mod:`dftinputgen.utils`."""
import os
import pytest
from ase import io as ase_io
from dftinputgen.utils import get_elem_symbol
from dftinputgen.utils import read_crystal_structure
from dftinputgen.utils import get_kpoint_grid_from_spacing
from dftinputgen.utils import DftInputGeneratorUtilsError
test_base_dir = os.path.dirname(__file__)
feo_conv_file = os.path.join(test_base_dir, "qe", "files", "feo_conv.vasp")
feo_conv = ase_io.read(feo_conv_file)
| 28.897436 | 75 | 0.754215 |
c74b3631946b737bd9c4684c29b89101e0d8c544 | 6,044 | py | Python | core/models.py | nforesperance/Django-Channels-ChatApp | b244954206214f7dc1b8793291d957a5bf80f0e2 | [
"MIT"
] | 2 | 2020-07-18T05:19:36.000Z | 2020-07-18T05:19:38.000Z | core/models.py | nforesperance/Django-Channels-ChatApp | b244954206214f7dc1b8793291d957a5bf80f0e2 | [
"MIT"
] | 4 | 2021-03-19T02:37:45.000Z | 2021-06-04T23:02:41.000Z | core/models.py | nforesperance/Django-Channels-ChatApp | b244954206214f7dc1b8793291d957a5bf80f0e2 | [
"MIT"
] | null | null | null | from django.contrib.auth.models import User
from django.db.models import (Model, TextField, DateTimeField, ForeignKey,
CASCADE)
from asgiref.sync import async_to_sync
from channels.layers import get_channel_layer
from django.db import models
import json
| 32.67027 | 93 | 0.603077 |
c74bed1c84a21dce43450d469d8869b0372e61e0 | 15,798 | py | Python | backup/model.py | jsikyoon/ASNP-RMR | ddd3e586b01ba3a7f8b3721582aca7403649400e | [
"MIT"
] | 8 | 2020-07-21T02:49:54.000Z | 2021-09-28T02:22:37.000Z | backup/model.py | jsikyoon/ASNP-RMR | ddd3e586b01ba3a7f8b3721582aca7403649400e | [
"MIT"
] | null | null | null | backup/model.py | jsikyoon/ASNP-RMR | ddd3e586b01ba3a7f8b3721582aca7403649400e | [
"MIT"
] | 1 | 2020-09-02T06:39:49.000Z | 2020-09-02T06:39:49.000Z | import tensorflow as tf
import numpy as np
# utility methods
def batch_mlp(input, output_sizes, variable_scope):
"""Apply MLP to the final axis of a 3D tensor (reusing already defined MLPs).
Args:
input: input tensor of shape [B,n,d_in].
output_sizes: An iterable containing the output sizes of the MLP as defined
in `basic.Linear`.
variable_scope: String giving the name of the variable scope. If this is set
to be the same as a previously defined MLP, then the weights are reused.
Returns:
tensor of shape [B,n,d_out] where d_out=output_sizes[-1]
"""
# Get the shapes of the input and reshape to parallelise across observations
batch_size, _, filter_size = input.shape.as_list()
output = tf.reshape(input, (-1, filter_size))
output.set_shape((None, filter_size))
# Pass through MLP
with tf.variable_scope(variable_scope, reuse=tf.AUTO_REUSE):
for i, size in enumerate(output_sizes[:-1]):
output = tf.nn.relu(
tf.layers.dense(output, size, name="layer_{}".format(i)))
# Last layer without a ReLu
output = tf.layers.dense(
output, output_sizes[-1], name="layer_{}".format(i + 1))
# Bring back into original shape
output = tf.reshape(output, (batch_size, -1, output_sizes[-1]))
return output
def uniform_attention(q, v):
"""Uniform attention. Equivalent to np.
Args:
q: queries. tensor of shape [B,m,d_k].
v: values. tensor of shape [B,n,d_v].
Returns:
tensor of shape [B,m,d_v].
"""
total_points = tf.shape(q)[1]
rep = tf.reduce_mean(v, axis=1, keepdims=True) # [B,1,d_v]
rep = tf.tile(rep, [1, total_points, 1])
return rep
def laplace_attention(q, k, v, scale, normalise):
"""Computes laplace exponential attention.
Args:
q: queries. tensor of shape [B,m,d_k].
k: keys. tensor of shape [B,n,d_k].
v: values. tensor of shape [B,n,d_v].
scale: float that scales the L1 distance.
normalise: Boolean that determines whether weights sum to 1.
Returns:
tensor of shape [B,m,d_v].
"""
k = tf.expand_dims(k, axis=1) # [B,1,n,d_k]
q = tf.expand_dims(q, axis=2) # [B,m,1,d_k]
unnorm_weights = - tf.abs((k - q) / scale) # [B,m,n,d_k]
unnorm_weights = tf.reduce_sum(unnorm_weights, axis=-1) # [B,m,n]
if normalise:
weight_fn = tf.nn.softmax
else:
weight_fn = lambda x: 1 + tf.tanh(x)
weights = weight_fn(unnorm_weights) # [B,m,n]
rep = tf.einsum('bik,bkj->bij', weights, v) # [B,m,d_v]
return rep
def dot_product_attention(q, k, v, normalise):
"""Computes dot product attention.
Args:
q: queries. tensor of shape [B,m,d_k].
k: keys. tensor of shape [B,n,d_k].
v: values. tensor of shape [B,n,d_v].
normalise: Boolean that determines whether weights sum to 1.
Returns:
tensor of shape [B,m,d_v].
"""
d_k = tf.shape(q)[-1]
scale = tf.sqrt(tf.cast(d_k, tf.float32))
unnorm_weights = tf.einsum('bjk,bik->bij', k, q) / scale # [B,m,n]
if normalise:
weight_fn = tf.nn.softmax
else:
weight_fn = tf.sigmoid
weights = weight_fn(unnorm_weights) # [B,m,n]
rep = tf.einsum('bik,bkj->bij', weights, v) # [B,m,d_v]
return rep
def multihead_attention(q, k, v, num_heads=8):
"""Computes multi-head attention.
Args:
q: queries. tensor of shape [B,m,d_k].
k: keys. tensor of shape [B,n,d_k].
v: values. tensor of shape [B,n,d_v].
num_heads: number of heads. Should divide d_v.
Returns:
tensor of shape [B,m,d_v].
"""
d_k = q.get_shape().as_list()[-1]
d_v = v.get_shape().as_list()[-1]
head_size = d_v / num_heads
key_initializer = tf.random_normal_initializer(stddev=d_k**-0.5)
value_initializer = tf.random_normal_initializer(stddev=d_v**-0.5)
rep = tf.constant(0.0)
for h in range(num_heads):
o = dot_product_attention(
tf.layers.Conv1D(head_size, 1, kernel_initializer=key_initializer,
name='wq%d' % h, use_bias=False, padding='VALID')(q),
tf.layers.Conv1D(head_size, 1, kernel_initializer=key_initializer,
name='wk%d' % h, use_bias=False, padding='VALID')(k),
tf.layers.Conv1D(head_size, 1, kernel_initializer=key_initializer,
name='wv%d' % h, use_bias=False, padding='VALID')(v),
normalise=True)
rep += tf.layers.Conv1D(d_v, 1, kernel_initializer=value_initializer,
name='wo%d' % h, use_bias=False, padding='VALID')(o)
return rep
| 36.068493 | 81 | 0.660653 |
c74e4682a52e8afc4e35ad4f69f1a64dccbd1416 | 3,520 | py | Python | minotaur/_minotaur.py | giannitedesco/minotaur | 1a043818775e14054cc3467ba6d1c07cbf128c6b | [
"Apache-2.0"
] | 172 | 2020-08-24T14:34:00.000Z | 2021-12-29T21:56:33.000Z | minotaur/_minotaur.py | giannitedesco/minotaur | 1a043818775e14054cc3467ba6d1c07cbf128c6b | [
"Apache-2.0"
] | 3 | 2020-08-25T13:46:30.000Z | 2021-02-27T01:25:38.000Z | minotaur/_minotaur.py | giannitedesco/minotaur | 1a043818775e14054cc3467ba6d1c07cbf128c6b | [
"Apache-2.0"
] | 4 | 2020-08-24T17:21:18.000Z | 2021-12-29T21:57:42.000Z | from typing import Dict, Tuple, Optional
from pathlib import Path
import asyncio
from ._mask import Mask
from ._event import Event
from ._base import InotifyBase
__all__ = ('Minotaur',)
| 26.268657 | 75 | 0.559659 |
c7508c28b649dccba896625618759517bbe0fd13 | 161 | py | Python | pyclustering/container/examples/__init__.py | JosephChataignon/pyclustering | bf4f51a472622292627ec8c294eb205585e50f52 | [
"BSD-3-Clause"
] | 1,013 | 2015-01-26T19:50:14.000Z | 2022-03-31T07:38:48.000Z | pyclustering/container/examples/__init__.py | peterlau0626/pyclustering | bf4f51a472622292627ec8c294eb205585e50f52 | [
"BSD-3-Clause"
] | 542 | 2015-01-20T16:44:32.000Z | 2022-01-29T14:57:20.000Z | pyclustering/container/examples/__init__.py | peterlau0626/pyclustering | bf4f51a472622292627ec8c294eb205585e50f52 | [
"BSD-3-Clause"
] | 262 | 2015-03-19T07:28:12.000Z | 2022-03-30T07:28:24.000Z | """!
@brief Collection of examples devoted to containers.
@authors Andrei Novikov (pyclustering@yandex.ru)
@date 2014-2020
@copyright BSD-3-Clause
""" | 17.888889 | 53 | 0.714286 |
c751066d68d4e91afb71f1ee11d13e9bcbb998a8 | 8,802 | py | Python | novelty-detection/train_wood_vgg19.py | matherm/python-data-science | bdb49b18c5ef6044f8a9e6f95c81d5f7bb1d511f | [
"MIT"
] | 1 | 2020-03-24T09:22:04.000Z | 2020-03-24T09:22:04.000Z | novelty-detection/train_wood_vgg19.py | matherm/python-data-science | bdb49b18c5ef6044f8a9e6f95c81d5f7bb1d511f | [
"MIT"
] | 1 | 2020-06-16T14:42:29.000Z | 2020-06-16T14:42:29.000Z | novelty-detection/train_wood_vgg19.py | matherm/python-data-science | bdb49b18c5ef6044f8a9e6f95c81d5f7bb1d511f | [
"MIT"
] | null | null | null | import argparse
import sys
import torch
import numpy as np
import torch.nn as nn
from torch.utils.data import DataLoader
from torchvision.datasets import MNIST
from torchvision.datasets import CIFAR10
import torchvision.transforms as transforms
import matplotlib.pyplot as plt
parser = argparse.ArgumentParser(description='PyTorch Novelty Detection')
# TRAINING PARAMS
parser.add_argument('--epochs', type=int, default=100, metavar='',
help='Amount of epochs for training (default: 100)')
parser.add_argument('--batch_size', type=int, default=1000, metavar='',
help='Batch size for SGD (default: 100)')
parser.add_argument('--lrate', type=float, default=0.0001, metavar="",
help="Learning rate (default: 0.001")
parser.add_argument('--with_cuda', action='store_true', dest='use_cuda',
help="Shall cuda be used (default: False)")
parser.add_argument('--model', type=int, default=0,
help="Which model to train (0=KLminimizer, 1=Euclidean-Minimizer) (default: 0)")
parser.add_argument('--plots', action='store_true', dest='plots',
help="Shall matplotlib be used (default: False)")
parser.add_argument('--grid', action='store_true', dest='grid',
help="Grid search (default: False)")
argv = parser.parse_args()
sys.argv = [sys.argv[0]]
from ummon import *
from negvarbound import *
from model import *
from helpers import Evaluator
import helpers
torch.manual_seed(4)
if __name__ == '__main__':
# WOOD
transform = transform=transforms.Compose([transforms.ToTensor(), transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225]), VGG19Features("pool4"), helpers.flatten_transform])
wood_data = ImagePatches("/ext/data/Wood-0035.png", mode='rgb', train=True, stride_y=14, stride_x=14, window_size=28, transform=transform)
wood_data_test = AnomalyImagePatches("/ext/data/Wood-0035.png", mode='rgb', train=True, stride_y=14, stride_x=14, window_size=28, transform=transform, propability=1.0, anomaly=SquareAnomaly(size=8, color=255))
wood_data = [wood_data[i][0].data for i in range(len(wood_data))]
wood_data = torch.stack(wood_data).numpy() / 10
wood_data_test = [wood_data_test[i][0].data for i in range(len(wood_data_test))]
wood_data_test = torch.stack(wood_data_test).numpy() / 10
# Novelty
data_novelty = wood_data_test
# Train
data_train = wood_data
# Val
data_val = data_train
######################################################
# NORMAL DISTRIBUTION
######################################################
# Model
model = ModelNormal(input_features = data_train.shape[1], hidden_layer=20, latent_features=20)
torch.manual_seed(4)
# LOSS
criterion = KLLoss(model=model, size_average=False)
# INSTANTIATE OPTIMIZER
optimizer = torch.optim.SGD(model.parameters(), lr=argv.lrate, weight_decay=1)
#Evaluator
evaluator = Evaluator(model, data_train, data_val, data_novelty)
# Activate matplotlib
argv.plots = True
with Logger(loglevel=10, log_batch_interval=601) as lg:
# CREATE A TRAINER
my_trainer = UnsupervisedTrainer(lg,
model,
criterion,
optimizer,
trainingstate = Trainingstate(),
model_filename="KL_MIN",
use_cuda= argv.use_cuda,
profile = False,
convergence_eps = 1e-5)
# START TRAINING
my_trainer.fit(dataloader_training=(wood_data, 20),
epochs=200)
evaluator.evaluate_model(argv)
######################################################
# LOGNORMAL
######################################################
# Model
model = ModelLogNormal(input_features = data_train.shape[1], hidden_layer=20, latent_features=20)
torch.manual_seed(4)
# LOSS
criterion = KLLoss_lognormal(model=model, size_average=False)
# INSTANTIATE OPTIMIZER
optimizer = torch.optim.SGD(model.parameters(), lr=argv.lrate, weight_decay=1)
#Evaluator
evaluator = Evaluator(model, data_train, data_val, data_novelty)
# Activate matplotlib
argv.plots = True
with Logger(loglevel=10, log_batch_interval=601) as lg:
# CREATE A TRAINER
my_trainer = UnsupervisedTrainer(lg,
model,
criterion,
optimizer,
trainingstate = Trainingstate(),
model_filename="KL_MIN",
use_cuda= argv.use_cuda,
profile = False,
convergence_eps = 1e-5)
# START TRAINING
my_trainer.fit(dataloader_training=(data_train, 20),
epochs=argv.epochs)
evaluator.evaluate_model(argv)
######################################################
# LAPLACE
######################################################
# Model
model = ModelLaplace(input_features = data_train.shape[1], hidden_layer=20, latent_features=20)
torch.manual_seed(4)
# LOSS
criterion = KLLoss_laplace(model=model, size_average=False, mean=2, scale=0.5)
# INSTANTIATE OPTIMIZER
optimizer = torch.optim.SGD(model.parameters(), lr=0.000001, weight_decay=1)
#Evaluator
evaluator = Evaluator(model, data_train, data_val, data_novelty)
# Activate matplotlib
argv.plots = True
with Logger(loglevel=10, log_batch_interval=601) as lg:
# CREATE A TRAINER
my_trainer = UnsupervisedTrainer(lg,
model,
criterion,
optimizer,
trainingstate = Trainingstate(),
model_filename="KL_MIN",
use_cuda= argv.use_cuda,
profile = False,
convergence_eps = 1e-1)
# START TRAINING
my_trainer.fit(dataloader_training=(data_train, 20),
epochs=300)
evaluator.evaluate_model(argv)
# {'AUROC LAT (TRAIN)': 0.8743801652892562,
# 'AUROC LAT (VAL)': 0.8661157024793389,
# 'AUROC REC (TRAIN)': 0.86900826446281,
# 'AUROC REC (VAL)': 0.8528925619834712}
######################################################
# LAPLACE WITH R-SHIFT
######################################################
# Model
model = ModelLaplace(input_features = data_train.shape[1], hidden_layer=20, latent_features=20)
torch.manual_seed(4)
# LOSS
criterion = CombinedLoss(model)
# INSTANTIATE OPTIMIZER
optimizer = torch.optim.SGD(model.parameters(), lr=argv.lrate, weight_decay=1)
#Evaluator
evaluator = Evaluator(model, data_train, data_val, data_novelty)
# Activate matplotlib
argv.plots = True
with Logger(loglevel=10, log_batch_interval=601) as lg:
# CREATE A TRAINER
my_trainer = UnsupervisedTrainer(lg,
model,
criterion,
optimizer,
trainingstate = Trainingstate(),
model_filename="KL_MIN",
use_cuda= argv.use_cuda,
profile = False,
convergence_eps = 1e-3)
# START TRAINING
my_trainer.fit(dataloader_training=(data_train, 20),
epochs=200)
evaluator.evaluate_model(argv)
# {'AUROC LAT (TRAIN)': 0.8590909090909091,
# 'AUROC LAT (VAL)': 0.8752066115702479,
# 'AUROC REC (TRAIN)': 0.8677685950413224,
# 'AUROC REC (VAL)': 0.8619834710743801}
| 34.249027 | 213 | 0.546353 |
c7511256bf0b0f8d7c0f1ccc084e2e9144ad8ab3 | 2,948 | py | Python | sample_architectures/cnn.py | hvarS/PyTorch-Refer | 020445e3ae1f3627f39e1ab957cdff44a2127289 | [
"MIT"
] | null | null | null | sample_architectures/cnn.py | hvarS/PyTorch-Refer | 020445e3ae1f3627f39e1ab957cdff44a2127289 | [
"MIT"
] | null | null | null | sample_architectures/cnn.py | hvarS/PyTorch-Refer | 020445e3ae1f3627f39e1ab957cdff44a2127289 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
"""CNN.ipynb
Automatically generated by Colaboratory.
Original file is located at
https://colab.research.google.com/drive/1Tq6HUya2PrC0SmyOIFo2c_eVtguRED2q
"""
import torch
import torch.nn as nn
import torch.optim as optim
import torch.nn.functional as F
from torch.utils.data import DataLoader
import torchvision.datasets as datasets
import torchvision.transforms as transforms
model = CNN(1,10)
x = torch.randn((64,1,28,28))
print(model(x).shape)
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
device
in_channels = 1
num_classes = 10
learning_rate = 0.001
batch_size = 64
num_epochs = 4
train_dataset = datasets.MNIST(root = "dataset/",train = True,transform = transforms.ToTensor(),download = True)
train_loader = DataLoader(dataset=train_dataset,batch_size=64,shuffle=True)
test_dataset = train_dataset = datasets.MNIST(root = "dataset/",train = False,transform = transforms.ToTensor(),download = True)
test_loader = DataLoader(dataset = test_dataset,batch_size = batch_size,shuffle = True)
model = CNN(1,10).to(device = device)
criterion = nn.CrossEntropyLoss()
optimizer = optim.Adam(model.parameters(),lr = learning_rate)
for epoch in range(num_epochs):
for batch_idx,(data,targets) in enumerate(train_loader):
#get data to cuda if possible
data = data.cuda()
targets = targets.cuda()
scores = model(data)
loss = criterion(scores,targets)
#backward
optimizer.zero_grad()
loss.backward()
#gradient_descent or adam-step
optimizer.step()
# Check the accuracy for the training step
check_accuracy(train_loader,model)
check_accuracy(test_loader,model)
| 28.07619 | 128 | 0.700136 |
c7551a216f55773fcf2668fcef4ad367660f3169 | 21,599 | py | Python | aispace/layers/callbacks/qa_evaluators.py | SmileGoat/AiSpace | 35fc120667e4263c99b300815e0bf018f5064a40 | [
"Apache-2.0"
] | 32 | 2020-01-16T07:59:03.000Z | 2022-03-31T09:24:00.000Z | aispace/layers/callbacks/qa_evaluators.py | SmileGoat/AiSpace | 35fc120667e4263c99b300815e0bf018f5064a40 | [
"Apache-2.0"
] | 9 | 2020-06-05T03:27:06.000Z | 2022-03-12T01:00:17.000Z | aispace/layers/callbacks/qa_evaluators.py | SmileGoat/AiSpace | 35fc120667e4263c99b300815e0bf018f5064a40 | [
"Apache-2.0"
] | 3 | 2020-06-09T02:22:50.000Z | 2021-07-19T06:07:07.000Z | # -*- coding: utf-8 -*-
# @Time : 2020-07-30 15:06
# @Author : yingyuankai
# @Email : yingyuankai@aliyun.com
# @File : qa_evaluators.py
import os
import logging
import numpy as np
import tensorflow as tf
import json
from pprint import pprint
from collections import defaultdict
from aispace.utils.eval_utils import calc_em_score, calc_f1_score
from aispace.utils.io_utils import save_json
from aispace.utils.print_utils import print_boxed
from aispace.utils.metrics_utils import ConfusionMatrix
__all__ = [
'EvaluatorForQaSimple',
'EvaluatorForQaWithImpossible'
]
logger = logging.getLogger(__name__)
| 44.997917 | 146 | 0.570582 |
c75685d19bc8be9c76eb30777f9bd2a54b73db11 | 682 | py | Python | tests/conftest.py | junjunjunk/torchgpipe | 3db11e1da0fc432eb3f3807ddcb22967973c8b28 | [
"Apache-2.0"
] | 532 | 2019-05-27T09:23:04.000Z | 2022-03-31T04:07:55.000Z | tests/conftest.py | junjunjunk/torchgpipe | 3db11e1da0fc432eb3f3807ddcb22967973c8b28 | [
"Apache-2.0"
] | 29 | 2019-07-01T19:49:54.000Z | 2021-11-28T00:51:00.000Z | tests/conftest.py | junjunjunk/torchgpipe | 3db11e1da0fc432eb3f3807ddcb22967973c8b28 | [
"Apache-2.0"
] | 68 | 2019-05-27T09:27:32.000Z | 2022-03-27T13:52:18.000Z | import pytest
import torch
def pytest_report_header():
return f'torch: {torch.__version__}'
| 22 | 62 | 0.696481 |
c756e2f724651746fcaf020b50f3e0f2bdeb6442 | 54,090 | py | Python | lib/python/treadmill/scheduler/__init__.py | drienyov/treadmill | ce21537cd9a2fdb0567ac2aa3de1afcb2f6861de | [
"Apache-2.0"
] | null | null | null | lib/python/treadmill/scheduler/__init__.py | drienyov/treadmill | ce21537cd9a2fdb0567ac2aa3de1afcb2f6861de | [
"Apache-2.0"
] | null | null | null | lib/python/treadmill/scheduler/__init__.py | drienyov/treadmill | ce21537cd9a2fdb0567ac2aa3de1afcb2f6861de | [
"Apache-2.0"
] | null | null | null | """Treadmill hierarchical scheduler.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import abc
import collections
import datetime
import heapq
import itertools
import logging
import operator
import sys
import time
import enum
import numpy as np
import six
_LOGGER = logging.getLogger(__name__)
MAX_PRIORITY = 100
DEFAULT_RANK = 100
_UNPLACED_RANK = sys.maxsize
DIMENSION_COUNT = None
_MAX_UTILIZATION = float('inf')
_GLOBAL_ORDER_BASE = time.mktime((2014, 1, 1, 0, 0, 0, 0, 0, 0))
# 21 day
DEFAULT_SERVER_UPTIME = 21 * 24 * 60 * 60
# 1 day
MIN_SERVER_UPTIME = 1 * 24 * 60 * 60
# 7 days
DEFAULT_MAX_APP_LEASE = 7 * 24 * 60 * 60
# Default partition threshold
DEFAULT_THRESHOLD = 0.9
# pylint: disable=C0302,too-many-lines
def _bit_count(value):
"""Returns number of bits set.
"""
count = 0
while value:
value &= value - 1
count += 1
return count
def zero_capacity():
"""Returns zero capacity vector.
"""
assert DIMENSION_COUNT is not None, 'Dimension count not set.'
return np.zeros(DIMENSION_COUNT)
def eps_capacity():
"""Returns eps capacity vector.
"""
assert DIMENSION_COUNT is not None, 'Dimension count not set.'
return np.array(
[np.finfo(float).eps for _x in range(0, DIMENSION_COUNT)]
)
def _global_order():
"""Use timestamp in nanoseconds, from Jan 1st 2014, to break tie in
scheduling conflicts for apps of the same priority, in a FIFO fashion.
"""
# Take the current EPOCH in nanosec
global_order = int(time.time() * 1000000) - _GLOBAL_ORDER_BASE
return global_order
def utilization(demand, allocated, available):
"""Calculates utilization score.
"""
return np.max(np.subtract(demand, allocated) / available)
def _all(oper, left, right):
"""Short circuit all for ndarray.
"""
return all(
oper(ai, bi)
for ai, bi in six.moves.zip(left, right)
)
def _any(oper, left, right):
"""Short circuit any for ndarray.
"""
return any(
oper(ai, bi)
for ai, bi in six.moves.zip(left, right)
)
def _any_eq(left, right):
"""Short circuit any eq for ndarray.
"""
return _any(operator.eq, left, right)
def _any_isclose(left, right):
"""Short circuit any isclose for ndarray.
"""
return _any(np.isclose, left, right)
def _any_lt(left, right):
"""Short circuit any lt for ndarray.
"""
return _any(operator.lt, left, right)
def _any_le(left, right):
"""Short circuit any le for ndarray.
"""
return _any(operator.le, left, right)
def _any_gt(left, right):
"""Short circuit any gt for ndarray.
"""
return _any(operator.gt, left, right)
def _any_ge(left, right):
"""Short circuit any ge for ndarray.
"""
return _any(operator.ge, left, right)
def _all_eq(left, right):
"""Short circuit all eq for ndarray.
"""
return _all(operator.eq, left, right)
def _all_isclose(left, right):
"""Short circuit all isclose for ndarray.
"""
return _all(np.isclose, left, right)
def _all_lt(left, right):
"""Short circuit all lt for ndarray.
"""
return _all(operator.lt, left, right)
def _all_le(left, right):
"""Short circuit all le for ndarray.
"""
return _all(operator.le, left, right)
def _all_gt(left, right):
"""Short circuit all gt for ndarray.
"""
return _all(operator.gt, left, right)
def _all_ge(left, right):
"""Short circuit all ge for ndarray.
"""
return _all(operator.ge, left, right)
def adjust_valid_until(self, child_valid_until):
"""Recursively adjust valid until time.
"""
if child_valid_until:
self.valid_until = max(self.valid_until, child_valid_until)
else:
if self.empty():
self.valid_until = 0
else:
self.valid_until = max([node.valid_until
for node in self.children_iter()])
if self.parent:
self.parent.adjust_valid_until(child_valid_until)
def remove_child_traits(self, node_name):
"""Recursively remove child traits up.
"""
self.traits.remove(node_name)
if self.parent:
self.parent.remove_child_traits(self.name)
self.parent.add_child_traits(self)
def reset_children(self):
"""Reset children to empty list.
"""
for child in self.children_iter():
child.parent = None
self.children = list()
self.children_by_name = dict()
def add_node(self, node):
"""Add child node, set the traits and propagate traits up.
"""
assert node.parent is None
assert node.name not in self.children_by_name
node.parent = self
self.children.append(node)
self.children_by_name[node.name] = node
self.add_child_traits(node)
self.increment_affinity(node.affinity_counters)
self.add_labels(node.labels)
self.adjust_valid_until(node.valid_until)
def add_labels(self, labels):
"""Recursively add labels to self and parents.
"""
self.labels.update(labels)
if self.parent:
self.parent.add_labels(self.labels)
def remove_node(self, node):
"""Remove child node and adjust the traits.
"""
assert node.name in self.children_by_name
del self.children_by_name[node.name]
for idx in six.moves.xrange(0, len(self.children)):
if self.children[idx] == node:
self.children[idx] = None
self.remove_child_traits(node.name)
self.decrement_affinity(node.affinity_counters)
self.adjust_valid_until(None)
node.parent = None
return node
def remove_node_by_name(self, nodename):
"""Removes node by name.
"""
assert nodename in self.children_by_name
return self.remove_node(self.children_by_name[nodename])
def check_app_constraints(self, app):
"""Find app placement on the node.
"""
if app.allocation is not None:
if app.allocation.label not in self.labels:
_LOGGER.info('Missing label: %s on %s', app.allocation.label,
self.name)
return False
if app.traits != 0 and not self.traits.has(app.traits):
_LOGGER.info('Missing traits: %s on %s', app.traits, self.name)
return False
if not self.check_app_affinity_limit(app):
return False
if _any_gt(app.demand, self.free_capacity):
_LOGGER.info('Not enough free capacity: %s', self.free_capacity)
return False
return True
def check_app_affinity_limit(self, app):
"""Check app affinity limits
"""
count = self.affinity_counters[app.affinity.name]
limit = app.affinity.limits[self.level]
return count < limit
def put(self, _app):
"""Abstract method, should never be called.
"""
raise Exception('Not implemented.')
def size(self, label):
"""Returns total capacity of the children.
"""
if self.empty() or label not in self.labels:
return eps_capacity()
return np.sum([
n.size(label) for n in self.children_iter()], 0)
def members(self):
"""Return set of all leaf node names.
"""
names = dict()
for node in self.children_iter():
names.update(node.members())
return names
def increment_affinity(self, counters):
"""Increment affinity counters recursively.
"""
self.affinity_counters.update(counters)
if self.parent:
self.parent.increment_affinity(counters)
def decrement_affinity(self, counters):
"""Decrement affinity counters recursively.
"""
self.affinity_counters.subtract(counters)
if self.parent:
self.parent.decrement_affinity(counters)
class Bucket(Node):
"""Collection of nodes/buckets.
"""
__slots__ = (
'affinity_strategies',
'traits',
)
_default_strategy_t = SpreadStrategy
def set_affinity_strategy(self, affinity, strategy_t):
"""Initilaizes placement strategy for given affinity.
"""
self.affinity_strategies[affinity] = strategy_t(self)
def get_affinity_strategy(self, affinity):
"""Returns placement strategy for the affinity, defaults to spread.
"""
if affinity not in self.affinity_strategies:
self.set_affinity_strategy(affinity, Bucket._default_strategy_t)
return self.affinity_strategies[affinity]
def adjust_capacity_up(self, new_capacity):
"""Node can only increase capacity.
"""
self.free_capacity = np.maximum(self.free_capacity, new_capacity)
if self.parent:
self.parent.adjust_capacity_up(self.free_capacity)
def adjust_capacity_down(self, prev_capacity=None):
"""Called when capacity is decreased.
"""
if self.empty():
self.free_capacity = zero_capacity()
if self.parent:
self.parent.adjust_capacity_down()
else:
if prev_capacity is not None and _all_lt(prev_capacity,
self.free_capacity):
return
free_capacity = zero_capacity()
for child_node in self.children_iter():
if child_node.state is not State.up:
continue
free_capacity = np.maximum(free_capacity,
child_node.free_capacity)
# If resulting free_capacity is less the previous, we need to
# adjust the parent, otherwise, nothing needs to be done.
prev_capacity = self.free_capacity.copy()
if _any_lt(free_capacity, self.free_capacity):
self.free_capacity = free_capacity
if self.parent:
self.parent.adjust_capacity_down(prev_capacity)
def add_node(self, node):
"""Adds node to the bucket.
"""
super(Bucket, self).add_node(node)
self.adjust_capacity_up(node.free_capacity)
def remove_node(self, node):
"""Removes node from the bucket.
"""
super(Bucket, self).remove_node(node)
# if _any_isclose(self.free_capacity, node.free_capacity):
self.adjust_capacity_down(node.free_capacity)
return node
def put(self, app):
"""Try to put app on one of the nodes that belong to the bucket.
"""
# Check if it is feasible to put app on some node low in the
# hierarchy
_LOGGER.debug('bucket.put: %s => %s', app.name, self.name)
if not self.check_app_constraints(app):
return False
strategy = self.get_affinity_strategy(app.affinity.name)
node = strategy.suggested_node()
if node is None:
_LOGGER.debug('All nodes in the bucket deleted.')
return False
nodename0 = node.name
first = True
while True:
# End of iteration.
if not first and node.name == nodename0:
_LOGGER.debug('Finished iterating on: %s.', self.name)
break
first = False
_LOGGER.debug('Trying node: %s:', node.name)
if node.state is not State.up:
_LOGGER.debug('Node not up: %s, %s', node.name, node.state)
else:
if node.put(app):
return True
node = strategy.next_node()
return False
class Server(Node):
"""Server object, final app placement.
"""
__slots__ = (
'init_capacity',
'apps',
'up_since',
'presence_id',
)
def is_same(self, other):
"""Compares capacity and traits against another server.
valid_until is ignored, as server comes up after reboot will have
different valid_until value.
"""
return (self.labels == other.labels and
_all_eq(self.init_capacity, other.init_capacity) and
self.traits.is_same(other.traits))
def put(self, app):
"""Tries to put the app on the server.
"""
assert app.name not in self.apps
_LOGGER.debug('server.put: %s => %s', app.name, self.name)
if not self.check_app_lifetime(app):
return False
if not self.check_app_constraints(app):
return False
prev_capacity = self.free_capacity.copy()
self.free_capacity -= app.demand
self.apps[app.name] = app
self.increment_affinity([app.affinity.name])
app.server = self.name
if self.parent:
self.parent.adjust_capacity_down(prev_capacity)
if app.placement_expiry is None:
app.placement_expiry = time.time() + app.lease
return True
def restore(self, app, placement_expiry=None):
"""Put app back on the server, ignore app lifetime.
"""
_LOGGER.debug('server.restore: %s => %s (%s)',
app.name, self.name, placement_expiry)
lease = app.lease
# If not explicit
if placement_expiry is None:
placement_expiry = app.placement_expiry
app.lease = 0
rc = self.put(app)
app.lease = lease
app.placement_expiry = placement_expiry
return rc
def renew(self, app):
"""Try to extend the placement for app lease.
"""
can_renew = self.check_app_lifetime(app)
if can_renew:
app.placement_expiry = time.time() + app.lease
return can_renew
def check_app_lifetime(self, app):
"""Check if the app lease fits until server is rebooted.
"""
# app with 0 lease can be placed anywhere (ignore potentially
# expired servers)
if not app.lease:
return True
return time.time() + app.lease < self.valid_until
def remove(self, app_name):
"""Removes app from the server.
"""
assert app_name in self.apps
app = self.apps[app_name]
del self.apps[app_name]
app.server = None
app.evicted = True
app.unschedule = False
app.placement_expiry = None
self.free_capacity += app.demand
self.decrement_affinity([app.affinity.name])
if self.parent:
self.parent.adjust_capacity_up(self.free_capacity)
def remove_all(self):
"""Remove all apps.
"""
# iterate over copy of the keys, as we are removing them in the loop.
for appname in list(self.apps):
self.remove(appname)
def size(self, label):
"""Return server capacity.
"""
if label not in self.labels:
return eps_capacity()
return self.init_capacity
def members(self):
"""Return set of all leaf node names.
"""
return {self.name: self}
def set_state(self, state, since):
"""Change host state.
"""
if self.state is state:
return
super(Server, self).set_state(state, since)
if state == State.up:
if self.parent:
self.parent.adjust_capacity_up(self.free_capacity)
elif state in (State.down, State.frozen):
if self.parent:
self.parent.adjust_capacity_down(self.free_capacity)
else:
raise Exception('Invalid state: ' % state)
class Allocation:
"""Allocation manages queue of apps sharing same reserved capacity.
In reality allocation is tied to grn via application proid.
Applications within the allocation are organized by application priority.
Allocations are ranked, and the rank is used to globally order applications
from different allocations into global queue.
Default allocation has rank 100. Defining allocation with lower rank will
result in all it's applications to be evaluated first regardless of
utilization. This is used to model "system" applications that should be
always present regardless of utilization.
Allocation queue can be capped with max_utilization parameter. If set, it
will specify the max_utilization which will be considered for scheduling.
"""
__slots__ = (
'reserved',
'rank',
'rank_adjustment',
'traits',
'label',
'max_utilization',
'apps',
'sub_allocations',
'path',
'constraints',
)
def set_reserved(self, reserved):
"""Update reserved capacity.
"""
if reserved is None:
self.reserved = zero_capacity()
elif isinstance(reserved, int):
assert reserved == 0
self.reserved = zero_capacity()
elif isinstance(reserved, float):
assert reserved == 0.0
self.reserved = zero_capacity()
elif isinstance(reserved, list):
assert len(reserved) == DIMENSION_COUNT
self.reserved = np.array(reserved, dtype=float)
elif isinstance(reserved, np.ndarray):
self.reserved = reserved
else:
assert 'Unsupported type: %r' % type(reserved)
def update(self, reserved, rank, rank_adjustment, max_utilization=None):
"""Updates allocation.
"""
if rank is not None:
self.rank = rank
else:
self.rank = DEFAULT_RANK
if rank_adjustment is not None:
self.rank_adjustment = rank_adjustment
self.set_reserved(reserved)
self.set_max_utilization(max_utilization)
def set_max_utilization(self, max_utilization):
"""Sets max_utilization, accounting for default None value.
"""
if max_utilization is not None:
self.max_utilization = max_utilization
else:
self.max_utilization = _MAX_UTILIZATION
def set_traits(self, traits):
"""Set traits, account for default None value.
"""
if not traits:
self.traits = 0
else:
self.traits = traits
def add(self, app):
"""Add application to the allocation queue.
Once added, the scheduler will make an attempt to place the app on one
of the cell nodes.
"""
# Check that there are no duplicate app names.
if app.name in self.apps:
_LOGGER.warning(
'Duplicate app on alllocation queue: %s', app.name
)
return
app.allocation = self
self.apps[app.name] = app
def remove(self, name):
"""Remove application from the allocation queue.
"""
if name in self.apps:
self.apps[name].allocation = None
del self.apps[name]
def total_reserved(self):
"""Total reserved capacity including sub-allocs.
"""
return six.moves.reduce(
lambda acc, alloc: acc + alloc.total_reserved(),
six.itervalues(self.sub_allocations),
self.reserved
)
def add_sub_alloc(self, name, alloc):
"""Add child allocation.
"""
self.sub_allocations[name] = alloc
assert not alloc.path
alloc.path = self.path + [name]
alloc.label = self.label
def remove_sub_alloc(self, name):
"""Remove chlid allocation.
"""
if name in self.sub_allocations:
del self.sub_allocations[name]
def get_sub_alloc(self, name):
"""Return sub allocation, create empty if it does not exist.
"""
if name not in self.sub_allocations:
self.add_sub_alloc(name, Allocation())
return self.sub_allocations[name]
def all_apps(self):
"""Return all apps in allocation and sub-allocations."""
all_apps = list(six.itervalues(self.apps))
for alloc in six.itervalues(self.sub_allocations):
all_apps.extend(alloc.all_apps())
return all_apps
class Partition:
"""Cell partition.
"""
__slots__ = (
'allocation',
'max_server_uptime',
'max_lease',
'threshold',
'label',
'_reboot_buckets',
'_reboot_dates',
'_reboot_last',
)
def _find_bucket(self, timestamp):
"""Try to find bucket with given timestamp.
"""
for bucket in self._reboot_buckets:
if bucket.timestamp == timestamp:
return bucket
return None
def add(self, server, timestamp=None):
"""Add server.
"""
bucket = None
if timestamp:
bucket = self._find_bucket(timestamp)
# servers with larger than max lifetime should be rebooted at
# the next opportunity
if (self._reboot_buckets[0].timestamp >
server.up_since + DEFAULT_SERVER_UPTIME):
bucket = self._reboot_buckets[0]
if not bucket:
bucket = min(reversed(self._reboot_buckets),
key=lambda b: b.cost(server))
bucket.add(server)
def remove(self, server):
"""Remove server.
"""
for bucket in self._reboot_buckets:
bucket.remove(server)
def tick(self, now):
"""Do per-tick-bookkeeping.
"""
while self._reboot_last <= now + DEFAULT_SERVER_UPTIME:
bucket = RebootBucket(next(self._reboot_dates))
self._reboot_buckets.append(bucket)
self._reboot_last = bucket.timestamp
while self._reboot_buckets[0].timestamp < now:
self._reboot_buckets.pop(0)
# pylint: disable=invalid-name
def reboot_dates(schedule, start_date=None):
"""Generate list of valid reboot dates.
"""
date = datetime.date.today()
if start_date:
date = start_date
while True:
weekday = date.weekday()
if weekday in schedule:
h, m, s = schedule[weekday]
yield time.mktime((date.year, date.month, date.day,
h, m, s, 0, 0, 0))
date += datetime.timedelta(days=1)
def dumps(cell):
"""Serializes cell to string.
"""
del cell
return ''
def loads(data):
"""Loads scheduler from string.
"""
del data
assert False, 'not implemented.'
| 30.016648 | 79 | 0.578203 |
c758c753c3644ae1a4c381597cfe0cc82c7e378b | 1,260 | py | Python | banners/bannerRan.py | gothyyy/AIDungeon | c198371c34d914e9d996559ef850c87a76f572c4 | [
"MIT"
] | 1 | 2019-12-30T21:45:06.000Z | 2019-12-30T21:45:06.000Z | banners/bannerRan.py | gothyyy/AIDungeon | c198371c34d914e9d996559ef850c87a76f572c4 | [
"MIT"
] | null | null | null | banners/bannerRan.py | gothyyy/AIDungeon | c198371c34d914e9d996559ef850c87a76f572c4 | [
"MIT"
] | null | null | null | import random
import sys
import time
import json
import os
import warnings
import numpy as np
import glob, os
stat_mini = 1
stat_max = 0
listBanners = []
#HOW TO USE IT:
#1 copy the opening.txt
#2 remove the graphic (but do keep top logo for consistency)
#3 add ASCII art that is 78 or less characters in width
#4 save txt file under a complete new name
| 14.823529 | 86 | 0.640476 |
c758e049e83a8786ae62f5c9ab2545ec4624de3e | 511 | py | Python | BondMarket/app/theme_lib.py | Meith0717/BondMarket | 83d99bd5930758e73b4fe74a92e706c7bc0eadb6 | [
"Apache-2.0"
] | null | null | null | BondMarket/app/theme_lib.py | Meith0717/BondMarket | 83d99bd5930758e73b4fe74a92e706c7bc0eadb6 | [
"Apache-2.0"
] | null | null | null | BondMarket/app/theme_lib.py | Meith0717/BondMarket | 83d99bd5930758e73b4fe74a92e706c7bc0eadb6 | [
"Apache-2.0"
] | null | null | null | from dataclasses import dataclass
LIGHT = theme(
name='LIGHT',
bg_color=None,
fg_color='black',
lb_color='#f0f0f0',
ttk_theme='xpnative'
)
DARK = theme(
name='DARK',
bg_color='#424242',
fg_color='white',
lb_color='#424242',
ttk_theme='black'
)
| 19.653846 | 35 | 0.485323 |
c7592054e40573b08b4d8a7a1efd9326b5695f4f | 3,877 | py | Python | run.py | rimijoker/CA-MTL | 068e25e0860a8ec81462018126eace4c004bacd4 | [
"MIT"
] | 1 | 2021-08-03T03:54:02.000Z | 2021-08-03T03:54:02.000Z | run.py | rimijoker/CA-MTL | 068e25e0860a8ec81462018126eace4c004bacd4 | [
"MIT"
] | null | null | null | run.py | rimijoker/CA-MTL | 068e25e0860a8ec81462018126eace4c004bacd4 | [
"MIT"
] | 1 | 2021-07-31T09:44:00.000Z | 2021-07-31T09:44:00.000Z | import os
import sys
import re
import json
import logging
import torch
from transformers import (
HfArgumentParser,
set_seed,
AutoTokenizer,
AutoConfig,
EvalPrediction,
)
from src.model.ca_mtl import CaMtl, CaMtlArguments
from src.utils.misc import MultiTaskDataArguments, Split
from src.mtl_trainer import MultiTaskTrainer, MultiTaskTrainingArguments
from src.data.mtl_dataset import MultiTaskDataset
from src.data.task_dataset import TaskDataset
logger = logging.getLogger(__name__)
if __name__ == "__main__":
main()
| 26.923611 | 98 | 0.660562 |
c75af988694e7b9961b260a9f014fab177797bfa | 1,033 | py | Python | examples/readWebsocket.py | uadlq/PhyPiDAQ-PiOS11 | fc6060551be2cc0143a157081341bf3c338d9fbd | [
"BSD-2-Clause"
] | null | null | null | examples/readWebsocket.py | uadlq/PhyPiDAQ-PiOS11 | fc6060551be2cc0143a157081341bf3c338d9fbd | [
"BSD-2-Clause"
] | null | null | null | examples/readWebsocket.py | uadlq/PhyPiDAQ-PiOS11 | fc6060551be2cc0143a157081341bf3c338d9fbd | [
"BSD-2-Clause"
] | null | null | null | #!/usr/bin/env python3
"""Read data in CSV format from websocket
"""
import sys
import asyncio
import websockets
# read url from command line
if len(sys.argv) >= 2:
uri = sys.argv[1]
else:
# host url and port
uri = "ws://localhost:8314"
print("*==* ", sys.argv[0], " Lese Daten von url ", uri)
# run web client
asyncio.get_event_loop().run_until_complete(read_ws())
| 25.195122 | 72 | 0.580833 |
c75b6da97a2671884ced55ad3cbef590baf2e5c6 | 2,187 | py | Python | settings/__init__.py | arcana261/python-grpc-boilerplate | dd20767ad5540a49e1db802ce578c7b8e416ccbb | [
"Unlicense"
] | null | null | null | settings/__init__.py | arcana261/python-grpc-boilerplate | dd20767ad5540a49e1db802ce578c7b8e416ccbb | [
"Unlicense"
] | null | null | null | settings/__init__.py | arcana261/python-grpc-boilerplate | dd20767ad5540a49e1db802ce578c7b8e416ccbb | [
"Unlicense"
] | null | null | null | import os
import sys
import itertools
import json
_NONE = object()
sys.modules[__name__] = SettingManager()
| 27.683544 | 99 | 0.577503 |
c75c60f75fce7285b991ad22486e1b1b13a02fed | 1,990 | py | Python | roblox/partials/partialgroup.py | speer-kinjo/ro.py | 2d5b80aec8fd143b11101fbbfdf3b557f798a27f | [
"MIT"
] | 28 | 2021-11-04T11:13:38.000Z | 2022-03-11T05:00:16.000Z | roblox/partials/partialgroup.py | speer-kinjo/ro.py | 2d5b80aec8fd143b11101fbbfdf3b557f798a27f | [
"MIT"
] | 12 | 2021-11-24T06:25:24.000Z | 2022-03-18T14:37:01.000Z | roblox/partials/partialgroup.py | speer-kinjo/ro.py | 2d5b80aec8fd143b11101fbbfdf3b557f798a27f | [
"MIT"
] | 21 | 2021-10-20T16:36:55.000Z | 2022-03-27T21:43:53.000Z | """
This file contains partial objects related to Roblox groups.
"""
from __future__ import annotations
from typing import TYPE_CHECKING
from ..bases.basegroup import BaseGroup
from ..bases.baseuser import BaseUser
if TYPE_CHECKING:
from ..client import Client
| 28.028169 | 91 | 0.628643 |