hexsha stringlengths 40 40 | size int64 5 2.06M | ext stringclasses 11 values | lang stringclasses 1 value | max_stars_repo_path stringlengths 3 251 | max_stars_repo_name stringlengths 4 130 | max_stars_repo_head_hexsha stringlengths 40 78 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 3 251 | max_issues_repo_name stringlengths 4 130 | max_issues_repo_head_hexsha stringlengths 40 78 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 116k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 3 251 | max_forks_repo_name stringlengths 4 130 | max_forks_repo_head_hexsha stringlengths 40 78 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 1 1.05M | avg_line_length float64 1 1.02M | max_line_length int64 3 1.04M | alphanum_fraction float64 0 1 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
5ef0d5fcfc264e4c868fb459e7c8ec1ae720744a | 6,136 | py | Python | warmmail/subscribe/tasks_send.py | sahilsakhuja/warmmail | 8a1f80d26c7a24c9aa054d869266cebd4540d7f2 | [
"MIT"
] | null | null | null | warmmail/subscribe/tasks_send.py | sahilsakhuja/warmmail | 8a1f80d26c7a24c9aa054d869266cebd4540d7f2 | [
"MIT"
] | null | null | null | warmmail/subscribe/tasks_send.py | sahilsakhuja/warmmail | 8a1f80d26c7a24c9aa054d869266cebd4540d7f2 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
import os
import urllib.parse
from datetime import date, datetime
from functools import partial
from urllib.parse import quote_plus
import pandas as pd
import plotly.express as px
import pytz
from csci_utils.luigi.requires import Requirement, Requires
from csci_utils.luigi.target import TargetOutput
from django.template.loader import render_to_string
from luigi import (
DateParameter,
ExternalTask,
ListParameter,
LocalTarget,
Parameter,
Target,
Task,
)
from plotly.io import to_image
from sendgrid import SendGridAPIClient
from sendgrid.helpers.mail import Mail
from .models import Subscription
from .tasks_fetch import ConvertAQIFileToParquet
| 32.638298 | 116 | 0.64309 |
5ef260b5bf84eb695b2bd8138b23ebab7ec1405b | 4,779 | py | Python | cno/chrutils.py | CherokeeLanguage/cherokee-audio-data | a10b7b38c0c1b56338561c917cef18a078ca573c | [
"CC0-1.0",
"MIT"
] | 2 | 2021-09-15T19:41:01.000Z | 2022-01-12T17:57:08.000Z | cno/chrutils.py | CherokeeLanguage/cherokee-audio-data | a10b7b38c0c1b56338561c917cef18a078ca573c | [
"CC0-1.0",
"MIT"
] | 1 | 2021-10-08T18:06:29.000Z | 2021-10-08T18:48:44.000Z | cno/chrutils.py | CherokeeLanguage/cherokee-audio-data | a10b7b38c0c1b56338561c917cef18a078ca573c | [
"CC0-1.0",
"MIT"
] | null | null | null | #!/usr/bin/env python3
# Converts MCO annotation into pseudo English phonetics for use by the aeneas alignment package
# lines prefixed with '#' are returned with the '#' removed, but otherwise unchanged.
if __name__ == "__main__":
test()
| 38.232 | 138 | 0.586943 |
5ef27b5395234b7acc5798e9c4c4dad901d9aba3 | 2,585 | py | Python | molo/usermetadata/tests/test_tags.py | praekelt/molo.usermetadata | 90cc0dffe55db8ece208d13d37d76956daadfa5a | [
"BSD-2-Clause"
] | null | null | null | molo/usermetadata/tests/test_tags.py | praekelt/molo.usermetadata | 90cc0dffe55db8ece208d13d37d76956daadfa5a | [
"BSD-2-Clause"
] | 14 | 2016-04-21T17:19:08.000Z | 2018-06-18T12:49:58.000Z | molo/usermetadata/tests/test_tags.py | praekeltfoundation/molo.usermetadata | 90cc0dffe55db8ece208d13d37d76956daadfa5a | [
"BSD-2-Clause"
] | null | null | null | import pytest
from django.test import TestCase, Client
from django.core.urlresolvers import reverse
from molo.core.tests.base import MoloTestCaseMixin
from molo.core.models import Main, SiteLanguageRelation, Languages
from molo.usermetadata.models import PersonaIndexPage, PersonaPage
from wagtail.wagtailcore.models import Site
from wagtail.contrib.settings.context_processors import SettingsProxy
| 34.013158 | 75 | 0.659574 |
5ef2f309d751c48873dcfc34c92ab93f2ef03256 | 1,793 | py | Python | app/db_con.py | bmugenya/Zup | 1677c1e4e263409f9f5fcaac7411dd403e32650e | [
"MIT"
] | null | null | null | app/db_con.py | bmugenya/Zup | 1677c1e4e263409f9f5fcaac7411dd403e32650e | [
"MIT"
] | 1 | 2020-03-06T17:32:15.000Z | 2020-03-06T17:32:15.000Z | app/db_con.py | bmugenya/Zup | 1677c1e4e263409f9f5fcaac7411dd403e32650e | [
"MIT"
] | null | null | null | import psycopg2
url = "dbname='da43n1slakcjkc' user='msqgxzgmcskvst' host='ec2-54-80-184-43.compute-1.amazonaws.com' port=5432 password='9281f925b1e2298e8d62812d9d4e430c1054db62e918c282d7039fa85b1759fa'"
| 34.480769 | 187 | 0.605131 |
5ef2f8f0dbedcc720d930427f98c729897cff0e0 | 780 | py | Python | server/dao/messageDao.py | ZibingZhang/Level-Up | e936eef7fc4f17e8bb392f98c7dff37dfad9d47b | [
"MIT"
] | null | null | null | server/dao/messageDao.py | ZibingZhang/Level-Up | e936eef7fc4f17e8bb392f98c7dff37dfad9d47b | [
"MIT"
] | 1 | 2020-01-23T19:22:06.000Z | 2020-01-23T19:23:47.000Z | server/dao/messageDao.py | ZibingZhang/Level-Up | e936eef7fc4f17e8bb392f98c7dff37dfad9d47b | [
"MIT"
] | null | null | null | from constants import cursor
| 21.081081 | 83 | 0.603846 |
5ef3a63fa138240896cecf671d1c8882815b58b3 | 3,248 | py | Python | skeletrack/bbox.py | mpeven/skeletal-tracker | ddb6e7d59899c0f3f0470805006e5c5c4bcabe33 | [
"MIT"
] | null | null | null | skeletrack/bbox.py | mpeven/skeletal-tracker | ddb6e7d59899c0f3f0470805006e5c5c4bcabe33 | [
"MIT"
] | null | null | null | skeletrack/bbox.py | mpeven/skeletal-tracker | ddb6e7d59899c0f3f0470805006e5c5c4bcabe33 | [
"MIT"
] | null | null | null | import numpy as np
import shapely.geometry as geom
| 44.493151 | 120 | 0.594828 |
5ef50480947622fa6c85f38cc28d083417268f20 | 351 | py | Python | apps/snippet/admin.py | AniPython/ani | 2536ac9ddae2b8396b634f982fb1083339b4a389 | [
"MIT"
] | null | null | null | apps/snippet/admin.py | AniPython/ani | 2536ac9ddae2b8396b634f982fb1083339b4a389 | [
"MIT"
] | null | null | null | apps/snippet/admin.py | AniPython/ani | 2536ac9ddae2b8396b634f982fb1083339b4a389 | [
"MIT"
] | null | null | null | from django.contrib import admin
from .models import Tag, Article
| 19.5 | 52 | 0.709402 |
5ef67226c4fddb4ea740eed126e252d451b1063d | 1,326 | py | Python | test/functional/test_framework/script_util.py | TopoX84/newlux | 555b9f7f9e4be4ef879f20083d8cf80ed8f7777e | [
"MIT"
] | 1,389 | 2017-06-28T02:35:01.000Z | 2022-03-25T20:09:01.000Z | test/functional/test_framework/script_util.py | TopoX84/newlux | 555b9f7f9e4be4ef879f20083d8cf80ed8f7777e | [
"MIT"
] | 1,039 | 2015-03-25T23:58:32.000Z | 2022-03-30T00:41:16.000Z | test/functional/test_framework/script_util.py | TopoX84/newlux | 555b9f7f9e4be4ef879f20083d8cf80ed8f7777e | [
"MIT"
] | 564 | 2017-06-28T03:55:03.000Z | 2022-03-30T14:57:40.000Z | #!/usr/bin/env python3
# Copyright (c) 2019 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Useful Script constants and utils."""
from test_framework.script import CScript
# To prevent a "tx-size-small" policy rule error, a transaction has to have a
# non-witness size of at least 82 bytes (MIN_STANDARD_TX_NONWITNESS_SIZE in
# src/policy/policy.h). Considering a Tx with the smallest possible single
# input (blank, empty scriptSig), and with an output omitting the scriptPubKey,
# we get to a minimum size of 60 bytes:
#
# Tx Skeleton: 4 [Version] + 1 [InCount] + 1 [OutCount] + 4 [LockTime] = 10 bytes
# Blank Input: 32 [PrevTxHash] + 4 [Index] + 1 [scriptSigLen] + 4 [SeqNo] = 41 bytes
# Output: 8 [Amount] + 1 [scriptPubKeyLen] = 9 bytes
#
# Hence, the scriptPubKey of the single output has to have a size of at
# least 22 bytes, which corresponds to the size of a P2WPKH scriptPubKey.
# The following script constant consists of a single push of 21 bytes of 'a':
# <PUSH_21> <21-bytes of 'a'>
# resulting in a 22-byte size. It should be used whenever (small) fake
# scriptPubKeys are needed, to guarantee that the minimum transaction size is
# met.
DUMMY_P2WPKH_SCRIPT = CScript([b'a' * 21])
| 51 | 84 | 0.737557 |
5efb1967191c3b432f3eb4d402361c056b7541a9 | 4,085 | py | Python | linux-distro/package/nuxleus/Source/Vendor/Microsoft/IronPython-2.0.1/Lib/Kamaelia/Protocol/Torrent/TorrentIPC.py | mdavid/nuxleus | 653f1310d8bf08eaa5a7e3326c2349e56a6abdc2 | [
"BSD-3-Clause"
] | 1 | 2017-03-28T06:41:51.000Z | 2017-03-28T06:41:51.000Z | linux-distro/package/nuxleus/Source/Vendor/Microsoft/IronPython-2.0.1/Lib/Kamaelia/Protocol/Torrent/TorrentIPC.py | mdavid/nuxleus | 653f1310d8bf08eaa5a7e3326c2349e56a6abdc2 | [
"BSD-3-Clause"
] | null | null | null | linux-distro/package/nuxleus/Source/Vendor/Microsoft/IronPython-2.0.1/Lib/Kamaelia/Protocol/Torrent/TorrentIPC.py | mdavid/nuxleus | 653f1310d8bf08eaa5a7e3326c2349e56a6abdc2 | [
"BSD-3-Clause"
] | 1 | 2016-12-13T21:08:58.000Z | 2016-12-13T21:08:58.000Z | #!/usr/bin/env python
#
# Copyright (C) 2006 British Broadcasting Corporation and Kamaelia Contributors(1)
# All Rights Reserved.
#
# You may only modify and redistribute this under the terms of any of the
# following licenses(2): Mozilla Public License, V1.1, GNU General
# Public License, V2.0, GNU Lesser General Public License, V2.1
#
# (1) Kamaelia Contributors are listed in the AUTHORS file and at
# http://kamaelia.sourceforge.net/AUTHORS - please extend this file,
# not this notice.
# (2) Reproduced in the COPYING file, and at:
# http://kamaelia.sourceforge.net/COPYING
# Under section 3.5 of the MPL, we are using this text since we deem the MPL
# notice inappropriate for this file. As per MPL/GPL/LGPL removal of this
# notice is prohibited.
#
# Please contact us via: kamaelia-list-owner@lists.sourceforge.net
# to discuss alternative licensing.
# -------------------------------------------------------------------------
# Licensed to the BBC under a Contributor Agreement: RJL
"""(Bit)Torrent IPC messages"""
from Kamaelia.BaseIPC import IPC
# ====================== Messages to send to TorrentMaker =======================
# ========= Messages for TorrentPatron to send to TorrentService ================
# a message for TorrentClient (i.e. to be passed on by TorrentService)
# request to add a TorrentPatron to a TorrentService's list of clients
# request to remove a TorrentPatron from a TorrentService's list of clients
# ==================== Messages for TorrentClient to produce ====================
# a new torrent has been added with id torrentid
# the torrent you requested me to download is already being downloaded as torrentid
# for some reason the torrent could not be started
# message containing the current status of a particular torrent
# ====================== Messages to send to TorrentClient ======================
# create a new torrent (a new download session) from a .torrent file's binary contents
# close a running torrent
| 40.04902 | 124 | 0.682742 |
5efb27ff2e3645c70f7c8e38f1cd5d5485dc77ac | 12,418 | py | Python | srcf/database/schema.py | danielchriscarter/srcf-python | a7143afd5340338094131a51f560efcd874457d2 | [
"MIT"
] | null | null | null | srcf/database/schema.py | danielchriscarter/srcf-python | a7143afd5340338094131a51f560efcd874457d2 | [
"MIT"
] | 2 | 2020-08-23T17:23:28.000Z | 2021-04-01T18:32:11.000Z | srcf/database/schema.py | danielchriscarter/srcf-python | a7143afd5340338094131a51f560efcd874457d2 | [
"MIT"
] | 3 | 2021-01-12T00:06:39.000Z | 2021-09-26T23:31:15.000Z | from __future__ import print_function, unicode_literals
from binascii import unhexlify
from enum import Enum
import os
import pwd
import six
from sqlalchemy import Column, Integer, String, Boolean, DateTime, Text, Enum as SQLAEnum, Numeric
from sqlalchemy import event
from sqlalchemy.dialects.postgresql import HSTORE
from sqlalchemy.schema import Table, FetchedValue, CheckConstraint, ForeignKey, DDL
from sqlalchemy.orm import relationship, backref
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.ext.hybrid import hybrid_property
from sqlalchemy.ext.mutable import MutableDict
from .compat import MemberCompat, SocietyCompat, AdminsSetCompat
__all__ = ["Member", "Society", "PendingAdmin",
"POSTGRES_USER", "RESTRICTED"]
# Should we make the notes & danger flags, and pending-admins
# tables available?
# These postgres roles have special permissions / are mentioned
# in the schema. Everyone else should connect as 'nobody'
schema_users = ("root", "srcf-admin", "hades")
# When connecting over a unix socket, postgres uses `getpeereid`
# for authentication; this is the number that matters:
euid_name = pwd.getpwuid(os.geteuid()).pw_name
if euid_name in schema_users or euid_name.endswith("-adm"):
POSTGRES_USER = euid_name
else:
POSTGRES_USER = "nobody"
is_root = POSTGRES_USER == "root" or POSTGRES_USER.endswith("-adm")
is_webapp = POSTGRES_USER == "srcf-admin"
is_hades = POSTGRES_USER == "hades"
RESTRICTED = not is_root
CRSID_TYPE = String(7)
SOCIETY_TYPE = String(16)
Base = declarative_base()
society_admins = Table(
'society_admins', Base.metadata,
Column('crsid', CRSID_TYPE,
ForeignKey('members.crsid'), primary_key=True),
Column('society', SOCIETY_TYPE,
ForeignKey('societies.society'), primary_key=True),
)
if is_root or is_webapp:
JobState = SQLAEnum('unapproved', 'queued', 'running', 'done', 'failed', 'withdrawn',
name='job_state')
LogType = SQLAEnum('created', 'started', 'progress', 'output', 'done', 'failed', 'note',
name='log_type')
LogLevel = SQLAEnum('debug', 'info', 'warning', 'error', 'critical',
name='log_level')
event.listen(
Base.metadata,
"before_create",
DDL("CREATE EXTENSION hstore")
)
else:
PendingAdmin = None
LogLevel = None
Domain = None
HTTPSCert = None
JobState = None
Job = None
JobLog = None
if __name__ == "__main__":
dump_schema()
| 33.836512 | 102 | 0.607988 |
5efcf7db618c88e80670f2e44849d8f110aeefaf | 15,226 | py | Python | tests/test_grid.py | ascillitoe/pyvista | b0eb948042f208a03b9feb5784854ebb8507dae8 | [
"MIT"
] | null | null | null | tests/test_grid.py | ascillitoe/pyvista | b0eb948042f208a03b9feb5784854ebb8507dae8 | [
"MIT"
] | null | null | null | tests/test_grid.py | ascillitoe/pyvista | b0eb948042f208a03b9feb5784854ebb8507dae8 | [
"MIT"
] | 1 | 2020-03-23T15:46:56.000Z | 2020-03-23T15:46:56.000Z | import os
import numpy as np
import pytest
import vtk
import pyvista
from pyvista import examples
from pyvista.plotting import system_supports_plotting
beam = pyvista.UnstructuredGrid(examples.hexbeamfile)
# create structured grid
x = np.arange(-10, 10, 2)
y = np.arange(-10, 10, 2)
z = np.arange(-10, 10, 2)
x, y, z = np.meshgrid(x, y, z)
sgrid = pyvista.StructuredGrid(x, y, z)
try:
test_path = os.path.dirname(os.path.abspath(__file__))
test_data_path = os.path.join(test_path, 'test_data')
except:
test_path = '/home/alex/afrl/python/source/pyvista/tests'
def test_grid_points():
"""Test the points methods on UniformGrid and RectilinearGrid"""
points = np.array([[0, 0, 0],
[1, 0, 0],
[1, 1, 0],
[0, 1, 0],
[0, 0, 1],
[1, 0, 1],
[1, 1, 1],
[0, 1, 1]])
grid = pyvista.UniformGrid()
grid.points = points
assert grid.dimensions == [2, 2, 2]
assert grid.spacing == [1, 1, 1]
assert grid.origin == [0., 0., 0.]
assert np.allclose(np.unique(grid.points, axis=0), np.unique(points, axis=0))
opts = np.c_[grid.x, grid.y, grid.z]
assert np.allclose(np.unique(opts, axis=0), np.unique(points, axis=0))
# Now test rectilinear grid
del grid
grid = pyvista.RectilinearGrid()
grid.points = points
assert grid.dimensions == [2, 2, 2]
assert np.allclose(np.unique(grid.points, axis=0), np.unique(points, axis=0))
def test_grid_extract_selection_points():
grid = pyvista.UnstructuredGrid(sgrid)
sub_grid = grid.extract_selection_points([0])
assert sub_grid.n_cells == 1
sub_grid = grid.extract_selection_points(range(100))
assert sub_grid.n_cells > 1
def test_gaussian_smooth():
uniform = examples.load_uniform()
active = uniform.active_scalars_name
values = uniform.active_scalars
uniform = uniform.gaussian_smooth(scalars=active)
assert uniform.active_scalars_name == active
assert uniform.active_scalars.shape == values.shape
assert not np.all(uniform.active_scalars == values)
values = uniform.active_scalars
uniform = uniform.gaussian_smooth(radius_factor=5, std_dev=1.3)
assert uniform.active_scalars_name == active
assert uniform.active_scalars.shape == values.shape
assert not np.all(uniform.active_scalars == values)
| 32.67382 | 97 | 0.64843 |
5efda15abd13bae316a30c8f74303450a7d645eb | 5,767 | py | Python | Server/src/quadradiusr_server/server.py | kjarosh/QuadradiusR | 2e55188bf9c9cd980ec6d11fce51830d0b4749d7 | [
"MIT"
] | null | null | null | Server/src/quadradiusr_server/server.py | kjarosh/QuadradiusR | 2e55188bf9c9cd980ec6d11fce51830d0b4749d7 | [
"MIT"
] | null | null | null | Server/src/quadradiusr_server/server.py | kjarosh/QuadradiusR | 2e55188bf9c9cd980ec6d11fce51830d0b4749d7 | [
"MIT"
] | null | null | null | import asyncio
import logging
from collections import defaultdict
from typing import Optional, List, Dict
from aiohttp import web
from aiohttp.web_runner import AppRunner, TCPSite
from quadradiusr_server.auth import Auth
from quadradiusr_server.config import ServerConfig
from quadradiusr_server.cron import Cron, SetupService
from quadradiusr_server.db.base import Game, Lobby
from quadradiusr_server.db.database_engine import DatabaseEngine
from quadradiusr_server.db.repository import Repository
from quadradiusr_server.game import GameInProgress
from quadradiusr_server.lobby import LiveLobby
from quadradiusr_server.notification import NotificationService
from quadradiusr_server.utils import import_submodules
routes = web.RouteTableDef()
def get_url(self, protocol: str = 'http') -> str:
# TCPSite.name is not implemented properly
self._ensure_started()
addr = self.address
scheme = self._get_scheme(protocol)
return f'{scheme}://{addr[0]}:{addr[1]}'
def get_href(self, protocol: str = 'http') -> str:
if self.config.href:
return f'{self._get_scheme(protocol)}://{self.config.href}'
else:
return self.get_url(protocol)
def run(self) -> int:
loop = asyncio.new_event_loop()
try:
loop.run_until_complete(self._run_async())
return 0
except KeyboardInterrupt:
logging.info('Interrupted')
loop.run_until_complete(self.shutdown())
return -1
finally:
loop.close()
def register_gateway(self, gateway):
user_id = gateway.user_id
self.gateway_connections[user_id].append(gateway)
def unregister_gateway(self, gateway):
user_id = gateway.user_id
self.gateway_connections[user_id].remove(gateway)
def start_lobby(self, lobby: Lobby) -> LiveLobby:
if lobby.id_ not in self.lobbies.keys():
self.lobbies[lobby.id_] = LiveLobby(
lobby.id_, self.repository,
self.notification_service)
return self.lobbies[lobby.id_]
def start_game(self, game: Game) -> GameInProgress:
if game.id_ not in self.games.keys():
self.games[game.id_] = GameInProgress(
game, self.repository, self.config.game)
return self.games[game.id_]
# importing submodules automatically registers endpoints
import quadradiusr_server.rest
import_submodules(quadradiusr_server.rest)
| 32.767045 | 81 | 0.650945 |
5eff513cdc7ff514a20abc942fb429679a31b4d7 | 95 | py | Python | 12_find the output/03_In Python/01_GeeksForGeeks/05_Set Five/problem_4.py | Magdyedwar1996/python-level-one-codes | 066086672f43488bc8b32c620b5e2f94cedfe3da | [
"MIT"
] | 1 | 2021-11-16T14:14:38.000Z | 2021-11-16T14:14:38.000Z | 12_find the output/03_In Python/01_GeeksForGeeks/05_Set Five/problem_4.py | Magdyedwar1996/python-level-one-codes | 066086672f43488bc8b32c620b5e2f94cedfe3da | [
"MIT"
] | null | null | null | 12_find the output/03_In Python/01_GeeksForGeeks/05_Set Five/problem_4.py | Magdyedwar1996/python-level-one-codes | 066086672f43488bc8b32c620b5e2f94cedfe3da | [
"MIT"
] | null | null | null |
gfg(2)
gfg(3,[3,2,1])
gfg(3)
| 10.555556 | 19 | 0.526316 |
5effb0c993d722db84398b9fa87c2c824fbd66c6 | 2,638 | py | Python | duck/utils/cal_ints.py | galaxycomputationalchemistry/duck | a57337afd523c99ebe4babf74c1868578c6cf1e0 | [
"Apache-2.0"
] | 1 | 2020-06-20T23:27:46.000Z | 2020-06-20T23:27:46.000Z | duck/utils/cal_ints.py | galaxycomputationalchemistry/duck | a57337afd523c99ebe4babf74c1868578c6cf1e0 | [
"Apache-2.0"
] | 4 | 2018-07-17T12:48:59.000Z | 2020-04-01T11:00:42.000Z | duck/utils/cal_ints.py | xchem/duck | b98bb78284e9c92837ac1e69fc2f06306ab1e28c | [
"Apache-2.0"
] | 3 | 2019-06-15T16:04:47.000Z | 2020-04-01T07:54:53.000Z | import json, pickle, sys, os
from parmed.geometry import distance2
from parmed.topologyobjects import Atom
import operator
import parmed
import math
if __name__ == "__main__":
# Define the input
res_atom = sys.argv[1]
prot_file = sys.argv[2]
find_interaction(res_atom, prot_file)
| 33.392405 | 88 | 0.681956 |
6f011e9d1e6d5fe45f9c159871d9be7ae9ea35b9 | 1,111 | py | Python | snakes/help_info.py | japinol7/snakes | bb501736027897bacab498ad7bbbe622cf4b9755 | [
"MIT"
] | 12 | 2019-04-15T07:20:31.000Z | 2019-05-18T22:03:35.000Z | snakes/help_info.py | japinol7/snakes | bb501736027897bacab498ad7bbbe622cf4b9755 | [
"MIT"
] | null | null | null | snakes/help_info.py | japinol7/snakes | bb501736027897bacab498ad7bbbe622cf4b9755 | [
"MIT"
] | null | null | null | """Module help_info."""
__author__ = 'Joan A. Pinol (japinol)'
| 41.148148 | 73 | 0.417642 |
6f0325adcc4e209cb06df2012d7cf8d2933313bf | 3,983 | py | Python | run_minprop_PD.py | kztakemoto/network_propagation | 7e66aca7f179cfe982b388b20b240745b4927bf9 | [
"MIT"
] | 3 | 2021-04-24T10:58:33.000Z | 2022-03-22T10:02:33.000Z | run_minprop_PD.py | kztakemoto/network_propagation | 7e66aca7f179cfe982b388b20b240745b4927bf9 | [
"MIT"
] | null | null | null | run_minprop_PD.py | kztakemoto/network_propagation | 7e66aca7f179cfe982b388b20b240745b4927bf9 | [
"MIT"
] | 1 | 2019-11-25T06:32:13.000Z | 2019-11-25T06:32:13.000Z | import warnings
warnings.simplefilter('ignore')
import argparse
import pickle
import numpy as np
import pandas as pd
import networkx as nx
import scipy.sparse as sp
from network_propagation_methods import minprop_2
from sklearn.metrics import roc_auc_score, auc
import matplotlib.pyplot as plt
#### Parameters #############
parser = argparse.ArgumentParser(description='Runs MINProp')
parser.add_argument('--alphaP', type=float, default=0.25, help='diffusion parameter for the protein-protein interaction network')
parser.add_argument('--alphaD', type=float, default=0.25, help='diffusion parameter for the disease similarity network')
parser.add_argument('--max_iter', type=int, default=1000, help='maximum number of iterations')
parser.add_argument('--eps', type=float, default=1.0e-6, help='convergence threshold')
parser.add_argument('--dir_data', type=str, default='./data/', help='directory of pickled network data')
args = parser.parse_args()
#### load data ############
### protein-protein interaction network
with open(args.dir_data + 'norm_adj_networkP.pickle', mode='rb') as f:
norm_adj_networkP = pickle.load(f)
nb_proteins = norm_adj_networkP.shape[0]
### disease similarity network
with open(args.dir_data + 'adj_networkD.pickle', mode='rb') as f:
adj_networkD = pickle.load(f)
nb_diseases = adj_networkD.shape[0]
# normalized adjacency matrix
deg_networkD = np.sum(adj_networkD, axis=0)
norm_adj_networkD = sp.csr_matrix(adj_networkD / np.sqrt(np.dot(deg_networkD.T, deg_networkD)), dtype=np.float64)
del(adj_networkD)
del(deg_networkD)
### protein-disease network (data used in PRINCE study)
with open(args.dir_data + 'biadj_networkPD.pickle', mode='rb') as f:
biadj_networkPD = pickle.load(f)
# get the list of protein-disease pairs
PD_pairs = biadj_networkPD.nonzero()
# number of protein-disease pairs
nb_PD_pairs = len(PD_pairs[0])
#### Network propagation MINProp ###########################
roc_value_set = np.array([], dtype=np.float64)
rankings = np.array([], dtype=np.int64)
for i in range(nb_PD_pairs):
# leave-one-out validation
# remove a protein-disease association
idx_P = PD_pairs[0][i]
idx_D = PD_pairs[1][i]
biadj_networkPD[idx_P, idx_D] = 0.0
biadj_networkPD.eliminate_zeros()
# normalized biadjacency matrix (ToDo: faster implementation)
degP = np.sum(biadj_networkPD, axis=1)
degD = np.sum(biadj_networkPD, axis=0)
norm_biadj_networkPD = sp.csr_matrix(biadj_networkPD / np.sqrt(np.dot(degP, degD)), dtype=np.float64)
norm_biadj_networkPD.data[np.isnan(norm_biadj_networkPD.data)] = 0.0
norm_biadj_networkPD.eliminate_zeros()
# set initial label
yP = np.zeros(nb_proteins, dtype=np.float64)
yD = np.zeros(nb_diseases, dtype=np.float64)
yD[idx_D] = 1.0
# propagation
fP, fD, convergent = minprop_2(norm_adj_networkP, norm_adj_networkD, norm_biadj_networkPD, yP, yD, args.alphaP, args.alphaD, args.eps, args.max_iter)
# ranking
labels_real = np.zeros(nb_proteins)
labels_real[idx_P] = 1
rank = int(np.where(labels_real[np.argsort(-fP)]==1)[0]) + 1
rankings = np.append(rankings, rank)
# get AUC value
roc_value = roc_auc_score(labels_real, fP)
print(i, "AUC:", roc_value, convergent)
roc_value_set = np.append(roc_value_set, roc_value)
# reassign the protein-disease association
biadj_networkPD[idx_P, idx_D] = 1.0
print("Average AUC", np.mean(roc_value_set))
# compute sensitivity and top rate (ROC-like curve)
# ToDo: faster implementation
sen_set = np.array([], dtype=np.float64)
top_rate_set = np.array([], dtype=np.float64)
for k in range(nb_proteins):
# sensitibity
sen = (rankings <= (k+1)).sum() / nb_PD_pairs
# top rate
top_rate = (k + 1) / nb_proteins
sen_set = np.append(sen_set, sen)
top_rate_set = np.append(top_rate_set, top_rate)
# get AUC value
print("Summarized AUC", auc(top_rate_set, sen_set))
# plot ROC-like curve
plt.scatter(top_rate_set, sen_set)
plt.show()
| 38.298077 | 153 | 0.726839 |
6f03742065f7d2c3fc2369fb406d4426cdddbeab | 459 | py | Python | Exercicios em Python/ex080.py | Raphael-Azevedo/Exercicios_Python | dece138f38edd02b0731aed78e44acccb021b3cb | [
"MIT"
] | null | null | null | Exercicios em Python/ex080.py | Raphael-Azevedo/Exercicios_Python | dece138f38edd02b0731aed78e44acccb021b3cb | [
"MIT"
] | null | null | null | Exercicios em Python/ex080.py | Raphael-Azevedo/Exercicios_Python | dece138f38edd02b0731aed78e44acccb021b3cb | [
"MIT"
] | null | null | null | n = []
i = 0
for c in range(0, 5):
n1 = int(input('Digite um valor: '))
if c == 0 or n1 > n[-1]:
n.append(n1)
print(f'Adicionado na posio {c} da lista...')
else:
pos = 0
while pos < len(n):
if n1 <= n[pos]:
n.insert(pos, n1)
print(f'Adicionado na posio {pos} da lista...')
break
pos += 1
print(f'Os valores digitados em ordem foram {n}')
| 25.5 | 65 | 0.461874 |
6f03aa2ab2aaee70b468bb66183fe442925a1018 | 13,132 | py | Python | rawal_stuff/src/demo.py | rawalkhirodkar/traffic_light_detection | 0e1e99962477bcf271b22d5205b1e7afab8635ba | [
"MIT"
] | null | null | null | rawal_stuff/src/demo.py | rawalkhirodkar/traffic_light_detection | 0e1e99962477bcf271b22d5205b1e7afab8635ba | [
"MIT"
] | null | null | null | rawal_stuff/src/demo.py | rawalkhirodkar/traffic_light_detection | 0e1e99962477bcf271b22d5205b1e7afab8635ba | [
"MIT"
] | null | null | null | import cv2
import numpy as np
import random
import copy
import dlib
from keras.models import Sequential
from keras.optimizers import SGD
from keras.datasets import cifar10
from keras.preprocessing.image import ImageDataGenerator
from keras.models import Sequential
from keras.layers import Dense, Dropout, Activation, Flatten
from keras.layers import Convolution2D, MaxPooling2D
from keras.utils import np_utils
from keras.models import load_model
from convnetskeras.convnets import preprocess_image_batch, convnet
from convnetskeras.imagenet_tool import synset_to_dfs_ids
np.set_printoptions(threshold=np.inf)
#----------------------------Globals------------------------------------------------------------
MIN_AREA = 20
MAX_AREA = 500
MIN_RED_DENSITY = 0.4
MIN_BLACk_DENSITY_BELOW = 0
MIN_POLYAPPROX = 3
WIDTH_HEIGHT_RATIO = [0.333, 1.5] #range
#------------------------------------------------------------------------------------------------
tracker_list = []
TRACK_FRAME = 10
VOTE_FRAME = 3
frame0_detections = []
frame1_detections = []
frame2_detections = []
frame_detections = []
RADIAL_DIST = 10
#------------------------------------------------------------------------------------------------
#------------------------------------------------------------------------------------------------
BOUNDING_BOX = [0,0,0,0] #x1, y1, x2, y2
#------------------------------------------------------------------------------------------------
#------------------------------------------------------------------------------------------------
#------------------------------------------------------------------------------------------------
#------------------------------------------------------------------------------------------------
#------------------------------------------------------------------------------------------------
#------------------------------------------------------------------------------------------------
#------------------------------------------------------------------------------------------------
#------------------------------------------------------------------------------------------------
print "Loading model"
model = create_model()
model.load_weights("../model/traffic_light_weights.h5")
#------------------------------------------------------------------------------------------------
sgd = SGD(lr=0.1, decay=1e-6, momentum=0.9, nesterov=True)
model_heatmap = convnet('vgg_19',weights_path="../model/weights/vgg19_weights.h5", heatmap=True)
model_heatmap.compile(optimizer=sgd, loss='mse')
traffic_light_synset = "n06874185"
ids = synset_to_dfs_ids(traffic_light_synset)
#------------------------------------------------------------------------------------------------
#------------------------------------------------------------------------------------------------
clipnum = raw_input("Enter Clip number:\n")
f=open('../../dayTrain/dayClip'+str(clipnum)+'/frameAnnotationsBULB.csv','r')
inputs=f.read()
f.close();
inputs=inputs.split()
inputs=[i.split(";") for i in inputs]
for i in range(21):
inputs.pop(0)
# fourcc = cv2.VideoWriter_fourcc(*'XVID')
fourcc = cv2.cv.CV_FOURCC(*'XVID')
out = cv2.VideoWriter('output'+str(clipnum)+'.avi',fourcc, 20.0, (1280,960))
#------------------------------------------------------------------------------------------------
frame_num = -1
VIOLATION = -1
for i in inputs:
if i[1]=="stop":
filename="../../dayTrain/dayClip"+str(clipnum)+"/frames/"+i[0][12:len(i[0])]
original_img=cv2.imread(filename)
img=copy.copy(original_img)
height, width, channels = img.shape
if(frame_num == -1):
center_x = width/2
center_y = height/2
BB_width = width/4
BB_height = height/4
BOUNDING_BOX = [center_x-BB_width,center_y-BB_height,center_x + BB_width, center_y + BB_height ]
frame_num += 1
#------------------detection begins--------------------------------------------------------
if(frame_num % TRACK_FRAME < VOTE_FRAME): #VOTE_FRAME = 3, then 0,1,2 allowed
#------------------reset------------------------
if(frame_num % TRACK_FRAME == 0):
tracker_list = []
frame0_detections = []
frame1_detections = []
frame2_detections = []
#------------------reset------------------------
#-----------preprocess------------------------------------
img = cv2.medianBlur(img,3) # Median Blur to Remove Noise
img = cv2.cvtColor(img, cv2.COLOR_BGR2HSV)
b,g,r = cv2.split(img)
clahe = cv2.createCLAHE(clipLimit=7.0, tileGridSize=(8,8)) # Adaptive histogram equilization
clahe = clahe.apply(r)
img = cv2.merge((b,g,clahe))
#----------------------------------------------------------
#----------red threshold the HSV image--------------------
img1 = cv2.inRange(img, np.array([0, 100, 100]), np.array([10,255,255])) #lower red hue
img2 = cv2.inRange(img, np.array([160, 100, 100]), np.array([179,255,255])) #upper red hue
img3 = cv2.inRange(img, np.array([160, 40, 60]), np.array([180,70,80]))
img4 = cv2.inRange(img, np.array([0, 150, 40]), np.array([20,190,75]))
img5 = cv2.inRange(img, np.array([145, 35, 65]), np.array([170,65,90]))
img = cv2.bitwise_or(img1,img3)
img = cv2.bitwise_or(img,img2)
img = cv2.bitwise_or(img,img4)
img = cv2.bitwise_or(img,img5)
cv2.medianBlur(img,7)
ret,thresh = cv2.threshold(img,127,255,0)
#----------------------------------------------------------
#--------------------Heatmap------------------------------------
im_heatmap = preprocess_image_batch([filename], color_mode="bgr")
out_heatmap = model_heatmap.predict(im_heatmap)
heatmap = out_heatmap[0,ids].sum(axis=0)
my_range = np.max(heatmap) - np.min(heatmap)
heatmap = heatmap / my_range
heatmap = heatmap * 255
heatmap = cv2.resize(heatmap,(width,height))
cv2.imwrite("heatmap.png",heatmap)
cv2.imwrite("image.png",original_img)
heatmap[heatmap < 128] = 0 # Black
heatmap[heatmap >= 128] = 255 # White
heatmap = np.asarray(heatmap,dtype=np.uint8)
#----------------------------------------------------------
thresh = cv2.bitwise_and(thresh,heatmap)
#----------------------------------------------------------
contours, hierarchy = cv2.findContours(thresh,cv2.RETR_EXTERNAL,cv2.CHAIN_APPROX_NONE)
for cnt in contours:
area = cv2.contourArea(cnt)
x,y,w,h = cv2.boundingRect(cnt)
red_density = (area*1.0)/(w*h)
width_height_ratio = (w*1.0)/h
perimeter = cv2.arcLength(cnt, True)
approx = cv2.approxPolyDP(cnt, 0.04 * perimeter, True)
temp=cv2.cvtColor(original_img[y+h:y+2*h,x:x+w], cv2.COLOR_RGB2GRAY)
(thresh, temp) = cv2.threshold(temp, 128, 255, cv2.THRESH_BINARY | cv2.THRESH_OTSU)
black_density_below = ((w*h - cv2.countNonZero(temp))*1.0)/(w*h)
if area>MIN_AREA and area<MAX_AREA and len(approx) > MIN_POLYAPPROX and red_density > MIN_RED_DENSITY and width_height_ratio < WIDTH_HEIGHT_RATIO[1] and width_height_ratio > WIDTH_HEIGHT_RATIO[0] and black_density_below > MIN_BLACk_DENSITY_BELOW:
try:
r_x1=x-50
r_y1=y-50
r_x2=x+w+50
r_y2=y+h+50
temp=original_img[r_y1:r_y2,r_x1:r_x2]
xx=cv2.resize(temp,(128,128))
xx=np.asarray(xx)
xx=np.transpose(xx,(2,0,1))
xx=np.reshape(xx,(1,3,128,128))
if model.predict_classes(xx,verbose=0)==[1]:
cv2.rectangle(original_img, (x,y), (x+w,y+h),(0,255,0), 2)
#append detections
if frame_num % TRACK_FRAME == 0:
frame0_detections.append((x,y,w,h))
elif frame_num%TRACK_FRAME == 1:
frame1_detections.append((x,y,w,h))
elif frame_num%TRACK_FRAME == 2:
frame2_detections.append((x,y,w,h))
else:
cv2.rectangle(original_img, (x,y), (x+w,y+h),(255,0,0), 1)
except Exception as e:
cv2.rectangle(original_img, (x,y), (x+w,y+h),(0,255,0), 2) #edges are allowed
print e
pass
#--------------------Violation in Detect Phase------------------------------
frame_detections = []
if(frame_num % TRACK_FRAME == 0):
frame_detections = frame0_detections
if(frame_num % TRACK_FRAME == 1):
frame_detections = frame1_detections
if(frame_num % TRACK_FRAME == 2):
frame_detections = frame2_detections
#--------------------Violation in Detect Phase------------------------------
#compute and start tracking
if frame_num % TRACK_FRAME == 2:
all_detections = frame0_detections + frame1_detections + frame2_detections
final_detections = prune_detection(all_detections)
for (x,y,w,h) in final_detections:
tracker = dlib.correlation_tracker()
tracker.start_track(original_img, dlib.rectangle(x,y,(x+w),(y+h)))
tracker_list.append(tracker)
#------------------detection end----------------------------------------------------
#------------------tracking begins----------------------------------------------------
else:
frame_detections = []
for tracker in tracker_list:
tracker.update(original_img)
rect = tracker.get_position()
pt1 = (int(rect.left()), int(rect.top()))
pt2 = (int(rect.right()), int(rect.bottom()))
cv2.rectangle(original_img, pt1, pt2, (255, 255, 255), 2)
frame_detections.append((pt1[0], pt1[1], pt2[0]-pt1[0], pt2[1]-pt1[1]))
#------------------ tracking end----------------------------------------------------
if(is_violation(frame_detections) == True):
cv2.rectangle(original_img, (BOUNDING_BOX[0],BOUNDING_BOX[1]), (BOUNDING_BOX[2],BOUNDING_BOX[3]),(0, 0, 255), 2)
else:
cv2.rectangle(original_img, (BOUNDING_BOX[0],BOUNDING_BOX[1]), (BOUNDING_BOX[2],BOUNDING_BOX[3]),(60, 255, 255), 2)
cv2.imshow("Annotated",original_img)
out.write(original_img)
ch = 0xFF & cv2.waitKey(1)
if ch == 27:
break
cv2.destroyAllWindows()
#------------------------------------------------------------------------------------------------
| 43.919732 | 262 | 0.456823 |
6f043f48e4529a5b4d4237cf80295c09f14302ee | 3,720 | py | Python | kaivy/geometry/line2d.py | team-kaivy/kaivy | e27b53e8e9eedc48abc99151f3adbb76f0a9b331 | [
"MIT"
] | null | null | null | kaivy/geometry/line2d.py | team-kaivy/kaivy | e27b53e8e9eedc48abc99151f3adbb76f0a9b331 | [
"MIT"
] | null | null | null | kaivy/geometry/line2d.py | team-kaivy/kaivy | e27b53e8e9eedc48abc99151f3adbb76f0a9b331 | [
"MIT"
] | null | null | null | ########################################################################################################################
# #
# This file is part of kAIvy #
# #
# Copyright (c) 2019-2021 by the kAIvy team and contributors #
# #
########################################################################################################################
import numpy as np
from kaivy.geometry.geometry2d import Geometry2D
from kaivy.geometry.transformation2d import Transformation2D
from kivy.graphics import Line, SmoothLine, Color
| 42.758621 | 136 | 0.491129 |
6f050e8b2c15f5d5adcf74276ee71e811d247441 | 5,813 | py | Python | data_loader/MSVD_dataset.py | dendisuhubdy/collaborative-experts | e6db63837537c054723ce00b73264101acc29d39 | [
"MIT"
] | null | null | null | data_loader/MSVD_dataset.py | dendisuhubdy/collaborative-experts | e6db63837537c054723ce00b73264101acc29d39 | [
"MIT"
] | null | null | null | data_loader/MSVD_dataset.py | dendisuhubdy/collaborative-experts | e6db63837537c054723ce00b73264101acc29d39 | [
"MIT"
] | null | null | null | import copy
from pathlib import Path
from typing import Dict, Union, List
from collections import defaultdict
import numpy as np
from typeguard import typechecked
from zsvision.zs_utils import memcache, concat_features
from utils.util import memory_summary
from base.base_dataset import BaseDataset
| 43.059259 | 90 | 0.584724 |
6f067497faf1ec468f96a34eb789dd94adfffc2e | 2,381 | py | Python | wagtail/wagtailsearch/forms.py | balkantechnologies/BalkanCMS_core | 68625199028fc96abb175e410a4a7a92c02cb261 | [
"BSD-3-Clause"
] | 1 | 2021-09-21T00:06:52.000Z | 2021-09-21T00:06:52.000Z | wagtail/wagtailsearch/forms.py | balkantechnologies/BalkanCMS_core | 68625199028fc96abb175e410a4a7a92c02cb261 | [
"BSD-3-Clause"
] | 1 | 2021-02-24T08:25:30.000Z | 2021-02-24T08:25:30.000Z | wagtail/wagtailsearch/forms.py | balkantechnologies/BalkanCMS_core | 68625199028fc96abb175e410a4a7a92c02cb261 | [
"BSD-3-Clause"
] | 1 | 2020-11-24T10:21:24.000Z | 2020-11-24T10:21:24.000Z | from django import forms
from django.forms.models import inlineformset_factory
from django.utils.translation import ugettext_lazy as _
from wagtail.wagtailadmin.widgets import AdminPageChooser
from wagtail.wagtailsearch import models
EditorsPickFormSetBase = inlineformset_factory(models.Query, models.EditorsPick, form=EditorsPickForm, can_order=True, can_delete=True, extra=0)
| 36.075758 | 144 | 0.673667 |
6f069669d5a2624249034f4c529c35293422204b | 6,994 | py | Python | app/utils/docs_utils.py | BoostryJP/ibet-Prime | 924e7f8da4f8feea0a572e8b5532e09bcdf2dc99 | [
"Apache-2.0"
] | 2 | 2021-08-19T12:35:25.000Z | 2022-02-16T04:13:38.000Z | app/utils/docs_utils.py | BoostryJP/ibet-Prime | 924e7f8da4f8feea0a572e8b5532e09bcdf2dc99 | [
"Apache-2.0"
] | 46 | 2021-09-02T03:22:05.000Z | 2022-03-31T09:20:00.000Z | app/utils/docs_utils.py | BoostryJP/ibet-Prime | 924e7f8da4f8feea0a572e8b5532e09bcdf2dc99 | [
"Apache-2.0"
] | 1 | 2021-11-17T23:18:27.000Z | 2021-11-17T23:18:27.000Z | """
Copyright BOOSTRY Co., Ltd.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
SPDX-License-Identifier: Apache-2.0
"""
from typing import (
List,
Dict,
Any
)
from pydantic import BaseModel
from fastapi.openapi.utils import get_openapi
from fastapi.exceptions import RequestValidationError
from app.exceptions import (
InvalidParameterError,
SendTransactionError,
AuthorizationError,
ServiceUnavailableError
)
DEFAULT_RESPONSE = {
400: {
"description": "Invalid Parameter Error / Send Transaction Error",
"model": Error400Model
},
401: {
"description": "Authorization Error",
"model": Error401Model
},
404: {
"description": "Not Found Error",
"model": Error404Model
},
405: {
"description": "Method Not Allowed",
"model": Error405Model
},
422: {
"description": "Validation Error",
"model": Error422Model
},
503: {
"description": "Service Unavailable Error",
"model": Error503Model
}
}
| 29.635593 | 120 | 0.601373 |
6f06e78625c74321a938329732209995e4f8e1f0 | 2,282 | py | Python | scripts/models/arcii.py | mogumogu2333/MatchZoo | 1182b076bf571eba4af89141b93a51598afc252c | [
"Apache-2.0"
] | null | null | null | scripts/models/arcii.py | mogumogu2333/MatchZoo | 1182b076bf571eba4af89141b93a51598afc252c | [
"Apache-2.0"
] | null | null | null | scripts/models/arcii.py | mogumogu2333/MatchZoo | 1182b076bf571eba4af89141b93a51598afc252c | [
"Apache-2.0"
] | null | null | null | import os
import sys
sys.path.insert(0, "../../")
import matchzoo as mz
import typing
import pandas as pd
import matchzoo
from matchzoo.preprocessors.units.tokenize import Tokenize, WordPieceTokenize
from matchzoo.engine.base_preprocessor import load_preprocessor
import pickle
import utils
os.environ["CUDA_VISIBLE_DEVICES"] = "6"
input_dir = "../../data/"
model_dir = "../../models/arcii"
num_epochs = 10
utils.ensure_dir(model_dir)
with open(os.path.join(input_dir, "train.pkl"), 'rb') as f:
train_pack_processed = pickle.load(f)
print(train_pack_processed.frame().head())
with open(os.path.join(input_dir, "test.pkl"), 'rb') as f:
test_pack_processed = pickle.load(f)
print(test_pack_processed.frame().head())
preprocessor = load_preprocessor(dirpath=os.path.join(input_dir))
print(preprocessor._context)
glove_embedding = mz.datasets.embeddings.load_glove_embedding(dimension=100)
ranking_task = mz.tasks.Classification()
ranking_task.metrics = ['accuracy']
print("`ranking_task` initialized with metrics", ranking_task.metrics)
model = mz.models.ArcII()
model.params.update(preprocessor.context)
model.params['task'] = ranking_task
model.params['embedding_output_dim'] = 100
model.params['embedding_trainable'] = True
model.params['num_blocks'] = 2
model.params['kernel_1d_count'] = 32
model.params['kernel_1d_size'] = 3
model.params['kernel_2d_count'] = [64, 64]
model.params['kernel_2d_size'] = [3, 3]
model.params['pool_2d_size'] = [[3, 3], [3, 3]]
model.params['optimizer'] = 'adam'
model.build()
model.compile()
model.backend.summary()
embedding_matrix = glove_embedding.build_matrix(preprocessor.context['vocab_unit'].state['term_index'])
model.load_embedding_matrix(embedding_matrix)
test_x, test_y = test_pack_processed.unpack()
evaluate = mz.callbacks.EvaluateAllMetrics(model, x=test_x, y=test_y, batch_size=128)
dump_prediction = mz.callbacks.DumpPrediction(model, x=test_x, y=test_y, batch_size=128,
model_save_path=model_dir)
train_generator = mz.DataGenerator(
train_pack_processed,
num_dup=2,
num_neg=1,
batch_size=128,
)
print('num batches:', len(train_generator))
history = model.fit_generator(train_generator, epochs=num_epochs,
callbacks=[evaluate, dump_prediction],
workers=4, use_multiprocessing=True)
| 30.837838 | 103 | 0.765995 |
6f073d830bc26d55a9b16a99438ab898d40254be | 3,418 | py | Python | mcpyrate/markers.py | Technologicat/mcpyrate | 8182a8d246554b152e281d0f6c912e35ea58c316 | [
"MIT"
] | 34 | 2020-10-13T19:22:36.000Z | 2022-01-28T00:53:55.000Z | mcpyrate/markers.py | Technologicat/mcpyrate | 8182a8d246554b152e281d0f6c912e35ea58c316 | [
"MIT"
] | 32 | 2020-10-16T16:29:54.000Z | 2022-01-27T15:45:51.000Z | mcpyrate/markers.py | Technologicat/mcpyrate | 8182a8d246554b152e281d0f6c912e35ea58c316 | [
"MIT"
] | 2 | 2020-10-17T19:07:26.000Z | 2021-02-20T01:43:50.000Z | # -*- coding: utf-8; -*-
"""AST markers for internal communication.
*Internal* here means they are to be never passed to Python's `compile`;
macros may use them to work together.
"""
__all__ = ["ASTMarker", "get_markers", "delete_markers", "check_no_markers_remaining"]
import ast
from . import core, utils, walkers
def get_markers(tree, cls=ASTMarker):
"""Return a `list` of any `cls` instances found in `tree`. For output validation."""
w = ASTMarkerCollector()
w.visit(tree)
return w.collected
def delete_markers(tree, cls=ASTMarker):
"""Delete any `cls` ASTMarker instances found in `tree`.
The deletion takes place by replacing each marker node with
the actual AST node stored in its `body` attribute.
"""
return ASTMarkerDeleter().visit(tree)
def check_no_markers_remaining(tree, *, filename, cls=None):
"""Check that `tree` has no AST markers remaining.
If a class `cls` is provided, only check for markers that `isinstance(cls)`.
If there are any, raise `MacroExpansionError`.
No return value.
`filename` is the full path to the `.py` file, for error reporting.
Convenience function.
"""
cls = cls or ASTMarker
remaining_markers = get_markers(tree, cls)
if remaining_markers:
codes = [utils.format_context(node, n=5) for node in remaining_markers]
locations = [utils.format_location(filename, node, code) for node, code in zip(remaining_markers, codes)]
report = "\n\n".join(locations)
raise core.MacroExpansionError(f"{filename}: AST markers remaining after expansion:\n{report}")
| 37.977778 | 113 | 0.693681 |
6f0871e5f1835b667efee97ba793562fead702a2 | 1,960 | py | Python | lambda.py | deepanshu-yadav/NSFW-Classifier | ec6a98eb982ec30c2a21ca11dc92d580cc8a8981 | [
"MIT"
] | 13 | 2019-09-18T18:32:17.000Z | 2022-03-01T08:01:18.000Z | lambda.py | deepanshu-yadav/NSFW-Classifier | ec6a98eb982ec30c2a21ca11dc92d580cc8a8981 | [
"MIT"
] | null | null | null | lambda.py | deepanshu-yadav/NSFW-Classifier | ec6a98eb982ec30c2a21ca11dc92d580cc8a8981 | [
"MIT"
] | 4 | 2020-03-27T10:00:52.000Z | 2021-04-23T03:30:43.000Z | import boto3
import json
import numpy as np
import base64, os, boto3, ast, json
endpoint = 'myprojectcapstone'
| 32.131148 | 147 | 0.514286 |
6f08e7a44962b3d4ce1d67b7f28da022e46eb7fe | 4,097 | py | Python | src/bindings/python/tests/test_ngraph/test_eye.py | si-eun-kim/openvino | 1db4446e2a6ead55d066e0b4e718fa37f509353a | [
"Apache-2.0"
] | 2 | 2021-12-14T15:27:46.000Z | 2021-12-14T15:34:16.000Z | src/bindings/python/tests/test_ngraph/test_eye.py | si-eun-kim/openvino | 1db4446e2a6ead55d066e0b4e718fa37f509353a | [
"Apache-2.0"
] | 33 | 2021-09-23T04:14:30.000Z | 2022-01-24T13:21:32.000Z | src/bindings/python/tests/test_ngraph/test_eye.py | si-eun-kim/openvino | 1db4446e2a6ead55d066e0b4e718fa37f509353a | [
"Apache-2.0"
] | 11 | 2021-11-09T00:51:40.000Z | 2021-11-10T12:04:16.000Z | # Copyright (C) 2018-2022 Intel Corporation
# SPDX-License-Identifier: Apache-2.0
import openvino.runtime.opset9 as ov
import numpy as np
import pytest
from tests.runtime import get_runtime
from openvino.runtime.utils.types import get_element_type_str
from openvino.runtime.utils.types import get_element_type
| 39.776699 | 90 | 0.686112 |
6f09c66c2c39712c9d1518ff1035780b17e4b03c | 2,371 | py | Python | tests/error/test_format_error.py | GDGSNF/graphql-core | 35aa9b261c850aa5f0c335c2405956fd41ed5ca2 | [
"MIT"
] | 590 | 2015-10-06T18:22:49.000Z | 2022-03-22T16:32:17.000Z | tests/error/test_format_error.py | vpetrovykh/graphql-core | 7af97e22afb27861fc1b7d7ca0292095f8427ecb | [
"MIT"
] | 300 | 2015-10-06T18:58:11.000Z | 2022-03-22T14:01:44.000Z | tests/error/test_format_error.py | vpetrovykh/graphql-core | 7af97e22afb27861fc1b7d7ca0292095f8427ecb | [
"MIT"
] | 270 | 2015-10-08T19:47:38.000Z | 2022-03-10T04:17:51.000Z | from typing import List, Union
from pytest import raises
from graphql.error import GraphQLError, format_error
from graphql.language import Node, Source
from graphql.pyutils import Undefined
| 31.197368 | 80 | 0.554197 |
6f0a8a484c64fa9bfcfccccb0a0f15f2d119765a | 6,708 | py | Python | pymonad/test/test_Maybe.py | bjd2385/pymonad | baec7a540d9195b2da029d1a101edd7c385f94bb | [
"BSD-3-Clause"
] | null | null | null | pymonad/test/test_Maybe.py | bjd2385/pymonad | baec7a540d9195b2da029d1a101edd7c385f94bb | [
"BSD-3-Clause"
] | null | null | null | pymonad/test/test_Maybe.py | bjd2385/pymonad | baec7a540d9195b2da029d1a101edd7c385f94bb | [
"BSD-3-Clause"
] | null | null | null | # --------------------------------------------------------
# (c) Copyright 2014 by Jason DeLaat.
# Licensed under BSD 3-clause licence.
# --------------------------------------------------------
import unittest
from pymonad.Maybe import Maybe, Just, First, Last, _Nothing, Nothing
from pymonad.Reader import curry
from pymonad.test.MonadTester import *
from pymonad.test.MonoidTester import *
if __name__ == "__main__":
unittest.main()
| 33.373134 | 73 | 0.690966 |
6f0b8327462eef4971df182fcdc4e7e99669fd00 | 210 | py | Python | sborl/__init__.py | canonical/sborl | f821ecfcbf977d0605def66dca19ea5e8e39b5a3 | [
"Apache-2.0"
] | null | null | null | sborl/__init__.py | canonical/sborl | f821ecfcbf977d0605def66dca19ea5e8e39b5a3 | [
"Apache-2.0"
] | null | null | null | sborl/__init__.py | canonical/sborl | f821ecfcbf977d0605def66dca19ea5e8e39b5a3 | [
"Apache-2.0"
] | null | null | null | # Copyright 2022 Canonical Ltd.
# See LICENSE file for licensing details.
__version__ = "0.0.8"
# flake8: noqa: F401,F402
from . import errors, events, relation, testing
from .relation import EndpointWrapper
| 23.333333 | 47 | 0.761905 |
6f0bb8acf71ebb128d83c12c5909aa37ad5afe8a | 940 | py | Python | sizer.py | riffcc/librarian | f3cf8f4cc9f9a717e5f807a1d8558eb8c4e4d528 | [
"MIT"
] | null | null | null | sizer.py | riffcc/librarian | f3cf8f4cc9f9a717e5f807a1d8558eb8c4e4d528 | [
"MIT"
] | null | null | null | sizer.py | riffcc/librarian | f3cf8f4cc9f9a717e5f807a1d8558eb8c4e4d528 | [
"MIT"
] | null | null | null | #!/usr/bin/python3
# Fetch torrent sizes
# TODO: Report number of files before we go etc
import os
from torrentool.api import Torrent
from fnmatch import fnmatch
root = '/opt/radio/collections'
pattern = "*.torrent"
alltorrentsize = 0
print("Thanks for using The Librarian.")
for path, subdirs, files in os.walk(root):
for name in files:
if fnmatch(name, pattern):
torrentstats = Torrent.from_file(os.path.join(path, name))
alltorrentsize += torrentstats.total_size
print('Torrent size ' + str(torrentstats.total_size) + ' for a total so far of ' + str(alltorrentsize))
print('DEBUG' + os.path.join(path, name))
# Reading filesize
my_torrent = Torrent.from_file('/opt/radio/collections/arienscompanymanuals/archive.org/download/collection_01_ariens_manuals/collection_01_ariens_manuals_archive.torrent')
size = my_torrent.total_size # Total files size in bytes.
print(size) | 34.814815 | 172 | 0.726596 |
6f0bf095397f81c3ceab712d5eed93ca0139a752 | 1,319 | py | Python | i_vis/core/login.py | piechottam/i-vis-core | 0b90300d1ae8b96d28a80802c1300dd861ad6f4e | [
"MIT"
] | null | null | null | i_vis/core/login.py | piechottam/i-vis-core | 0b90300d1ae8b96d28a80802c1300dd861ad6f4e | [
"MIT"
] | null | null | null | i_vis/core/login.py | piechottam/i-vis-core | 0b90300d1ae8b96d28a80802c1300dd861ad6f4e | [
"MIT"
] | null | null | null | """ Flask LoginManager plugin.
Import and execute ``login.init_app(app)`` in a factory function to use.
"""
from typing import Any, Callable, TYPE_CHECKING
from functools import wraps
from flask import redirect, request, url_for, current_app
from flask_login import current_user
from flask_login.login_manager import LoginManager
from .errors import IllegalAccessError
if TYPE_CHECKING:
from werkzeug.wrappers import Response
login = LoginManager()
def admin_required(func: Callable) -> Callable:
"""Make view only accessible to admins.
Args:
func: Callabe to wrap.
Returns:
Wrapped callable - only callable when user is an admin.
"""
return decorated_view
| 25.862745 | 72 | 0.686884 |
6f0cc8d81107fd93a3ad95d929b3e7cadc42e6cc | 10,078 | py | Python | code/App.py | KasinSparks/Arduino_RGB_Lights | 9c924ef3c7df2c7725c2178b42eb0f784168160c | [
"MIT"
] | null | null | null | code/App.py | KasinSparks/Arduino_RGB_Lights | 9c924ef3c7df2c7725c2178b42eb0f784168160c | [
"MIT"
] | null | null | null | code/App.py | KasinSparks/Arduino_RGB_Lights | 9c924ef3c7df2c7725c2178b42eb0f784168160c | [
"MIT"
] | null | null | null | from tkinter import *
from ModeEnum import Mode
import SerialHelper
import Views.StaticView
import Views.CustomWidgets.Silder
from ColorEnum import Color
from functools import partial
from Views.CommandPanel import CommandPanel
from Views.ListItem import ListItem
from ProcessControl import ProcessManager, ProcessCommandEnum
import os, signal
menuBackgroundColor = "#262e30"
menuForegroundColor = "#e5e4c5"
menuActiveForegroundColor = menuForegroundColor
menuActiveBackgroundColor = "#464743"
mainBackgroundColor = "#1b2122"
#from SerialHelper import getSerialPorts
#for sp in getSerialPorts():
# print(sp)
# Start the app up!
app = App()
app.master.title("RGB Lights 3000")
app.master.config(menu=app.my_menu, background=mainBackgroundColor)
#subprocess.call(["./controller.py", "/dev/ttyUSB0"])
# Start up the app and the process manager
pid = os.fork()
if pid:
# parent
app.mainloop()
os.kill(pid, signal.SIGTERM)
else:
# child
exec(open("./code/ProcessControl/ProcessManager.py").read())
#os.execlp("python3", "python3", "./ProcessControl/ProcessManager.py")
#os.system("controller.py")
#app.mainloop()
#print("here") | 32.509677 | 209 | 0.594066 |
6f0d7bbee7a9caaa60cc0549c015512769c48c45 | 4,944 | py | Python | tests/io/product/test_sidd_writing.py | ngageoint/SarPy | a21ebfe136833e3d25cac4e5ebfd534f28538db4 | [
"MIT"
] | null | null | null | tests/io/product/test_sidd_writing.py | ngageoint/SarPy | a21ebfe136833e3d25cac4e5ebfd534f28538db4 | [
"MIT"
] | null | null | null | tests/io/product/test_sidd_writing.py | ngageoint/SarPy | a21ebfe136833e3d25cac4e5ebfd534f28538db4 | [
"MIT"
] | null | null | null | import os
import json
import tempfile
import shutil
import unittest
from sarpy.io.complex.sicd import SICDReader
from sarpy.io.product.sidd import SIDDReader
from sarpy.io.product.sidd_schema import get_schema_path
from sarpy.processing.sidd.sidd_product_creation import create_detected_image_sidd, create_dynamic_image_sidd, create_csi_sidd
from sarpy.processing.ortho_rectify import NearestNeighborMethod
from tests import parse_file_entry
try:
from lxml import etree
except ImportError:
etree = None
product_file_types = {}
this_loc = os.path.abspath(__file__)
file_reference = os.path.join(os.path.split(this_loc)[0], 'product_file_types.json') # specifies file locations
if os.path.isfile(file_reference):
with open(file_reference, 'r') as fi:
the_files = json.load(fi)
for the_type in the_files:
valid_entries = []
for entry in the_files[the_type]:
the_file = parse_file_entry(entry)
if the_file is not None:
valid_entries.append(the_file)
product_file_types[the_type] = valid_entries
sicd_files = product_file_types.get('SICD', [])
| 44.142857 | 126 | 0.619539 |
6f0f9bbc343ebc2f491e5e0fa189894eb08c5ad7 | 28,213 | py | Python | src/westpa/tools/wipi.py | burntyellow/adelman_ci | cca251a51b34843faed0275cce01d7a307829993 | [
"MIT"
] | null | null | null | src/westpa/tools/wipi.py | burntyellow/adelman_ci | cca251a51b34843faed0275cce01d7a307829993 | [
"MIT"
] | null | null | null | src/westpa/tools/wipi.py | burntyellow/adelman_ci | cca251a51b34843faed0275cce01d7a307829993 | [
"MIT"
] | null | null | null | import numpy as np
import scipy.sparse as sp
from westpa.tools import Plotter
# A useful dataclass used as a wrapper for w_ipa to facilitate
# ease-of-use in ipython/jupyter notebooks/sessions.
# It basically just wraps up numpy arrays and dicts.
# Similar to the above, but slightly expanded to contain information from analysis files.
# This handles the 'schemes', and all assorted data.
| 43.073282 | 222 | 0.589267 |
6f0fd9711f448e832198d3798ba9ecf322599507 | 680 | py | Python | src/M5_random_module.py | posguy99/comp660-fall2020 | 0fbf5b660fe8863bf9754b5227fe47dd03dc2291 | [
"MIT"
] | null | null | null | src/M5_random_module.py | posguy99/comp660-fall2020 | 0fbf5b660fe8863bf9754b5227fe47dd03dc2291 | [
"MIT"
] | null | null | null | src/M5_random_module.py | posguy99/comp660-fall2020 | 0fbf5b660fe8863bf9754b5227fe47dd03dc2291 | [
"MIT"
] | null | null | null | import random
# use of the random module
print(random.random()) # a float value >= 0.0 and < 1.0
print(random.random()*100) # a float value >= 0.0 and < 100.0
# use of the randint method
print(random.randint(1, 100)) # an int from 1 to 100
print(random.randint(101, 200)) # an int from 101 to 200
print(random.randint(0, 7)) # an int from 0 7
die1 = random.randint(1, 6)
die2 = random.randint(1, 6)
print("Your roll: ", die1, die2)
print(random.randrange(1, 100)) # an int from 1 to 99
print(random.randrange(100, 200, 2)) # an even int from 100 to 198
print(random.randrange(11, 250, 2)) # an odd int from 11 to 249
| 35.789474 | 73 | 0.627941 |
6f0fe7aa9178367d1e8da95885ff8667f686cebb | 1,385 | py | Python | lnt/graphics/styles.py | flotwig/lnt | 2f4ab3d051508801b521f5da39f0cf522c54a96e | [
"MIT"
] | 7 | 2020-02-21T23:43:10.000Z | 2021-07-06T11:16:37.000Z | lnt/graphics/styles.py | arshbot/lntools | 9c6f344452323ff93b7a6a3763697d2ad81b4961 | [
"MIT"
] | 19 | 2019-08-07T18:00:13.000Z | 2020-12-03T17:21:01.000Z | lnt/graphics/styles.py | arshbot/lntools | 9c6f344452323ff93b7a6a3763697d2ad81b4961 | [
"MIT"
] | 1 | 2019-11-05T21:38:29.000Z | 2019-11-05T21:38:29.000Z | from PyInquirer import style_from_dict, Token, prompt, Separator
from lnt.graphics.utils import vars_to_string
# Mark styles
prompt_style = style_from_dict({
Token.Separator: '#6C6C6C',
Token.QuestionMark: '#FF9D00 bold',
#Token.Selected: '', # default
Token.Selected: '#5F819D',
Token.Pointer: '#FF9D00 bold',
Token.Instruction: '', # default
Token.Answer: '#5F819D bold',
Token.Question: '',
})
# Mark prompt configurations
| 30.108696 | 134 | 0.639711 |
6f10007c40e440e0d8097efa2d2333808b818d8f | 25,327 | py | Python | dvrip.py | jackkum/python-dvr | c004606ff8a37a213715fbc835cef77add0b3014 | [
"MIT"
] | 149 | 2018-04-04T18:46:43.000Z | 2022-03-07T18:27:52.000Z | dvrip.py | jackkum/python-dvr | c004606ff8a37a213715fbc835cef77add0b3014 | [
"MIT"
] | 20 | 2018-09-05T13:10:29.000Z | 2022-03-28T12:56:36.000Z | dvrip.py | jackkum/python-dvr | c004606ff8a37a213715fbc835cef77add0b3014 | [
"MIT"
] | 51 | 2018-05-29T02:10:04.000Z | 2022-02-23T14:24:11.000Z | import os
import struct
import json
from time import sleep
import hashlib
import threading
from socket import socket, AF_INET, SOCK_STREAM, SOCK_DGRAM
from datetime import *
from re import compile
import time
import logging
| 31.5798 | 232 | 0.48774 |
6f1051aadde1f5582ce2b30a763b8cd2ec505a2e | 1,373 | py | Python | tests/test_renderer.py | 0xflotus/maildown | fa17ce6a29458da549a145741db8e5092def2176 | [
"MIT"
] | 626 | 2019-05-08T22:34:45.000Z | 2022-03-31T07:29:35.000Z | tests/test_renderer.py | pythonthings/maildown | 4e0caf297bdf264ab5ead537eb45d20f187971a1 | [
"MIT"
] | 12 | 2019-04-30T20:47:17.000Z | 2019-06-27T11:19:46.000Z | tests/test_renderer.py | pythonthings/maildown | 4e0caf297bdf264ab5ead537eb45d20f187971a1 | [
"MIT"
] | 36 | 2019-05-08T23:50:41.000Z | 2021-07-30T17:46:24.000Z | import mock
from maildown import renderer
import mistune
import pygments
from pygments import lexers
from pygments.formatters import html
import premailer
import jinja2
| 33.487805 | 72 | 0.758194 |
6f105f0927ad589737ae9605008d8f670158e4d5 | 1,423 | py | Python | practice/practice_4/main.py | Norbert2808/programming | 3dbab86718c1cee5efe3b4b92e4492f984c75ea2 | [
"Unlicense"
] | null | null | null | practice/practice_4/main.py | Norbert2808/programming | 3dbab86718c1cee5efe3b4b92e4492f984c75ea2 | [
"Unlicense"
] | null | null | null | practice/practice_4/main.py | Norbert2808/programming | 3dbab86718c1cee5efe3b4b92e4492f984c75ea2 | [
"Unlicense"
] | null | null | null | from generator import *
from iterator import *
if __name__ == "__main__":
while True:
print("Enter 1, if you want to generate prime Lucas Number.")
print("Enter 2, if you want to iterate prime Lucas Number.")
print("Or 0, if you want to get out: ")
count = intInput("")
if count == 1:
n = nInput()
print("First " + str(n) + " prime Lucas Number:")
gen = generator(n)
printGenerator(gen)
elif count == 2:
n = nInput()
print("First " + str(n) + " prime Lucas Number:")
iter = IteratorLucasNumbers()
printIterator(iter)
elif count == 0:
break
else: print("Enter 1, or 2, or 0!") | 26.351852 | 69 | 0.51019 |
6f11a287519a38fcf82e8d66f617304a1a4f570b | 688 | py | Python | setup.py | wgnet/grail | 1d8d22bebda758800cb9aa9027486053d568bc14 | [
"Apache-2.0"
] | 37 | 2015-01-12T07:34:34.000Z | 2020-12-29T09:46:28.000Z | setup.py | wgnet/grail | 1d8d22bebda758800cb9aa9027486053d568bc14 | [
"Apache-2.0"
] | 7 | 2015-04-10T14:55:34.000Z | 2021-04-28T10:00:47.000Z | setup.py | wgnet/grail | 1d8d22bebda758800cb9aa9027486053d568bc14 | [
"Apache-2.0"
] | 17 | 2015-01-06T20:09:02.000Z | 2019-06-28T08:57:36.000Z | from setuptools import setup
version = '1.0.10'
setup(
name='grail',
version=version,
classifiers=[
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
],
packages=[
'grail',
],
description='Grail is a library which allows test script creation based on steps. '
'It helps to structure your tests and get rid of additional test documentation for your code.',
include_package_data=True,
author='Wargaming.NET',
author_email='web_qa_auto@wargaming.net',
url='https://github.com/wgnet/grail'
)
| 28.666667 | 111 | 0.640988 |
6f123e344c537798141dc193f3e6368ab0209301 | 964 | py | Python | tests/test_free.py | qingyunha/boltdb | 2ea341336f02210f751cd49ea7724d890511db38 | [
"MIT"
] | 7 | 2020-11-18T10:06:47.000Z | 2021-09-06T16:31:13.000Z | tests/test_free.py | qingyunha/boltdb | 2ea341336f02210f751cd49ea7724d890511db38 | [
"MIT"
] | 1 | 2021-02-20T19:32:11.000Z | 2021-02-20T19:32:11.000Z | tests/test_free.py | qingyunha/boltdb | 2ea341336f02210f751cd49ea7724d890511db38 | [
"MIT"
] | 2 | 2020-11-25T15:21:20.000Z | 2021-02-20T19:28:14.000Z | import os
import unittest
import tempfile
from boltdb import BoltDB
| 24.717949 | 61 | 0.551867 |
6f149a0dd9e45b60d9d630858342198ce7d83ebf | 1,709 | py | Python | xen/xen-4.2.2/tools/xm-test/tests/xapi/01_xapi-vm_basic.py | zhiming-shen/Xen-Blanket-NG | 47e59d9bb92e8fdc60942df526790ddb983a5496 | [
"Apache-2.0"
] | 1 | 2018-02-02T00:15:26.000Z | 2018-02-02T00:15:26.000Z | xen/xen-4.2.2/tools/xm-test/tests/xapi/01_xapi-vm_basic.py | zhiming-shen/Xen-Blanket-NG | 47e59d9bb92e8fdc60942df526790ddb983a5496 | [
"Apache-2.0"
] | null | null | null | xen/xen-4.2.2/tools/xm-test/tests/xapi/01_xapi-vm_basic.py | zhiming-shen/Xen-Blanket-NG | 47e59d9bb92e8fdc60942df526790ddb983a5496 | [
"Apache-2.0"
] | 1 | 2019-05-27T09:47:18.000Z | 2019-05-27T09:47:18.000Z | #!/usr/bin/python
# Copyright (C) International Business Machines Corp., 2006
# Author: Stefan Berger <stefanb@us.ibm.com>
# Basic VM creation test
from XmTestLib import xapi
from XmTestLib.XenAPIDomain import XmTestAPIDomain
from XmTestLib import *
from xen.xend import XendAPIConstants
import commands
import os
try:
# XmTestAPIDomain tries to establish a connection to XenD
domain = XmTestAPIDomain()
except Exception, e:
SKIP("Skipping test. Error: %s" % str(e))
vm_uuid = domain.get_uuid()
session = xapi.connect()
domain.start(startpaused=True)
res = session.xenapi.VM.get_power_state(vm_uuid)
if res != XendAPIConstants.XEN_API_VM_POWER_STATE[XendAPIConstants.XEN_API_VM_POWER_STATE_PAUSED]:
FAIL("VM was not started in 'paused' state")
res = session.xenapi.VM.unpause(vm_uuid)
res = session.xenapi.VM.get_power_state(vm_uuid)
if res != XendAPIConstants.XEN_API_VM_POWER_STATE[XendAPIConstants.XEN_API_VM_POWER_STATE_RUNNING]:
FAIL("VM could not be put into 'running' state")
console = domain.getConsole()
try:
run = console.runCmd("cat /proc/interrupts")
except ConsoleError, e:
saveLog(console.getHistory())
FAIL("Could not access proc-filesystem")
res = session.xenapi.VM.pause(vm_uuid)
res = session.xenapi.VM.get_power_state(vm_uuid)
if res != XendAPIConstants.XEN_API_VM_POWER_STATE[XendAPIConstants.XEN_API_VM_POWER_STATE_PAUSED]:
FAIL("VM could not be put into 'paused' state")
res = session.xenapi.VM.unpause(vm_uuid)
res = session.xenapi.VM.get_power_state(vm_uuid)
if res != XendAPIConstants.XEN_API_VM_POWER_STATE[XendAPIConstants.XEN_API_VM_POWER_STATE_RUNNING]:
FAIL("VM could not be 'unpaused'")
domain.stop()
domain.destroy()
| 27.564516 | 99 | 0.774137 |
6f162b9d147aaaf9aa9b58f1a839359e4e0bcd22 | 9,024 | py | Python | nlp_fourier.py | neitzke/stokes-numerics | 8845aef7598ca245d095cca690bf48568758a8c9 | [
"MIT"
] | 1 | 2020-08-03T16:24:06.000Z | 2020-08-03T16:24:06.000Z | nlp_fourier.py | neitzke/stokes-numerics | 8845aef7598ca245d095cca690bf48568758a8c9 | [
"MIT"
] | null | null | null | nlp_fourier.py | neitzke/stokes-numerics | 8845aef7598ca245d095cca690bf48568758a8c9 | [
"MIT"
] | null | null | null | """Fourier transform non-linear Poisson solver"""
# This module is concerned with solving the "non-linear Poisson"
# equation
# Delta(u) = f(u,z)
# on a uniform rectangular mesh, with u = u0 on the boundary.
#
# We solve the equation by an iterative method, solving an
# approximation to the linearized equation at u_i to get u_{i+1} and
# terminating when u_{i+1} - u_i is small enough.
#
# The key feature of this solve is that we use a very coarse
# approximation of the linearization---chosen specifically so that it
# can be solved by Fourier transform methods. The coarse
# approxmination means that each iteration makes little progress
# toward the final solution, and many iterations are necessary.
# However, the availability of efficient FFT routines means that each
# iteration is very fast, and so in many cases there is a net gain
# compared to a direct method.
#
# The exact linearized equation for v = u-u0 is
# Delta(vdot) - d1F(v,z) vdot = F(v,z) - Delta(vdot) (*)
# where
# F(v,z) = f(u0+v,z) - Delta(u0)
# We rewrite (*) as
# (Delta - A)vdot = RHS
# This is exactly solvable by Fourier methods if A is a constant
# function.
#
# To approximate a solution, we replace A = d1F(v,z) by a constant
# that is in some way representative of its values on he grid points.
# We follow the suggestion of [1] to use the "minimax" value
#
# A = (max(d1F) + min(d1F)) / 2
#
# where max and min are taken over the grid.
#
# References
#
# [1] Concus, P. and Golub, G. H. 1973. Use of fast direct methods for
# the efficient numerical solution of nonseparable elliptic
# equations. SIAM J. Numer. Anal., 10: 1103-1103.
#
# KNOWN ISSUES:
#
# * The initialization code assumes that u_0 is harmonic in a
# neighborhood of the boundary of the mesh. This is not a
# fundamental requirement of the method, but because u_0 cannot be
# easily extended to a doubly-periodic function its Laplacian is
# computed by a finite difference scheme rather than by FFT methods.
# Being harmonic at the boundary allows us to simply zero out the
# Laplacian at the edges and ignore this issue.
#
# (Note that this assumption is satisfied for the applications to
# the self-duality equations for which this solver was developed0).
from __future__ import absolute_import
import numpy as np
import scipy.signal
from dst2 import dst2, idst2, dst2freq
from solverexception import SolverException
import time
import logging
logger = logging.getLogger(__name__)
| 38.729614 | 176 | 0.623559 |
6f177aacdeb67b4df7640983b24e1411fe279553 | 2,853 | py | Python | app/models/fragment.py | saury2013/Memento | dbb2031a5aff3064f40bcb5afe631de8724a547e | [
"MIT"
] | null | null | null | app/models/fragment.py | saury2013/Memento | dbb2031a5aff3064f40bcb5afe631de8724a547e | [
"MIT"
] | null | null | null | app/models/fragment.py | saury2013/Memento | dbb2031a5aff3064f40bcb5afe631de8724a547e | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
from datetime import datetime
from sqlalchemy.dialects.mysql import LONGTEXT
from sqlalchemy.orm import load_only
from sqlalchemy import func
from flask import abort
from markdown import Markdown,markdown
from app.models import db,fragment_tags_table
from app.models.tag import Tag
from app.whoosh import search_helper
| 38.04 | 95 | 0.660007 |
6f18b3824b6daec3cd5fa315168eff3f33823b3f | 24,236 | py | Python | qatrack/qa/migrations/0001_initial.py | crcrewso/qatrackplus | b9da3bc542d9e3eca8b7291bb631d1c7255d528e | [
"MIT"
] | 20 | 2021-03-11T18:37:32.000Z | 2022-03-23T19:38:07.000Z | qatrack/qa/migrations/0001_initial.py | crcrewso/qatrackplus | b9da3bc542d9e3eca8b7291bb631d1c7255d528e | [
"MIT"
] | 75 | 2021-02-12T02:37:33.000Z | 2022-03-29T20:56:16.000Z | qatrack/qa/migrations/0001_initial.py | crcrewso/qatrackplus | b9da3bc542d9e3eca8b7291bb631d1c7255d528e | [
"MIT"
] | 5 | 2021-04-07T15:46:53.000Z | 2021-09-18T16:55:00.000Z | # -*- coding: utf-8 -*-
from django.db import migrations, models
import django.utils.timezone
import django.db.models.deletion
from django.conf import settings
| 74.572308 | 469 | 0.637523 |
6f1b13d2e45c97356d3de371d486f2f4c6321a9d | 746 | py | Python | cloudygram_api_server/models/telethon_model.py | Maverick1983/cloudygram-api-server | acb0b0ed173ebfff8b1a2b69efef3abe943e735e | [
"Unlicense"
] | 2 | 2021-05-25T15:24:03.000Z | 2021-05-27T09:35:56.000Z | cloudygram_api_server/models/telethon_model.py | skurob/cgas | 7660064882c5d5e56dbc4aa7e5be99754ffdcfd6 | [
"Unlicense"
] | 1 | 2021-05-27T08:32:55.000Z | 2021-05-27T10:02:35.000Z | cloudygram_api_server/models/telethon_model.py | skurob/cgas | 7660064882c5d5e56dbc4aa7e5be99754ffdcfd6 | [
"Unlicense"
] | 1 | 2021-06-03T10:06:49.000Z | 2021-06-03T10:06:49.000Z | from .constants import SUCCESS_KEY, MESSAGE_KEY, DATA_KEY
from cloudygram_api_server.scripts import CGMessage
from typing import List
| 25.724138 | 60 | 0.608579 |
6f1b8a527ec012630d1bead41b940dac1320a132 | 4,617 | py | Python | source1/bsp/entities/portal2_entity_handlers.py | tltneon/SourceIO | 418224918c2b062a4c78a41d4d65329ba2decb22 | [
"MIT"
] | 199 | 2019-04-02T02:30:58.000Z | 2022-03-30T21:29:49.000Z | source1/bsp/entities/portal2_entity_handlers.py | syborg64/SourceIO | e4ba86d801f518e192260af08ef533759c2e1cc3 | [
"MIT"
] | 113 | 2019-03-03T19:36:25.000Z | 2022-03-31T19:44:05.000Z | source1/bsp/entities/portal2_entity_handlers.py | syborg64/SourceIO | e4ba86d801f518e192260af08ef533759c2e1cc3 | [
"MIT"
] | 38 | 2019-05-15T16:49:30.000Z | 2022-03-22T03:40:43.000Z | import math
from mathutils import Euler
import bpy
from .portal2_entity_classes import *
from .portal_entity_handlers import PortalEntityHandler
local_entity_lookup_table = PortalEntityHandler.entity_lookup_table.copy()
local_entity_lookup_table.update(entity_class_handle)
| 53.068966 | 109 | 0.753736 |
6f1ed343bbac27b5996271e2bb652c962f6512bc | 3,935 | py | Python | michelanglo_api/ss_parser.py | matteoferla/MichelaNGLo-api | c00749d4b9385785f777bd6613ea8327381a3f38 | [
"MIT"
] | 1 | 2020-05-23T07:42:24.000Z | 2020-05-23T07:42:24.000Z | michelanglo_api/ss_parser.py | matteoferla/MichelaNGLo-api | c00749d4b9385785f777bd6613ea8327381a3f38 | [
"MIT"
] | null | null | null | michelanglo_api/ss_parser.py | matteoferla/MichelaNGLo-api | c00749d4b9385785f777bd6613ea8327381a3f38 | [
"MIT"
] | null | null | null | from collections import namedtuple
| 36.775701 | 99 | 0.516645 |
6f1f6d17456ac645513cd747a8b58ba607f3346f | 748 | py | Python | Net640/apps/user_posts/mixin.py | 86Ilya/net640kb | 6724f3da3b678b637e0e776ee0d4953753ee2e05 | [
"MIT"
] | 1 | 2019-06-18T09:50:29.000Z | 2019-06-18T09:50:29.000Z | Net640/apps/user_posts/mixin.py | 86Ilya/net640kb | 6724f3da3b678b637e0e776ee0d4953753ee2e05 | [
"MIT"
] | 10 | 2019-12-24T07:05:29.000Z | 2022-02-10T07:42:44.000Z | Net640/apps/user_posts/mixin.py | 86Ilya/net640kb | 6724f3da3b678b637e0e776ee0d4953753ee2e05 | [
"MIT"
] | null | null | null | from django.urls import reverse
from Net640.settings import FRONTEND_DATE_FORMAT
| 37.4 | 94 | 0.605615 |
6f1f734997fb69804fc6859e112a7faf8e27b40b | 16,030 | py | Python | squids/tfrecords/maker.py | mmgalushka/squids | 2d6e1bbeb89721a2ff232a7031997111c600abb6 | [
"MIT"
] | null | null | null | squids/tfrecords/maker.py | mmgalushka/squids | 2d6e1bbeb89721a2ff232a7031997111c600abb6 | [
"MIT"
] | 37 | 2022-01-15T21:42:23.000Z | 2022-02-23T23:43:31.000Z | squids/tfrecords/maker.py | mmgalushka/squids | 2d6e1bbeb89721a2ff232a7031997111c600abb6 | [
"MIT"
] | null | null | null | """A module for converting a data source to TFRecords."""
import os
import json
import copy
import csv
from pathlib import Path
from shutil import rmtree
import PIL.Image as Image
import tensorflow as tf
from tqdm import tqdm
from .feature import items_to_features
from .errors import DirNotFoundError, InvalidDatasetFormat
from ..config import IMAGE_WIDTH, IMAGE_HEIGHT, DATASET_DIR, TFRECORDS_SIZE
# ------------------------------------------------------------------------------
# CSV/COCO Dataset Detectors
# ------------------------------------------------------------------------------
def is_csv_input(input_dir: Path) -> bool:
"""
Tests if the input directory represents CSV dataset format.
Args:
input_dir (Path):
The input directory to test.
Returns:
status (bool):
Returns `True` if the input directory represents CSV dataset
format and `False` otherwise.
"""
return set(os.listdir(input_dir)) == set(
[
"images",
"instances_train.csv",
"instances_test.csv",
"instances_val.csv",
"categories.json",
]
)
def is_coco_input(input_dir: Path) -> bool:
"""
Tests if the input directory represents COCO dataset format.
Args:
input_dir (Path):
The input directory to test.
Returns:
status (bool):
Returns `True` if the input directory represents COCO dataset
format and `False` otherwise.
"""
root_artifacts = os.listdir(input_dir)
if "annotations" in root_artifacts:
annotations_artifacts = os.listdir(input_dir / "annotations")
stems_artifacts = [
Path(artifact).stem for artifact in annotations_artifacts
]
return set(stems_artifacts).issubset(set(root_artifacts))
return False
# ------------------------------------------------------------------------------
# CSV/COCO Dataset Iterators
# ------------------------------------------------------------------------------
# ------------------------------------------------------------------------------
# Dataset to TFRecords Transformer
# ------------------------------------------------------------------------------
def instances_to_tfrecords(
instance_file: Path,
output_dir: Path,
items: DatasetIterator,
size: int,
image_width: int,
image_height: int,
verbose: bool,
):
"""
Converse instances to tfrecords.
Args:
instance_file (Path):
The path to the instance file to read data from.
output_dir (Path):
The path to the output directory to save generated TFRecords.
items (DatasetIterator):
The CSV or COCO dataset iterator.
size (int):
The number of images per partion.
image_width (int):
The TFRecords image width resize to.
image_height (int):
The TFRecords image height resize to.
verbose (bool):
The flag to set verbose mode.
"""
tfrecords_dir = output_dir / instance_file.stem
tfrecords_dir.mkdir(exist_ok=True)
# The TFRecords writer.
writer = None
# The index for the next TFRecords partition.
part_index = -1
# The count of how many records stored in the TFRecords files. It
# is set here to maximum capacity (as a trick) to make the "if"
# condition in the loop equals to True and start 0 - partition.
part_count = size
# Initializes the progress bar of verbose mode is on.
if verbose:
pbar = tqdm(total=len(items))
for item in items:
if item:
if part_count >= size:
# The current partition has been reached the maximum capacity,
# so we need to start a new one.
if writer is not None:
# Closes the existing TFRecords writer.
writer.close()
part_index += 1
writer = tf.io.TFRecordWriter(
str(tfrecords_dir / f"part-{part_index}.tfrecord")
)
part_count = 0
example = get_example(item)
if example:
writer.write(example.SerializeToString())
part_count += 1
# Updates the progress bar of verbose mode is on.
if verbose:
pbar.update(1)
# Closes the existing TFRecords writer after the last row.
writer.close()
def create_tfrecords(
dataset_dir: str = DATASET_DIR,
tfrecords_dir: str = None,
size: int = TFRECORDS_SIZE,
image_width: int = IMAGE_WIDTH,
image_height: int = IMAGE_HEIGHT,
selected_categories: list = [],
verbose: bool = False,
):
"""
This function transforms CSV or COCO dataset to TFRecords.
Args:
dataset_dir (str):
The path to the data set directory to transform.
tfrecords_dir (str):
The path to the output directory to save generated TFRecords.
size (int):
The number of images per partion.
image_width (int):
The TFRecords image width resize to.
image_height (int):
The TFRecords image height resize to.
selected_categories (list):
The list of selected category IDs.
verbose (bool):
The flag to set verbose mode.
Raises:
DirNotFoundError:
If input or output directories do not exist.
InvalidDatasetFormat:
If the input dataset has invalid CSV or COCO format.
"""
input_dir = Path(dataset_dir)
if not input_dir.exists():
raise DirNotFoundError("input dataset", input_dir)
if tfrecords_dir is None:
output_dir = input_dir.parent / (input_dir.name + "-tfrecords")
else:
output_dir = Path(tfrecords_dir)
if not output_dir.parent.exists():
raise DirNotFoundError("parent (to output)", output_dir.parent)
if output_dir.exists():
rmtree(output_dir)
output_dir.mkdir(exist_ok=True)
if is_csv_input(input_dir):
for instance_file in input_dir.rglob("*.csv"):
instances_to_tfrecords(
instance_file,
output_dir,
CsvIterator(instance_file, selected_categories),
size,
image_width,
image_height,
verbose,
)
elif is_coco_input(input_dir):
for instance_file in (input_dir / "annotations").rglob("*.json"):
instances_to_tfrecords(
instance_file,
output_dir,
CocoIterator(instance_file, selected_categories),
size,
image_width,
image_height,
verbose,
)
else:
raise InvalidDatasetFormat()
| 33.818565 | 80 | 0.564255 |
6f1f9754bb7f6d41b30e4a4c10cead5e654ca04e | 2,743 | py | Python | edexOsgi/com.raytheon.edex.plugin.gfe/utility/cave_static/user/GFETEST/gfe/userPython/smartTools/ExUtil1.py | srcarter3/awips2 | 37f31f5e88516b9fd576eaa49d43bfb762e1d174 | [
"Apache-2.0"
] | null | null | null | edexOsgi/com.raytheon.edex.plugin.gfe/utility/cave_static/user/GFETEST/gfe/userPython/smartTools/ExUtil1.py | srcarter3/awips2 | 37f31f5e88516b9fd576eaa49d43bfb762e1d174 | [
"Apache-2.0"
] | null | null | null | edexOsgi/com.raytheon.edex.plugin.gfe/utility/cave_static/user/GFETEST/gfe/userPython/smartTools/ExUtil1.py | srcarter3/awips2 | 37f31f5e88516b9fd576eaa49d43bfb762e1d174 | [
"Apache-2.0"
] | 1 | 2021-10-30T00:03:05.000Z | 2021-10-30T00:03:05.000Z | ##
# This software was developed and / or modified by Raytheon Company,
# pursuant to Contract DG133W-05-CQ-1067 with the US Government.
#
# U.S. EXPORT CONTROLLED TECHNICAL DATA
# This software product contains export-restricted data whose
# export/transfer/disclosure is restricted by U.S. law. Dissemination
# to non-U.S. persons whether in the United States or abroad requires
# an export license or other authorization.
#
# Contractor Name: Raytheon Company
# Contractor Address: 6825 Pine Street, Suite 340
# Mail Stop B8
# Omaha, NE 68106
# 402.291.0100
#
# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
# further licensing information.
##
# ----------------------------------------------------------------------------
# This software is in the public domain, furnished "as is", without technical
# support, and with no warranty, express or implied, as to its usefulness for
# any purpose.
#
# ExUtil1
#
# Author:
# ----------------------------------------------------------------------------
ToolType = "numeric"
WeatherElementEdited = "T"
from numpy import *
import SmartScript
import Common
VariableList = [("Model:" , "", "D2D_model")]
| 34.2875 | 96 | 0.596792 |
6f1fef78694338432a72024d0e2abb835ff193fd | 5,335 | py | Python | venv/KryptoSkattScript/mining_income.py | odgaard/KryptoSkatt | 60338f25af2300b165738ceac033aae72969f7c5 | [
"MIT"
] | null | null | null | venv/KryptoSkattScript/mining_income.py | odgaard/KryptoSkatt | 60338f25af2300b165738ceac033aae72969f7c5 | [
"MIT"
] | null | null | null | venv/KryptoSkattScript/mining_income.py | odgaard/KryptoSkatt | 60338f25af2300b165738ceac033aae72969f7c5 | [
"MIT"
] | null | null | null | import pathlib
import datetime
path = 'c:/Users/Jacob/PycharmProjects/KryptoSkatt/Data/'
trans_in = list()
trans_out = list()
bitcoin_dict = dict()
ethereum_dict = dict()
USD_NOK_dict = dict()
main() | 37.307692 | 120 | 0.65567 |
6f21c952ba1d6ad55821e054cf4f9e1bcc0cbef5 | 1,222 | py | Python | SymBOP_Analysis/ql_global.py | duttm/Octahedra_Nanoparticle_Project | aebee2859e104071a1a6f5f46b42ddc9bd2fa5ad | [
"MIT"
] | null | null | null | SymBOP_Analysis/ql_global.py | duttm/Octahedra_Nanoparticle_Project | aebee2859e104071a1a6f5f46b42ddc9bd2fa5ad | [
"MIT"
] | null | null | null | SymBOP_Analysis/ql_global.py | duttm/Octahedra_Nanoparticle_Project | aebee2859e104071a1a6f5f46b42ddc9bd2fa5ad | [
"MIT"
] | null | null | null | import numpy as np
import scipy.special as ss
import pathlib
from Particle import Particle
| 20.366667 | 140 | 0.672668 |
6f22dd259e43cf8dd03f6e436b63e23ee3c3c16a | 133 | py | Python | mycelium/__init__.py | suet-lee/mycelium | db83cd3ab00697f28b2def2cebcdef52698fdd92 | [
"Apache-2.0"
] | 6 | 2021-05-23T17:36:02.000Z | 2022-01-21T20:34:17.000Z | mycelium/__init__.py | suet-lee/mycelium | db83cd3ab00697f28b2def2cebcdef52698fdd92 | [
"Apache-2.0"
] | null | null | null | mycelium/__init__.py | suet-lee/mycelium | db83cd3ab00697f28b2def2cebcdef52698fdd92 | [
"Apache-2.0"
] | 1 | 2021-06-17T20:35:10.000Z | 2021-06-17T20:35:10.000Z | from .switch import EKFSwitch, RelaySwitch, InitialModeSwitch
from .camera_t265 import CameraT265
from .camera_d435 import CameraD435 | 44.333333 | 61 | 0.864662 |
6f24922c982451aa56d071ba87202ae9a17e9ae3 | 1,030 | py | Python | arsenal/sleep/openfaas/sleep-py/handler.py | nropatas/faasbenchmark | 99f08c70a0ddaa8e9dcadb092b2c395318a6e215 | [
"Apache-2.0"
] | null | null | null | arsenal/sleep/openfaas/sleep-py/handler.py | nropatas/faasbenchmark | 99f08c70a0ddaa8e9dcadb092b2c395318a6e215 | [
"Apache-2.0"
] | null | null | null | arsenal/sleep/openfaas/sleep-py/handler.py | nropatas/faasbenchmark | 99f08c70a0ddaa8e9dcadb092b2c395318a6e215 | [
"Apache-2.0"
] | null | null | null | import os
import time
import datetime
| 23.953488 | 99 | 0.615534 |
6f24c0d9627e8e593e0f3f03a5c6df58f6f65c2e | 2,922 | py | Python | lib/vapi_cli/users.py | nogayama/vision-tools | f3041b519f30037d5b6390bce36a7f5efd3ed6ae | [
"Apache-2.0"
] | 15 | 2020-03-22T18:25:27.000Z | 2021-12-03T05:49:32.000Z | lib/vapi_cli/users.py | nogayama/vision-tools | f3041b519f30037d5b6390bce36a7f5efd3ed6ae | [
"Apache-2.0"
] | 8 | 2020-04-04T18:11:56.000Z | 2021-07-27T18:06:47.000Z | lib/vapi_cli/users.py | nogayama/vision-tools | f3041b519f30037d5b6390bce36a7f5efd3ed6ae | [
"Apache-2.0"
] | 19 | 2020-03-20T23:36:32.000Z | 2022-01-10T20:38:48.000Z | #!/usr/bin/env python3
# IBM_PROLOG_BEGIN_TAG
#
# Copyright 2019,2020 IBM International Business Machines Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# IBM_PROLOG_END_TAG
import logging as logger
import sys
import vapi
import vapi_cli.cli_utils as cli_utils
from vapi_cli.cli_utils import reportSuccess, reportApiError, translate_flags
# All of Vision Tools requires python 3.6 due to format string
# Make the check in a common location
if sys.hexversion < 0x03060000:
sys.exit("Python 3.6 or newer is required to run this program.")
token_usage = """
Usage:
users token --user=<user-name> --password=<password>
Where:
--user Required parameter containing the user login name
--password Required parameter containing the user's password
Gets an authentication token for the given user"""
server = None
# --- Token Operation ----------------------------------------------
def token(params):
""" Handles getting an authentication token for a specific user"""
user = params.get("--user", None)
pw = params.get("--password", None)
rsp = server.users.get_token(user, pw)
if rsp is None or rsp.get("result", "fail") == "fail":
reportApiError(server, f"Failed to get token for user '{user}'")
else:
reportSuccess(server, rsp["token"])
cmd_usage = f"""
Usage: users {cli_utils.common_cmd_flags} <operation> [<args>...]
Where:
{cli_utils.common_cmd_flag_descriptions}
<operation> is required and must be one of:
token -- gets an authentication token for the given user
Use 'users <operation> --help' for more information on a specific command."""
usage_stmt = {
"usage": cmd_usage,
"token": token_usage
}
operation_map = {
"token": token
}
if __name__ == "__main__":
main(None)
| 29.816327 | 97 | 0.687543 |
6f25add3846c5ac4302faa8959401e3328e32572 | 2,223 | py | Python | smile_recognition.py | audreymychan/djsmile | 8dc5d6337f1b32db8bf3dfbf13315ec25049ebb5 | [
"MIT"
] | 5 | 2019-05-30T20:15:34.000Z | 2020-04-16T08:21:16.000Z | smile_recognition.py | audreymychan/djsmile | 8dc5d6337f1b32db8bf3dfbf13315ec25049ebb5 | [
"MIT"
] | 5 | 2021-08-25T14:43:34.000Z | 2022-02-10T00:14:09.000Z | smile_recognition.py | audreymychan/djsmile | 8dc5d6337f1b32db8bf3dfbf13315ec25049ebb5 | [
"MIT"
] | null | null | null | # This script loads the pre-trained scaler and models and contains the
# predict_smile() function to take in an image and return smile predictions
import joblib
from tensorflow.keras.models import load_model
from tensorflow.keras.preprocessing.image import img_to_array, array_to_img
from PIL import Image
import numpy as np
# Set new frame size dimensions
img_width, img_height = (100, 100)
# Scaler and model imports
scaler = joblib.load('./models/scaler.save')
model = load_model('./models/my_model.h5')
model.compile(loss='binary_crossentropy', optimizer='adam',
metrics=['accuracy'])
def predict_smile(gray_img, box, count):
"""Make prediction on a new image whether a person is smiling or not.
Parameters
----------
gray_img : numpy.ndarray of dtype int
Grayscale image in numpy.ndarray of current frame.
box : tuple
(left, top, right, bottom) locating face bounding box in pixel locations.
count : int
Number of faces detected in current frame.
Returns
-------
numpy.ndarray of dtype float
Probabilities of no smile (second number) and smile (first number).
i.e. array([[0.972528 , 0.02747207]], dtype=float32)
"""
# Save a copy of current frame
gray_img = gray_img.reshape(gray_img.shape+(1,)) # (height, width, 1)
array_to_img(gray_img).save(f'./images/temp/current_frame_{count}.jpg')
# Load image
gray_img = Image.open(f'./images/temp/current_frame_{count}.jpg')
# Crop face, resize to 100x100 pixels, and save a copy
face_crop = gray_img.resize((img_width, img_height), box=box)
face_crop.save(f'./images/temp/face_crop_current_frame_{count}.jpg')
# Load image and convert to np.array
face_crop = Image.open(f'./images/temp/face_crop_current_frame_{count}.jpg')
new_face_array = np.array(img_to_array(face_crop)) # (100, 100, 1)
# Reshape
new_face_array = new_face_array.reshape(1, img_width*img_height) # (1, 10_000)
# Transform with pre-trained scaler
new_face_array = scaler.transform(new_face_array)
new_face_array = new_face_array.reshape(1, img_width, img_height, 1) # (1, 100, 100, 1)
return model.predict(new_face_array)
| 35.285714 | 92 | 0.706253 |
6f25c6dda6bae99b736764ebd22f5be07aae919e | 1,054 | py | Python | comicstreamerlib/gui_qt.py | rlugojr/ComicStreamer | 62eb914652695ea41a5e1f0cfbd044cbc6854e84 | [
"Apache-2.0"
] | 169 | 2015-01-08T03:23:37.000Z | 2022-02-27T22:09:25.000Z | comicstreamerlib/gui_qt.py | gwhittey23/ComicStreamer | 3e0fe2011984cee54197985cb313f5b6864f6f8c | [
"Apache-2.0"
] | 46 | 2015-01-10T23:47:51.000Z | 2020-05-31T01:04:28.000Z | comicstreamerlib/gui_qt.py | gwhittey23/ComicStreamer | 3e0fe2011984cee54197985cb313f5b6864f6f8c | [
"Apache-2.0"
] | 94 | 2015-01-26T01:57:52.000Z | 2022-01-25T17:11:31.000Z | import sys
import webbrowser
import os
from comicstreamerlib.folders import AppFolders
from PyQt4 import QtGui,QtCore
if __name__ == '__main__':
QtGui().run()
| 23.422222 | 65 | 0.624288 |
6f276dd2fdcae04762736c35013f0dd614ff7db4 | 3,892 | py | Python | laserchicken/io/las_handler.py | eEcoLiDAR/eEcoLiDAR | f5c4e772e4893f7242ed0b10aa17ac7e693a55a0 | [
"Apache-2.0"
] | null | null | null | laserchicken/io/las_handler.py | eEcoLiDAR/eEcoLiDAR | f5c4e772e4893f7242ed0b10aa17ac7e693a55a0 | [
"Apache-2.0"
] | 104 | 2017-09-07T08:06:49.000Z | 2018-04-16T09:17:18.000Z | laserchicken/io/las_handler.py | eEcoLiDAR/eEcoLiDAR | f5c4e772e4893f7242ed0b10aa17ac7e693a55a0 | [
"Apache-2.0"
] | 2 | 2017-11-17T17:23:04.000Z | 2017-12-15T07:13:20.000Z | """ IO Handler for LAS (and compressed LAZ) file format """
import laspy
import numpy as np
from laserchicken import keys
from laserchicken.io.base_io_handler import IOHandler
from laserchicken.io.utils import convert_to_short_type, select_valid_attributes
DEFAULT_LAS_ATTRIBUTES = {
'x',
'y',
'z',
'intensity',
'gps_time',
'raw_classification',
}
| 37.423077 | 98 | 0.610226 |
6f29a0478e6fdf417f21eeca439c92961dbbacca | 1,206 | py | Python | prob.py | Y1fanHE/po_with_moead-levy | d0531c9685ea1a09dd074960b51756d8f19a9719 | [
"MIT"
] | 7 | 2020-09-02T12:40:58.000Z | 2021-09-17T09:39:09.000Z | prob.py | Y1fanHE/po_with_moead-levy | d0531c9685ea1a09dd074960b51756d8f19a9719 | [
"MIT"
] | null | null | null | prob.py | Y1fanHE/po_with_moead-levy | d0531c9685ea1a09dd074960b51756d8f19a9719 | [
"MIT"
] | null | null | null | import numpy as np
import pandas as pd
| 29.414634 | 78 | 0.529022 |
6f2a6d704873d5624524e8309be808576dfeefc1 | 277 | py | Python | nmr/testing_PyBMRB.py | jameshtwose/han_jms_collabs | ee5cdb73b3e14e7f1f1e225dbc6a7d7d2b1b5b73 | [
"CC-BY-4.0"
] | null | null | null | nmr/testing_PyBMRB.py | jameshtwose/han_jms_collabs | ee5cdb73b3e14e7f1f1e225dbc6a7d7d2b1b5b73 | [
"CC-BY-4.0"
] | null | null | null | nmr/testing_PyBMRB.py | jameshtwose/han_jms_collabs | ee5cdb73b3e14e7f1f1e225dbc6a7d7d2b1b5b73 | [
"CC-BY-4.0"
] | null | null | null | from pybmrb import Spectra, Histogram
import plotly.io as pio
pio.renderers.default = "browser"
peak_list=Spectra.n15hsqc(bmrb_ids=15060, legend='residue')
peak_list=Spectra.c13hsqc(bmrb_ids=15060, legend='residue')
peak_list=Spectra.tocsy(bmrb_ids=15060, legend='residue') | 27.7 | 59 | 0.801444 |
6f2c2c62e843e5ddae5061bd51b492b090cca398 | 10,511 | py | Python | parser.py | sberczuk/powerschool-reporter | 2393d9f63ffe643499f6cbf2bf406f3c4d311129 | [
"MIT"
] | 1 | 2021-03-04T20:11:08.000Z | 2021-03-04T20:11:08.000Z | parser.py | sberczuk/powerschool-reporter | 2393d9f63ffe643499f6cbf2bf406f3c4d311129 | [
"MIT"
] | null | null | null | parser.py | sberczuk/powerschool-reporter | 2393d9f63ffe643499f6cbf2bf406f3c4d311129 | [
"MIT"
] | 1 | 2021-03-04T20:11:13.000Z | 2021-03-04T20:11:13.000Z | #!/usr/bin/env python3
import io
import xml.etree.ElementTree as ET
import argparse
ns = {'ns1': 'http://www.sifinfo.org/infrastructure/2.x',
'ns2': 'http://stumo.transcriptcenter.com'}
def process_course(course, year):
title = course.find(".//ns1:CourseTitle", ns).text
course_code = course.find(".//ns1:CourseCode", ns).text
mark_data = course.find(".//ns1:MarkData", ns)
grade_level = course.find(".//ns1:GradeLevelWhenTaken/ns1:Code", ns).text
letter_grade = mark_data.find("ns1:Letter", ns).text
number_grade = mark_data.find("ns1:Percentage", ns).text
comments = mark_data.find("ns1:Narrative", ns).text
# get extended info
extended_info = course.find("ns1:SIF_ExtendedElements", ns)
term = extended_info.find("ns1:SIF_ExtendedElement[@Name='StoreCode']", ns).text
teacher_fn = extended_info.find("ns1:SIF_ExtendedElement[@Name='InstructorFirstName']", ns).text
teacher_ln = extended_info.find("ns1:SIF_ExtendedElement[@Name='InstructorLastName']", ns).text
school_name = extended_info.find("ns1:SIF_ExtendedElement[@Name='SchoolName']", ns).text
return Grade(year, grade_level, term, course_code, title, letter_grade, number_grade, comments, teacher_fn,
teacher_ln, school_name)
# Placeholder for markdown format for a list of grades
# Take the list and sort it with appropriate headers.
# TBD if we need to figure pass in meta data, whether we figure it out, or if we make assumptions.
# concat all of the XML lines in the file, then return it
# Skip all up to the start of the XML
if __name__ == "__main__":
import sys
parser = argparse.ArgumentParser(description='Report Card Generator.')
parser.add_argument('--output_basename', action='store',
default='report_card',
help='Output file to report results to (default: standard out)')
# First arg is the data file
parser.add_argument('data_file')
args = parser.parse_args()
basename = args.output_basename
print("output = ", basename)
print("parsing ", args.data_file)
valid_xml = extractValidXML(args.data_file)
(student_info, grades, years) = process_data(args.data_file)
years.sort()
for year in years:
(grades_by_course, grades_by_period, headers_by_course) = organize_grades(
[a for a in grades if (a.year == year)])
print("*******************", year, "***************")
schools = [g.school for g in grades if (g.year == year)]
terms = [g.term for g in grades if (g.year == year)]
report_text = generate_year_report(student_info, year, grades_by_course, set(schools), set(terms))
file_name = f"{basename}-{year}.html"
generate_html_file(file_name, report_text)
| 38.083333 | 271 | 0.641328 |
6f2de6790116bc6ef41091db2832890bbce2457a | 2,623 | py | Python | eunite/eunite_data.py | jiasudemotuohe/deep_learning | 44eb14d91b6b9ca2092361918a1bcaa73786f78e | [
"MIT"
] | null | null | null | eunite/eunite_data.py | jiasudemotuohe/deep_learning | 44eb14d91b6b9ca2092361918a1bcaa73786f78e | [
"MIT"
] | null | null | null | eunite/eunite_data.py | jiasudemotuohe/deep_learning | 44eb14d91b6b9ca2092361918a1bcaa73786f78e | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
# @Time : 2020-04-11 12:34
# @Author : speeding_moto
import numpy as np
import pandas as pd
from matplotlib import pyplot as plt
EUNITE_PATH = "dataset/eunite.xlsx"
PARSE_TABLE_NAME = "mainData"
def load_eunite_data():
"""
return the generated load data, include all the features wo handle
"""
data = open_file()
X, Y = generate_features(data)
return X.values, Y.values
def generate_features(df):
"""
parse the data, wo need to transfer the class number to ont_hot for our calculate later
"""
months = df["Month"]
days = df["Day"]
one_hot_months = cast_to_one_hot(months, n_classes=12)
days = cast_to_one_hot(days, n_classes=31)
one_hot_months = pd.DataFrame(one_hot_months)
days = pd.DataFrame(days)
df = pd.merge(left=df, right=one_hot_months, left_index=True, right_index=True)
df = pd.merge(left=df, right=days, left_index=True, right_index=True)
y = df['Max Load']
# think, maybe wo need to normalization the temperature data,
temperature = normalization(df['Temp'].values)
temperature = pd.DataFrame(temperature)
df = pd.merge(left=df, right=temperature, left_index=True, right_index=True)
drop_columns = ["ID", "Month", "Day", "Year", "Max Load", "Temp"]
df.drop(drop_columns, axis=1, inplace=True)
print(df[0:10], "\n", y[0])
return df, y
def cast_to_one_hot(data, n_classes):
"""
cast the classifier data to one hot
"""
one_hot_months = np.eye(N=n_classes)[[data - 1]]
return one_hot_months
def open_file():
"""
open the eunite load excel file to return
"""
xlsx_file = pd.ExcelFile(EUNITE_PATH)
return xlsx_file.parse(PARSE_TABLE_NAME)
if __name__ == '__main__':
df = open_file()
show_month_temperature_load_image(df)
x, y = load_eunite_data()
print(x.shape)
| 22.808696 | 91 | 0.661456 |
6f2ef602fc37c19ef3635c7ccba25fb1c352192a | 4,828 | py | Python | tests/test_fibsem.py | DeMarcoLab/piescope | ea7acf5b198b91e4923097711d55ca038763eba2 | [
"MIT"
] | 4 | 2019-06-07T07:28:48.000Z | 2022-02-23T23:02:08.000Z | tests/test_fibsem.py | DeMarcoLab/PIEScope | ea7acf5b198b91e4923097711d55ca038763eba2 | [
"MIT"
] | 44 | 2019-06-09T14:32:16.000Z | 2022-03-25T06:04:20.000Z | tests/test_fibsem.py | DeMarcoLab/piescope | ea7acf5b198b91e4923097711d55ca038763eba2 | [
"MIT"
] | 3 | 2019-06-07T07:31:09.000Z | 2021-03-01T10:47:24.000Z | import numpy as np
import pytest
from piescope.data.mocktypes import MockAdornedImage
import piescope.fibsem
autoscript = pytest.importorskip(
"autoscript_sdb_microscope_client", reason="Autoscript is not available."
)
try:
from autoscript_sdb_microscope_client import SdbMicroscopeClient
microscope = SdbMicroscopeClient()
microscope.connect("localhost")
except Exception as e:
pytest.skip("AutoScript cannot connect to localhost, skipping all AutoScript tests.",
allow_module_level=True)
def test_initialize():
"""Test connecting to the microscope offline with localhost."""
microscope = piescope.fibsem.initialize("localhost")
def test_move_to_light_microscope(microscope):
original_position = microscope.specimen.stage.current_position
final_position = piescope.fibsem.move_to_light_microscope(microscope)
assert np.isclose(final_position.x, original_position.x + 50e-3, atol=1e-7)
assert np.isclose(final_position.y, original_position.y + 0.)
assert np.isclose(final_position.z, original_position.z)
assert np.isclose(final_position.r, original_position.r)
assert np.isclose(final_position.t, original_position.t)
def test_move_to_electron_microscope(microscope):
original_position = microscope.specimen.stage.current_position
final_position = piescope.fibsem.move_to_electron_microscope(microscope)
assert np.isclose(final_position.x, original_position.x - 50e-3, atol=1e-7)
assert np.isclose(final_position.y, original_position.y - 0.)
assert np.isclose(final_position.z, original_position.z)
assert np.isclose(final_position.r, original_position.r)
assert np.isclose(final_position.t, original_position.t)
def test_new_ion_image(microscope):
result = piescope.fibsem.new_ion_image(microscope)
assert microscope.imaging.get_active_view() == 2
assert result.data.shape == (884, 1024)
def test_new_electron_image(microscope):
result = piescope.fibsem.new_electron_image(microscope)
assert microscope.imaging.get_active_view() == 1
assert result.data.shape == (884, 1024)
def test_last_ion_image(microscope):
result = piescope.fibsem.last_ion_image(microscope)
assert microscope.imaging.get_active_view() == 2
assert result.data.shape == (884, 1024)
def test_last_electron_image(microscope):
result = piescope.fibsem.last_electron_image(microscope)
assert microscope.imaging.get_active_view() == 1
assert result.data.shape == (884, 1024)
def test_create_rectangular_pattern(microscope, image):
x0 = 2
x1 = 8
y0 = 3
y1 = 7
depth = 1e-6
output = piescope.fibsem.create_rectangular_pattern(
microscope, image, x0, x1, y0, y1, depth)
expected_center_x = 0
expected_center_y = 0
expected_width = 6e-6
expected_height = 4e-6
assert np.isclose(output.center_x, expected_center_x)
assert np.isclose(output.center_y, expected_center_y)
assert np.isclose(output.width, expected_width)
assert np.isclose(output.height, expected_height)
assert np.isclose(output.depth, depth) # depth is unchanged
assert np.isclose(output.rotation, 0) # no rotation by befault
def test_empty_rectangular_pattern(microscope, image):
x0 = None
x1 = None
y0 = 3
y1 = 7
depth = 1e-6
output = piescope.fibsem.create_rectangular_pattern(
microscope, image, x0, x1, y0, y1, depth)
assert output is None
| 31.763158 | 89 | 0.719553 |
6f2f4a5690de443a3e4f39e964bc36f35fd2bc86 | 8,206 | py | Python | newnew.py | jennycs005/Skyscraper-App | 53d69e005bec17a033be6ea1274e8f7372ed8b28 | [
"MIT"
] | null | null | null | newnew.py | jennycs005/Skyscraper-App | 53d69e005bec17a033be6ea1274e8f7372ed8b28 | [
"MIT"
] | null | null | null | newnew.py | jennycs005/Skyscraper-App | 53d69e005bec17a033be6ea1274e8f7372ed8b28 | [
"MIT"
] | null | null | null | import streamlit as st
import pandas as pd
import matplotlib.pyplot as plt
import csv
import numpy as np
import pydeck as pdk
from PIL import Image
#rank_map whole_mao(),rank_map()
#rank_map(),rank
#
main()
| 40.029268 | 115 | 0.598343 |
6f2f4c53b7a08acbd2a5aec32456145e78be64d9 | 4,746 | py | Python | cifar_train.py | usumfabricae/sagemaker-multi-model-endpoint-tensorflow-computer-vision | 74a97ecc2fa9bf76c5543dfe23373a6c69c61647 | [
"MIT-0"
] | 4 | 2021-05-30T22:15:34.000Z | 2022-03-12T23:01:36.000Z | cifar_train.py | usumfabricae/sagemaker-multi-model-endpoint-tensorflow-computer-vision | 74a97ecc2fa9bf76c5543dfe23373a6c69c61647 | [
"MIT-0"
] | null | null | null | cifar_train.py | usumfabricae/sagemaker-multi-model-endpoint-tensorflow-computer-vision | 74a97ecc2fa9bf76c5543dfe23373a6c69c61647 | [
"MIT-0"
] | 3 | 2021-06-08T12:04:43.000Z | 2021-06-12T13:44:48.000Z | from tensorflow.keras.layers import Conv2D, MaxPooling2D, Flatten, Dense, Dropout, BatchNormalization
from tensorflow.keras.preprocessing.image import ImageDataGenerator
from tensorflow.keras.callbacks import ModelCheckpoint
from tensorflow.keras.models import Sequential
from tensorflow.keras.models import load_model
from tensorflow.keras import utils
import tensorflow as tf
import numpy as np
import argparse
import logging
import os
# Set Log Level
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3'
# Seed for Reproducability
SEED = 123
np.random.seed(SEED)
tf.random.set_seed(SEED)
# Setup Logger
logger = logging.getLogger('sagemaker')
logger.setLevel(logging.INFO)
logger.addHandler(logging.StreamHandler())
if __name__ == '__main__':
logger.info(f'[Using TensorFlow version: {tf.__version__}]')
DEVICE = '/cpu:0'
args, _ = parse_args()
epochs = args.epochs
# Load train, validation and test sets from S3
X_train, y_train = get_train_data(args.train)
X_validation, y_validation = get_validation_data(args.val)
X_test, y_test = get_test_data(args.test)
with tf.device(DEVICE):
# Data Augmentation
TRAIN_BATCH_SIZE = 32
data_generator = ImageDataGenerator(width_shift_range=0.1, height_shift_range=0.1, horizontal_flip=True)
train_iterator = data_generator.flow(X_train, y_train, batch_size=TRAIN_BATCH_SIZE)
# Define Model Architecture
model = Sequential()
# CONVOLUTIONAL LAYER 1
model.add(Conv2D(filters=16, kernel_size=2, padding='same', activation='relu', input_shape=(32, 32, 3)))
model.add(BatchNormalization())
model.add(MaxPooling2D(pool_size=2))
# CONVOLUTIONAL LAYER 1
model.add(Conv2D(filters=32, kernel_size=2, padding='same', activation='relu'))
model.add(BatchNormalization())
model.add(MaxPooling2D(pool_size=2))
# CONVOLUTIONAL LAYER 3
model.add(Conv2D(filters=64, kernel_size=2, padding='same', activation='relu'))
model.add(BatchNormalization())
model.add(MaxPooling2D(pool_size=2))
model.add(Dropout(0.3))
# FULLY CONNECTED LAYER
model.add(Flatten())
model.add(Dense(500, activation='relu'))
model.add(Dropout(0.4))
model.add(Dense(10, activation='softmax'))
model.summary()
# Compile Model
model.compile(loss='categorical_crossentropy', optimizer='adam', metrics=['accuracy'])
# Train Model
BATCH_SIZE = 32
STEPS_PER_EPOCH = int(X_train.shape[0]/TRAIN_BATCH_SIZE)
model.fit(train_iterator,
steps_per_epoch=STEPS_PER_EPOCH,
batch_size=BATCH_SIZE,
epochs=epochs,
validation_data=(X_validation, y_validation),
callbacks=[],
verbose=2,
shuffle=True)
# Evaluate on Test Set
result = model.evaluate(X_test, y_test, verbose=1)
print(f'Test Accuracy: {result[1]}')
# Save Model
model.save(f'{args.model_dir}/1')
| 37.370079 | 112 | 0.676991 |
6f2fda5d1a7f7912eef13fc0ff8b8f413ac5c9a7 | 1,373 | py | Python | corehq/form_processor/migrations/0049_case_attachment_props.py | kkrampa/commcare-hq | d64d7cad98b240325ad669ccc7effb07721b4d44 | [
"BSD-3-Clause"
] | 1 | 2020-05-05T13:10:01.000Z | 2020-05-05T13:10:01.000Z | corehq/form_processor/migrations/0049_case_attachment_props.py | kkrampa/commcare-hq | d64d7cad98b240325ad669ccc7effb07721b4d44 | [
"BSD-3-Clause"
] | 1 | 2019-12-09T14:00:14.000Z | 2019-12-09T14:00:14.000Z | corehq/form_processor/migrations/0049_case_attachment_props.py | MaciejChoromanski/commcare-hq | fd7f65362d56d73b75a2c20d2afeabbc70876867 | [
"BSD-3-Clause"
] | 5 | 2015-11-30T13:12:45.000Z | 2019-07-01T19:27:07.000Z | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from __future__ import absolute_import
from django.db import models, migrations
import jsonfield.fields
| 29.212766 | 69 | 0.600874 |
6f30daadb871f9a5d1c444d73777bde40a45df2e | 8,658 | py | Python | src/utils/es_async.py | karawallace/mygene | 35bf066eb50bc929b4bb4e2423d47b4c98797526 | [
"Apache-2.0"
] | null | null | null | src/utils/es_async.py | karawallace/mygene | 35bf066eb50bc929b4bb4e2423d47b4c98797526 | [
"Apache-2.0"
] | null | null | null | src/utils/es_async.py | karawallace/mygene | 35bf066eb50bc929b4bb4e2423d47b4c98797526 | [
"Apache-2.0"
] | 1 | 2018-11-17T08:53:06.000Z | 2018-11-17T08:53:06.000Z | import re
import json
import tornado.web
import tornado.httpclient
tornado.httpclient.AsyncHTTPClient.configure("tornado.curl_httpclient.CurlAsyncHTTPClient")
import tornadoes
from utils.es import (ESQuery, ESQueryBuilder,
MGQueryError, ElasticSearchException,
ES_INDEX_NAME_ALL)
from utils.dotfield import parse_dot_fields
from config import ES_HOST
| 38.825112 | 136 | 0.524371 |
6f31322afdaea5a169b7473328dfc029ea716e21 | 10,203 | py | Python | processviz/test.py | jurgendn/processviz | 82808a92662962f04c48673c9cf159d7bc904ff7 | [
"BSD-3-Clause"
] | null | null | null | processviz/test.py | jurgendn/processviz | 82808a92662962f04c48673c9cf159d7bc904ff7 | [
"BSD-3-Clause"
] | null | null | null | processviz/test.py | jurgendn/processviz | 82808a92662962f04c48673c9cf159d7bc904ff7 | [
"BSD-3-Clause"
] | 2 | 2020-03-19T11:14:13.000Z | 2021-08-14T14:24:08.000Z | """
Th vin ny vit ra phc v cho mn hc `Cc m hnh ngu nhin v ng dng`
S dng cc th vin `networkx, pandas, numpy, matplotlib`
"""
import networkx as nx
import numpy as np
import matplotlib.pyplot as plt
from matplotlib.image import imread
import pandas as pd
| 32.287975 | 101 | 0.503283 |
6f319a2e3b23a21c6ff1ef69178d3b4bc2931b78 | 3,322 | py | Python | src/check_results.py | jagwar/Sentiment-Analysis | 312186c066c360ed4b3ebc9e999dba419f10e93c | [
"MIT"
] | null | null | null | src/check_results.py | jagwar/Sentiment-Analysis | 312186c066c360ed4b3ebc9e999dba419f10e93c | [
"MIT"
] | null | null | null | src/check_results.py | jagwar/Sentiment-Analysis | 312186c066c360ed4b3ebc9e999dba419f10e93c | [
"MIT"
] | null | null | null | import os
import json
import numpy as np
import torch
from torch.utils.data import DataLoader, RandomSampler, TensorDataset, SequentialSampler
from transformers import CamembertTokenizer, CamembertForSequenceClassification
import pandas as pd
from tqdm import tqdm, trange
# tokenizer = CamembertTokenizer.from_pretrained('/home/crannou/workspace/sentiment-eai/data/36e8f471-821d-4270-be56-febb1be36c26')
# model = CamembertForSequenceClassification.from_pretrained('/home/crannou/workspace/sentiment-eai/data/36e8f471-821d-4270-be56-febb1be36c26')
# tokenizer = CamembertTokenizer.from_pretrained('/home/crannou/workspace/sentiment-eai/7a37b1e5-8e7b-45d1-9e87-7314e8e66c0c/')
# model = CamembertForSequenceClassification.from_pretrained('/home/crannou/workspace/sentiment-eai/7a37b1e5-8e7b-45d1-9e87-7314e8e66c0c/')
tokenizer = CamembertTokenizer.from_pretrained('/home/crannou/workspace/serving-preset-images/sentiment-analysis-fr/app/model_sources')
model = CamembertForSequenceClassification.from_pretrained('/home/crannou/workspace/serving-preset-images/sentiment-analysis-fr/app/model_sources')
if __name__ == '__main__':
eval_model() | 40.024096 | 147 | 0.705298 |
6f31bdd4727dd7111ae865267e15057fbd15d9fb | 29 | py | Python | Pacotes/ex022.py | TonyRio/Python-Exercicios | 8a72d1b12418c6485794dae184425df0daf098bb | [
"MIT"
] | null | null | null | Pacotes/ex022.py | TonyRio/Python-Exercicios | 8a72d1b12418c6485794dae184425df0daf098bb | [
"MIT"
] | null | null | null | Pacotes/ex022.py | TonyRio/Python-Exercicios | 8a72d1b12418c6485794dae184425df0daf098bb | [
"MIT"
] | null | null | null | print (19 // 2 )
print( 19%2) | 14.5 | 16 | 0.551724 |
6f32849e7bc2a9a3bdff91b0ea97b373245c40e0 | 934 | py | Python | uscampgrounds/models.py | adamfast/geodjango-uscampgrounds | 0ddcdfee44dd2cb3525bbf852e93a58e5429d0d8 | [
"BSD-3-Clause"
] | 1 | 2020-06-26T22:32:25.000Z | 2020-06-26T22:32:25.000Z | uscampgrounds/models.py | adamfast/geodjango-uscampgrounds | 0ddcdfee44dd2cb3525bbf852e93a58e5429d0d8 | [
"BSD-3-Clause"
] | null | null | null | uscampgrounds/models.py | adamfast/geodjango-uscampgrounds | 0ddcdfee44dd2cb3525bbf852e93a58e5429d0d8 | [
"BSD-3-Clause"
] | null | null | null | from django.conf import settings
from django.contrib.gis.db import models
# integrate with the django-locator app for easy geo lookups if it's installed
if 'locator.objects' in settings.INSTALLED_APPS:
from locator.objects.models import create_locator_object
models.signals.post_save.connect(create_locator_object, sender=Campground)
| 35.923077 | 78 | 0.755889 |
6f34182931d744d711a9eaa391580c23eb3546c2 | 383 | py | Python | blog/users/urls.py | simpleOnly1/blog | 34343068318a64bd537e5862181e037fc4636247 | [
"MIT"
] | null | null | null | blog/users/urls.py | simpleOnly1/blog | 34343068318a64bd537e5862181e037fc4636247 | [
"MIT"
] | null | null | null | blog/users/urls.py | simpleOnly1/blog | 34343068318a64bd537e5862181e037fc4636247 | [
"MIT"
] | null | null | null | #users
from django.urls import path
from users.views import RegisterView, ImageCodeView,SmsCodeView
urlpatterns = [
#path
#path
path('register/', RegisterView.as_view(),name='register'),
#
path('imagecode/',ImageCodeView.as_view(),name='imagecode'),
#
path('smscode/',SmsCodeView.as_view(),name='smscode'),
] | 25.533333 | 64 | 0.715405 |
6f3478c403c5a4607452ef969c0985f21a247166 | 11,861 | py | Python | src/config/fabric-ansible/ansible-playbooks/filter_plugins/import_lldp_info.py | EWERK-DIGITAL/tf-controller | 311ea863b03d425a67d04d27c1f1b9cf1e20c926 | [
"Apache-2.0"
] | null | null | null | src/config/fabric-ansible/ansible-playbooks/filter_plugins/import_lldp_info.py | EWERK-DIGITAL/tf-controller | 311ea863b03d425a67d04d27c1f1b9cf1e20c926 | [
"Apache-2.0"
] | null | null | null | src/config/fabric-ansible/ansible-playbooks/filter_plugins/import_lldp_info.py | EWERK-DIGITAL/tf-controller | 311ea863b03d425a67d04d27c1f1b9cf1e20c926 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/python
from builtins import object
from builtins import str
import sys
import traceback
sys.path.append("/opt/contrail/fabric_ansible_playbooks/module_utils") # noqa
from filter_utils import _task_done, _task_error_log, _task_log, FilterLog
from job_manager.job_utils import JobVncApi
| 39.802013 | 79 | 0.517073 |
6f355a92c02e0c6216729df9bbfec7b8bd8e4145 | 527 | py | Python | src/shared/_menu.py | MarcSkovMadsen/awesome-panel-starter | b76854882a041c7b955a59785d08e167ffef07af | [
"Apache-2.0"
] | 5 | 2021-01-04T16:39:09.000Z | 2021-08-03T15:26:49.000Z | src/shared/_menu.py | MarcSkovMadsen/awesome-panel-starter | b76854882a041c7b955a59785d08e167ffef07af | [
"Apache-2.0"
] | 6 | 2020-12-28T03:28:25.000Z | 2021-09-11T13:07:51.000Z | src/shared/_menu.py | MarcSkovMadsen/awesome-panel-starter | b76854882a041c7b955a59785d08e167ffef07af | [
"Apache-2.0"
] | 1 | 2021-09-15T20:08:44.000Z | 2021-09-15T20:08:44.000Z | """Provides the MENU html string which is appended to all templates
Please note that the MENU only works in [Fast](https://www.fast.design/) based templates.
If you need some sort of custom MENU html string feel free to customize this code.
"""
from awesome_panel_extensions.frameworks.fast.fast_menu import to_menu
from src.shared import config
if config.applications:
MENU = to_menu(
config.applications.values(), accent_color=config.color_primary, expand=["Main"]
).replace("\n", "")
else:
MENU = ""
| 31 | 89 | 0.73814 |
6f35c3c4af214e988cae123b40970464d22b95ab | 1,909 | py | Python | prediction-api/app.py | BrokenImage/raptor-api | 2cafc7fedf883a730d22dc0e2898f531d20fedf2 | [
"MIT",
"Unlicense"
] | null | null | null | prediction-api/app.py | BrokenImage/raptor-api | 2cafc7fedf883a730d22dc0e2898f531d20fedf2 | [
"MIT",
"Unlicense"
] | null | null | null | prediction-api/app.py | BrokenImage/raptor-api | 2cafc7fedf883a730d22dc0e2898f531d20fedf2 | [
"MIT",
"Unlicense"
] | null | null | null | import os
import boto3
import numpy as np
import tensorflow as tf
from flask import Flask
from dotenv import load_dotenv
from pymongo import MongoClient
from keras.models import load_model
from sklearn.preprocessing import LabelEncoder
from werkzeug.datastructures import FileStorage
from werkzeug.middleware.proxy_fix import ProxyFix
from flask_restplus import Api, Resource
from utils.Model import ModelManager
load_dotenv()
# Mongodb connection
client = MongoClient(os.environ['MONGO_CLIENT_URL'])
db = client.registry
# AWS S3 connection
session = boto3.Session(
aws_access_key_id=os.environ['AWS_ACCESS_KEY_ID'],
aws_secret_access_key=os.environ['AWS_SECRET_KEY']
)
s3 = session.resource('s3')
# App and API setup
app = Flask(__name__)
app.wsgi_app = ProxyFix(app.wsgi_app)
api = Api(app, version="1.0", title="Anomaly Detection", description="")
ns = api.namespace('api')
single_parser = api.parser()
single_parser.add_argument("files", location="files", type=FileStorage, action='append', required=True)
graph = tf.get_default_graph()
backup_model = load_model("./models/backup/model.h5")
backup_label_encoder = LabelEncoder()
backup_label_encoder.classes_ = np.load("./models/backup/classes.npy")
if __name__ == "__main__":
app.run(debug=True, host="0.0.0.0")
| 30.790323 | 103 | 0.736511 |
6f35ce7e4cec8e809fb6bd6d1db0395eade06403 | 633 | py | Python | misc/fill_blanks.py | netotz/codecamp | ff6b5ce1af1d99bbb00f7e095ca6beac92020b1c | [
"Unlicense"
] | null | null | null | misc/fill_blanks.py | netotz/codecamp | ff6b5ce1af1d99bbb00f7e095ca6beac92020b1c | [
"Unlicense"
] | null | null | null | misc/fill_blanks.py | netotz/codecamp | ff6b5ce1af1d99bbb00f7e095ca6beac92020b1c | [
"Unlicense"
] | 1 | 2020-04-05T06:22:18.000Z | 2020-04-05T06:22:18.000Z | # Given an array containing None values fill in the None values with most recent
# non None value in the array
from random import random
test = list(map(int, input().split()))
print(fill1(test))
print(fill2(test))
| 22.607143 | 81 | 0.593997 |
6f37404f1493e37478a90fbc8c755991983fccf9 | 3,836 | py | Python | beast/tests/helpers.py | marthaboyer/beast | 1ca71fb64ab60827e4e4e1937b64f319a98166c3 | [
"BSD-3-Clause"
] | null | null | null | beast/tests/helpers.py | marthaboyer/beast | 1ca71fb64ab60827e4e4e1937b64f319a98166c3 | [
"BSD-3-Clause"
] | null | null | null | beast/tests/helpers.py | marthaboyer/beast | 1ca71fb64ab60827e4e4e1937b64f319a98166c3 | [
"BSD-3-Clause"
] | null | null | null | # useful functions for BEAST tests
# put here instead of having in every tests
import os.path
import numpy as np
import h5py
from astropy.io import fits
from astropy.utils.data import download_file
__all__ = ['download_rename', 'compare_tables', 'compare_fits',
'compare_hdf5']
def download_rename(filename):
"""Download a file and rename it to have the right extension.
Otherwise, downloaded file will not have an extension at all and an
extension is needed for the BEAST.
Parameters
----------
filename : str
name of file to download
"""
url_loc = 'http://www.stsci.edu/~kgordon/beast/'
fname_dld = download_file('%s%s' % (url_loc, filename))
extension = filename.split('.')[-1]
fname = '%s.%s' % (fname_dld, extension)
os.rename(fname_dld, fname)
return fname
def compare_tables(table_cache, table_new):
"""
Compare two tables using astropy tables routines.
Parameters
----------
table_cache : astropy table
table_new : astropy table
data for comparision.
"""
assert len(table_new) == len(table_cache)
for tcolname in table_new.colnames:
# test numerical types for closeness
# and other types for equality
if table_new[tcolname].data.dtype.kind in ['f', 'i']:
np.testing.assert_allclose(table_new[tcolname],
table_cache[tcolname],
err_msg=('%s columns not equal'
% tcolname))
else:
np.testing.assert_equal(table_new[tcolname],
table_cache[tcolname],
err_msg=('%s columns not equal'
% tcolname))
def compare_fits(fname_cache, fname_new):
"""
Compare two FITS files.
Parameters
----------
fname_cache : str
fname_new : type
names to FITS files
"""
fits_cache = fits.open(fname_cache)
fits_new = fits.open(fname_new)
assert len(fits_new) == len(fits_cache)
for k in range(1, len(fits_new)):
qname = fits_new[k].header['EXTNAME']
np.testing.assert_allclose(fits_new[k].data,
fits_cache[qname].data,
err_msg=('%s FITS extension not equal'
% qname))
def compare_hdf5(fname_cache, fname_new, ctype=None):
"""
Compare two hdf files.
Parameters
----------
fname_cache : str
fname_new : type
names to hdf5 files
ctype : str
if set, string to identify the type of data being tested
"""
hdf_cache = h5py.File(fname_cache, 'r')
hdf_new = h5py.File(fname_new, 'r')
# go through the file and check if it is exactly the same
for sname in hdf_cache.keys():
if isinstance(hdf_cache[sname], h5py.Dataset):
cvalue = hdf_cache[sname]
cvalue_new = hdf_new[sname]
if ctype is not None:
osname = '%s/%s' % (ctype, sname)
else:
osname = sname
if cvalue.dtype.fields is None:
np.testing.assert_allclose(cvalue.value, cvalue_new.value,
err_msg='testing %s' % (osname),
rtol=1e-6)
else:
for ckey in cvalue.dtype.fields.keys():
err_msg = 'testing %s/%s' % (osname, ckey)
np.testing.assert_allclose(cvalue.value[ckey],
cvalue_new.value[ckey],
err_msg=err_msg,
rtol=1e-5)
| 31.966667 | 75 | 0.533889 |
6f38b06f669b537017b964e2c9d9bddd9b904d47 | 78,772 | py | Python | sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations/_page_blob_operations.py | jalauzon-msft/azure-sdk-for-python | 15967f5c6d3376f2334a382486ba86339786e028 | [
"MIT"
] | 1 | 2022-02-01T18:50:12.000Z | 2022-02-01T18:50:12.000Z | sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations/_page_blob_operations.py | ellhe-blaster/azure-sdk-for-python | 82193ba5e81cc5e5e5a5239bba58abe62e86f469 | [
"MIT"
] | null | null | null | sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations/_page_blob_operations.py | ellhe-blaster/azure-sdk-for-python | 82193ba5e81cc5e5e5a5239bba58abe62e86f469 | [
"MIT"
] | null | null | null | # pylint: disable=too-many-lines
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
import datetime
from typing import Any, Callable, Dict, IO, Optional, TypeVar, Union
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator_async import distributed_trace_async
from azure.core.utils import case_insensitive_dict
from ... import models as _models
from ..._vendor import _convert_request
from ...operations._page_blob_operations import build_clear_pages_request, build_copy_incremental_request, build_create_request, build_get_page_ranges_diff_request, build_get_page_ranges_request, build_resize_request, build_update_sequence_number_request, build_upload_pages_from_url_request, build_upload_pages_request
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
| 54.551247 | 319 | 0.692441 |
6f38ecb37fdc239d1019da968ae8c9a2467372bc | 4,494 | py | Python | Win_Source/ESP_Autostart.py | maschhoff/ESP32-433Mhz-Receiver-and-Tools | a7cb8c0740054650d38444781d2b7b6c18779a29 | [
"MIT"
] | 3 | 2020-11-29T18:38:48.000Z | 2022-02-23T15:13:56.000Z | Win_Source/ESP_Autostart.py | maschhoff/ESP32-433Mhz-Receiver-and-Tools | a7cb8c0740054650d38444781d2b7b6c18779a29 | [
"MIT"
] | null | null | null | Win_Source/ESP_Autostart.py | maschhoff/ESP32-433Mhz-Receiver-and-Tools | a7cb8c0740054650d38444781d2b7b6c18779a29 | [
"MIT"
] | 2 | 2021-07-25T18:03:12.000Z | 2021-07-26T11:50:14.000Z | # Detlev Aschhoff info@vmais.de
# The MIT License (MIT)
#
# Copyright (c) 2020
#
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import time
from tkinter import *
from tkinter import ttk
from tkinter.messagebox import *
import serial
root=Tk()
root.title("ESP Autostart Changer")
err=""
#----------------------------------------------------------------------------------
#---------- Witgets laden
frameButton = Frame(root)
frameButton.pack(fill='both')
button2=Button(frameButton, text="Autostart ON ", command=autoon)
button2.pack(side="right",padx="5",pady="2")
button1=Button(frameButton, text="Autostart OFF ", command=autooff)
button1.pack(side="right",padx="5")
hinweis = Label(root, fg = "lightgreen",bg = "gray", font = "Verdana 10 bold" )
hinweis.pack(fill='both',padx="5",pady="2")
hinweistxt="Change Autostart "
hinweis.config(text=hinweistxt)
status = Label(root)
status.pack(fill='both',padx="5",pady="2")
statustxt=" "
status.config(text=statustxt)
#------------------------------------------------------------------------------------
start()
root.mainloop()
| 29.372549 | 86 | 0.574099 |
6f392ad202bb9d010a7064f5991bc4aec4981e22 | 212 | py | Python | app/core/apps.py | KarimTayie/djangoadmin-test | 7218866dbf72ae580e605d2f32601557efe1baca | [
"MIT"
] | null | null | null | app/core/apps.py | KarimTayie/djangoadmin-test | 7218866dbf72ae580e605d2f32601557efe1baca | [
"MIT"
] | 1 | 2019-12-18T16:01:44.000Z | 2019-12-18T16:01:44.000Z | app/core/apps.py | KarimTayie/djangoadmin-test | 7218866dbf72ae580e605d2f32601557efe1baca | [
"MIT"
] | null | null | null | from django.apps import AppConfig
from django.contrib.admin.apps import AdminConfig
| 17.666667 | 49 | 0.768868 |
6f3bc48d07d6db347089edf80b48b6fd74fd6c76 | 2,108 | py | Python | download_cifar100_teacher.py | valeoai/QuEST | 02a23d2d8e0d059b4a30433f92eec5db146467f4 | [
"Apache-2.0"
] | 3 | 2021-06-03T22:45:47.000Z | 2022-03-27T18:50:06.000Z | download_cifar100_teacher.py | valeoai/QuEST | 02a23d2d8e0d059b4a30433f92eec5db146467f4 | [
"Apache-2.0"
] | null | null | null | download_cifar100_teacher.py | valeoai/QuEST | 02a23d2d8e0d059b4a30433f92eec5db146467f4 | [
"Apache-2.0"
] | 1 | 2021-08-20T15:39:40.000Z | 2021-08-20T15:39:40.000Z | import os
import urllib.request
os.makedirs('saved_models', exist_ok=True)
model_path = 'http://shape2prog.csail.mit.edu/repo/wrn_40_2_vanilla/ckpt_epoch_240.pth'
model_dir = 'saved_models/wrn_40_2_vanilla'
os.makedirs(model_dir, exist_ok=True)
urllib.request.urlretrieve(model_path, os.path.join(model_dir, model_path.split('/')[-1]))
print(f"Downloaded {model_path.split('repo/')[-1]} to saved_models/")
model_path = 'http://shape2prog.csail.mit.edu/repo/resnet56_vanilla/ckpt_epoch_240.pth'
model_dir = 'saved_models/resnet56_vanilla'
os.makedirs(model_dir, exist_ok=True)
urllib.request.urlretrieve(model_path, os.path.join(model_dir, model_path.split('/')[-1]))
print(f"Downloaded {model_path.split('repo/')[-1]} to saved_models/")
model_path = 'http://shape2prog.csail.mit.edu/repo/resnet110_vanilla/ckpt_epoch_240.pth'
model_dir = 'saved_models/resnet110_vanilla'
os.makedirs(model_dir, exist_ok=True)
urllib.request.urlretrieve(model_path, os.path.join(model_dir, model_path.split('/')[-1]))
print(f"Downloaded {model_path.split('repo/')[-1]} to saved_models/")
model_path = 'http://shape2prog.csail.mit.edu/repo/resnet32x4_vanilla/ckpt_epoch_240.pth'
model_dir = 'saved_models/resnet32x4_vanilla'
os.makedirs(model_dir, exist_ok=True)
urllib.request.urlretrieve(model_path, os.path.join(model_dir, model_path.split('/')[-1]))
print(f"Downloaded {model_path.split('repo/')[-1]} to saved_models/")
model_path = 'http://shape2prog.csail.mit.edu/repo/vgg13_vanilla/ckpt_epoch_240.pth'
model_dir = 'saved_models/vgg13_vanilla'
os.makedirs(model_dir, exist_ok=True)
urllib.request.urlretrieve(model_path, os.path.join(model_dir, model_path.split('/')[-1]))
print(f"Downloaded {model_path.split('repo/')[-1]} to saved_models/")
model_path = 'http://shape2prog.csail.mit.edu/repo/ResNet50_vanilla/ckpt_epoch_240.pth'
model_dir = 'saved_models/ResNet50_vanilla'
os.makedirs(model_dir, exist_ok=True)
urllib.request.urlretrieve(model_path, os.path.join(model_dir, model_path.split('/')[-1]))
print(f"Downloaded {model_path.split('repo/')[-1]} to saved_models/")
| 51.414634 | 91 | 0.766129 |
6f3bd5a39dfdffc25d3e3bcdbc5be1926e9811b6 | 48 | py | Python | aliyunsdkcore/__init__.py | gikoluo/aliyun-python-sdk-core | 5c4e79ad5f7668af048ae1a18d424c4919131a9c | [
"MIT"
] | null | null | null | aliyunsdkcore/__init__.py | gikoluo/aliyun-python-sdk-core | 5c4e79ad5f7668af048ae1a18d424c4919131a9c | [
"MIT"
] | null | null | null | aliyunsdkcore/__init__.py | gikoluo/aliyun-python-sdk-core | 5c4e79ad5f7668af048ae1a18d424c4919131a9c | [
"MIT"
] | 4 | 2017-07-27T11:27:01.000Z | 2020-09-01T07:49:21.000Z | __author__ = 'alex jiang'
__version__ = '2.3.3'
| 16 | 25 | 0.6875 |
6f3cd19601af3a6ec8e27fb00bfee8d9af472214 | 95,791 | py | Python | leaderboard/scenarios/background_activity.py | casper-auto/leaderboard | 111a48f9099c08a2f1068ee8aea2ad56ce52ef9d | [
"MIT"
] | 68 | 2020-03-25T10:04:21.000Z | 2022-03-21T01:03:39.000Z | leaderboard/scenarios/background_activity.py | casper-auto/leaderboard | 111a48f9099c08a2f1068ee8aea2ad56ce52ef9d | [
"MIT"
] | 32 | 2020-06-16T22:11:05.000Z | 2022-03-24T09:35:48.000Z | leaderboard/scenarios/background_activity.py | casper-auto/leaderboard | 111a48f9099c08a2f1068ee8aea2ad56ce52ef9d | [
"MIT"
] | 40 | 2020-03-21T23:43:39.000Z | 2022-01-03T14:04:31.000Z | #!/usr/bin/env python
#
# This work is licensed under the terms of the MIT license.
# For a copy, see <https://opensource.org/licenses/MIT>.
"""
Scenario spawning elements to make the town dynamic and interesting
"""
import math
from collections import OrderedDict
import py_trees
import numpy as np
import carla
from srunner.scenariomanager.carla_data_provider import CarlaDataProvider
from srunner.scenariomanager.scenarioatomics.atomic_behaviors import AtomicBehavior
from srunner.scenarios.basic_scenario import BasicScenario
DEBUG_COLORS = {
'road': carla.Color(0, 0, 255), # Blue
'opposite': carla.Color(255, 0, 0), # Red
'junction': carla.Color(0, 0, 0), # Black
'entry': carla.Color(255, 255, 0), # Yellow
'exit': carla.Color(0, 255, 255), # Teal
'connect': carla.Color(0, 255, 0), # Green
}
DEBUG_TYPE = {
'small': [0.8, 0.1],
'medium': [0.5, 0.15],
'large': [0.2, 0.2],
}
def draw_string(world, location, string='', debug_type='road', persistent=False):
"""Utility function to draw debugging strings"""
v_shift, _ = DEBUG_TYPE.get('small')
l_shift = carla.Location(z=v_shift)
color = DEBUG_COLORS.get(debug_type, 'road')
life_time = 0.07 if not persistent else 100000
world.debug.draw_string(location + l_shift, string, False, color, life_time)
def draw_point(world, location, point_type='small', debug_type='road', persistent=False):
"""Utility function to draw debugging points"""
v_shift, size = DEBUG_TYPE.get(point_type, 'small')
l_shift = carla.Location(z=v_shift)
color = DEBUG_COLORS.get(debug_type, 'road')
life_time = 0.07 if not persistent else 100000
world.debug.draw_point(location + l_shift, size, color, life_time)
def get_same_dir_lanes(waypoint):
"""Gets all the lanes with the same direction of the road of a wp"""
same_dir_wps = [waypoint]
# Check roads on the right
right_wp = waypoint
while True:
possible_right_wp = right_wp.get_right_lane()
if possible_right_wp is None or possible_right_wp.lane_type != carla.LaneType.Driving:
break
right_wp = possible_right_wp
same_dir_wps.append(right_wp)
# Check roads on the left
left_wp = waypoint
while True:
possible_left_wp = left_wp.get_left_lane()
if possible_left_wp is None or possible_left_wp.lane_type != carla.LaneType.Driving:
break
if possible_left_wp.lane_id * left_wp.lane_id < 0:
break
left_wp = possible_left_wp
same_dir_wps.append(left_wp)
return same_dir_wps
def get_opposite_dir_lanes(waypoint):
"""Gets all the lanes with opposite direction of the road of a wp"""
other_dir_wps = []
other_dir_wp = None
# Get the first lane of the opposite direction
left_wp = waypoint
while True:
possible_left_wp = left_wp.get_left_lane()
if possible_left_wp is None:
break
if possible_left_wp.lane_id * left_wp.lane_id < 0:
other_dir_wp = possible_left_wp
break
left_wp = possible_left_wp
if not other_dir_wp:
return other_dir_wps
# Check roads on the right
right_wp = other_dir_wp
while True:
if right_wp.lane_type == carla.LaneType.Driving:
other_dir_wps.append(right_wp)
possible_right_wp = right_wp.get_right_lane()
if possible_right_wp is None:
break
right_wp = possible_right_wp
return other_dir_wps
def get_lane_key(waypoint):
"""Returns a key corresponding to the waypoint lane. Equivalent to a 'Lane'
object and used to compare waypoint lanes"""
return '' if waypoint is None else get_road_key(waypoint) + '*' + str(waypoint.lane_id)
def get_road_key(waypoint):
"""Returns a key corresponding to the waypoint road. Equivalent to a 'Road'
object and used to compare waypoint roads"""
return '' if waypoint is None else str(waypoint.road_id)
| 44.741242 | 120 | 0.608784 |
6f3d6d699afb7966b9d1c11324477310b224dc24 | 502 | py | Python | Python/Interfacing_C_C++_Fortran/F2py/comp_pi_f2py.py | Gjacquenot/training-material | 16b29962bf5683f97a1072d961dd9f31e7468b8d | [
"CC-BY-4.0"
] | 115 | 2015-03-23T13:34:42.000Z | 2022-03-21T00:27:21.000Z | Python/Interfacing_C_C++_Fortran/F2py/comp_pi_f2py.py | Gjacquenot/training-material | 16b29962bf5683f97a1072d961dd9f31e7468b8d | [
"CC-BY-4.0"
] | 56 | 2015-02-25T15:04:26.000Z | 2022-01-03T07:42:48.000Z | Python/Interfacing_C_C++_Fortran/F2py/comp_pi_f2py.py | Gjacquenot/training-material | 16b29962bf5683f97a1072d961dd9f31e7468b8d | [
"CC-BY-4.0"
] | 59 | 2015-11-26T11:44:51.000Z | 2022-03-21T00:27:22.000Z | #!/usr/bin/env python
from argparse import ArgumentParser
import sys
from comp_pi import compute_pi
if __name__ == '__main__':
status = main()
sys.exit(status)
| 25.1 | 71 | 0.62749 |
6f3d81cff53a00e04f111ddf20aa94a2c2b57bda | 3,885 | py | Python | test/lazy/test_cat_lazy_tensor.py | Mehdishishehbor/gpytorch | 432e537b3f6679ea4ab3acf33b14626b7e161c92 | [
"MIT"
] | null | null | null | test/lazy/test_cat_lazy_tensor.py | Mehdishishehbor/gpytorch | 432e537b3f6679ea4ab3acf33b14626b7e161c92 | [
"MIT"
] | null | null | null | test/lazy/test_cat_lazy_tensor.py | Mehdishishehbor/gpytorch | 432e537b3f6679ea4ab3acf33b14626b7e161c92 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
import unittest
import torch
from Lgpytorch.lazy import CatLazyTensor, NonLazyTensor
from Lgpytorch.test.lazy_tensor_test_case import LazyTensorTestCase
if __name__ == "__main__":
unittest.main()
| 31.844262 | 73 | 0.667954 |
6f3d9e5be4e02104620356819d1fd22753eef212 | 3,349 | py | Python | dbSchema.py | zikasak/ReadOnlyBot | 912403a5d6386c1ce691bbe22dad660af49b26e8 | [
"MIT"
] | 1 | 2020-12-17T20:50:29.000Z | 2020-12-17T20:50:29.000Z | dbSchema.py | zikasak/ReadOnlyBot | 912403a5d6386c1ce691bbe22dad660af49b26e8 | [
"MIT"
] | null | null | null | dbSchema.py | zikasak/ReadOnlyBot | 912403a5d6386c1ce691bbe22dad660af49b26e8 | [
"MIT"
] | null | null | null | import datetime
from sqlalchemy import Column, Integer, Boolean, ForeignKey, String, DateTime, UniqueConstraint, ForeignKeyConstraint
from sqlalchemy.orm import relationship
from dbConfig import Base, engine
Base.metadata.create_all(engine)
| 40.349398 | 121 | 0.701702 |
6f3e4697377cf878d0a79c14a88b2faa221afbab | 2,224 | py | Python | dqn/dqn_noisy_networks/model.py | AgentMaker/Paddle-RLBooks | 2e879f7ec3befa2058f0181e205b790d47770a85 | [
"Apache-2.0"
] | 127 | 2021-03-22T07:34:43.000Z | 2022-02-04T13:33:15.000Z | dqn/dqn_noisy_networks/model.py | WhiteFireFox/Paddle-RLBooks | 1a6add1d01b1bab08bb9d246fcd6ab852a43c18c | [
"Apache-2.0"
] | 1 | 2021-05-16T09:51:07.000Z | 2021-05-16T09:51:07.000Z | dqn/dqn_noisy_networks/model.py | WhiteFireFox/Paddle-RLBooks | 1a6add1d01b1bab08bb9d246fcd6ab852a43c18c | [
"Apache-2.0"
] | 16 | 2021-04-03T05:31:30.000Z | 2022-03-26T07:53:49.000Z | import paddle
import paddle.nn as nn
import paddle.nn.functional as F
from paddle.nn.initializer import Assign
import math
| 38.344828 | 131 | 0.619155 |
6f3fea7c8f1bfc40279f3c4ea0ed4489009162a1 | 50 | py | Python | players/__init__.py | lejbron/arkenstone | d5341c27ba81eaf116e5ee5983b4fa422437d294 | [
"MIT"
] | null | null | null | players/__init__.py | lejbron/arkenstone | d5341c27ba81eaf116e5ee5983b4fa422437d294 | [
"MIT"
] | 4 | 2021-03-17T19:46:35.000Z | 2021-04-09T11:37:53.000Z | players/__init__.py | lejbron/arkenstone | d5341c27ba81eaf116e5ee5983b4fa422437d294 | [
"MIT"
] | 1 | 2021-04-11T07:50:56.000Z | 2021-04-11T07:50:56.000Z | default_app_config = 'players.apps.PlayersConfig'
| 25 | 49 | 0.84 |
6f405d7dc1023a5440b606895121fbd0e2262df7 | 1,631 | py | Python | forte/utils/utils_io.py | swapnull7/forte | 737a72afd440d40c3826c3a7c5e4e44235c0f701 | [
"Apache-2.0"
] | 2 | 2021-01-01T12:07:27.000Z | 2021-09-10T03:57:18.000Z | forte/utils/utils_io.py | swapnull7/forte | 737a72afd440d40c3826c3a7c5e4e44235c0f701 | [
"Apache-2.0"
] | null | null | null | forte/utils/utils_io.py | swapnull7/forte | 737a72afd440d40c3826c3a7c5e4e44235c0f701 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 The Forte Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Utility functions related to input/output.
"""
import os
__all__ = [
"maybe_create_dir",
"ensure_dir",
"get_resource"
]
import sys
def maybe_create_dir(dirname: str) -> bool:
r"""Creates directory if it does not exist.
Args:
dirname (str): Path to the directory.
Returns:
bool: Whether a new directory is created.
"""
if not os.path.isdir(dirname):
os.makedirs(dirname)
return True
return False
def ensure_dir(filename: str):
"""
Args:
filename:
Returns:
"""
d = os.path.dirname(filename)
if d:
maybe_create_dir(d)
| 24.343284 | 77 | 0.660331 |
6f4276bb292fddfa79fdb894416964ab4cf57b3a | 4,834 | py | Python | src/AFN.py | mbampi/LinguagensRegulares | 1fc7fbcc21053577bbeb1f71e742aee3a48f2188 | [
"MIT"
] | null | null | null | src/AFN.py | mbampi/LinguagensRegulares | 1fc7fbcc21053577bbeb1f71e742aee3a48f2188 | [
"MIT"
] | null | null | null | src/AFN.py | mbampi/LinguagensRegulares | 1fc7fbcc21053577bbeb1f71e742aee3a48f2188 | [
"MIT"
] | null | null | null |
import re
from AFD import AFD
| 37.184615 | 156 | 0.528962 |
6f428bd943ae35a4fd79dc7877617c8e0b05143f | 11,348 | py | Python | cs_tools/tools/_searchable-dependencies/app.py | thoughtspot/cs_tools | 7b516476be94adf7f121645b7c3fc7206fdae4ca | [
"MIT"
] | 1 | 2022-03-14T19:04:53.000Z | 2022-03-14T19:04:53.000Z | cs_tools/tools/_searchable-dependencies/app.py | thoughtspot/cs_tools | 7b516476be94adf7f121645b7c3fc7206fdae4ca | [
"MIT"
] | 10 | 2021-06-01T14:34:52.000Z | 2022-03-24T00:47:47.000Z | cs_tools/tools/_searchable-dependencies/app.py | thoughtspot/cs_tools | 7b516476be94adf7f121645b7c3fc7206fdae4ca | [
"MIT"
] | null | null | null | from typing import List, Dict
import pathlib
import shutil
import enum
from typer import Option as O_
import typer
from cs_tools.helpers.cli_ux import console, frontend, CSToolsGroup, CSToolsCommand
from cs_tools.util.datetime import to_datetime
from cs_tools.tools.common import run_tql_command, run_tql_script, tsload
from cs_tools.util.algo import chunks
from cs_tools.settings import TSConfig
from cs_tools.const import FMT_TSLOAD_DATETIME
from cs_tools.thoughtspot import ThoughtSpot
from cs_tools.tools import common
from .util import FileQueue
HERE = pathlib.Path(__file__).parent
def _format_metadata_objects(queue, metadata: List[Dict]):
"""
Standardize data in an expected format.
This is a simple transformation layer, we are fitting our data to be
record-based and in the format that's expected for an eventual
tsload command.
"""
for parent in metadata:
queue.put({
'guid_': parent['id'],
'name': parent['name'],
'description': parent.get('description'),
'author_guid': parent['author'],
'author_name': parent['authorName'],
'author_display_name': parent['authorDisplayName'],
'created': to_datetime(parent['created'], unit='ms').strftime(FMT_TSLOAD_DATETIME),
'modified': to_datetime(parent['modified'], unit='ms').strftime(FMT_TSLOAD_DATETIME),
# 'modified_by': parent['modifiedBy'] # user.guid
'type': SystemType.to_friendly(parent['type']) if parent.get('type') else 'column',
'context': parent.get('owner')
})
def _format_dependency(queue, parent_guid, dependencies: Dict[str, Dict]):
"""
Standardize data in an expected format.
This is a simple transformation layer, we are fitting our data to be
record-based and in the format that's expected for an eventual
tsload command.
"""
for dependency in dependencies:
queue.put({
'guid_': dependency['id'],
'parent_guid': parent_guid,
'name': dependency['name'],
'description': dependency.get('description'),
'author_guid': dependency['author'],
'author_name': dependency['authorName'],
'author_display_name': dependency['authorDisplayName'],
'created': to_datetime(dependency['created'], unit='ms').strftime(FMT_TSLOAD_DATETIME),
'modified': to_datetime(dependency['modified'], unit='ms').strftime(FMT_TSLOAD_DATETIME),
# 'modified_by': dependency['modifiedBy'] # user.guid
'type': SystemType.to_friendly(dependency['type'])
})
app = typer.Typer(
help="""
Make Dependencies searchable in your platform.
[b][yellow]USE AT YOUR OWN RISK![/b] This tool uses private API calls which
could change on any version update and break the tool.[/]
Dependencies can be collected for various types of metadata. For example,
many tables are used within a worksheet, while many worksheets will have
answers and pinboards built on top of them.
\b
Metadata Object Metadata Dependent
- guid - guid
- name - parent guid
- description - name
- author guid - description
- author name - author guid
- author display name - author name
- created - author display name
- modified - created
- object type - modified
- context - object type
\f
Also available, but not developed for..
Tag / Stickers -> TAG
Embrace Connections -> DATA_SOURCE
""",
cls=CSToolsGroup,
options_metavar='[--version, --help]'
)
| 36.371795 | 139 | 0.607684 |
6f454cefd9a2976b1fecad345694dd6dc38f8205 | 6,098 | py | Python | bots/philBots.py | phyxl/GameOfPureStrategy | 95ec7b1cb0c85dbdd4da315dac02d12d5d9c1a6a | [
"MIT"
] | null | null | null | bots/philBots.py | phyxl/GameOfPureStrategy | 95ec7b1cb0c85dbdd4da315dac02d12d5d9c1a6a | [
"MIT"
] | null | null | null | bots/philBots.py | phyxl/GameOfPureStrategy | 95ec7b1cb0c85dbdd4da315dac02d12d5d9c1a6a | [
"MIT"
] | null | null | null | #!/usr/bin/python
import math
import random
from utils.log import log
from bots.simpleBots import BasicBot
| 38.594937 | 130 | 0.735323 |
6f459b6385eeaec430778e2b8c2a198dc774b06f | 1,280 | py | Python | tests/ws/TestWebsocketRegisterAgent.py | sinri/nehushtan | 6fda496e16a8d443a86c617173d35f31c392beb6 | [
"MIT"
] | null | null | null | tests/ws/TestWebsocketRegisterAgent.py | sinri/nehushtan | 6fda496e16a8d443a86c617173d35f31c392beb6 | [
"MIT"
] | 1 | 2020-11-20T03:10:23.000Z | 2020-11-20T09:30:34.000Z | tests/ws/TestWebsocketRegisterAgent.py | sinri/nehushtan | 6fda496e16a8d443a86c617173d35f31c392beb6 | [
"MIT"
] | 1 | 2021-10-13T10:16:58.000Z | 2021-10-13T10:16:58.000Z | import uuid
from typing import Dict, List
from nehushtan.ws.NehushtanWebsocketConnectionEntity import NehushtanWebsocketConnectionEntity
| 36.571429 | 99 | 0.682813 |
6f47b4b418f600c91349bca3f946db81bd280d01 | 470 | py | Python | decorator_pattern/starbuzz/condiment.py | garyeechung/design-pattern-practice | 00ca66b79773de06c2d043c33caf37cb5f40a507 | [
"MIT"
] | 2 | 2021-02-25T06:04:34.000Z | 2021-02-25T06:13:48.000Z | decorator_pattern/starbuzz/condiment.py | garyeechung/design-pattern-practice | 00ca66b79773de06c2d043c33caf37cb5f40a507 | [
"MIT"
] | 1 | 2021-02-17T16:45:58.000Z | 2021-02-23T12:54:39.000Z | decorator_pattern/starbuzz/condiment.py | garyeechung/design-pattern-practice | 00ca66b79773de06c2d043c33caf37cb5f40a507 | [
"MIT"
] | null | null | null | from .interface import Beverage, CondimentDecorator
| 20.434783 | 51 | 0.67234 |
6f480b5d92cd89679ad9577e9f8230981a8ae4ea | 1,641 | py | Python | src/geo_testing/test_scripts/psgs_big.py | hpgl/hpgl | 72d8c4113c242295de740513093f5779c94ba84a | [
"BSD-3-Clause"
] | 70 | 2015-01-21T12:24:50.000Z | 2022-03-16T02:10:45.000Z | src/geo_testing/test_scripts/psgs_big.py | hpgl/hpgl | 72d8c4113c242295de740513093f5779c94ba84a | [
"BSD-3-Clause"
] | 8 | 2015-04-22T13:14:30.000Z | 2021-11-23T12:16:32.000Z | src/geo_testing/test_scripts/psgs_big.py | hpgl/hpgl | 72d8c4113c242295de740513093f5779c94ba84a | [
"BSD-3-Clause"
] | 18 | 2015-02-15T18:04:31.000Z | 2021-01-16T08:54:32.000Z | #
#
# Copyright 2009 HPGL Team
#
# This file is part of HPGL (High Perfomance Geostatistics Library).
#
# HPGL is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, version 2 of the License.
#
# HPGL is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along with HPGL. If not, see http://www.gnu.org/licenses/.
#
from geo import *
from sys import *
import os
import time
if not os.path.exists("results/"):
os.mkdir("results/")
if not os.path.exists("results/medium/"):
os.mkdir("results/medium/")
#grid = SugarboxGrid(166, 141, 225)
#prop = load_cont_property("test_data/BIG_HARD_DATA.INC", -99)
grid = SugarboxGrid(166, 141, 20)
prop = load_cont_property("test_data/BIG_SOFT_DATA_CON_160_141_20.INC",-99)
sgs_params = {
"prop": prop,
"grid": grid,
"seed": 3439275,
"kriging_type": "sk",
"radiuses": (20, 20, 20),
"max_neighbours": 12,
"covariance_type": covariance.exponential,
"ranges": (10, 10, 10),
"sill": 0.4
}
for x in xrange(1):
time1 = time.time()
psgs_result = sgs_simulation(workers_count = x+2, use_new_psgs = True, **sgs_params)
time2 = time.time()
print "Workers: %s" % (x+2)
print "Time: %s" % (time2 - time1)
write_property(psgs_result, "results/medium/PSGS_workers_1.inc", "PSIS_MEDIUM_workers_1", -99)
| 33.489796 | 229 | 0.702011 |
6f484367a2e17cf732eb810bd88c47b5caccd1c1 | 166 | py | Python | app/src/constants.py | hubacekjirka/dailyPhotoTwitterBot | abd490b73603883d4e71bfa6076e9925a055fcb7 | [
"MIT"
] | 1 | 2020-03-16T10:51:07.000Z | 2020-03-16T10:51:07.000Z | app/src/constants.py | hubacekjirka/dailyPhotoTwitterBot | abd490b73603883d4e71bfa6076e9925a055fcb7 | [
"MIT"
] | 6 | 2019-08-11T10:00:36.000Z | 2021-06-02T00:18:58.000Z | app/src/constants.py | hubacekjirka/dailyPhotoTwitterBot | abd490b73603883d4e71bfa6076e9925a055fcb7 | [
"MIT"
] | 2 | 2019-09-30T18:45:47.000Z | 2021-01-09T10:38:14.000Z | friendly_camera_mapping = {
"GM1913": "Oneplus 7 Pro",
"FC3170": "Mavic Air 2",
# An analogue scanner in FilmNeverDie
"SP500": "Canon AE-1 Program"
}
| 23.714286 | 41 | 0.638554 |
6f486f62f9567ab5d28e26f5db6697fa139744ec | 1,622 | py | Python | refined/refinement_types.py | espetro/refined | c2f38418268e8d89634ede1265d869d8d54dc9d4 | [
"MIT"
] | 4 | 2021-10-04T19:53:04.000Z | 2021-12-17T07:08:42.000Z | refined/refinement_types.py | espetro/refined | c2f38418268e8d89634ede1265d869d8d54dc9d4 | [
"MIT"
] | null | null | null | refined/refinement_types.py | espetro/refined | c2f38418268e8d89634ede1265d869d8d54dc9d4 | [
"MIT"
] | null | null | null | from typing_extensions import Annotated, TypeGuard
from typing import TypeVar, List, Set, Dict
from refined.predicates import (
PositivePredicate,
NegativePredicate,
ValidIntPredicate,
ValidFloatPredicate,
EmptyPredicate,
NonEmptyPredicate,
TrimmedPredicate,
IPv4Predicate,
IPv6Predicate,
XmlPredicate,
CsvPredicate
)
__all__ = [
# numeric types
'Positive',
'Negative',
# string types
'TrimmedString',
'ValidIntString',
'ValidFloatString',
'XmlString',
'CsvString',
'IPv4String',
'IPv6String',
# generic collection types
'Empty',
'NonEmpty',
# concrete collection types
'NonEmptyString',
'NonEmptyList',
'NonEmptySet',
'NonEmptyDict',
]
_T1 = TypeVar("_T1")
_T2 = TypeVar("_T2")
Positive = Annotated[_T1, PositivePredicate[_T1]]
Negative = Annotated[_T1, NegativePredicate[_T1]]
TrimmedString = Annotated[str, TrimmedPredicate[str]]
ValidIntString = Annotated[str, ValidIntPredicate[str]]
ValidFloatString = Annotated[str, ValidFloatPredicate[str]]
XmlString = Annotated[str, XmlPredicate[str]]
CsvString = Annotated[str, CsvPredicate[str]]
IPv4String = Annotated[str, IPv4Predicate[str]]
IPv6String = Annotated[str, IPv6Predicate[str]]
Empty = Annotated[_T1, EmptyPredicate[_T1]]
NonEmpty = Annotated[_T1, NonEmptyPredicate[_T1]]
NonEmptyString = Annotated[str, NonEmptyPredicate[str]]
NonEmptyList = Annotated[List[_T1], NonEmptyPredicate[List[_T1]]]
NonEmptySet = Annotated[Set[_T1], NonEmptyPredicate[Set[_T1]]]
NonEmptyDict = Annotated[Dict[_T1, _T2], NonEmptyPredicate[Dict[_T1, _T2]]]
| 25.34375 | 75 | 0.727497 |
6f4befaddb9a5f3e1b6a96cd0450bb3e135fa72a | 1,151 | py | Python | setup.py | evamvid/SpotPRIS2 | 4def72c626ac4184fbfb5741ae1f5616f9c34245 | [
"MIT"
] | null | null | null | setup.py | evamvid/SpotPRIS2 | 4def72c626ac4184fbfb5741ae1f5616f9c34245 | [
"MIT"
] | null | null | null | setup.py | evamvid/SpotPRIS2 | 4def72c626ac4184fbfb5741ae1f5616f9c34245 | [
"MIT"
] | null | null | null | from setuptools import setup
with open("README.md", "r") as f:
long_description = f.read()
setup(name="SpotPRIS2",
version='0.3.1',
author="Adrian Freund",
author_email="adrian@freund.io",
url="https://github.com/freundTech/SpotPRIS2",
description="MPRIS2 interface for Spotify Connect",
long_description=long_description,
packages=['spotpris2'],
package_dir={'spotpris2': "spotpris2"},
package_data={'spotpris2': ['mpris/*.xml', 'html/*.html']},
install_requires=[
"PyGObject",
"pydbus",
"spotipy>=2.8",
"appdirs",
],
entry_points={
'console_scripts': ["spotpris2=spotpris2.__main__:main"]
},
classifiers=[
"Development Status :: 3 - Alpha",
"Environment :: No Input/Output (Daemon)",
"Intended Audience :: End Users/Desktop",
"License :: OSI Approved :: MIT License",
"Operating System :: POSIX :: Linux",
"Programming Language :: Python :: 3 :: Only",
"Topic :: Multimedia :: Sound/Audio",
],
python_requires='>=3.6',
)
| 31.972222 | 66 | 0.569939 |
6f4c1702195066e993129a8eb57596bee6bd8234 | 2,371 | py | Python | partycipe/migrations/0001_initial.py | spexxsoldier51/PartyCipe | 5b8038db408fca1e1d568d6520daaf04889ccef0 | [
"CC0-1.0"
] | null | null | null | partycipe/migrations/0001_initial.py | spexxsoldier51/PartyCipe | 5b8038db408fca1e1d568d6520daaf04889ccef0 | [
"CC0-1.0"
] | null | null | null | partycipe/migrations/0001_initial.py | spexxsoldier51/PartyCipe | 5b8038db408fca1e1d568d6520daaf04889ccef0 | [
"CC0-1.0"
] | null | null | null | # Generated by Django 4.0.3 on 2022-04-02 17:32
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
| 43.907407 | 126 | 0.578237 |