hexsha
stringlengths 40
40
| size
int64 5
2.06M
| ext
stringclasses 11
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 3
251
| max_stars_repo_name
stringlengths 4
130
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
251
| max_issues_repo_name
stringlengths 4
130
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
116k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
251
| max_forks_repo_name
stringlengths 4
130
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 1
1.05M
| avg_line_length
float64 1
1.02M
| max_line_length
int64 3
1.04M
| alphanum_fraction
float64 0
1
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
ab1a1e11ddf7bd7dae943e8668ed1a5ba0c14a72
| 3,848
|
py
|
Python
|
applications/cli/commands/model/tests/test_export.py
|
nparkstar/nauta
|
1bda575a01f782d1dc2cd5221122651f184f7167
|
[
"Apache-2.0"
] | 390
|
2019-01-23T09:07:00.000Z
|
2022-02-20T04:03:34.000Z
|
applications/cli/commands/model/tests/test_export.py
|
nparkstar/nauta
|
1bda575a01f782d1dc2cd5221122651f184f7167
|
[
"Apache-2.0"
] | 52
|
2019-01-31T12:17:30.000Z
|
2022-02-10T00:01:39.000Z
|
applications/cli/commands/model/tests/test_export.py
|
nparkstar/nauta
|
1bda575a01f782d1dc2cd5221122651f184f7167
|
[
"Apache-2.0"
] | 66
|
2019-01-23T18:59:39.000Z
|
2020-10-18T15:24:00.000Z
|
#
# Copyright (c) 2019 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from click.testing import CliRunner
from cli_text_consts import ModelExportCmdTexts as Texts
from commands.model.common import workflow_description
from commands.model.export import export
from platform_resources.workflow import ArgoWorkflow, QUEUED_PHASE
FEM_NAME = "EXPORT_1"
SEM_NAME = "EXPORT_2"
FEM_PARAMETERS = "PARAMS_1"
SEM_PARAMETERS = "PARAMS_2"
FEM_START_DATE = '2000-01-01'
FEM_NAMESPACE = 'test-namespace'
TEST_AGROWORKFLOW = ArgoWorkflow(name=FEM_NAME, started_at=FEM_START_DATE, finished_at=None,
namespace=FEM_NAMESPACE, phase=None)
TWO_MODEL_OUTPUT = [workflow_description(name=FEM_NAME, parameters=FEM_PARAMETERS),
workflow_description(name=SEM_NAME, parameters=SEM_PARAMETERS)]
| 35.62963
| 109
| 0.747401
|
ab1a86e3a749c305907e0a449b620a088db1db5e
| 4,070
|
py
|
Python
|
var/spack/repos/builtin/packages/py-mdanalysis/package.py
|
LiamBindle/spack
|
e90d5ad6cfff2ba3de7b537d6511adccd9d5fcf1
|
[
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | 2,360
|
2017-11-06T08:47:01.000Z
|
2022-03-31T14:45:33.000Z
|
var/spack/repos/builtin/packages/py-mdanalysis/package.py
|
LiamBindle/spack
|
e90d5ad6cfff2ba3de7b537d6511adccd9d5fcf1
|
[
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | 13,838
|
2017-11-04T07:49:45.000Z
|
2022-03-31T23:38:39.000Z
|
var/spack/repos/builtin/packages/py-mdanalysis/package.py
|
LiamBindle/spack
|
e90d5ad6cfff2ba3de7b537d6511adccd9d5fcf1
|
[
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | 1,793
|
2017-11-04T07:45:50.000Z
|
2022-03-30T14:31:53.000Z
|
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
| 54.266667
| 96
| 0.653317
|
ab1a9a9be99684f3bafd7d5cd35569aa18f68f49
| 786
|
py
|
Python
|
lesley-byte/graphpressure.py
|
lesley-byte/enviroplus-python
|
df08c238c8b550c9041ff06a0b6bef6b330af611
|
[
"MIT"
] | null | null | null |
lesley-byte/graphpressure.py
|
lesley-byte/enviroplus-python
|
df08c238c8b550c9041ff06a0b6bef6b330af611
|
[
"MIT"
] | null | null | null |
lesley-byte/graphpressure.py
|
lesley-byte/enviroplus-python
|
df08c238c8b550c9041ff06a0b6bef6b330af611
|
[
"MIT"
] | null | null | null |
from requests import get
import matplotlib.pyplot as plt
import matplotlib.animation as animation
import datetime as dt
from bme280 import BME280
try:
from smbus2 import SMBus
except ImportError:
from smbus import SMBus
fig = plt.figure()
ax = fig.add_subplot(1, 1, 1)
xs = []
ys =[]
bus = SMBus(1)
bme280 = BME280(i2c_dev=bus)
ani = animation.FuncAnimation(fig, animate, fargs=(xs, ys), interval=60000)
plt.show()
| 20.153846
| 75
| 0.670483
|
ab1ab02c6fe0df3ffafd8d3c0b4bb24aea453027
| 5,912
|
py
|
Python
|
bootstrapvz/plugins/ova/tasks.py
|
brett-smith/bootstrap-vz
|
2eaa98db684b85186f3ecd6e5d1304aaceca6b73
|
[
"Apache-2.0"
] | null | null | null |
bootstrapvz/plugins/ova/tasks.py
|
brett-smith/bootstrap-vz
|
2eaa98db684b85186f3ecd6e5d1304aaceca6b73
|
[
"Apache-2.0"
] | null | null | null |
bootstrapvz/plugins/ova/tasks.py
|
brett-smith/bootstrap-vz
|
2eaa98db684b85186f3ecd6e5d1304aaceca6b73
|
[
"Apache-2.0"
] | null | null | null |
from bootstrapvz.base import Task
from bootstrapvz.common import phases
from bootstrapvz.common.tasks import workspace
import os
import shutil
assets = os.path.normpath(os.path.join(os.path.dirname(__file__), 'assets'))
| 39.152318
| 113
| 0.692321
|
ab1ab3780950be34d6065669fa02273afffb05ab
| 3,498
|
py
|
Python
|
docs/conf.py
|
PhilippJunk/homelette
|
d6e585a215d7eef75ef6c837d1faf2d0ad8025c1
|
[
"MIT"
] | null | null | null |
docs/conf.py
|
PhilippJunk/homelette
|
d6e585a215d7eef75ef6c837d1faf2d0ad8025c1
|
[
"MIT"
] | null | null | null |
docs/conf.py
|
PhilippJunk/homelette
|
d6e585a215d7eef75ef6c837d1faf2d0ad8025c1
|
[
"MIT"
] | null | null | null |
# Configuration file for the Sphinx documentation builder.
#
# This file only contains a selection of the most common options. For a full
# list see the documentation:
# https://www.sphinx-doc.org/en/master/usage/configuration.html
# -- Path setup --------------------------------------------------------------
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
import os
import shutil
import sys
sys.path.insert(0, os.path.abspath('..'))
# -- Project information -----------------------------------------------------
project = 'homelette'
copyright = '2021, Philipp Junk, Christina Kiel'
author = 'Philipp Junk, Christina Kiel'
# -- General configuration ---------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.napoleon',
'nbsphinx',
'sphinx_rtd_theme',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This pattern also affects html_static_path and html_extra_path.
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
# -- Options for HTML output -------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'sphinx_rtd_theme'
html_logo = 'logo.png'
html_theme_options = {
'logo_only': False,
'style_nav_header_background': '#000000',
}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
# html_static_path = ['_static']
# -- Options for LaTex output ------------------------------------------------
latex_elements = {
'preamble': r'''
\setcounter{tocdepth}{1}
\renewcommand{\hyperref}[2][]{#2}
'''
}
# -- Extension configuration: autodoc ----------------------------------------
autodoc_default_options = {
'member-order': 'bysource',
}
autoclass_content = 'class'
autodoc_mock_imports = ['altmod', 'modeller', 'ost', 'promod3', 'qmean',
'pandas']
# -- Extension configuration: napoleon ---------------------------------------
napoleon_use_ivar = True
# -- Copy notebooks to include in the documentation --------------------------
notebooks = [
'../examples/Tutorial1_Basics.ipynb',
'../examples/Tutorial2_Modelling.ipynb',
'../examples/Tutorial3_Evaluation.ipynb',
'../examples/Tutorial4_ExtendingHomelette.ipynb',
'../examples/Tutorial5_Parallelization.ipynb',
'../examples/Tutorial6_ComplexModelling.ipynb',
'../examples/Tutorial7_AssemblingPipelines.ipynb',
'../examples/Tutorial8_AlignmentGeneration.ipynb',
]
for notebook in notebooks:
if os.path.exists(notebook):
shutil.copy(notebook, '.')
# -- Copy logo ---------------------------------------------------------------
if os.path.exists('../logo/logo.png'):
shutil.copy('../logo/logo.png', '.')
| 33.314286
| 79
| 0.616638
|
ab1bca899ba1f440651e92b900de789b742e59bc
| 7,742
|
py
|
Python
|
bytecode2ast/parsers/bases.py
|
Cologler/bytecode2ast-python
|
407b261a493e018bc86388040ddfb6fb0e4b96d9
|
[
"MIT"
] | null | null | null |
bytecode2ast/parsers/bases.py
|
Cologler/bytecode2ast-python
|
407b261a493e018bc86388040ddfb6fb0e4b96d9
|
[
"MIT"
] | null | null | null |
bytecode2ast/parsers/bases.py
|
Cologler/bytecode2ast-python
|
407b261a493e018bc86388040ddfb6fb0e4b96d9
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
#
# Copyright (c) 2019~2999 - Cologler <skyoflw@gmail.com>
# ----------
# some object for parser
# ----------
from typing import List
import enum
import dis
from collections import defaultdict
# instrs
def add_instr(self, instr: dis.Instruction):
''' add a handled instruction in this state '''
self._instrs.append(instr)
def get_instrs(self, key=None) -> List[dis.Instruction]:
''' get all instructions by key from this state '''
if key is None:
return self._instrs.copy()
else:
return [i for i in self._instrs if i.opcode == key or i.opname == key]
def copy(self):
''' copy a `CodeState` '''
state = CodeState()
state._load_stack = self._load_stack.copy()
state._ast_stack = self._ast_stack.copy()
return state
def copy_with_load(self, load_count):
''' copy a `CodeState` with empty ast stack. '''
state = CodeState()
state._load_stack = self._load_stack[-load_count:]
return state
def push(self, node):
''' push a node into load stack. '''
self._load_stack.append(node)
def pop(self):
''' pop the top node from load stack. '''
return self._load_stack.pop()
def pop_seq(self, count: int) -> list:
''' pop a list of top nodes from load stack. '''
assert count >= 0
if count > 0:
items = self._load_stack[-count:]
self._load_stack = self._load_stack[0:-count]
return items
else:
return []
def dup_top(self):
''' repeat top once. '''
self._load_stack.append(self._load_stack[-1])
def store(self, node):
''' store a node '''
self.add_node(node)
def add_node(self, node):
''' add a final node into ast stmt tree '''
self._blocks[-1].append(node)
def get_value(self) -> list:
''' get stmts from single block. '''
# ensure all status was handled
assert not self._state, self._state
assert not self._load_stack, self._load_stack
# get value
assert len(self._blocks) == 1, self._blocks
return self._blocks[-1]
def new_block(self):
''' make a new stmts block '''
self._blocks.append([])
def get_blocks(self) -> list:
''' get all stmts blocks. '''
# ensure all status was handled
assert not self._state, self._state
assert not self._load_stack, self._load_stack
# get value
return self._blocks
def get_block_count(self) -> int:
''' get count of stmts blocks. '''
return len(self._blocks)
class CodeReaderIter:
__slots__ = ('_reader', '_condition')
def fill_state(self, state: CodeState):
''' iter self into the `CodeState` and return it. '''
for instr in self:
handler = get_instr_handler(instr)
handler(self._reader, state, instr)
state.add_instr(instr)
return state
def get_state(self, *, scope=Scope.NONE):
''' iter self into a new `CodeState`, return the `CodeState` '''
state = CodeState(scope=scope)
return self.fill_state(state)
def get_value(self, *, scope=Scope.NONE):
''' iter self into a new `CodeState`, return value from `CodeState`. '''
return self.get_state(scope=scope).get_value()
def get_blocks(self, *, scope=Scope.NONE):
''' iter self into a new `CodeState`, return blocks from `CodeState`. '''
return self.get_state(scope=scope).get_blocks()
_OPCODE_MAP = {}
def get_instr_handler(instr):
'''
the return function `(reader, state, instr) -> None`
'''
k = (instr.opname, instr.opcode)
try:
return _OPCODE_MAP[k]
except KeyError:
raise NotImplementedError(k, instr)
| 28.050725
| 82
| 0.597778
|
ab1c5aded9a853b37a00d0b031cb2cff207d2b22
| 15,055
|
py
|
Python
|
netbox/extras/forms.py
|
orphanedgamboa/netbox
|
5cdc38ec3adb5278480b267a6c8e674e9d3fca39
|
[
"Apache-2.0"
] | 1
|
2021-05-01T18:16:37.000Z
|
2021-05-01T18:16:37.000Z
|
netbox/extras/forms.py
|
orphanedgamboa/netbox
|
5cdc38ec3adb5278480b267a6c8e674e9d3fca39
|
[
"Apache-2.0"
] | null | null | null |
netbox/extras/forms.py
|
orphanedgamboa/netbox
|
5cdc38ec3adb5278480b267a6c8e674e9d3fca39
|
[
"Apache-2.0"
] | null | null | null |
from django import forms
from django.contrib.auth.models import User
from django.contrib.contenttypes.models import ContentType
from django.utils.safestring import mark_safe
from django.utils.translation import gettext as _
from dcim.models import DeviceRole, DeviceType, Platform, Region, Site, SiteGroup
from tenancy.models import Tenant, TenantGroup
from utilities.forms import (
add_blank_choice, APISelectMultiple, BootstrapMixin, BulkEditForm, BulkEditNullBooleanSelect, ColorSelect,
CommentField, ContentTypeMultipleChoiceField, CSVModelForm, DateTimePicker, DynamicModelMultipleChoiceField,
JSONField, SlugField, StaticSelect2, BOOLEAN_WITH_BLANK_CHOICES,
)
from virtualization.models import Cluster, ClusterGroup
from .choices import *
from .models import ConfigContext, CustomField, ImageAttachment, JournalEntry, ObjectChange, Tag
from .utils import FeatureQuery
#
# Custom fields
#
#
# Tags
#
#
# Config contexts
#
#
# Filter form for local config context data
#
#
# Image attachments
#
#
# Journal entries
#
#
# Change logging
#
#
# Scripts
#
| 28.40566
| 117
| 0.643374
|
ab1c9d3f78e7e9ff6cc93d1c78aab266fbaf43fb
| 3,122
|
py
|
Python
|
unwarp_models.py
|
zgjslc/Film-Recovery-master1
|
4497a9930398c9e826ac364056a79e5bcbf6c953
|
[
"Apache-2.0"
] | null | null | null |
unwarp_models.py
|
zgjslc/Film-Recovery-master1
|
4497a9930398c9e826ac364056a79e5bcbf6c953
|
[
"Apache-2.0"
] | null | null | null |
unwarp_models.py
|
zgjslc/Film-Recovery-master1
|
4497a9930398c9e826ac364056a79e5bcbf6c953
|
[
"Apache-2.0"
] | null | null | null |
import torch
import torch.nn as nn
import torch.nn.functional as F
from models.misc import modules
constrain_path = {
('threeD', 'normal'): (True, True, ''),
('threeD', 'depth'): (True, True, ''),
('normal', 'depth'): (True, True, ''),
('depth', 'normal'): (True, True, ''),
}
| 51.180328
| 105
| 0.686739
|
ab1d101c4bcfc97cfc157f818f4f8698285ba31c
| 12,768
|
py
|
Python
|
endpoint/test_endpoint/update.py
|
pansila/Auto-Test-System
|
bfe51a277466939a32daa08f27a89cf3c1900def
|
[
"MIT"
] | 14
|
2019-02-19T01:31:08.000Z
|
2021-12-12T12:56:08.000Z
|
endpoint/test_endpoint/update.py
|
pansila/Auto-Test-System
|
bfe51a277466939a32daa08f27a89cf3c1900def
|
[
"MIT"
] | 2
|
2020-03-10T12:12:10.000Z
|
2020-03-10T12:12:10.000Z
|
endpoint/test_endpoint/update.py
|
pansila/Auto-Test-System
|
bfe51a277466939a32daa08f27a89cf3c1900def
|
[
"MIT"
] | 4
|
2019-07-09T02:00:13.000Z
|
2020-08-18T14:04:24.000Z
|
import configparser
import os
import hashlib
import json
import shutil
import sys
import tempfile
import subprocess
import tarfile
import re
import stat
from functools import cmp_to_key
from contextlib import closing
from gzip import GzipFile
from pathlib import Path
from urllib.error import HTTPError
from urllib.request import Request
from urllib.request import urlopen
WINDOWS = sys.platform == "win32"
BOOTSTRAP = """\
import os, sys
import re
import subprocess
def _which_python():
allowed_executables = ["python3", "python"]
if sys.platform == 'win32':
# in favor of 32 bit python to be compatible with the 32bit dlls of test libraries
allowed_executables[:0] = ["py.exe -3-32", "py.exe -2-32", "py.exe -3-64", "py.exe -2-64"]
# \d in regex ensures we can convert to int later
version_matcher = re.compile(r"^Python (?P<major>\d+)\.(?P<minor>\d+)\..+$")
fallback = None
for executable in allowed_executables:
try:
raw_version = subprocess.check_output(
executable + " --version", stderr=subprocess.STDOUT, shell=True
).decode("utf-8")
except subprocess.CalledProcessError:
continue
match = version_matcher.match(raw_version.strip())
if match and tuple(map(int, match.groups())) >= (3, 0):
# favor the first py3 executable we can find.
return executable
if fallback is None:
# keep this one as the fallback; it was the first valid executable we found.
fallback = executable
if fallback is None:
# Avoid breaking existing scripts
fallback = "python"
return fallback
if __name__ == '__main__':
py_executable = _which_python()
subprocess.run(py_executable + r' {collie_bin} ' + ' '.join(sys.argv[1:]), shell=True)
"""
BIN = """#!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys
import os
import argparse
lib = os.path.normpath(os.path.join(os.path.realpath(__file__), "..", "..", "lib", "collie"))
sys.path.insert(0, lib)
from test_endpoint.app import main
if __name__ == "__main__":
sys.exit(main())
"""
BAT = '@echo off\r\n{python_executable} "{collie_bootstrap}" %*\r\n'
SH = '#!/bin/sh\npython3 "{collie_bootstrap}" $*\n'
def expanduser(path):
"""
Expand ~ and ~user constructions.
Includes a workaround for http://bugs.python.org/issue14768
"""
expanded = os.path.expanduser(path)
if path.startswith("~/") and expanded.startswith("//"):
expanded = expanded[1:]
return expanded
def _which_python(self):
"""
Decides which python executable we'll embed in the launcher script.
"""
allowed_executables = ["python", "python3"]
if WINDOWS:
allowed_executables += ["py.exe -3", "py.exe -2"]
# \d in regex ensures we can convert to int later
version_matcher = re.compile(r"^Python (?P<major>\d+)\.(?P<minor>\d+)\..+$")
fallback = None
for executable in allowed_executables:
try:
raw_version = subprocess.check_output(
executable + " --version", stderr=subprocess.STDOUT, shell=True
).decode("utf-8")
except subprocess.CalledProcessError:
continue
match = version_matcher.match(raw_version.strip())
if match and tuple(map(int, match.groups())) >= (3, 0):
# favor the first py3 executable we can find.
return executable
if fallback is None:
# keep this one as the fallback; it was the first valid executable we found.
fallback = executable
if fallback is None:
# Avoid breaking existing scripts
fallback = "python"
return fallback
| 31.60396
| 98
| 0.56344
|
ab1d930ad268269a2d4b9569657fc14b57b495e4
| 690
|
py
|
Python
|
lib/jbgp/jbgpneighbor.py
|
routedo/junos-pyez-example
|
b89df2d40ca0a233529e4a26b42dd605c00aae46
|
[
"Apache-2.0"
] | null | null | null |
lib/jbgp/jbgpneighbor.py
|
routedo/junos-pyez-example
|
b89df2d40ca0a233529e4a26b42dd605c00aae46
|
[
"Apache-2.0"
] | null | null | null |
lib/jbgp/jbgpneighbor.py
|
routedo/junos-pyez-example
|
b89df2d40ca0a233529e4a26b42dd605c00aae46
|
[
"Apache-2.0"
] | 1
|
2020-06-17T12:17:18.000Z
|
2020-06-17T12:17:18.000Z
|
"""
Query BGP neighbor table on a Juniper network device.
"""
import sys
from jnpr.junos import Device
from jnpr.junos.factory import loadyaml
def juniper_bgp_state(dev, bgp_neighbor):
"""
This function queries the BGP neighbor table on a Juniper network device.
dev = Juniper device connection
bgp_neighbor = IP address of BGP neighbor
return = Returns state of BGP neighbor
"""
try:
globals().update(loadyaml('yaml/bgp_neighbor.yml'))
bgp_ni = bgp_neighbor_info(dev).get(neighbor_address=bgp_neighbor)
return bgp_ni
except Exception as err:
print(err)
dev.close()
sys.exit(1)
return
return
| 23
| 77
| 0.676812
|
ab1fe51ebbcd4a1dc4363d8ff7260094c438deca
| 2,170
|
py
|
Python
|
lib/cherrypy/cherrypy/test/test_sessionauthenticate.py
|
MiCHiLU/google_appengine_sdk
|
3da9f20d7e65e26c4938d2c4054bc4f39cbc5522
|
[
"Apache-2.0"
] | 790
|
2015-01-03T02:13:39.000Z
|
2020-05-10T19:53:57.000Z
|
AppServer/lib/cherrypy/cherrypy/test/test_sessionauthenticate.py
|
nlake44/appscale
|
6944af660ca4cb772c9b6c2332ab28e5ef4d849f
|
[
"Apache-2.0"
] | 1,361
|
2015-01-08T23:09:40.000Z
|
2020-04-14T00:03:04.000Z
|
AppServer/lib/cherrypy/cherrypy/test/test_sessionauthenticate.py
|
nlake44/appscale
|
6944af660ca4cb772c9b6c2332ab28e5ef4d849f
|
[
"Apache-2.0"
] | 162
|
2015-01-01T00:21:16.000Z
|
2022-02-23T02:36:04.000Z
|
import cherrypy
from cherrypy.test import helper
| 34.444444
| 82
| 0.566359
|
ab202528012b6880e43938d0db79af54bf805f9b
| 1,145
|
py
|
Python
|
2021/day-12/solve.py
|
amochtar/adventofcode
|
292e7f00a1e19d2149d00246b0a77fedfcd3bd08
|
[
"MIT"
] | 1
|
2019-12-27T22:36:30.000Z
|
2019-12-27T22:36:30.000Z
|
2021/day-12/solve.py
|
amochtar/adventofcode
|
292e7f00a1e19d2149d00246b0a77fedfcd3bd08
|
[
"MIT"
] | null | null | null |
2021/day-12/solve.py
|
amochtar/adventofcode
|
292e7f00a1e19d2149d00246b0a77fedfcd3bd08
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
from typing import List
import aoc
from collections import defaultdict
with open('test2.txt', 'r') as f:
inp = f.read()
print("Part 1:", solve(inp))
print("Part 2:", solve(inp, True))
with open('input.txt', 'r') as f:
inp = f.read()
print("Part 1:", solve(inp))
print("Part 2:", solve(inp, True))
| 23.367347
| 73
| 0.541485
|
ab207da0020d38ce47419c0053bab12a37bcf81b
| 11,387
|
py
|
Python
|
PaddleCV/tracking/ltr/data/processing.py
|
suytingwan/models
|
ccdbfe77d071cc19b55fb9f4b738912e35d982ef
|
[
"Apache-2.0"
] | 5
|
2021-09-28T13:28:01.000Z
|
2021-12-21T07:25:44.000Z
|
PaddleCV/tracking/ltr/data/processing.py
|
suytingwan/models
|
ccdbfe77d071cc19b55fb9f4b738912e35d982ef
|
[
"Apache-2.0"
] | 1
|
2020-07-02T03:05:00.000Z
|
2020-07-02T03:05:00.000Z
|
PaddleCV/tracking/ltr/data/processing.py
|
suytingwan/models
|
ccdbfe77d071cc19b55fb9f4b738912e35d982ef
|
[
"Apache-2.0"
] | 3
|
2021-09-28T15:33:45.000Z
|
2021-09-29T01:44:32.000Z
|
import numpy as np
from ltr.data import transforms
import ltr.data.processing_utils as prutils
from pytracking.libs import TensorDict
| 43.296578
| 121
| 0.558971
|
ab21105c56263980d75d2b1bb1e9d7beba919be5
| 884
|
py
|
Python
|
tqcli/config/config.py
|
Tranquant/tqcli
|
0cc12e0d80129a14cec8117cd73e2ca69fb25214
|
[
"Apache-2.0"
] | null | null | null |
tqcli/config/config.py
|
Tranquant/tqcli
|
0cc12e0d80129a14cec8117cd73e2ca69fb25214
|
[
"Apache-2.0"
] | null | null | null |
tqcli/config/config.py
|
Tranquant/tqcli
|
0cc12e0d80129a14cec8117cd73e2ca69fb25214
|
[
"Apache-2.0"
] | 1
|
2016-08-16T03:43:36.000Z
|
2016-08-16T03:43:36.000Z
|
import logging
from os.path import expanduser
#TQ_API_ROOT_URL = 'http://127.0.1.1:8090/dataset'
TQ_API_ROOT_URL = 'http://elb-tranquant-ecs-cluster-tqapi-1919110681.us-west-2.elb.amazonaws.com/dataset'
LOG_PATH = expanduser('~/tqcli.log')
# the chunk size must be at least 5MB for multipart upload
DEFAULT_CHUNK_SIZE = 1024 * 1024 * 5 # 5MB
logging.basicConfig(
level=logging.DEBUG,
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
filename=LOG_PATH,
filemode='w'
)
# define a Handler which writes INFO messages or higher to the sys.stderr
console = logging.StreamHandler()
console.setLevel(logging.INFO)
# set a format which is simpler for console use
formatter = logging.Formatter('%(message)s')
# tell the handler to use this format
console.setFormatter(formatter)
# add the handler to the root logger
logging.getLogger('').addHandler(console)
| 30.482759
| 105
| 0.745475
|
ab21d266138fcacadbe38aeb0f70a2986ce949b2
| 8,564
|
py
|
Python
|
fqf_iqn_qrdqn/agent/base_agent.py
|
rainwangphy/fqf-iqn-qrdqn.pytorch
|
351e9c4722c8b1ed411cd8c1bbf46c93c07f0893
|
[
"MIT"
] | null | null | null |
fqf_iqn_qrdqn/agent/base_agent.py
|
rainwangphy/fqf-iqn-qrdqn.pytorch
|
351e9c4722c8b1ed411cd8c1bbf46c93c07f0893
|
[
"MIT"
] | null | null | null |
fqf_iqn_qrdqn/agent/base_agent.py
|
rainwangphy/fqf-iqn-qrdqn.pytorch
|
351e9c4722c8b1ed411cd8c1bbf46c93c07f0893
|
[
"MIT"
] | 1
|
2022-02-14T02:55:01.000Z
|
2022-02-14T02:55:01.000Z
|
from abc import ABC, abstractmethod
import os
import numpy as np
import torch
from torch.utils.tensorboard import SummaryWriter
from fqf_iqn_qrdqn.memory import LazyMultiStepMemory, \
LazyPrioritizedMultiStepMemory
from fqf_iqn_qrdqn.utils import RunningMeanStats, LinearAnneaer
| 34.119522
| 79
| 0.599603
|
ab224c0b7dd96b0783239d1ab27b2b04825a3e94
| 4,122
|
py
|
Python
|
Python/libraries/recognizers-date-time/recognizers_date_time/date_time/italian/timeperiod_extractor_config.py
|
felaray/Recognizers-Text
|
f514fd61c8d472ed92565261162712409f655312
|
[
"MIT"
] | null | null | null |
Python/libraries/recognizers-date-time/recognizers_date_time/date_time/italian/timeperiod_extractor_config.py
|
felaray/Recognizers-Text
|
f514fd61c8d472ed92565261162712409f655312
|
[
"MIT"
] | 6
|
2021-12-20T17:13:35.000Z
|
2022-03-29T08:54:11.000Z
|
Python/libraries/recognizers-date-time/recognizers_date_time/date_time/italian/timeperiod_extractor_config.py
|
felaray/Recognizers-Text
|
f514fd61c8d472ed92565261162712409f655312
|
[
"MIT"
] | null | null | null |
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
from typing import List, Pattern
from recognizers_text.utilities import RegExpUtility
from recognizers_text.extractor import Extractor
from recognizers_number.number.italian.extractors import ItalianIntegerExtractor
from ...resources.italian_date_time import ItalianDateTime
from ..extractors import DateTimeExtractor
from ..base_timeperiod import TimePeriodExtractorConfiguration, MatchedIndex
from ..base_time import BaseTimeExtractor
from ..base_timezone import BaseTimeZoneExtractor
from .time_extractor_config import ItalianTimeExtractorConfiguration
from .base_configs import ItalianDateTimeUtilityConfiguration
from .timezone_extractor_config import ItalianTimeZoneExtractorConfiguration
| 37.816514
| 96
| 0.743328
|
ab2324a100ecb32532716cd76301eba78659a0c1
| 3,012
|
py
|
Python
|
quartet_condor.py
|
BotanyHunter/QuartetAnalysis
|
c9b21aac267718be5ea8a8a76632fc0a3feb8403
|
[
"MIT"
] | null | null | null |
quartet_condor.py
|
BotanyHunter/QuartetAnalysis
|
c9b21aac267718be5ea8a8a76632fc0a3feb8403
|
[
"MIT"
] | null | null | null |
quartet_condor.py
|
BotanyHunter/QuartetAnalysis
|
c9b21aac267718be5ea8a8a76632fc0a3feb8403
|
[
"MIT"
] | null | null | null |
#quartet_condor.py
#version 2.0.2
import random, sys
def addToDict(d):
'''
Ensures each quartet has three concordance factors (CFs)
a dictionary d has less than three CFs, add CFs with the value 0 until there are three
Input: a dictionary containing CFs, a counter of how many CFs are in the dictionary
'''
if ("{1,2|3,4}" not in d):
d["{1,2|3,4}"] = 0.0
if ("{1,3|2,4}" not in d):
d["{1,3|2,4}"] = 0.0
if ("{1,4|2,3}" not in d):
d["{1,4|2,3}"] = 0.0
| 33.466667
| 98
| 0.537849
|
ab24554d8fbfa3bde2693fe79ec073ac02c9f577
| 177
|
py
|
Python
|
src/profiles/forms.py
|
rahulroshan96/CloudVisual
|
aa33709d88442bcdbe3229234b4eb4f9abb4481e
|
[
"MIT",
"Unlicense"
] | null | null | null |
src/profiles/forms.py
|
rahulroshan96/CloudVisual
|
aa33709d88442bcdbe3229234b4eb4f9abb4481e
|
[
"MIT",
"Unlicense"
] | 4
|
2020-06-05T19:43:52.000Z
|
2021-06-01T23:15:44.000Z
|
src/profiles/forms.py
|
rahulroshan96/CloudVisual
|
aa33709d88442bcdbe3229234b4eb4f9abb4481e
|
[
"MIT",
"Unlicense"
] | null | null | null |
from django import forms
from models import UserInputModel
| 22.125
| 37
| 0.717514
|
ab253b2fa27d701106a981880d15472309de60c1
| 2,379
|
py
|
Python
|
tests_oval_graph/test_arf_xml_parser/test_arf_xml_parser.py
|
Honny1/oval-graph
|
96472a9d2b08c2afce620c54f229ce95ad019d1f
|
[
"Apache-2.0"
] | 21
|
2019-08-01T09:09:25.000Z
|
2020-09-27T10:00:09.000Z
|
tests_oval_graph/test_arf_xml_parser/test_arf_xml_parser.py
|
Honny1/oval-graph
|
96472a9d2b08c2afce620c54f229ce95ad019d1f
|
[
"Apache-2.0"
] | 129
|
2019-08-04T19:06:24.000Z
|
2020-10-03T10:02:26.000Z
|
tests_oval_graph/test_arf_xml_parser/test_arf_xml_parser.py
|
Honny1/oval-graph
|
96472a9d2b08c2afce620c54f229ce95ad019d1f
|
[
"Apache-2.0"
] | 11
|
2019-08-07T08:53:54.000Z
|
2020-10-02T22:02:38.000Z
|
from pathlib import Path
import pytest
from oval_graph.arf_xml_parser.arf_xml_parser import ARFXMLParser
| 30.896104
| 93
| 0.721732
|
ab2601bcecd2c5b5f36345a106f14a3b9c2ff88d
| 5,668
|
py
|
Python
|
main.py
|
scottjr632/trump-twitter-bot
|
484b1324d752395338b0a9e5850acf294089b26f
|
[
"MIT"
] | null | null | null |
main.py
|
scottjr632/trump-twitter-bot
|
484b1324d752395338b0a9e5850acf294089b26f
|
[
"MIT"
] | null | null | null |
main.py
|
scottjr632/trump-twitter-bot
|
484b1324d752395338b0a9e5850acf294089b26f
|
[
"MIT"
] | null | null | null |
import os
import logging
import argparse
import sys
import signal
import subprocess
from functools import wraps
from dotenv import load_dotenv
load_dotenv(verbose=True)
from app.config import configure_app
from app.bot import TrumpBotScheduler
from app.sentimentbot import SentimentBot
parser = argparse.ArgumentParser(description=r"""
""")
ROOT = os.getcwd()
PID_FILE_PATH = os.path.join(ROOT, 'var/run-dev.pid')
CMDS = []
FNCS = []
try:
os.setpgrp()
if not os.path.exists(os.path.dirname(PID_FILE_PATH)):
os.makedirs(os.path.dirname(PID_FILE_PATH))
with open(PID_FILE_PATH, 'w+') as file:
file.write(str(os.getpgrp()) + '\n')
except Exception as e:
logging.error(e)
def _start_flask_server(*args, **kwargs):
from app import app
logging.info('Starting the flask server...')
level = os.environ.get('CONFIG_LEVEL')
configure_app(app, status='production' if level is None else level)
port = app.config.get('PORT')
app.run(host='0.0.0.0', port=port)
def _start_dev_server(*args, **kwargs):
_start_client_server()
FNCS.append(_start_flask_server)
def _start_prod_server(*args, **kwargs):
_start_trump_bot(*args, **kwargs)
_start_flask_server(*args, **kwargs)
def _start_trump_bot(send_posts=True, start_sentiment_bot=False, *args, **kwargs):
logging.info('Starting the trump bot...')
# requests_path = os.environ.get('REQUESTS_FILE_PATH', 'requests/request.json')
# auth_path = os.environ.get('AUTH_FILE_PATH', 'requests/auth.json')
# _file_path_sanity_check(requests_path, auth_path)
bot = _initialize_trump_bot(send_posts=send_posts)
if not start_sentiment_bot:
_start_sentiment_bot(trump_bot=bot, send_posts=send_posts)
bot.start()
ACTIONS = {
"initialize": _initialize_trump_bot,
"client": _start_client_server,
"trumpbot": _start_trump_bot,
"flask": _start_flask_server,
"dev": _start_dev_server,
"prod": _start_prod_server,
}
parser.add_argument('action',
help='start the Flask app',
type=str,
choices=[key for key, v in ACTIONS.items()])
parser.add_argument('-np', '--no-post',
dest='send_posts',
action='store_true',
help='Do not send post requests')
parser.add_argument('-nsb', '--no-sentiment-bot',
dest='start_sentiment_bot',
action='store_true',
help='Do not to start the sentiment bot')
if __name__ == "__main__":
logging.basicConfig(level=logging.INFO)
main()
| 27.64878
| 106
| 0.657728
|
ab27a7af29a41d40eec1afd58d05fca20bfc3c8b
| 691
|
py
|
Python
|
010-summation-of-primes.py
|
dendi239/euler
|
71fcdca4a80f9e586aab05eb8acadf1a296dda90
|
[
"MIT"
] | null | null | null |
010-summation-of-primes.py
|
dendi239/euler
|
71fcdca4a80f9e586aab05eb8acadf1a296dda90
|
[
"MIT"
] | null | null | null |
010-summation-of-primes.py
|
dendi239/euler
|
71fcdca4a80f9e586aab05eb8acadf1a296dda90
|
[
"MIT"
] | null | null | null |
#! /usr/bin/env python3
import itertools
import typing as tp
if __name__ == '__main__':
main()
| 16.452381
| 62
| 0.51809
|
ab27ed4a158779f6beba16216ad31870fa98bf95
| 11,368
|
py
|
Python
|
setup.py
|
letmaik/lensfunpy
|
ddadb6bfd5f3acde5640210aa9f575501e5c0914
|
[
"MIT"
] | 94
|
2016-08-24T21:52:40.000Z
|
2022-03-05T07:17:21.000Z
|
setup.py
|
letmaik/lensfunpy
|
ddadb6bfd5f3acde5640210aa9f575501e5c0914
|
[
"MIT"
] | 22
|
2016-10-21T07:15:21.000Z
|
2021-09-20T13:41:02.000Z
|
setup.py
|
letmaik/lensfunpy
|
ddadb6bfd5f3acde5640210aa9f575501e5c0914
|
[
"MIT"
] | 11
|
2016-12-12T03:14:07.000Z
|
2021-05-06T17:47:30.000Z
|
from setuptools import setup, Extension, find_packages
import subprocess
import errno
import re
import os
import shutil
import sys
import zipfile
from urllib.request import urlretrieve
import numpy
from Cython.Build import cythonize
isWindows = os.name == 'nt'
isMac = sys.platform == 'darwin'
is64Bit = sys.maxsize > 2**32
# adapted from cffi's setup.py
# the following may be overridden if pkg-config exists
libraries = ['lensfun']
include_dirs = []
library_dirs = []
extra_compile_args = []
extra_link_args = []
if isWindows or isMac:
cmake_build = os.path.abspath('external/lensfun/build')
install_dir = os.path.join(cmake_build, 'install')
include_dirs += [os.path.join(install_dir, 'include', 'lensfun')]
library_dirs += [os.path.join(install_dir, 'lib')]
else:
use_pkg_config()
# this must be after use_pkg_config()!
include_dirs += [numpy.get_include()]
# for version_helper.h
include_dirs += [os.path.abspath('lensfunpy')]
package_data = {'lensfunpy': []}
# evil hack, check cmd line for relevant commands
# custom cmdclasses didn't work out in this case
cmdline = ''.join(sys.argv[1:])
needsCompile = any(s in cmdline for s in ['install', 'bdist', 'build_ext', 'wheel', 'nosetests'])
if isWindows and needsCompile:
windows_lensfun_compile()
package_data['lensfunpy'].append('*.dll')
elif isMac and needsCompile:
mac_lensfun_compile()
if any(s in cmdline for s in ['clean', 'sdist']):
# When running sdist after a previous run of bdist or build_ext
# then even with the 'clean' command the .egg-info folder stays.
# This folder contains SOURCES.txt which in turn is used by sdist
# to include package data files, but we don't want .dll's and .xml
# files in our source distribution. Therefore, to prevent accidents,
# we help a little...
egg_info = 'lensfunpy.egg-info'
print('removing', egg_info)
shutil.rmtree(egg_info, ignore_errors=True)
if 'sdist' not in cmdline:
# This assumes that the lensfun version from external/lensfun was used.
# If that's not the case, the bundled files may fail to load, for example,
# if lensfunpy was linked against an older lensfun version already on
# the system (Linux mostly) and the database format changed in an incompatible way.
# In that case, loading of bundled files can still be disabled
# with Database(load_bundled=False).
package_data['lensfunpy'].append('db_files/*.xml')
bundle_db_files()
# Support for optional Cython line tracing
# run the following to generate a test coverage report:
# $ export LINETRACE=1
# $ python setup.py build_ext --inplace
# $ nosetests --with-coverage --cover-html --cover-package=lensfunpy
compdirectives = {}
macros = []
if (os.environ.get('LINETRACE', False)):
compdirectives['linetrace'] = True
macros.append(('CYTHON_TRACE', '1'))
extensions = cythonize([Extension("lensfunpy._lensfun",
include_dirs=include_dirs,
sources=[os.path.join('lensfunpy', '_lensfun.pyx')],
libraries=libraries,
library_dirs=library_dirs,
extra_compile_args=extra_compile_args,
extra_link_args=extra_link_args,
define_macros=macros
)],
compiler_directives=compdirectives)
# make __version__ available (https://stackoverflow.com/a/16084844)
exec(open('lensfunpy/_version.py').read())
setup(
name = 'lensfunpy',
version = __version__,
description = 'Lens distortion correction for Python, a wrapper for lensfun',
long_description = open('README.rst').read(),
author = 'Maik Riechert',
author_email = 'maik.riechert@arcor.de',
url = 'https://github.com/letmaik/lensfunpy',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Natural Language :: English',
'License :: OSI Approved :: MIT License',
'Programming Language :: Cython',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Operating System :: MacOS',
'Operating System :: Microsoft :: Windows',
'Operating System :: POSIX',
'Operating System :: Unix',
'Topic :: Multimedia :: Graphics',
'Topic :: Software Development :: Libraries',
],
packages = find_packages(),
ext_modules = extensions,
package_data = package_data,
install_requires=['numpy']
)
| 37.518152
| 125
| 0.616555
|
ab2a38bd32faf647f78849a772f13ad447eb6e18
| 2,144
|
py
|
Python
|
chapter_13/mailtools/__init__.py
|
bimri/programming_python
|
ba52ccd18b9b4e6c5387bf4032f381ae816b5e77
|
[
"MIT"
] | null | null | null |
chapter_13/mailtools/__init__.py
|
bimri/programming_python
|
ba52ccd18b9b4e6c5387bf4032f381ae816b5e77
|
[
"MIT"
] | null | null | null |
chapter_13/mailtools/__init__.py
|
bimri/programming_python
|
ba52ccd18b9b4e6c5387bf4032f381ae816b5e77
|
[
"MIT"
] | null | null | null |
"The mailtools Utility Package"
'Initialization File'
"""
##################################################################################
mailtools package: interface to mail server transfers, used by pymail2, PyMailGUI,
and PyMailCGI; does loads, sends, parsing, composing, and deleting, with part
attachments, encodings (of both the email and Unicdode kind), etc.; the parser,
fetcher, and sender classes here are designed to be mixed-in to subclasses which
use their methods, or used as embedded or standalone objects;
this package also includes convenience subclasses for silent mode, and more;
loads all mail text if pop server doesn't do top; doesn't handle threads or UI
here, and allows askPassword to differ per subclass; progress callback funcs get
status; all calls raise exceptions on error--client must handle in GUI/other;
this changed from file to package: nested modules imported here for bw compat;
4E: need to use package-relative import syntax throughout, because in Py 3.X
package dir in no longer on module import search path if package is imported
elsewhere (from another directory which uses this package); also performs
Unicode decoding on mail text when fetched (see mailFetcher), as well as for
some text part payloads which might have been email-encoded (see mailParser);
TBD: in saveparts, should file be opened in text mode for text/ contypes?
TBD: in walkNamedParts, should we skip oddballs like message/delivery-status?
TBD: Unicode support has not been tested exhaustively: see Chapter 13 for more
on the Py3.1 email package and its limitations, and the policies used here;
##################################################################################
"""
# collect contents of all modules here, when package dir imported directly
from .mailFetcher import *
from .mailSender import * # 4E: package-relative
from .mailParser import *
# export nested modules here, when from mailtools import *
__all__ = 'mailFetcher', 'mailSender', 'mailParser'
# self-test code is in selftest.py to allow mailconfig's path
# to be set before running thr nested module imports above
| 51.047619
| 83
| 0.718284
|
ab2add18b201d727e235b13fba3fa52b34c35680
| 404
|
py
|
Python
|
TreeModelLib/BelowgroundCompetition/__init__.py
|
jvollhueter/pyMANGA-1
|
414204a394d44405225b4b8224b19464c1006f1d
|
[
"MIT"
] | null | null | null |
TreeModelLib/BelowgroundCompetition/__init__.py
|
jvollhueter/pyMANGA-1
|
414204a394d44405225b4b8224b19464c1006f1d
|
[
"MIT"
] | null | null | null |
TreeModelLib/BelowgroundCompetition/__init__.py
|
jvollhueter/pyMANGA-1
|
414204a394d44405225b4b8224b19464c1006f1d
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Thu Nov 8 15:25:03 2018
@author: bathmann
"""
from .BelowgroundCompetition import BelowgroundCompetition
from .SimpleTest import SimpleTest
from .FON import FON
from .OGSWithoutFeedback import OGSWithoutFeedback
from .OGSLargeScale3D import OGSLargeScale3D
from .OGS.helpers import CellInformation
from .FixedSalinity import FixedSalinity
| 25.25
| 58
| 0.799505
|
ab2b3845336cbc9c2cd653a367ec0d03b0cfffa6
| 223
|
py
|
Python
|
server.py
|
SDelhey/websocket-chat
|
c7b83583007a723baee25acedbceddd55c12ffec
|
[
"MIT"
] | null | null | null |
server.py
|
SDelhey/websocket-chat
|
c7b83583007a723baee25acedbceddd55c12ffec
|
[
"MIT"
] | null | null | null |
server.py
|
SDelhey/websocket-chat
|
c7b83583007a723baee25acedbceddd55c12ffec
|
[
"MIT"
] | null | null | null |
from flask import Flask, render_template
from flask_socketio import SocketIO, send, emit
app = Flask(__name__)
app.config['SECRET_KEY'] = 'secret!'
socketio = SocketIO(app)
if __name__ == '__main__':
socketio.run(app)
| 24.777778
| 47
| 0.748879
|
ab2c89bde44269f1533806cfa45910e25d77ed66
| 2,771
|
py
|
Python
|
services/postprocess/src/postprocess.py
|
hadarohana/myCosmos
|
6e4682a2af822eb828180658aaa6d3e304cc85bf
|
[
"Apache-2.0"
] | null | null | null |
services/postprocess/src/postprocess.py
|
hadarohana/myCosmos
|
6e4682a2af822eb828180658aaa6d3e304cc85bf
|
[
"Apache-2.0"
] | 5
|
2020-01-28T23:13:10.000Z
|
2022-02-10T00:28:15.000Z
|
services/postprocess/src/postprocess.py
|
hadarohana/myCosmos
|
6e4682a2af822eb828180658aaa6d3e304cc85bf
|
[
"Apache-2.0"
] | 1
|
2021-03-10T19:25:44.000Z
|
2021-03-10T19:25:44.000Z
|
"""
Post processing on detected objects
"""
import pymongo
from pymongo import MongoClient
import time
import logging
logging.basicConfig(format='%(levelname)s :: %(asctime)s :: %(message)s', level=logging.DEBUG)
from joblib import Parallel, delayed
import click
from xgboost_model.inference import run_inference, PostprocessException
import os
if __name__ == '__main__':
click_wrapper()
| 37.445946
| 111
| 0.616745
|
ab2d00e90fa00656e5b245ed372443c5a0686b39
| 2,619
|
py
|
Python
|
model-optimizer/mo/front/common/partial_infer/multi_box_prior_test.py
|
calvinfeng/openvino
|
11f591c16852637506b1b40d083b450e56d0c8ac
|
[
"Apache-2.0"
] | null | null | null |
model-optimizer/mo/front/common/partial_infer/multi_box_prior_test.py
|
calvinfeng/openvino
|
11f591c16852637506b1b40d083b450e56d0c8ac
|
[
"Apache-2.0"
] | 19
|
2021-03-26T08:11:00.000Z
|
2022-02-21T13:06:26.000Z
|
model-optimizer/mo/front/common/partial_infer/multi_box_prior_test.py
|
calvinfeng/openvino
|
11f591c16852637506b1b40d083b450e56d0c8ac
|
[
"Apache-2.0"
] | 1
|
2021-07-28T17:30:46.000Z
|
2021-07-28T17:30:46.000Z
|
"""
Copyright (C) 2018-2021 Intel Corporation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import unittest
import numpy as np
from mo.front.common.partial_infer.multi_box_prior import multi_box_prior_infer_mxnet
from mo.graph.graph import Node
from mo.utils.unittest.graph import build_graph
nodes_attributes = {'node_1': {'value': None, 'kind': 'data'},
'node_2': {'value': None, 'kind': 'data'},
'prior_box_1': {'type': 'PriorBox', 'kind': 'op'},
'node_3': {'type': 'Identity', 'value': None, 'kind': 'data'}
}
| 44.389831
| 112
| 0.583047
|
ab2d830b247e5d1c87b1cc476939c72b7371cdbc
| 10,997
|
py
|
Python
|
bin/mem_monitor.py
|
Samahu/ros-system-monitor
|
5376eba046ac38cfe8fe9ff8b385fa2637015eda
|
[
"BSD-3-Clause"
] | 68
|
2016-02-07T00:35:25.000Z
|
2022-03-22T11:14:16.000Z
|
bin/mem_monitor.py
|
Samahu/ros-system-monitor
|
5376eba046ac38cfe8fe9ff8b385fa2637015eda
|
[
"BSD-3-Clause"
] | 5
|
2016-04-12T14:29:51.000Z
|
2021-08-04T12:55:59.000Z
|
bin/mem_monitor.py
|
Samahu/ros-system-monitor
|
5376eba046ac38cfe8fe9ff8b385fa2637015eda
|
[
"BSD-3-Clause"
] | 62
|
2015-08-09T23:17:16.000Z
|
2022-02-11T18:24:30.000Z
|
#!/usr/bin/env python
############################################################################
# Copyright (C) 2009, Willow Garage, Inc. #
# Copyright (C) 2013 by Ralf Kaestner #
# ralf.kaestner@gmail.com #
# Copyright (C) 2013 by Jerome Maye #
# jerome.maye@mavt.ethz.ch #
# #
# All rights reserved. #
# #
# Redistribution and use in source and binary forms, with or without #
# modification, are permitted provided that the following conditions #
# are met: #
# #
# 1. Redistributions of source code must retain the above copyright #
# notice, this list of conditions and the following disclaimer. #
# #
# 2. Redistributions in binary form must reproduce the above copyright #
# notice, this list of conditions and the following disclaimer in #
# the documentation and/or other materials provided with the #
# distribution. #
# #
# 3. The name of the copyright holders may be used to endorse or #
# promote products derived from this software without specific #
# prior written permission. #
# #
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS #
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT #
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS #
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE #
# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, #
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, #
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; #
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER #
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT #
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN #
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE #
# POSSIBILITY OF SUCH DAMAGE. #
############################################################################
from __future__ import with_statement
import rospy
import traceback
import threading
from threading import Timer
import sys, os, time
from time import sleep
import subprocess
import string
import socket
from diagnostic_msgs.msg import DiagnosticArray, DiagnosticStatus, KeyValue
mem_level_warn = 0.95
mem_level_error = 0.99
stat_dict = { 0: 'OK', 1: 'Warning', 2: 'Error' }
if __name__ == '__main__':
hostname = socket.gethostname()
hostname = hostname.replace('-', '_')
import optparse
parser = optparse.OptionParser(usage="usage: mem_monitor.py [--diag-hostname=cX]")
parser.add_option("--diag-hostname", dest="diag_hostname",
help="Computer name in diagnostics output (ex: 'c1')",
metavar="DIAG_HOSTNAME",
action="store", default = hostname)
options, args = parser.parse_args(rospy.myargv())
try:
rospy.init_node('mem_monitor_%s' % hostname)
except rospy.exceptions.ROSInitException:
print >> sys.stderr, 'Memory monitor is unable to initialize node. Master may not be running.'
sys.exit(0)
mem_node = MemMonitor(hostname, options.diag_hostname)
rate = rospy.Rate(1.0)
try:
while not rospy.is_shutdown():
rate.sleep()
mem_node.publish_stats()
except KeyboardInterrupt:
pass
except Exception, e:
traceback.print_exc()
rospy.logerr(traceback.format_exc())
mem_node.cancel_timers()
sys.exit(0)
| 41.973282
| 102
| 0.555606
|
ab2dd4e23245d0ab9d1e255dfa3fc732936ba5f1
| 4,557
|
py
|
Python
|
cmake/utils/gen-ninja-deps.py
|
stamhe/bitcoin-abc
|
a1ba303c6b4f164ae94612e83b824e564405a96e
|
[
"MIT"
] | 1,266
|
2017-05-02T07:02:29.000Z
|
2022-03-31T17:15:44.000Z
|
cmake/utils/gen-ninja-deps.py
|
EGYVOICE/bitcoin-abc-avalanche
|
e0f1fe857e1fc85f01903f1c323c2d5c54aecc1c
|
[
"MIT"
] | 426
|
2017-05-07T12:40:52.000Z
|
2022-03-29T18:12:01.000Z
|
cmake/utils/gen-ninja-deps.py
|
EGYVOICE/bitcoin-abc-avalanche
|
e0f1fe857e1fc85f01903f1c323c2d5c54aecc1c
|
[
"MIT"
] | 721
|
2017-05-07T10:36:11.000Z
|
2022-03-15T09:07:48.000Z
|
#!/usr/bin/env python3
import argparse
import os
import subprocess
parser = argparse.ArgumentParser(description='Produce a dep file from ninja.')
parser.add_argument(
'--build-dir',
help='The build directory.',
required=True)
parser.add_argument(
'--base-dir',
help='The directory for which dependencies are rewriten.',
required=True)
parser.add_argument('--ninja', help='The ninja executable to use.')
parser.add_argument(
'base_target',
help="The target from the base's perspective.")
parser.add_argument(
'targets', nargs='+',
help='The target for which dependencies are extracted.')
parser.add_argument(
'--extra-deps', nargs='+',
help='Extra dependencies.')
args = parser.parse_args()
build_dir = os.path.abspath(args.build_dir)
base_dir = os.path.abspath(args.base_dir)
ninja = args.ninja
base_target = args.base_target
targets = args.targets
extra_deps = args.extra_deps
# Make sure we operate in the right folder.
os.chdir(build_dir)
if ninja is None:
ninja = subprocess.check_output(['command', '-v', 'ninja'])[:-1]
# Construct the set of all targets
all_targets = set()
doto_targets = set()
for t in subprocess.check_output([ninja, '-t', 'targets', 'all']).splitlines():
t, r = t.split(b':')
all_targets.add(t)
if r[:13] == b' C_COMPILER__' or r[:15] == b' CXX_COMPILER__':
doto_targets.add(t)
base_dir = base_dir.encode()
deps = extract_deps(set(targets))
deps = rebase_deps(deps)
# Collapse everything under the base target.
basedeps = set() if extra_deps is None else set(d.encode() for d in extra_deps)
for d in deps.values():
basedeps.update(d)
base_target = base_target.encode()
basedeps.discard(base_target)
dump({base_target: basedeps})
| 25.038462
| 79
| 0.577354
|
ab30352abcf50690534a3f85202149cd132e631c
| 46
|
py
|
Python
|
src/webpy1/src/manage/checkPic.py
|
ptphp/PyLib
|
07ac99cf2deb725475f5771b123b9ea1375f5e65
|
[
"Apache-2.0"
] | 1
|
2020-02-17T08:18:29.000Z
|
2020-02-17T08:18:29.000Z
|
src/webpy1/src/manage/checkPic.py
|
ptphp/PyLib
|
07ac99cf2deb725475f5771b123b9ea1375f5e65
|
[
"Apache-2.0"
] | null | null | null |
src/webpy1/src/manage/checkPic.py
|
ptphp/PyLib
|
07ac99cf2deb725475f5771b123b9ea1375f5e65
|
[
"Apache-2.0"
] | null | null | null |
'''
Created on 2011-6-22
@author: dholer
'''
| 7.666667
| 20
| 0.608696
|
ab30973f8a964fee614a5ec7df1f83c6a91d145f
| 122
|
py
|
Python
|
tests/__init__.py
|
coleb/sendoff
|
fc1b38ba7571254a88ca457f6f618ae4572f30b6
|
[
"MIT"
] | 2
|
2021-09-28T09:53:53.000Z
|
2021-10-01T17:45:29.000Z
|
tests/__init__.py
|
coleb/sendoff
|
fc1b38ba7571254a88ca457f6f618ae4572f30b6
|
[
"MIT"
] | 10
|
2021-09-17T22:14:37.000Z
|
2022-03-21T16:25:39.000Z
|
tests/__init__.py
|
coleb/sendoff
|
fc1b38ba7571254a88ca457f6f618ae4572f30b6
|
[
"MIT"
] | 1
|
2021-09-27T15:55:40.000Z
|
2021-09-27T15:55:40.000Z
|
"""Tests for the `sendoff` library."""
"""
The `sendoff` library tests validate the expected function of the library.
"""
| 24.4
| 74
| 0.704918
|
ab30b98300e549b0e8401f690d6ee36c03180fdb
| 2,493
|
py
|
Python
|
sysinv/sysinv/sysinv/sysinv/helm/garbd.py
|
Wind-River/starlingx-config
|
96b92e5179d54dde10cb84c943eb239adf26b958
|
[
"Apache-2.0"
] | null | null | null |
sysinv/sysinv/sysinv/sysinv/helm/garbd.py
|
Wind-River/starlingx-config
|
96b92e5179d54dde10cb84c943eb239adf26b958
|
[
"Apache-2.0"
] | null | null | null |
sysinv/sysinv/sysinv/sysinv/helm/garbd.py
|
Wind-River/starlingx-config
|
96b92e5179d54dde10cb84c943eb239adf26b958
|
[
"Apache-2.0"
] | null | null | null |
#
# Copyright (c) 2018-2019 Wind River Systems, Inc.
#
# SPDX-License-Identifier: Apache-2.0
#
from sysinv.common import constants
from sysinv.common import exception
from sysinv.common import utils
from sysinv.helm import common
from sysinv.helm import base
| 37.208955
| 80
| 0.649017
|
ab313db7c6b7e6135aaa8212f15c08dfe29e2372
| 1,280
|
py
|
Python
|
dataloader/frame_counter/frame_counter.py
|
aaron-zou/pretraining-twostream
|
5aa2f4bafb731e61f8f671e2500a6dfa8436be57
|
[
"MIT"
] | null | null | null |
dataloader/frame_counter/frame_counter.py
|
aaron-zou/pretraining-twostream
|
5aa2f4bafb731e61f8f671e2500a6dfa8436be57
|
[
"MIT"
] | null | null | null |
dataloader/frame_counter/frame_counter.py
|
aaron-zou/pretraining-twostream
|
5aa2f4bafb731e61f8f671e2500a6dfa8436be57
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
"""Generate frame counts dict for a dataset.
Usage:
frame_counter.py [options]
Options:
-h, --help Print help message
--root=<str> Path to root of dataset (should contain video folders that contain images)
[default: /vision/vision_users/azou/data/hmdb51_flow/u/]
--output=<str> Output filename [default: hmdb_frame_count.pickle]
"""
from __future__ import print_function
from docopt import docopt
import os
import sys
import pickle
if __name__ == '__main__':
args = docopt(__doc__)
print(args)
# Final counts
counts = {}
min_count = sys.maxint
# Generate list of video folders
for root, dirs, files in os.walk(args['--root']):
# Skip the root directory
if len(dirs) != 0:
continue
# Process a directory and frame count into a dictionary entry
name = os.path.basename(os.path.normpath(root))
print('{}: {} frames'.format(name, len(files)))
counts[name] = len(files)
# Track minimum count
if len(files) < min_count:
min_count = len(files)
with open(args['--output'], 'wb') as ofile:
pickle.dump(counts, ofile)
print('Minimum frame count = {}'.format(min_count))
| 27.826087
| 102
| 0.625
|
ab32101612714ab2b6b04c378a7a5646daa96906
| 155
|
py
|
Python
|
Problem_30/main.py
|
jdalzatec/EulerProject
|
2f2f4d9c009be7fd63bb229bb437ea75db77d891
|
[
"MIT"
] | 1
|
2022-03-28T05:32:58.000Z
|
2022-03-28T05:32:58.000Z
|
Problem_30/main.py
|
jdalzatec/EulerProject
|
2f2f4d9c009be7fd63bb229bb437ea75db77d891
|
[
"MIT"
] | null | null | null |
Problem_30/main.py
|
jdalzatec/EulerProject
|
2f2f4d9c009be7fd63bb229bb437ea75db77d891
|
[
"MIT"
] | null | null | null |
total = 0
for n in range(1000, 1000000):
suma = 0
for i in str(n):
suma += int(i)**5
if (n == suma):
total += n
print(total)
| 14.090909
| 30
| 0.483871
|
ab33de96dbc34b33ac4aed99648c2c63749addef
| 8,913
|
py
|
Python
|
armi/physics/fuelCycle/settings.py
|
celikten/armi
|
4e100dd514a59caa9c502bd5a0967fd77fdaf00e
|
[
"Apache-2.0"
] | 1
|
2021-05-29T16:02:31.000Z
|
2021-05-29T16:02:31.000Z
|
armi/physics/fuelCycle/settings.py
|
celikten/armi
|
4e100dd514a59caa9c502bd5a0967fd77fdaf00e
|
[
"Apache-2.0"
] | null | null | null |
armi/physics/fuelCycle/settings.py
|
celikten/armi
|
4e100dd514a59caa9c502bd5a0967fd77fdaf00e
|
[
"Apache-2.0"
] | null | null | null |
"""Settings for generic fuel cycle code."""
import re
import os
from armi.settings import setting
from armi.operators import settingsValidation
CONF_ASSEMBLY_ROTATION_ALG = "assemblyRotationAlgorithm"
CONF_ASSEM_ROTATION_STATIONARY = "assemblyRotationStationary"
CONF_CIRCULAR_RING_MODE = "circularRingMode"
CONF_CIRCULAR_RING_ORDER = "circularRingOrder"
CONF_CUSTOM_FUEL_MANAGEMENT_INDEX = "customFuelManagementIndex"
CONF_RUN_LATTICE_BEFORE_SHUFFLING = "runLatticePhysicsBeforeShuffling"
CONF_SHUFFLE_LOGIC = "shuffleLogic"
CONF_PLOT_SHUFFLE_ARROWS = "plotShuffleArrows"
CONF_FUEL_HANDLER_NAME = "fuelHandlerName"
CONF_JUMP_RING_NUM = "jumpRingNum"
CONF_LEVELS_PER_CASCADE = "levelsPerCascade"
def getFuelCycleSettings():
"""Define settings for fuel cycle."""
settings = [
setting.Setting(
CONF_ASSEMBLY_ROTATION_ALG,
default="",
label="Assembly Rotation Algorithm",
description="The algorithm to use to rotate the detail assemblies while shuffling",
options=["", "buReducingAssemblyRotation", "simpleAssemblyRotation"],
enforcedOptions=True,
),
setting.Setting(
CONF_ASSEM_ROTATION_STATIONARY,
default=False,
label="Rotate stationary assems",
description=(
"Whether or not to rotate assemblies that are not shuffled."
"This can only be True if 'rotation' is true."
),
),
setting.Setting(
CONF_CIRCULAR_RING_MODE,
default=False,
description="Toggle between circular ring definitions to hexagonal ring definitions",
label="Use Circular Rings",
),
setting.Setting(
CONF_CIRCULAR_RING_ORDER,
default="angle",
description="Order by which locations are sorted in circular rings for equilibrium shuffling",
label="Eq. circular sort type",
options=["angle", "distance", "distanceSmart"],
),
setting.Setting(
CONF_CUSTOM_FUEL_MANAGEMENT_INDEX,
default=0,
description=(
"An index that determines which of various options is used in management. "
"Useful for optimization sweeps. "
),
label="Custom Shuffling Index",
),
setting.Setting(
CONF_RUN_LATTICE_BEFORE_SHUFFLING,
default=False,
description=(
"Forces the Generation of Cross Sections Prior to Shuffling the Fuel Assemblies. "
"Note: This is recommended when performing equilibrium shuffling branching searches."
),
label="Generate XS Prior to Fuel Shuffling",
),
setting.Setting(
CONF_SHUFFLE_LOGIC,
default="",
label="Shuffle Logic",
description=(
"Python script written to handle the fuel shuffling for this case. "
"This is user-defined per run as a dynamic input."
),
# schema here could check if file exists, but this is a bit constraining in testing.
# For example, some tests have relative paths for this but aren't running in
# the right directory, and IsFile doesn't seem to work well with relative paths.
# This is left here as an FYI about how we could check existence of files if we get
# around these problem.
# schema=vol.All(
# vol.IsFile(), # pylint: disable=no-value-for-parameter
# msg="Shuffle logic input must be an existing file",
# ),
),
setting.Setting(
CONF_FUEL_HANDLER_NAME,
default="",
label="Fuel Handler Name",
description="The name of the FuelHandler class in the shuffle logic module to activate",
),
setting.Setting(
CONF_PLOT_SHUFFLE_ARROWS,
default=False,
description="Make plots with arrows showing each move.",
label="Plot shuffle arrows",
),
setting.Setting(
CONF_JUMP_RING_NUM, default=8, label="Jump Ring Number", description="None"
),
setting.Setting(
CONF_LEVELS_PER_CASCADE,
default=14,
label="Move per cascade",
description="None",
),
]
return settings
| 39.092105
| 106
| 0.574554
|
ab36c71fdbd365804953a57202728144c1db7c55
| 628
|
py
|
Python
|
nl/predictor.py
|
jclosure/donkus
|
b3384447094b2ecbaff5ee9d970818313b6ee8b0
|
[
"MIT"
] | 1
|
2015-01-16T01:04:39.000Z
|
2015-01-16T01:04:39.000Z
|
nl/predictor.py
|
jclosure/donkus
|
b3384447094b2ecbaff5ee9d970818313b6ee8b0
|
[
"MIT"
] | null | null | null |
nl/predictor.py
|
jclosure/donkus
|
b3384447094b2ecbaff5ee9d970818313b6ee8b0
|
[
"MIT"
] | null | null | null |
from nltk.corpus import gutenberg
from nltk import ConditionalFreqDist
from random import choice
#create the distribution object
cfd = ConditionalFreqDist()
## for each token count the current word given the previous word
prev_word = None
for word in gutenberg.words('austen-persuasion.txt'):
cfd[prev_word][word] += 1
prev_word = word
## start predicting at given word, say "therefore"
word = "therefore"
i = 1
## find all words that can follow the given word and choose one at random
while i<20:
print word,
lwords = cfd.get(word).keys()
follower = choice(lwords)
word = follower
i += 1
| 22.428571
| 73
| 0.716561
|
ab36d85f43aaa15a75066ac03e9bf9c7135fde35
| 893
|
py
|
Python
|
eve/workers/pykmip/bin/run_server.py
|
mmg-3/cloudserver
|
9ff6364b2ed4f33a5135d86311a72de4caff51c1
|
[
"Apache-2.0"
] | 762
|
2016-06-08T19:51:51.000Z
|
2018-08-02T09:05:38.000Z
|
eve/workers/pykmip/bin/run_server.py
|
mmg-3/cloudserver
|
9ff6364b2ed4f33a5135d86311a72de4caff51c1
|
[
"Apache-2.0"
] | 1,252
|
2018-08-03T18:27:31.000Z
|
2022-03-31T20:22:55.000Z
|
eve/workers/pykmip/bin/run_server.py
|
mmg-3/cloudserver
|
9ff6364b2ed4f33a5135d86311a72de4caff51c1
|
[
"Apache-2.0"
] | 170
|
2016-06-14T17:09:39.000Z
|
2018-06-30T12:48:19.000Z
|
#!/usr/bin/env python
# Copyright (c) 2016 The Johns Hopkins University/Applied Physics Laboratory
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging # noqa: E402
logging.basicConfig(level=logging.DEBUG)
from kmip.services.server import server # noqa: E402
if __name__ == '__main__':
print('Starting PyKMIP server on 0.0.0.0:5696')
server.main()
| 33.074074
| 76
| 0.75252
|
ab37e16ef4016e52fa0dab454a286037abc7c623
| 889
|
py
|
Python
|
tests/test_tempo_event.py
|
yokaze/crest-python
|
c246b16ade6fd706f0e18aae797660064bddd555
|
[
"MIT"
] | null | null | null |
tests/test_tempo_event.py
|
yokaze/crest-python
|
c246b16ade6fd706f0e18aae797660064bddd555
|
[
"MIT"
] | null | null | null |
tests/test_tempo_event.py
|
yokaze/crest-python
|
c246b16ade6fd706f0e18aae797660064bddd555
|
[
"MIT"
] | null | null | null |
#
# test_tempo_event.py
# crest-python
#
# Copyright (C) 2017 Rue Yokaze
# Distributed under the MIT License.
#
import crest_loader
import unittest
from crest.events.meta import TempoEvent
if (__name__ == '__main__'):
unittest.main()
| 24.694444
| 75
| 0.662542
|
ab386b22c54d5affe44957aab20ea4dc3913a866
| 1,159
|
py
|
Python
|
tensorflow_quantum/python/differentiators/__init__.py
|
PyJedi/quantum
|
3f4a3c320e048b8a8faf3a10339975d2d5366fb6
|
[
"Apache-2.0"
] | 1,501
|
2020-03-09T00:40:31.000Z
|
2022-03-28T19:59:57.000Z
|
tensorflow_quantum/python/differentiators/__init__.py
|
tappinganalytics/quantum
|
ef3a34341d997d485a7e43335a8ed61a8e7c6ea6
|
[
"Apache-2.0"
] | 381
|
2020-03-09T18:31:04.000Z
|
2022-03-28T18:47:32.000Z
|
tensorflow_quantum/python/differentiators/__init__.py
|
tappinganalytics/quantum
|
ef3a34341d997d485a7e43335a8ed61a8e7c6ea6
|
[
"Apache-2.0"
] | 410
|
2020-03-09T03:05:48.000Z
|
2022-03-31T12:08:14.000Z
|
# Copyright 2020 The TensorFlow Quantum Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Module functions for tfq.differentiators.*"""
from tensorflow_quantum.python.differentiators.adjoint import (
Adjoint,)
from tensorflow_quantum.python.differentiators.linear_combination import (
ForwardDifference,
CentralDifference,
LinearCombination,
)
from tensorflow_quantum.python.differentiators.parameter_shift import (
ParameterShift,)
from tensorflow_quantum.python.differentiators.differentiator import (
Differentiator,)
| 37.387097
| 80
| 0.729077
|
ab38ae2d8c17a7a5df07314f47034bda8a636085
| 3,845
|
py
|
Python
|
tests/test_color_background.py
|
erykoff/redmapper
|
23fb66c7369de784c67ce6c41ada2f1f51a84acb
|
[
"Apache-2.0"
] | 17
|
2016-03-06T07:51:02.000Z
|
2022-02-03T15:17:26.000Z
|
tests/test_color_background.py
|
erykoff/redmapper
|
23fb66c7369de784c67ce6c41ada2f1f51a84acb
|
[
"Apache-2.0"
] | 42
|
2016-07-27T20:48:20.000Z
|
2022-01-31T20:47:51.000Z
|
tests/test_color_background.py
|
erykoff/redmapper
|
23fb66c7369de784c67ce6c41ada2f1f51a84acb
|
[
"Apache-2.0"
] | 8
|
2017-01-26T01:38:41.000Z
|
2020-11-14T07:41:53.000Z
|
import unittest
import numpy.testing as testing
import numpy as np
import fitsio
import tempfile
import os
from redmapper import ColorBackground
from redmapper import ColorBackgroundGenerator
from redmapper import Configuration
if __name__=='__main__':
unittest.main()
| 34.954545
| 98
| 0.635371
|
ab392fd8e80c256d42ff5f34b47b1e8775e1c4cb
| 33,538
|
py
|
Python
|
src/metpy/calc/basic.py
|
Exi666/MetPy
|
c3cf8b9855e0ce7c14347e9d000fc3d531a18e1c
|
[
"BSD-3-Clause"
] | null | null | null |
src/metpy/calc/basic.py
|
Exi666/MetPy
|
c3cf8b9855e0ce7c14347e9d000fc3d531a18e1c
|
[
"BSD-3-Clause"
] | null | null | null |
src/metpy/calc/basic.py
|
Exi666/MetPy
|
c3cf8b9855e0ce7c14347e9d000fc3d531a18e1c
|
[
"BSD-3-Clause"
] | null | null | null |
# Copyright (c) 2008,2015,2016,2017,2018,2019 MetPy Developers.
# Distributed under the terms of the BSD 3-Clause License.
# SPDX-License-Identifier: BSD-3-Clause
"""Contains a collection of basic calculations.
These include:
* wind components
* heat index
* windchill
"""
import warnings
import numpy as np
from scipy.ndimage import gaussian_filter
from .. import constants as mpconsts
from ..package_tools import Exporter
from ..units import atleast_1d, check_units, masked_array, units
from ..xarray import preprocess_xarray
exporter = Exporter(globals())
# The following variables are constants for a standard atmosphere
t0 = 288. * units.kelvin
p0 = 1013.25 * units.hPa
def _check_radians(value, max_radians=2 * np.pi):
"""Input validation of values that could be in degrees instead of radians.
Parameters
----------
value : `pint.Quantity`
The input value to check.
max_radians : float
Maximum absolute value of radians before warning.
Returns
-------
`pint.Quantity`
The input value
"""
try:
value = value.to('radians').m
except AttributeError:
pass
if np.greater(np.nanmax(np.abs(value)), max_radians):
warnings.warn('Input over {} radians. '
'Ensure proper units are given.'.format(max_radians))
return value
| 33.945344
| 94
| 0.653706
|
ab3a2dd3958eeed0683e138275086ab9243b7a2e
| 1,525
|
py
|
Python
|
burl/core/api/views.py
|
wryfi/burl
|
664878ce9a31695456be89c8e10e8bb612074ef6
|
[
"MIT"
] | 1
|
2021-02-07T21:48:59.000Z
|
2021-02-07T21:48:59.000Z
|
burl/core/api/views.py
|
wryfi/burl
|
664878ce9a31695456be89c8e10e8bb612074ef6
|
[
"MIT"
] | 16
|
2020-03-24T16:53:30.000Z
|
2022-03-15T17:46:59.000Z
|
burl/core/api/views.py
|
wryfi/burl
|
664878ce9a31695456be89c8e10e8bb612074ef6
|
[
"MIT"
] | null | null | null |
from rest_framework.response import Response
from rest_framework.decorators import api_view
from rest_framework.reverse import reverse
from rest_framework_simplejwt.tokens import RefreshToken
| 29.901961
| 92
| 0.688525
|
ab3b3b45e085df872dae2584e5093ac10afc0c16
| 1,296
|
py
|
Python
|
ITmeetups_back/api/serializers.py
|
RomulusGwelt/AngularProject
|
acc7083f30b1edf002da8d156be023d2432a05e4
|
[
"MIT"
] | 3
|
2019-02-28T15:20:24.000Z
|
2019-03-02T18:33:23.000Z
|
ITmeetups_back/api/serializers.py
|
RomulusGwelt/AngularProject
|
acc7083f30b1edf002da8d156be023d2432a05e4
|
[
"MIT"
] | 7
|
2020-07-17T01:18:53.000Z
|
2022-02-17T22:49:16.000Z
|
ITmeetups_back/api/serializers.py
|
RomulusGwelt/AngularProject
|
acc7083f30b1edf002da8d156be023d2432a05e4
|
[
"MIT"
] | 1
|
2019-03-03T02:45:55.000Z
|
2019-03-03T02:45:55.000Z
|
from rest_framework import serializers
from .models import Post, Comment, Like
from django.contrib.auth.models import User
| 24
| 62
| 0.655864
|
ab3e250f158b4ed0173fe7715ee2559fe186d522
| 1,879
|
py
|
Python
|
qurator/sbb_ned/embeddings/bert.py
|
qurator-spk/sbb_ned
|
d4cfe249f72e48913f254a58fbe0dbe6e47bd168
|
[
"Apache-2.0"
] | 6
|
2020-09-05T16:08:59.000Z
|
2022-03-05T00:54:47.000Z
|
qurator/sbb_ned/embeddings/bert.py
|
qurator-spk/sbb_ned
|
d4cfe249f72e48913f254a58fbe0dbe6e47bd168
|
[
"Apache-2.0"
] | 6
|
2020-09-23T17:58:37.000Z
|
2022-03-10T14:02:09.000Z
|
qurator/sbb_ned/embeddings/bert.py
|
qurator-spk/sbb_ned
|
d4cfe249f72e48913f254a58fbe0dbe6e47bd168
|
[
"Apache-2.0"
] | 2
|
2021-03-22T00:12:51.000Z
|
2022-01-31T10:04:08.000Z
|
from ..embeddings.base import Embeddings
from flair.data import Sentence
| 29.359375
| 122
| 0.585418
|
ab3e8587e5794a9e3b079ba7743bbea191efe88d
| 66,791
|
py
|
Python
|
Arbitrage_Future/Arbitrage_Future/test.py
|
ronaldzgithub/CryptoArbitrage
|
b4b7a12b7b11f3dcf950f9d2039dad4f1388530b
|
[
"MIT"
] | 1
|
2021-11-03T06:16:16.000Z
|
2021-11-03T06:16:16.000Z
|
Arbitrage_Future/Arbitrage_Future/test.py
|
benno0810/CryptoArbitrage
|
b4b7a12b7b11f3dcf950f9d2039dad4f1388530b
|
[
"MIT"
] | null | null | null |
Arbitrage_Future/Arbitrage_Future/test.py
|
benno0810/CryptoArbitrage
|
b4b7a12b7b11f3dcf950f9d2039dad4f1388530b
|
[
"MIT"
] | 2
|
2021-05-07T09:11:54.000Z
|
2021-11-27T16:29:10.000Z
|
# !/usr/local/bin/python
# -*- coding:utf-8 -*-
import YunBi
import CNBTC
import json
import threading
import Queue
import time
import logging
import numpy
import message
import random
open_platform = [True,True,True,True]
numpy.set_printoptions(suppress=True)
# logging.basicConfig(level=logging.DEBUG,
# format="[%(asctime)20s] [%(levelname)8s] %(filename)10s:%(lineno)-5s --- %(message)s",
# datefmt="%Y-%m-%d %H:%M:%S",
# filename="log/%s.log"%time.strftime('%Y-%m-%d %H:%M:%S',time.localtime(time.time())),
# filemode='w')
# console = logging.StreamHandler()
# console.setLevel(logging.INFO)
# formatter = logging.Formatter("[%(asctime)20s] [%(levelname)8s] %(filename)10s:%(lineno)-5s --- %(message)s", "%Y-%m-%d %H:%M:%S")
# console.setFormatter(formatter)
# logging.getLogger('').addHandler(console)
coin_status = [-1,-1,-1,-1]
money_status = [-1,-1,-1,-1]
history = open("log/historyPrice_%s.txt"%time.strftime('%Y_%m_%d_%H_%M_%S', time.localtime(time.time())),"a")
# output = open("journalist.txt",'a')
balance = open("log/balance%s.txt"%time.strftime('%Y_%m_%d %H_%M_%S', time.localtime(time.time())),'a')
ybQue1 = Queue.Queue()
ybQue2 = Queue.Queue()
hbQue1 = Queue.Queue()
hbQue2 = Queue.Queue()
okcQue1 = Queue.Queue()
okcQue2 = Queue.Queue()
cnbtcQue1 = Queue.Queue()
cnbtcQue2 = Queue.Queue()
ybTradeQue1 = Queue.Queue()
ybTradeQue2 = Queue.Queue()
cnbtcTradeQue1 = Queue.Queue()
cnbtcTradeQue2 = Queue.Queue()
hbTradeQue1 = Queue.Queue()
hbTradeQue2 = Queue.Queue()
okcTradeQue1 = Queue.Queue()
okcTradeQue2 = Queue.Queue()
ybAccountQue1 = Queue.Queue()
ybAccountQue2 = Queue.Queue()
cnbtcAccountQue1 = Queue.Queue()
cnbtcAccountQue2 = Queue.Queue()
hbAccountQue1 = Queue.Queue()
hbAccountQue2 = Queue.Queue()
okcAccountQue1 = Queue.Queue()
okcAccountQue2 = Queue.Queue()
alertQue = Queue.Queue()
total_trade_coin = 0
delay_time = 0.2
config = json.load(open("config.json","r"))
#####max coin # in each trade
maxTradeLimitation = float(config["MaxCoinTradeLimitation"])
tel_list = config["tel"]
# maxTradeLimitation_yb_buy_cnbtc_sell = float(config["MaxCoinTradeLimitation_yb_buy_cnbtc_sell"])
# maxTradeLimitation_yb_buy_hb_sell = float(config["MaxCoinTradeLimitation_yb_buy_hb_sell"])
# maxTradeLimitation_yb_sell_hb_buy = float(config["MaxCoinTradeLimitation_yb_sell_hb_buy"])
# maxTradeLimitation_hb_buy_cnbtc_sell = float(config["MaxCoinTradeLimitation_hb_buy_cnbtc_sell"])
# maxTradeLimitation_hb_sell_cnbtc_buy = float(config["MaxCoinTradeLimitation_hb_sell_cnbtc_buy"])
#####max coin # for each account
maxCoin = float(config["MaxCoinLimitation"])
#####if spread over this threshold, we trade
max_thres_limitation = float(config["max_thres_limitation"])
spread_threshold_yb_sell_cnbtc_buy = float(config["spread_threshold_yb_sell_cnbtc_buy"])
spread_threshold_yb_buy_cnbtc_sell = float(config["spread_threshold_yb_buy_cnbtc_sell"])
spread_threshold_yb_buy_hb_sell = float(config["spread_threshold_yb_buy_hb_sell"])
spread_threshold_yb_sell_hb_buy = float(config["spread_threshold_yb_sell_hb_buy"])
spread_threshold_hb_buy_cnbtc_sell = float(config["spread_threshold_hb_buy_cnbtc_sell"])
spread_threshold_hb_sell_cnbtc_buy = float(config["spread_threshold_hb_sell_cnbtc_buy"])
random_range = float(config["RandomRange"])
spread_threshold_yb_sell_okc_buy = float(config["spread_threshold_yb_sell_okc_buy"])
spread_threshold_yb_buy_okc_sell = float(config["spread_threshold_yb_buy_okc_sell"])
spread_threshold_okc_buy_hb_sell = float(config["spread_threshold_okc_buy_hb_sell"])
spread_threshold_okc_sell_hb_buy = float(config["spread_threshold_okc_sell_hb_buy"])
spread_threshold_okc_buy_cnbtc_sell = float(config["spread_threshold_okc_buy_cnbtc_sell"])
spread_threshold_okc_sell_cnbtc_buy = float(config["spread_threshold_okc_sell_cnbtc_buy"])
max_diff_thres = float(config["max_diff_thres"])
#######if coin # is lower than alert thres, it will increase the thres
alert_thres_coin = float(config["alert_thres_coin"])
alert_thres_money = float(config["alert_thres_money"])
thres_coin = float(config["thres_coin"])
thres_money = float(config["thres_money"])
#######max thres increase is slop*alert_thres
slope = float(config["alert_slope"])
# print max_diff_thres,alert_thres,slope
# spread_threshold = float(config["spread_threshold"])
# spread_threshold_minor = float(config["spread_threshold_minor"])
#####if we start a trade, we will accept all trade until spread reach lowest spread threshold, after that, we cancel all trade
lowest_spread_threshold = float(config["lowest_spread_threshold"])
trade_multiplier_ratio = float(config["TradeMultiplyRatio"])
# lowest_spread_threshold_minor = float(config["lowest_spread_threshold_minor"])
#####the trade price is max trade limitation*trade ratio behind the min/max price of ask/bid
trade_ratio = float(config["TradeAdvanceRatio"])
# trade_ratio_minor = float(config["TradeAdvanceRatio_minor"])
#####slippage
slippage = float(config["slippage"])
tmpThres = maxTradeLimitation*trade_ratio
# tmpThres_minor = maxTradeLimitation_minor*trade_ratio
offset_player = int(config["offset_player"])
# offset_player_minor = int(config["offset_player_minor"])
offset_coin = float(config["offset_coin"])
# offset_coin_minor = float(config["offset_coin_minor"])
########return 0 accumulate amount
########return 1 price
########return 2 list
#######tradeque1[0]:obj
#######tradeque1[1]:buy or sell
#######tradeque1[2]:amount
#######tradeque1[3]:price
#######tradeque1[4]:limit_price
import sys
import numpy.matlib
import HuoBi
import OKCoin
open_okc = open_platform[3]
open_yb = open_platform[1]
open_cnbtc = open_platform[0]
open_hb = open_platform[2]
if open_yb:
yb = YunBi.Yunbi(config,"LiChen")
print yb.get_account()
else:
yb = None
# import gzip
# from StringIO import StringIO
#
# buf = StringIO(acc["name"])
# f = gzip.GzipFile(fileobj=buf)
# print f.read()
# sss = acc["name"].encode("raw_unicode_escape").decode()
# print ss
# logging.info("YB Account "+json.dumps(yb.get_account(),ensure_ascii=False))
if open_cnbtc:
cnbtc = CNBTC.CNBTC(config)
print("cnbtc Account "+str(cnbtc.get_account()))
else:
cnbtc = None
if open_hb:
hb = HuoBi.HuoBi(config)
print("HB Account "+str(hb.get_account()))
else:
hb = None
if open_okc:
okc = OKCoin.OKCoin(config)
print("OKCoin Account "+str(okc.get_account()))
okc_thread = threading.Thread(target=okcRun)
okc_thread.setDaemon(True)
okc_thread.start()
else:
okc = None
if open_yb:
yb_thread = threading.Thread(target=ybRun)
yb_thread.setDaemon(True)
yb_thread.start()
if open_cnbtc:
cnbtc_thread = threading.Thread(target=cnbtcRun)
cnbtc_thread.setDaemon(True)
cnbtc_thread.start()
if open_hb:
hb_thread = threading.Thread(target=hbRun)
hb_thread.setDaemon(True)
hb_thread.start()
if open_okc:
okc_trade_thread = threading.Thread(target=okcTradeRun)
okc_trade_thread.setDaemon(True)
okc_trade_thread.start()
if open_yb:
yb_trade_thread = threading.Thread(target=ybTradeRun)
yb_trade_thread.setDaemon(True)
yb_trade_thread.start()
if open_cnbtc:
cnbtc_trade_thread = threading.Thread(target = cnbtcTradeRun)
cnbtc_trade_thread.setDaemon(True)
cnbtc_trade_thread.start()
if open_hb:
hb_trade_thread = threading.Thread(target=hbTradeRun)
hb_trade_thread.setDaemon(True)
hb_trade_thread.start()
if open_okc:
okc_account_thread = threading.Thread(target=okcAccountRun)
okc_account_thread.setDaemon(True)
okc_account_thread.start()
if open_yb:
yb_account_thread = threading.Thread(target=ybAccountRun)
yb_account_thread.setDaemon(True)
yb_account_thread.start()
if open_cnbtc:
cnbtc_account_thread = threading.Thread(target = cnbtcAccountRun)
cnbtc_account_thread.setDaemon(True)
cnbtc_account_thread.start()
if open_hb:
hb_account_thread = threading.Thread(target=hbAccountRun)
hb_account_thread.setDaemon(True)
hb_account_thread.start()
alertThread = threading.Thread(target=alert)
alertThread.setDaemon(True)
alertThread.start()
total_coin = 0
total_money = 0
tick = 0
last_total_eth = 0
last_total_cny = 0
first_total_eth = 0
first_total_cny = 0
first = True
platform_number = 4
name_list = ["CNBTC","YunBi","HuoBi","OKCoin"]
obj_list = [cnbtc,yb,hb,okc]
que1_list = [cnbtcQue1,ybQue1,hbQue1,okcQue1]
que2_list = [cnbtcQue2,ybQue2,hbQue2,okcQue2]
trade_que1_list = [cnbtcTradeQue1,ybTradeQue1,hbTradeQue1,okcTradeQue1]
trade_que2_list = [cnbtcTradeQue2,ybTradeQue2,hbTradeQue2,okcTradeQue2]
thres_list = numpy.array([[999999,spread_threshold_yb_buy_cnbtc_sell,spread_threshold_hb_buy_cnbtc_sell,spread_threshold_okc_buy_cnbtc_sell],
[spread_threshold_yb_sell_cnbtc_buy,999999,spread_threshold_yb_sell_hb_buy,spread_threshold_yb_sell_okc_buy],
[spread_threshold_hb_sell_cnbtc_buy,spread_threshold_yb_buy_hb_sell,9999999,spread_threshold_okc_buy_hb_sell],
[spread_threshold_okc_sell_cnbtc_buy,spread_threshold_yb_buy_okc_sell,spread_threshold_okc_sell_hb_buy,999999]])
thres_list_origin = thres_list.copy()
has_ts = [True,True,True,False]
platform_list = []
for i in range(platform_number):
platform_list.append(
{
"name":name_list[i],
"obj":obj_list[i],
"que1":que1_list[i],
"que2":que2_list[i],
"trade_que1":trade_que1_list[i],
"trade_que2":trade_que2_list[i],
"depth_buy":None,
"depth_sell":None,
"has_ts":has_ts[i]
}
)
brokerage_fee = numpy.asarray([0.0004,0.001,0.002,0.001])
cash_fee = numpy.asarray([0.001,0.001,0.002,0.002])
while True:
print 'tick',tick
for platform in platform_list:
if platform["obj"]!=None:
platform["que1"].put(platform["obj"])
if open_yb:
ybAccountQue1.put(yb)
if open_okc:
okcAccountQue1.put(okc)
if open_cnbtc:
cnbtcAccountQue1.put(cnbtc)
if open_hb:
hbAccountQue1.put(hb)
for platform in platform_list:
if platform["obj"]!=None:
platform["depth_sell"] = platform["que2"].get()
platform["depth_buy"] = platform["que2"].get()
###depth[0] is amount
###depth[1] is price
###depth[2] is list platform_list["depth_buy"] = platform["que2"].get()
max_diff = -1000
trade_info = dict()
average_price = 0
open_num = 0
for i in range(platform_number):
if platform_list[i]["obj"]!=None:
open_num+=1
average_price+=platform_list[i]["depth_buy"][0][1]+platform_list[i]["depth_sell"][0][1]
average_price /= open_num*2.0/1.01
print 'average_price %f'%average_price
brokerage_trade = numpy.add.outer(brokerage_fee,brokerage_fee)*average_price
cash_trade = numpy.add.outer(cash_fee,numpy.zeros(cash_fee.shape[0]))*average_price
tick+=1
if tick % 1 == 0:
total_cny = 0
total_eth = 0
yb_cny = 0
yb_eth = 0
cnbtc_cny = 0
cnbtc_eth = 0
hb_cny = 0
hb_eth = 0
okc_cny = 0
okc_eth = 0
if open_yb:
yb_cny,yb_eth = ybAccountQue2.get()
print "yb_balance:%f %f"%(yb_eth,yb_cny)
if open_okc:
okc_cny,okc_eth = okcAccountQue2.get()
print "okc_balance:%f %f"%(okc_eth,okc_cny)
if open_hb:
hb_cny,hb_eth = hbAccountQue2.get()
print "hb balance:%f %f"%(hb_eth,hb_cny)
if open_cnbtc:
cnbtc_cny,cnbtc_eth = cnbtcAccountQue2.get()
print "cnbtc balance:%f %f"%(cnbtc_eth,cnbtc_cny)
total_cny = yb_cny+hb_cny+cnbtc_cny+okc_cny
total_eth = yb_eth+hb_eth+cnbtc_eth+okc_eth
balance.write("%s %f %f %f %f %f %f %f %f %f %f\n"%(time.strftime('%Y-%m-%d %H:%M:%S',time.localtime(time.time())),
cnbtc_eth,cnbtc_cny,yb_eth,yb_cny,hb_eth,hb_cny,okc_eth,okc_cny,total_eth,total_cny))
history.write("%s "%time.strftime('%Y-%m-%d %H:%M:%S',time.localtime(time.time())))
for i in range(platform_number):
if platform_list[i]["obj"]!=None:
history.write("%f %f "%(platform_list[i]["depth_buy"][0][1],platform_list[i]["depth_sell"][0][1]))
else:
history.write('0 0 ')
history.write('\n')
cny_list = numpy.asarray([cnbtc_cny,yb_cny,hb_cny,okc_cny])
eth_list = numpy.asarray([cnbtc_eth,yb_eth,hb_eth,okc_eth])
last_total_eth = total_eth
last_total_cny = total_cny
if first:
first_total_cny = total_cny
first_total_eth = total_eth
first = False
# history.write("%s %f %f %f %f %f %f\n" % (time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(time.time())),
# yb_depth[0][1], cnbtc_depth[0][1], yb_depth[0][1] - cnbtc_depth[0][1],
# yb_depth_minor[0][1], cnbtc_depth_minor[0][1],
# cnbtc_depth_minor[0][1] - yb_depth_minor[0][1]))
balance.flush()
history.flush()
if tick%1 == 0:
thres_list,trade_multiplier = setThreshold(cny_list,eth_list,brokerage_fee,cash_fee,thres_list,thres_list_origin,platform_number,average_price,maxTradeLimitation,name_list)
# print thres_list
i1 = None
j1 = None
for i in range(platform_number):
for j in range(platform_number):
if i!=j and platform_list[i]["obj"]!=None and platform_list[j]["obj"]!=None:
# if platform_list[i]["has_ts"] and platform_list[j]["has_ts"]:
# print i,j,int(platform_list[i]["depth_sell"][1]),int(platform_list[j]["depth_buy"][1])
# if (int(platform_list[i]["depth_sell"][1])-int(platform_list[j]["depth_buy"][1]))>5:
# continue
# print platform_list[i],platform_list[j]
if platform_list[i]["depth_sell"][0][1] - platform_list[j]["depth_buy"][0][1]>thres_list[i,j] and platform_list[i]["depth_sell"][0][1] - platform_list[j]["depth_buy"][0][1]-thres_list[i,j]>max_diff:
max_diff = platform_list[i]["depth_sell"][0][1]-platform_list[j]["depth_buy"][0][1]-thres_list[i,j]
trade_info["sell_depth"] = platform_list[i]["depth_sell"]
trade_info["buy_depth"] = platform_list[j]["depth_buy"]
trade_info["sell_name"] = platform_list[i]["name"]
trade_info["buy_name"] = platform_list[j]["name"]
trade_info["sell_que1"] = platform_list[i]["trade_que1"]
trade_info["sell_que2"] = platform_list[i]["trade_que2"]
trade_info["buy_que1"] = platform_list[j]["trade_que1"]
trade_info["buy_que2"] = platform_list[j]["trade_que2"]
trade_info["sell_obj"] = platform_list[i]["obj"]
trade_info["buy_obj"]=platform_list[j]["obj"]
i1 = i
j1 = j
if max_diff>0:
print "max_diff %f"%max_diff
buy_depth = trade_info["buy_depth"]
sell_depth = trade_info["sell_depth"]
# print("BuySide:%s timestamp:%s amount:\t%f price:\t%f"%(trade_info["buy_name"],buy_depth[1],buy_depth[0][0],buy_depth[0][1],str(buy_depth[0][2])))
# print('SellSide:%s timestamp:%s amount:\t%f price:\t%f'%(trade_info["sell_name"],sell_depth[1],sell_depth[0][0],sell_depth[0][1],str(sell_depth[0][2])))
# print 'BuySide:%s timestamp:%s amount:\t%f price:\t%f asks:%s'%(trade_info["buy_name"],buy_depth[1],buy_depth[0][0],buy_depth[0][1],str(buy_depth[0][2]))
# print 'SellSide:%s timestamp:%s amount:\t%f price:\t%f bids:%s'%(trade_info["sell_name"],sell_depth[1],sell_depth[0][0],sell_depth[0][1],str(sell_depth[0][2]))
amount = int(min(buy_depth[0][0],sell_depth[0][0])*1.0/trade_ratio*trade_multiplier[i1,j1]*100)/100.0
amount +=int((random.random()-0.5)*2*(random_range+0.01)*100)/100.0
if amount<0:
amount = 0
amount_buy=amount
amount_sell=amount_buy
limit = (buy_depth[0][1]+sell_depth[0][1])*1.0/2.0
if total_coin>0.0001:
amount_buy = max(amount_buy-total_coin,0)
elif total_coin<-0.0001:
amount_sell = max(amount_sell+total_coin,0)
print "%s buy %f coins at %f and limit %f" %(trade_info["buy_name"],amount_buy,buy_depth[0][1],limit-lowest_spread_threshold/2.0)
trade_info["buy_que1"].put((trade_info["buy_obj"],"buy",amount_buy,buy_depth[0][1],limit-lowest_spread_threshold/2.0))
print "%s sell %f coins at %f and limit %f" %(trade_info["sell_name"],amount_sell,sell_depth[0][1],limit+lowest_spread_threshold/2.0)
trade_info["sell_que1"].put((trade_info["sell_obj"],"sell",amount_sell,sell_depth[0][1],limit+lowest_spread_threshold/2.0))
sell_remain = trade_info["sell_que2"].get()
buy_remain = trade_info["buy_que2"].get()
# output.write('%f, %f, %f, %f\n'%(sell_remain[0]-amount_sell,amount_buy-buy_remain[0],buy_remain[1],sell_remain[1]))
# output.flush()
total_coin+=sell_remain[0]-amount_sell-buy_remain[0]+amount_buy
total_money+=sell_remain[1]+buy_remain[1]
print "%s_remain:%f\t %s_remain:%f,total_remain:%f"%(trade_info["buy_name"],buy_remain[0],trade_info["sell_name"],sell_remain[0],maxCoin)
print"coin:%f,money:%f"%(total_coin,total_money)
maxCoin-=max(sell_remain[0],buy_remain[0])
# if maxCoin<0:
# hbQue1.put(None)
# cnbtcQue1.put(None)
# hbTradeQue1.put(None)
# cnbtcTradeQue1.put(None)
# break
else:
# average_price = 0
for i in range(platform_number):
for j in range(platform_number):
if i!=j and platform_list[i]["obj"]!=None and platform_list[j]["obj"]!=None:
print "no trade %s sell:%f %s buy:%f diff:%15f thres:%20f diff_brokerage:%20f"%(platform_list[i]["name"],platform_list[i]["depth_sell"][0][1],platform_list[j]["name"],platform_list[j]["depth_buy"][0][1],
platform_list[i]["depth_sell"][0][1]-platform_list[j]["depth_buy"][0][1],thres_list[i,j],platform_list[i]["depth_sell"][0][1]-platform_list[j]["depth_buy"][0][1]-thres_list[i,j])
# average_price+=platform_list[i]["depth_buy"][0][1]+platform_list[i]["depth_sell"][0][1]
# average_price/=2.0*platform_number
print average_price
# print "no trade yb sell:%f cnbtc buy:%f diff:%f"%(yb_depth_sell[0][1],cnbtc_depth_buy[0][1],yb_depth_sell[0][1]-cnbtc_depth_buy[0][1])
# print "no trade hb sell:%f cnbtc buy:%f diff:%f"%(hb_depth_sell[0][1],cnbtc_depth_buy[0][1],hb_depth_sell[0][1]-cnbtc_depth_buy[0][1])
# print "no trade yb buy:%f cnbtc sell:%f diff:%f"%(yb_depth_buy[0][1],cnbtc_depth_sell[0][1],cnbtc_depth_sell[0][1]-yb_depth_buy[0][1])
# print "no trade hb buy:%f cnbtc sell:%f diff:%f"%(hb_depth_buy[0][1],cnbtc_depth_sell[0][1],cnbtc_depth_sell[0][1]-hb_depth_buy[0][1])
# print "no trade yb buy:%f hb sell:%f diff:%f"%(yb_depth_buy[0][1],hb_depth_sell[0][1],hb_depth_sell[0][1]-yb_depth_buy[0][1])
# print "no trade hb buy:%f yb sell:%f diff:%f"%(hb_depth_buy[0][1],yb_depth_sell[0][1],yb_depth_sell[0][1]-hb_depth_buy[0][1])
print "balance %f %f diff: %f %f %f first:%f %f"%(total_eth,total_cny, total_eth - last_total_eth,total_cny - last_total_cny,(total_eth - last_total_eth)*2000.0,
total_eth - first_total_eth,total_cny - first_total_cny)
print '\n'
#
# if hb_depth_sell[0][1]-cnbtc_depth_buy[0][1]>spread_threshold_hb_sell_cnbtc_buy and abs(int(cnbtc_depth_buy[1])-int(hb_depth_sell[1])<=3) and hb_depth_sell[0][1]-cnbtc_depth_buy[0][1]>max_diff:
# if cnbtc_depth_sell[0][1]-hb_depth_buy[0][1]>spread_threshold_hb_buy_cnbtc_sell and abs(int(hb_depth_buy[1])-int(cnbtc_depth_sell[1])<=3) and cnbtc_depth_sell[0][1]-hb_depth_buy[0][1]>max_diff:
# max_diff = cnbtc_depth_sell[0][1]-hb_depth_buy[0][1]
# trade_info["sell_depth"] = cnbtc_depth_sell
# trade_info["buy_depth"] = hb_depth_buy
# trade_info["sell_name"] = "CNBTC"
# trade_info["buy_name"] = "HuoBi"
# trade_info["sell_que1"] = cnbtcTradeQue1
# trade_info["sell_que2"] = cnbtcTradeQue2
# trade_info["buy_que1"] = hbTradeQue1
# trade_info["buy_que2"] = hbTradeQue2
# trade_info["buy_obj"] = hb
# trade_info["sell_obj"]=cnbtc
# if hb_depth_sell[0][1]-yb_depth_buy[0][1]>spread_threshold_yb_buy_hb_sell and abs(int(yb_depth_buy[1])-int(hb_depth_sell[1])<=3) and hb_depth_sell[0][1]-yb_depth_buy[0][1]>max_diff:
# max_diff = hb_depth_sell[0][1]-yb_depth_buy[0][1]
# trade_info["sell_depth"] = hb_depth_sell
# trade_info["buy_depth"] = yb_depth_buy
# trade_info["sell_name"] = "HuoBi"
# trade_info["buy_name"] = "YunBi"
# trade_info["sell_que1"] = hbTradeQue1
# trade_info["sell_que2"] = hbTradeQue2
# trade_info["buy_que1"] = ybTradeQue1
# trade_info["buy_que2"] = ybTradeQue2
# trade_info["sell_obj"] = hb
# trade_info["buy_obj"]=yb
# if yb_depth_sell[0][1]-hb_depth_buy[0][1]>spread_threshold_yb_sell_hb_buy and abs(int(hb_depth_buy[1])-int(yb_depth_sell[1])<=3) and yb_depth_sell[0][1]-hb_depth_buy[0][1]>max_diff:
# max_diff = yb_depth_sell[0][1]-hb_depth_buy[0][1]
# trade_info["sell_depth"] = yb_depth_sell
# trade_info["buy_depth"] = hb_depth_buy
# trade_info["sell_name"] = "YunBi"
# trade_info["buy_name"] = "HuoBi"
# trade_info["sell_que1"] = ybTradeQue1
# trade_info["sell_que2"] = ybTradeQue2
# trade_info["buy_que1"] = hbTradeQue1
# trade_info["buy_que2"] = hbTradeQue2
# trade_info["sell_obj"] = yb
# trade_info["buy_obj"]=hb
# if yb_depth_sell[0][1]-cnbtc_depth_buy[0][1]>spread_threshold_yb_sell_cnbtc_buy and abs(int(cnbtc_depth_buy[1])-int(yb_depth_sell[1])<=3) and yb_depth_sell[0][1]-cnbtc_depth_buy[0][1]>max_diff:
# max_diff = yb_depth_sell[0][1]-cnbtc_depth_buy[0][1]
# trade_info["sell_depth"] = yb_depth_sell
# trade_info["buy_depth"] = cnbtc_depth_buy
# trade_info["sell_name"] = "YunBi"
# trade_info["buy_name"] = "CNBTC"
# trade_info["sell_que1"] = ybTradeQue1
# trade_info["sell_que2"] = ybTradeQue2
# trade_info["buy_que1"] = cnbtcTradeQue1
# trade_info["buy_que2"] = cnbtcTradeQue2
# trade_info["sell_obj"] = yb
# trade_info["buy_obj"]=cnbtc
# if cnbtc_depth_sell[0][1]-yb_depth_buy[0][1]>spread_threshold_yb_sell_cnbtc_buy and abs(int(cnbtc_depth_sell[1])-int(yb_depth_buy[1])<=3) and cnbtc_depth_sell[0][1]-yb_depth_buy[0][1]>max_diff:
# max_diff = cnbtc_depth_sell[0][1]-yb_depth_buy[0][1]
# trade_info["sell_depth"] = cnbtc_depth_sell
# trade_info["buy_depth"] = yb_depth_buy
# trade_info["sell_name"] = "CNBTC"
# trade_info["buy_name"] = "YunBi"
# trade_info["sell_que1"] = cnbtcTradeQue1
# trade_info["sell_que2"] = cnbtcTradeQue2
# trade_info["buy_que1"] = ybTradeQue1
# trade_info["buy_que2"] = ybTradeQue2
# trade_info["sell_obj"] = cnbtc
# trade_info["buy_obj"]=yb
# if open_okc:
# if okc_depth_sell[0][1]-cnbtc_depth_buy[0][1]>spread_threshold_okc_sell_cnbtc_buy and okc_depth_sell[0][1]-cnbtc_depth_buy[0][1]>max_diff:
# max_diff = okc_depth_sell[0][1]-cnbtc_depth_buy[0][1]
# trade_info["sell_depth"] = okc_depth_sell
# trade_info["buy_depth"] = cnbtc_depth_buy
# trade_info["sell_name"] = "OKCoin"
# trade_info["buy_name"] = "CNBTC"
# trade_info["sell_que1"] = okcTradeQue1
# trade_info["sell_que2"] = okcTradeQue2
# trade_info["buy_que1"] = cnbtcTradeQue1
# trade_info["buy_que2"] = cnbtcTradeQue2
# trade_info["sell_obj"] = okc
# trade_info["buy_obj"]=cnbtc
# if cnbtc_depth_sell[0][1]-okc_depth_buy[0][1]>spread_threshold_okc_buy_cnbtc_sell and cnbtc_depth_sell[0][1]-okc_depth_buy[0][1]>max_diff:
# max_diff = cnbtc_depth_sell[0][1]-okc_depth_buy[0][1]
# trade_info["sell_depth"] = cnbtc_depth_sell
# trade_info["buy_depth"] = okc_depth_buy
# trade_info["sell_name"] = "CNBTC"
# trade_info["buy_name"] = "OKCoin"
# trade_info["sell_que1"] = cnbtcTradeQue1
# trade_info["sell_que2"] = cnbtcTradeQue2
# trade_info["buy_que1"] = okcTradeQue1
# trade_info["buy_que2"] = okcTradeQue2
# trade_info["buy_obj"] = okc
# trade_info["sell_obj"]=cnbtc
# if hb_depth_sell[0][1]-okc_depth_buy[0][1]>spread_threshold_okc_buy_hb_sell and hb_depth_sell[0][1]-okc_depth_buy[0][1]>max_diff:
# max_diff = hb_depth_sell[0][1]-okc_depth_buy[0][1]
# trade_info["sell_depth"] = hb_depth_sell
# trade_info["buy_depth"] = okc_depth_buy
# trade_info["sell_name"] = "HuoBi"
# trade_info["buy_name"] = "OKCoin"
# trade_info["sell_que1"] = hbTradeQue1
# trade_info["sell_que2"] = hbTradeQue2
# trade_info["buy_que1"] = okcTradeQue1
# trade_info["buy_que2"] = okcTradeQue2
# trade_info["sell_obj"] = hb
# trade_info["buy_obj"]=okc
# if okc_depth_sell[0][1]-hb_depth_buy[0][1]>spread_threshold_okc_sell_hb_buy and okc_depth_sell[0][1]-hb_depth_buy[0][1]>max_diff:
# max_diff = okc_depth_sell[0][1]-hb_depth_buy[0][1]
# trade_info["sell_depth"] = okc_depth_sell
# trade_info["buy_depth"] = hb_depth_buy
# trade_info["sell_name"] = "OKCoin"
# trade_info["buy_name"] = "HuoBi"
# trade_info["sell_que1"] = okcTradeQue1
# trade_info["sell_que2"] = okcTradeQue2
# trade_info["buy_que1"] = hbTradeQue1
# trade_info["buy_que2"] = hbTradeQue2
# trade_info["sell_obj"] = okc
# trade_info["buy_obj"]=hb
# if yb_depth_sell[0][1]-okc_buy[0][1]>spread_threshold_yb_sell_cnbtc_buy and yb_depth_sell[0][1]-cnbtc_depth_buy[0][1]>max_diff:
# max_diff = yb_depth_sell[0][1]-cnbtc_depth_buy[0][1]
# trade_info["sell_depth"] = yb_depth_sell
# trade_info["buy_depth"] = cnbtc_depth_buy
# trade_info["sell_name"] = "YunBi"
# trade_info["buy_name"] = "CNBTC"
# trade_info["sell_que1"] = ybTradeQue1
# trade_info["sell_que2"] = ybTradeQue2
# trade_info["buy_que1"] = cnbtcTradeQue1
# trade_info["buy_que2"] = cnbtcTradeQue2
# trade_info["sell_obj"] = yb
# trade_info["buy_obj"]=cnbtc
# if cnbtc_depth_sell[0][1]-yb_depth_buy[0][1]>spread_threshold_yb_sell_cnbtc_buy and cnbtc_depth_sell[0][1]-yb_depth_buy[0][1]>max_diff:
# max_diff = cnbtc_depth_sell[0][1]-yb_depth_buy[0][1]
# trade_info["sell_depth"] = cnbtc_depth_sell
# trade_info["buy_depth"] = yb_depth_buy
# trade_info["sell_name"] = "CNBTC"
# trade_info["buy_name"] = "YunBi"
# trade_info["sell_que1"] = cnbtcTradeQue1
# trade_info["sell_que2"] = cnbtcTradeQue2
# trade_info["buy_que1"] = ybTradeQue1
# trade_info["buy_que2"] = ybTradeQue2
# trade_info["sell_obj"] = cnbtc
# trade_info["buy_obj"]=yb
# if hb_depth_sell[0][1]-cnbtc_depth_buy[0][1]>spread_threshold_hb_sell_cnbtc_buy and abs(int(cnbtc_depth_buy[1])-int(hb_depth_sell[1])<=3) and hb_depth_sell[0][1]-cnbtc_depth_buy[0][1]>max_diff:
# print "start trade major"
#
# elif yb_depth_sell[0][1]-cnbtc_depth_buy[0][1]>spread_threshold_yb_sell_cnbtc_buy and abs(int(cnbtc_depth_buy[1])-int(yb_depth_sell[1])<=3):
# print 'CNBTC: timestamp:%s amount:\t%f price:\t%f asks:%s'%(cnbtc_depth_buy[1],cnbtc_depth_buy[0][0],cnbtc_depth_buy[0][1],str(cnbtc_depth_buy[0][2]))
# print 'YUNBI: timestamp:%s amount:\t%f price:\t%f bids:%s'%(yb_depth_sell[1],yb_depth_sell[0][0],yb_depth_sell[0][1],str(yb_depth_sell[0][2]))
# print "start trade major"
# amount = min(cnbtc_depth_buy[0][0],yb_depth_sell[0][0])*1.0/trade_ratio
# amount_buy=amount
# amount_sell=amount_buy
# limit = (cnbtc_depth_buy[0][1]+yb_depth_sell[0][1])*1.0/2.0
# if total_coin>0.0001:
# amount_buy = max(amount_buy-total_coin,0)
# elif total_coin<-0.0001:
# amount_sell = max(amount_sell+total_coin,0)
# print "cnbtc buy %f coins at %f and limit %f" %(amount_buy,cnbtc_depth_buy[0][1],limit-lowest_spread_threshold/2.0)
# cnbtcTradeQue1.put((cnbtc,"buy",amount_buy,cnbtc_depth_buy[0][1],limit-lowest_spread_threshold/2.0))
# print "yb sell %f coins at %f and limit %f" %(amount_sell,yb_depth_sell[0][1],limit+lowest_spread_threshold/2.0)
# ybTradeQue1.put((yb,"sell",amount_sell,yb_depth_sell[0][1],limit+lowest_spread_threshold/2.0))
# cnbtc_remain = cnbtcTradeQue2.get()
# yb_remain = ybTradeQue2.get()
# output.write('%f, %f, %f, %f\n'%(yb_remain[0]-amount_sell,amount_buy-cnbtc_remain[0],yb_remain[1],cnbtc_remain[1]))
# output.flush()
# total_coin+=yb_remain[0]-amount_sell-cnbtc_remain[0]+amount_buy
# total_money+=yb_remain[1]+cnbtc_remain[1]
# print "cnbtc_remain:%f\t yb_remain:%f,total_remain:%f"%(cnbtc_remain[0],yb_remain[0],maxCoin)
# print"coin:%f,money:%f"%(total_coin,total_money)
# maxCoin-=max(yb_remain[0],cnbtc_remain[0])
# if maxCoin<0:
# ybQue1.put(None)
# cnbtcQue1.put(None)
# ybTradeQue1.put(None)
# cnbtcTradeQue1.put(None)
# break
#
# # elif False:
# elif cnbtc_depth_sell[0][1]-yb_depth_buy[0][1]>spread_threshold_yb_buy_cnbtc_sell and abs(int(cnbtc_depth_sell[1])-int(yb_depth_buy[1])<=3):
# print 'CNBTC: timestamp:%s amount:\t%f price:\t%f bids:%s'%(cnbtc_depth_sell[1],cnbtc_depth_sell[0][0],cnbtc_depth_sell[0][1],str(cnbtc_depth_sell[0][2]))
# print 'YUNBI: timestamp:%s amount:\t%f price:\t%f asks:%s'%(yb_depth_buy[1],yb_depth_buy[0][0],yb_depth_buy[0][1],str(yb_depth_buy[0][2]))
# print "start trade minor"
# amount = min(cnbtc_depth_sell[0][0], yb_depth_buy[0][0]) * 1.0 / trade_ratio
# amount_buy = amount
# amount_sell = amount_buy
# limit = (cnbtc_depth_sell[0][1] + yb_depth_buy[0][1]) * 1.0 / 2.0
# if total_coin > 0.01:
# amount_buy = max(amount_buy - total_coin, 0)
# elif total_coin < -0.01:
# amount_sell = max(amount_sell + total_coin, 0)
# print "cnbtc sell %f coins at %f and limit %f" % (amount_sell, cnbtc_depth_sell[0][1], limit + lowest_spread_threshold/ 2.0)
# cnbtcTradeQue1.put((cnbtc, "sell", amount_sell, cnbtc_depth_sell[0][1], limit + lowest_spread_threshold / 2.0))
# print "yb buy %f coins at %f and limit %f" % (amount_buy, yb_depth_buy[0][1], limit - lowest_spread_threshold / 2.0)
# ybTradeQue1.put(
# (yb, "buy", amount_buy, yb_depth_buy[0][1], limit - lowest_spread_threshold / 2.0))
# cnbtc_remain = cnbtcTradeQue2.get()
# yb_remain = ybTradeQue2.get()
# output.write('%f, %f, %f, %f\n' % (
# amount_buy - yb_remain[0], cnbtc_remain[0] - amount_sell, yb_remain[1], cnbtc_remain[1]))
# total_coin += -yb_remain[0] - amount_sell + cnbtc_remain[0] + amount_buy
# total_money += yb_remain[1] + cnbtc_remain[1]
# print "cnbtc_remain:%f\t yb_remain:%f,total_remain:%f" % (cnbtc_remain[0], yb_remain[0], maxCoin)
# print"coin:%f,money:%f" % (total_coin, total_money)
# maxCoin -= max(yb_remain[0], cnbtc_remain[0])
# if maxCoin < 0:
# ybQue1.put(None)
# cnbtcQue1.put(None)
# ybTradeQue1.put(None)
# cnbtcTradeQue1.put(None)
# break
# # elif False:
# elif cnbtc_depth_sell[0][1]-hb_depth_buy[0][1]>spread_threshold_hb_buy_cnbtc_sell and abs(int(cnbtc_depth_sell[1])-int(hb_depth_buy[1])<=3):
# print 'CNBTC: timestamp:%s amount:\t%f price:\t%f bids:%s'%(cnbtc_depth_sell[1],cnbtc_depth_sell[0][0],cnbtc_depth_sell[0][1],str(cnbtc_depth_sell[0][2]))
# print 'HuoBI: timestamp:%s amount:\t%f price:\t%f asks:%s'%(hb_depth_buy[1],hb_depth_buy[0][0],hb_depth_buy[0][1],str(hb_depth_buy[0][2]))
# print "start trade minor"
# amount = min(cnbtc_depth_sell[0][0], hb_depth_buy[0][0]) * 1.0 / trade_ratio
# amount_buy = amount
# amount_sell = amount_buy
# limit = (cnbtc_depth_sell[0][1] + hb_depth_buy[0][1]) * 1.0 / 2.0
# if total_coin > 0.01:
# amount_buy = max(amount_buy - total_coin, 0)
# elif total_coin < -0.01:
# amount_sell = max(amount_sell + total_coin, 0)
# print "cnbtc sell %f coins at %f and limit %f" % (amount_sell, cnbtc_depth_sell[0][1], limit + lowest_spread_threshold/ 2.0)
# cnbtcTradeQue1.put((cnbtc, "sell", amount_sell, cnbtc_depth_sell[0][1], limit + lowest_spread_threshold / 2.0))
# print "hb buy %f coins at %f and limit %f" % (amount_buy, hb_depth_buy[0][1], limit - lowest_spread_threshold / 2.0)
# hbTradeQue1.put(
# (hb, "buy", amount_buy, hb_depth_buy[0][1], limit - lowest_spread_threshold / 2.0))
# cnbtc_remain = cnbtcTradeQue2.get()
# hb_remain = hbTradeQue2.get()
# output.write('%f, %f, %f, %f\n' % (
# amount_buy - hb_remain[0], cnbtc_remain[0] - amount_sell, hb_remain[1], cnbtc_remain[1]))
# total_coin += -hb_remain[0] - amount_sell + cnbtc_remain[0] + amount_buy
# total_money += hb_remain[1] + cnbtc_remain[1]
# print "cnbtc_remain:%f\t hb_remain:%f,total_remain:%f" % (cnbtc_remain[0], hb_remain[0], maxCoin)
# print"coin:%f,money:%f" % (total_coin, total_money)
# maxCoin -= max(hb_remain[0], cnbtc_remain[0])
# if maxCoin < 0:
# hbQue1.put(None)
# cnbtcQue1.put(None)
# hbTradeQue1.put(None)
# cnbtcTradeQue1.put(None)
# break
# else:
# # print "total coin: %f total_cny %f"%(total_eth,total_cny)
# # print "yunbi ",str(yb.get_account())
# # print "cnbtc ",str(cnbtc.get_account())
# print cnbtc.get_account()
# cnbtc.getDepth()
# print cnbtc.buy(volume=0.01,price=1461)
# print cnbtc.get_account()
# hft = HaiFengTeng.HaiFengTeng(config)
# hft.login()
# yb = YunBi.Yunbi(config,"YunBi2")
# yb.get_account()
# yb.buy(volume=0.001,price=9999.0)
# yb.getOrder()
# print yb.getDepth()
| 45.74726
| 245
| 0.561558
|
ab3ea1f161bcea5311f9766c4b23a51c645e6437
| 1,174
|
py
|
Python
|
startuptweet.py
|
cudmore/startupnotify
|
76b61b295ae7049e597fa05457a6696e624c4955
|
[
"MIT"
] | null | null | null |
startuptweet.py
|
cudmore/startupnotify
|
76b61b295ae7049e597fa05457a6696e624c4955
|
[
"MIT"
] | null | null | null |
startuptweet.py
|
cudmore/startupnotify
|
76b61b295ae7049e597fa05457a6696e624c4955
|
[
"MIT"
] | null | null | null |
#!/usr/bin/python3
"""
Author: Robert Cudmore
Date: 20181013
Purpose: Send a Tweet with IP and MAC address of a Raspberry Pi
Install:
pip3 install tweepy
Usage:
python3 startuptweet.py 'this is my tweet'
"""
import tweepy
import sys
import socket
import subprocess
from uuid import getnode as get_mac
from datetime import datetime
# Create variables for each key, secret, token
from my_config import hash_tag
from my_config import consumer_key
from my_config import consumer_secret
from my_config import access_token
from my_config import access_token_secret
message = ''
if len( sys.argv ) > 1:
message = sys.argv[1]
# Set up OAuth and integrate with API
auth = tweepy.OAuthHandler(consumer_key, consumer_secret)
auth.set_access_token(access_token, access_token_secret)
api = tweepy.API(auth)
#
thetime = datetime.now().strftime('%Y%m%d %H:%M:%S')
ip = subprocess.check_output(['hostname', '--all-ip-addresses'])
ip = ip.decode('utf-8').strip()
hostname = socket.gethostname()
mac = get_mac()
mac = hex(mac)
tweet = thetime + ' ' + hostname + ' ' + ip + ' ' + mac + ' ' + message + ' ' + hash_tag
print('tweeting:', tweet)
api.update_status(status=tweet)
| 22.576923
| 88
| 0.736797
|
ab3f43a17d20a3de43fa762a19edf3462b4252f3
| 1,307
|
py
|
Python
|
distributed/db.py
|
VW-Stephen/pySpiderScrape
|
861d7289743d5b65916310448526a58b381fde8d
|
[
"WTFPL"
] | null | null | null |
distributed/db.py
|
VW-Stephen/pySpiderScrape
|
861d7289743d5b65916310448526a58b381fde8d
|
[
"WTFPL"
] | null | null | null |
distributed/db.py
|
VW-Stephen/pySpiderScrape
|
861d7289743d5b65916310448526a58b381fde8d
|
[
"WTFPL"
] | null | null | null |
#!/usr/bin/python
from bs4 import BeautifulSoup
import sqlite3
| 35.324324
| 397
| 0.582249
|
ab4054d837b64d6cdc4bc55d34e29e751e8dc8d5
| 4,427
|
py
|
Python
|
private/scripts/recheck-invalid-handles.py
|
bansal-shubham/stopstalk-deployment
|
6392eace490311be103292fdaff9ae215e4db7e6
|
[
"MIT"
] | null | null | null |
private/scripts/recheck-invalid-handles.py
|
bansal-shubham/stopstalk-deployment
|
6392eace490311be103292fdaff9ae215e4db7e6
|
[
"MIT"
] | null | null | null |
private/scripts/recheck-invalid-handles.py
|
bansal-shubham/stopstalk-deployment
|
6392eace490311be103292fdaff9ae215e4db7e6
|
[
"MIT"
] | null | null | null |
"""
Copyright (c) 2015-2019 Raj Patel(raj454raj@gmail.com), StopStalk
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
import requests, bs4
import sites
# Constants to be used in case of request failures
SERVER_FAILURE = "SERVER_FAILURE"
NOT_FOUND = "NOT_FOUND"
OTHER_FAILURE = "OTHER_FAILURE"
REQUEST_FAILURES = (SERVER_FAILURE, NOT_FOUND, OTHER_FAILURE)
if __name__ == "__main__":
ihtable = db.invalid_handle
atable = db.auth_user
cftable = db.custom_friend
stable = db.submission
nrtable = db.next_retrieval
mapping = {}
handle_to_row = {}
for site in current.SITES:
mapping[site] = get_invalid_handle_method(site)
handle_to_row[site] = {}
impossiblehandle = "thisreallycantbeahandle308"
assert(all(map(lambda site: get_invalid_handle_method(site)(impossiblehandle), current.SITES.keys())))
populate_handle_to_row(atable)
populate_handle_to_row(cftable)
# for site in current.SITES:
# print site
# for site_handle in handle_to_row[site]:
# print "\t", site_handle
# for row in handle_to_row[site][site_handle]:
# print "\t\t", row.first_name, row.last_name, row.stopstalk_handle
update_dict = {"stopstalk_rating": 0,
"stopstalk_prev_rating": 0,
"per_day": 0.0,
"per_day_change": "0.0",
"authentic": False}
final_delete_query = False
cnt = 0
for row in db(ihtable).iterselect():
# If not an invalid handle anymore
if handle_to_row[row.site].has_key(row.handle) and mapping[row.site](row.handle) is False:
cnt += 1
print row.site, row.handle, "deleted"
for row_obj in handle_to_row[row.site][row.handle]:
print "\t", row_obj.stopstalk_handle, "updated"
update_dict[row.site.lower() + "_lr"] = current.INITIAL_DATE
row_obj.update_record(**update_dict)
if "user_id" in row_obj:
# Custom user
db(nrtable.custom_user_id == row_obj.id).update(**{row.site.lower() + "_delay": 0})
else:
db(nrtable.user_id == row_obj.id).update(**{row.site.lower() + "_delay": 0})
final_delete_query |= ((stable.site == row.site) & \
(stable.stopstalk_handle == row_obj.stopstalk_handle))
del update_dict[row.site.lower() + "_lr"]
row.delete_record()
if cnt >= 10:
if final_delete_query:
db(final_delete_query).delete()
cnt = 0
final_delete_query = False
if final_delete_query:
db(final_delete_query).delete()
| 41.373832
| 106
| 0.639259
|
ab41670d17acae57b54990c3a25815a2ee40eb19
| 9,225
|
py
|
Python
|
onnxmltools/convert/keras/_parse.py
|
gpminsuk/onnxmltools
|
4e88929a79a1018183f58e2d5e032dd639839dd2
|
[
"MIT"
] | 1
|
2018-04-10T02:30:47.000Z
|
2018-04-10T02:30:47.000Z
|
onnxmltools/convert/keras/_parse.py
|
gpminsuk/onnxmltools
|
4e88929a79a1018183f58e2d5e032dd639839dd2
|
[
"MIT"
] | null | null | null |
onnxmltools/convert/keras/_parse.py
|
gpminsuk/onnxmltools
|
4e88929a79a1018183f58e2d5e032dd639839dd2
|
[
"MIT"
] | 1
|
2018-06-27T18:16:20.000Z
|
2018-06-27T18:16:20.000Z
|
# -------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
# --------------------------------------------------------------------------
import tensorflow as tf
from keras.models import Model
from keras.layers import Layer, InputLayer
from ...proto import onnx
from ..common._container import KerasModelContainer
from ..common._topology import Topology
from ..common.data_types import *
def parse_keras(model, initial_types=None, targeted_onnx=onnx.__version__):
'''
The main parsing function of Keras Model and Sequential objects.
:param model: A Keras Model or Sequential object
:param initial_types: A list providing some types for some root variables. Each element is a tuple of a variable
name and a type defined in data_types.py.
:param targeted_onnx: a version string such as `1.1.2` or `1.2.1` for specifying the ONNX version used to produce
the output model.
:return: a Topology object. It's a intermediate representation of the input Keras model
'''
raw_model_container = KerasModelContainer(model)
topology = Topology(raw_model_container, default_batch_size=1, initial_types=initial_types,
targeted_onnx=targeted_onnx)
scope = topology.declare_scope('__root__')
# Each inbound node defines an evaluation of the underlining model (if the model is called multiple times, it may
# contain several inbound nodes). According to the tensors specified in those inbound nodes, we declare the roots
# and leaves of the computational graph described by the Keras input model.
for node in _extract_inbound_nodes(model):
input_shapes, output_shapes = extract_model_input_and_output_shapes(model, topology.default_batch_size)
# Declare inputs for a specific model execution
for tensor, shape in zip(node.input_tensors, input_shapes):
raw_model_container.add_input_name(tensor.name)
tensor_type = determine_tensor_type(tensor, topology.default_batch_size, list(shape))
scope.get_local_variable_or_declare_one(tensor.name, tensor_type)
# Declare outputs for a specific model execution
for tensor, shape in zip(node.output_tensors, output_shapes):
raw_model_container.add_output_name(tensor.name)
tensor_type = determine_tensor_type(tensor, topology.default_batch_size, list(shape))
scope.get_local_variable_or_declare_one(tensor.name, tensor_type)
# For each model execution, we call a parsing function to create a computational (sub-)graph because ONNX has no
# model/layer sharing.
for node in _extract_inbound_nodes(model):
_parse_keras(topology, scope, model, node)
topology.root_names = [variable.onnx_name for variable in scope.variables.values()]
return topology
| 51.825843
| 125
| 0.714363
|
ab41d11daca6d1b31e59637bf18b9f99a383f86f
| 24,575
|
py
|
Python
|
src/scenic/core/regions.py
|
cahartsell/Scenic
|
2e7979011aef426108687947668d9ba6f5439136
|
[
"BSD-3-Clause"
] | null | null | null |
src/scenic/core/regions.py
|
cahartsell/Scenic
|
2e7979011aef426108687947668d9ba6f5439136
|
[
"BSD-3-Clause"
] | null | null | null |
src/scenic/core/regions.py
|
cahartsell/Scenic
|
2e7979011aef426108687947668d9ba6f5439136
|
[
"BSD-3-Clause"
] | null | null | null |
"""Objects representing regions in space."""
import math
import random
import itertools
import numpy
import scipy.spatial
import shapely.geometry
import shapely.ops
from scenic.core.distributions import Samplable, RejectionException, needsSampling
from scenic.core.lazy_eval import valueInContext
from scenic.core.vectors import Vector, OrientedVector, VectorDistribution
from scenic.core.geometry import RotatedRectangle
from scenic.core.geometry import sin, cos, hypot, findMinMax, pointIsInCone, averageVectors
from scenic.core.geometry import headingOfSegment, triangulatePolygon, plotPolygon, polygonUnion
from scenic.core.type_support import toVector
from scenic.core.utils import cached, areEquivalent
def regionFromShapelyObject(obj, orientation=None):
"""Build a 'Region' from Shapely geometry."""
assert obj.is_valid, obj
if obj.is_empty:
return nowhere
elif isinstance(obj, (shapely.geometry.Polygon, shapely.geometry.MultiPolygon)):
return PolygonalRegion(polygon=obj, orientation=orientation)
elif isinstance(obj, (shapely.geometry.LineString, shapely.geometry.MultiLineString)):
return PolylineRegion(polyline=obj, orientation=orientation)
else:
raise RuntimeError(f'unhandled type of Shapely geometry: {obj}')
everywhere = AllRegion('everywhere')
nowhere = EmptyRegion('nowhere')
| 33.896552
| 96
| 0.722238
|
ab42204ebfa5ee7790165df748eb621656c602f4
| 6,525
|
py
|
Python
|
orangery/cli/cutfill.py
|
mrahnis/orangery
|
69afe0057bd61163eb8e026e58d648dfa1e73b94
|
[
"BSD-3-Clause"
] | 2
|
2015-11-30T02:46:28.000Z
|
2021-06-26T15:01:45.000Z
|
orangery/cli/cutfill.py
|
mrahnis/orangery
|
69afe0057bd61163eb8e026e58d648dfa1e73b94
|
[
"BSD-3-Clause"
] | 18
|
2017-06-18T03:23:05.000Z
|
2022-03-18T00:14:05.000Z
|
orangery/cli/cutfill.py
|
mrahnis/orangery
|
69afe0057bd61163eb8e026e58d648dfa1e73b94
|
[
"BSD-3-Clause"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys
import logging
import time
import json
import click
import matplotlib.pyplot as plt
import orangery as o
from orangery.cli import defaults, util
from orangery.tools.plotting import get_scale_factor
| 45
| 225
| 0.66728
|
ab42719d063612a8629ae16074131965d4bb9222
| 1,397
|
py
|
Python
|
src/ice_g2p/dictionaries.py
|
cadia-lvl/ice-g2p
|
5a6cc55f45282e8a656ea0742e2f373189c9a912
|
[
"Apache-2.0"
] | null | null | null |
src/ice_g2p/dictionaries.py
|
cadia-lvl/ice-g2p
|
5a6cc55f45282e8a656ea0742e2f373189c9a912
|
[
"Apache-2.0"
] | null | null | null |
src/ice_g2p/dictionaries.py
|
cadia-lvl/ice-g2p
|
5a6cc55f45282e8a656ea0742e2f373189c9a912
|
[
"Apache-2.0"
] | null | null | null |
import os, sys
DICTIONARY_FILE = os.path.join(sys.prefix, 'dictionaries/ice_pron_dict_standard_clear.csv')
HEAD_FILE = os.path.join(sys.prefix, 'data/head_map.csv')
MODIFIER_FILE = os.path.join(sys.prefix, 'data/modifier_map.csv')
VOWELS_FILE = os.path.join(sys.prefix, 'data/vowels_sampa.txt')
CONS_CLUSTERS_FILE = os.path.join(sys.prefix, 'data/cons_clusters_sampa.txt')
| 24.086207
| 91
| 0.670007
|
ab42c6179a77692e03a58e9d6335af55ec3cb46d
| 385
|
py
|
Python
|
tests/test_annotations_notebook.py
|
jeromedockes/pylabelbuddy
|
26be00db679e94117968387aa7010dab2739b517
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_annotations_notebook.py
|
jeromedockes/pylabelbuddy
|
26be00db679e94117968387aa7010dab2739b517
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_annotations_notebook.py
|
jeromedockes/pylabelbuddy
|
26be00db679e94117968387aa7010dab2739b517
|
[
"BSD-3-Clause"
] | null | null | null |
from pylabelbuddy import _annotations_notebook
| 32.083333
| 68
| 0.750649
|
ab4374fa18ea29af4960ad145950b9d2672ecb83
| 1,257
|
py
|
Python
|
middleware/run.py
|
natedogg484/react-flask-authentication
|
5000685d35471b03f72e0b07dfbdbf6d5fc296d2
|
[
"MIT"
] | null | null | null |
middleware/run.py
|
natedogg484/react-flask-authentication
|
5000685d35471b03f72e0b07dfbdbf6d5fc296d2
|
[
"MIT"
] | 4
|
2021-03-09T21:12:06.000Z
|
2022-02-26T19:17:31.000Z
|
middleware/run.py
|
natedogg484/vue-authentication
|
ab087e238d98606ffb73167cb9a16648812ac3e5
|
[
"MIT"
] | null | null | null |
from flask import Flask
from flask_cors import CORS
from flask_restful import Api
from flask_sqlalchemy import SQLAlchemy
from flask_jwt_extended import JWTManager
app = Flask(__name__)
CORS(app)
api = Api(app)
app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///app.db'
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
app.config['SECRET_KEY'] = 'some-secret-string'
app.config['JWT_SECRET_KEY'] = 'jwt-secret-string'
app.config['JWT_BLACKLIST_ENABLED'] = True
app.config['JWT_BLACKLIST_TOKEN_CHECKS'] = ['access', 'refresh']
db = SQLAlchemy(app)
jwt = JWTManager(app)
import models, resources, views
api.add_resource(resources.UserRegistration, '/registration')
api.add_resource(resources.UserLogin, '/login')
api.add_resource(resources.UserLogoutAccess, '/logout/access')
api.add_resource(resources.UserLogoutRefresh, '/logout/refresh')
api.add_resource(resources.TokenRefresh, '/token/refresh')
api.add_resource(resources.AllUsers, '/users')
api.add_resource(resources.SecretResource, '/secret')
| 27.933333
| 64
| 0.791567
|
ab43951bf5dc988e3a4948ac307ca905f1536445
| 223
|
py
|
Python
|
Programas do Curso/Desafio 2.py
|
carvalhopedro22/Programas-em-python-cursos-e-geral-
|
970e1ebe6cdd1e31f52dfd60328c2203d4de3ef1
|
[
"MIT"
] | null | null | null |
Programas do Curso/Desafio 2.py
|
carvalhopedro22/Programas-em-python-cursos-e-geral-
|
970e1ebe6cdd1e31f52dfd60328c2203d4de3ef1
|
[
"MIT"
] | null | null | null |
Programas do Curso/Desafio 2.py
|
carvalhopedro22/Programas-em-python-cursos-e-geral-
|
970e1ebe6cdd1e31f52dfd60328c2203d4de3ef1
|
[
"MIT"
] | null | null | null |
nome = input('Qual o seu nome? ')
dia = input('Que dia do ms voc nasceu? ')
mes = input('Qual o ms em que voc nasceu? ')
ano = input('Qual o ano em que voc nasceu? ')
print(nome, 'nasceu em', dia,'de',mes,'do ano',ano)
| 44.6
| 51
| 0.650224
|
ab450e026b0907e8b838f6f9a3e2ba1d4218dd25
| 5,065
|
py
|
Python
|
cmibs/cisco_vlan_membership_mib.py
|
prorevizor/noc
|
37e44b8afc64318b10699c06a1138eee9e7d6a4e
|
[
"BSD-3-Clause"
] | 84
|
2017-10-22T11:01:39.000Z
|
2022-02-27T03:43:48.000Z
|
cmibs/cisco_vlan_membership_mib.py
|
prorevizor/noc
|
37e44b8afc64318b10699c06a1138eee9e7d6a4e
|
[
"BSD-3-Clause"
] | 22
|
2017-12-11T07:21:56.000Z
|
2021-09-23T02:53:50.000Z
|
cmibs/cisco_vlan_membership_mib.py
|
prorevizor/noc
|
37e44b8afc64318b10699c06a1138eee9e7d6a4e
|
[
"BSD-3-Clause"
] | 23
|
2017-12-06T06:59:52.000Z
|
2022-02-24T00:02:25.000Z
|
# ----------------------------------------------------------------------
# CISCO-VLAN-MEMBERSHIP-MIB
# Compiled MIB
# Do not modify this file directly
# Run ./noc mib make-cmib instead
# ----------------------------------------------------------------------
# Copyright (C) 2007-2020 The NOC Project
# See LICENSE for details
# ----------------------------------------------------------------------
# MIB Name
NAME = "CISCO-VLAN-MEMBERSHIP-MIB"
# Metadata
LAST_UPDATED = "2007-12-14"
COMPILED = "2020-01-19"
# MIB Data: name -> oid
MIB = {
"CISCO-VLAN-MEMBERSHIP-MIB::ciscoVlanMembershipMIB": "1.3.6.1.4.1.9.9.68",
"CISCO-VLAN-MEMBERSHIP-MIB::ciscoVlanMembershipMIBObjects": "1.3.6.1.4.1.9.9.68.1",
"CISCO-VLAN-MEMBERSHIP-MIB::vmVmps": "1.3.6.1.4.1.9.9.68.1.1",
"CISCO-VLAN-MEMBERSHIP-MIB::vmVmpsVQPVersion": "1.3.6.1.4.1.9.9.68.1.1.1",
"CISCO-VLAN-MEMBERSHIP-MIB::vmVmpsRetries": "1.3.6.1.4.1.9.9.68.1.1.2",
"CISCO-VLAN-MEMBERSHIP-MIB::vmVmpsReconfirmInterval": "1.3.6.1.4.1.9.9.68.1.1.3",
"CISCO-VLAN-MEMBERSHIP-MIB::vmVmpsReconfirm": "1.3.6.1.4.1.9.9.68.1.1.4",
"CISCO-VLAN-MEMBERSHIP-MIB::vmVmpsReconfirmResult": "1.3.6.1.4.1.9.9.68.1.1.5",
"CISCO-VLAN-MEMBERSHIP-MIB::vmVmpsCurrent": "1.3.6.1.4.1.9.9.68.1.1.6",
"CISCO-VLAN-MEMBERSHIP-MIB::vmVmpsTable": "1.3.6.1.4.1.9.9.68.1.1.7",
"CISCO-VLAN-MEMBERSHIP-MIB::vmVmpsEntry": "1.3.6.1.4.1.9.9.68.1.1.7.1",
"CISCO-VLAN-MEMBERSHIP-MIB::vmVmpsIpAddress": "1.3.6.1.4.1.9.9.68.1.1.7.1.1",
"CISCO-VLAN-MEMBERSHIP-MIB::vmVmpsPrimary": "1.3.6.1.4.1.9.9.68.1.1.7.1.2",
"CISCO-VLAN-MEMBERSHIP-MIB::vmVmpsRowStatus": "1.3.6.1.4.1.9.9.68.1.1.7.1.3",
"CISCO-VLAN-MEMBERSHIP-MIB::vmMembership": "1.3.6.1.4.1.9.9.68.1.2",
"CISCO-VLAN-MEMBERSHIP-MIB::vmMembershipSummaryTable": "1.3.6.1.4.1.9.9.68.1.2.1",
"CISCO-VLAN-MEMBERSHIP-MIB::vmMembershipSummaryEntry": "1.3.6.1.4.1.9.9.68.1.2.1.1",
"CISCO-VLAN-MEMBERSHIP-MIB::vmMembershipSummaryVlanIndex": "1.3.6.1.4.1.9.9.68.1.2.1.1.1",
"CISCO-VLAN-MEMBERSHIP-MIB::vmMembershipSummaryMemberPorts": "1.3.6.1.4.1.9.9.68.1.2.1.1.2",
"CISCO-VLAN-MEMBERSHIP-MIB::vmMembershipSummaryMember2kPorts": "1.3.6.1.4.1.9.9.68.1.2.1.1.3",
"CISCO-VLAN-MEMBERSHIP-MIB::vmMembershipTable": "1.3.6.1.4.1.9.9.68.1.2.2",
"CISCO-VLAN-MEMBERSHIP-MIB::vmMembershipEntry": "1.3.6.1.4.1.9.9.68.1.2.2.1",
"CISCO-VLAN-MEMBERSHIP-MIB::vmVlanType": "1.3.6.1.4.1.9.9.68.1.2.2.1.1",
"CISCO-VLAN-MEMBERSHIP-MIB::vmVlan": "1.3.6.1.4.1.9.9.68.1.2.2.1.2",
"CISCO-VLAN-MEMBERSHIP-MIB::vmPortStatus": "1.3.6.1.4.1.9.9.68.1.2.2.1.3",
"CISCO-VLAN-MEMBERSHIP-MIB::vmVlans": "1.3.6.1.4.1.9.9.68.1.2.2.1.4",
"CISCO-VLAN-MEMBERSHIP-MIB::vmVlans2k": "1.3.6.1.4.1.9.9.68.1.2.2.1.5",
"CISCO-VLAN-MEMBERSHIP-MIB::vmVlans3k": "1.3.6.1.4.1.9.9.68.1.2.2.1.6",
"CISCO-VLAN-MEMBERSHIP-MIB::vmVlans4k": "1.3.6.1.4.1.9.9.68.1.2.2.1.7",
"CISCO-VLAN-MEMBERSHIP-MIB::vmMembershipSummaryExtTable": "1.3.6.1.4.1.9.9.68.1.2.3",
"CISCO-VLAN-MEMBERSHIP-MIB::vmMembershipSummaryExtEntry": "1.3.6.1.4.1.9.9.68.1.2.3.1",
"CISCO-VLAN-MEMBERSHIP-MIB::vmMembershipPortRangeIndex": "1.3.6.1.4.1.9.9.68.1.2.3.1.1",
"CISCO-VLAN-MEMBERSHIP-MIB::vmMembershipSummaryExtPorts": "1.3.6.1.4.1.9.9.68.1.2.3.1.2",
"CISCO-VLAN-MEMBERSHIP-MIB::vmVlanCreationMode": "1.3.6.1.4.1.9.9.68.1.2.4",
"CISCO-VLAN-MEMBERSHIP-MIB::vmStatistics": "1.3.6.1.4.1.9.9.68.1.3",
"CISCO-VLAN-MEMBERSHIP-MIB::vmVQPQueries": "1.3.6.1.4.1.9.9.68.1.3.1",
"CISCO-VLAN-MEMBERSHIP-MIB::vmVQPResponses": "1.3.6.1.4.1.9.9.68.1.3.2",
"CISCO-VLAN-MEMBERSHIP-MIB::vmVmpsChanges": "1.3.6.1.4.1.9.9.68.1.3.3",
"CISCO-VLAN-MEMBERSHIP-MIB::vmVQPShutdown": "1.3.6.1.4.1.9.9.68.1.3.4",
"CISCO-VLAN-MEMBERSHIP-MIB::vmVQPDenied": "1.3.6.1.4.1.9.9.68.1.3.5",
"CISCO-VLAN-MEMBERSHIP-MIB::vmVQPWrongDomain": "1.3.6.1.4.1.9.9.68.1.3.6",
"CISCO-VLAN-MEMBERSHIP-MIB::vmVQPWrongVersion": "1.3.6.1.4.1.9.9.68.1.3.7",
"CISCO-VLAN-MEMBERSHIP-MIB::vmInsufficientResources": "1.3.6.1.4.1.9.9.68.1.3.8",
"CISCO-VLAN-MEMBERSHIP-MIB::vmStatus": "1.3.6.1.4.1.9.9.68.1.4",
"CISCO-VLAN-MEMBERSHIP-MIB::vmNotificationsEnabled": "1.3.6.1.4.1.9.9.68.1.4.1",
"CISCO-VLAN-MEMBERSHIP-MIB::vmVoiceVlan": "1.3.6.1.4.1.9.9.68.1.5",
"CISCO-VLAN-MEMBERSHIP-MIB::vmVoiceVlanTable": "1.3.6.1.4.1.9.9.68.1.5.1",
"CISCO-VLAN-MEMBERSHIP-MIB::vmVoiceVlanEntry": "1.3.6.1.4.1.9.9.68.1.5.1.1",
"CISCO-VLAN-MEMBERSHIP-MIB::vmVoiceVlanId": "1.3.6.1.4.1.9.9.68.1.5.1.1.1",
"CISCO-VLAN-MEMBERSHIP-MIB::vmVoiceVlanCdpVerifyEnable": "1.3.6.1.4.1.9.9.68.1.5.1.1.2",
"CISCO-VLAN-MEMBERSHIP-MIB::vmNotifications": "1.3.6.1.4.1.9.9.68.2",
"CISCO-VLAN-MEMBERSHIP-MIB::vmNotificationsPrefix": "1.3.6.1.4.1.9.9.68.2.0",
"CISCO-VLAN-MEMBERSHIP-MIB::vmVmpsChange": "1.3.6.1.4.1.9.9.68.2.0.1",
"CISCO-VLAN-MEMBERSHIP-MIB::vmMIBConformance": "1.3.6.1.4.1.9.9.68.3",
"CISCO-VLAN-MEMBERSHIP-MIB::vmMIBCompliances": "1.3.6.1.4.1.9.9.68.3.1",
"CISCO-VLAN-MEMBERSHIP-MIB::vmMIBGroups": "1.3.6.1.4.1.9.9.68.3.2",
}
DISPLAY_HINTS = {}
| 64.113924
| 98
| 0.62231
|
ab453c7b64fdd47b4cf51bb569f233871fe2b337
| 4,118
|
py
|
Python
|
harbor/tests/test_unit.py
|
tdimnet/integrations-core
|
a78133a3b71a1b8377fa214d121a98647031ab06
|
[
"BSD-3-Clause"
] | 663
|
2016-08-23T05:23:45.000Z
|
2022-03-29T00:37:23.000Z
|
harbor/tests/test_unit.py
|
tdimnet/integrations-core
|
a78133a3b71a1b8377fa214d121a98647031ab06
|
[
"BSD-3-Clause"
] | 6,642
|
2016-06-09T16:29:20.000Z
|
2022-03-31T22:24:09.000Z
|
harbor/tests/test_unit.py
|
tdimnet/integrations-core
|
a78133a3b71a1b8377fa214d121a98647031ab06
|
[
"BSD-3-Clause"
] | 1,222
|
2017-01-27T15:51:38.000Z
|
2022-03-31T18:17:51.000Z
|
# (C) Datadog, Inc. 2019-present
# All rights reserved
# Licensed under a 3-clause BSD style license (see LICENSE)
import pytest
from mock import MagicMock
from requests import HTTPError
from datadog_checks.base import AgentCheck
from datadog_checks.dev.http import MockResponse
from .common import HARBOR_COMPONENTS, HARBOR_VERSION, VERSION_1_5, VERSION_1_6, VERSION_1_8
def test_api__make_get_request(harbor_api):
harbor_api.http = MagicMock()
harbor_api.http.get = MagicMock(return_value=MockResponse(json_data={'json': True}))
assert harbor_api._make_get_request('{base_url}/api/path') == {"json": True}
harbor_api.http.get = MagicMock(return_value=MockResponse(status_code=500))
with pytest.raises(HTTPError):
harbor_api._make_get_request('{base_url}/api/path')
def test_api__make_paginated_get_request(harbor_api):
expected_result = [{'item': i} for i in range(20)]
paginated_result = [[expected_result[i], expected_result[i + 1]] for i in range(0, len(expected_result) - 1, 2)]
values = []
for r in paginated_result:
values.append(MockResponse(json_data=r, headers={'link': 'Link: <unused_url>; rel=next; type="text/plain"'}))
values[-1].headers.pop('link')
harbor_api.http = MagicMock()
harbor_api.http.get = MagicMock(side_effect=values)
assert harbor_api._make_paginated_get_request('{base_url}/api/path') == expected_result
def test_api__make_post_request(harbor_api):
harbor_api.http = MagicMock()
harbor_api.http.post = MagicMock(return_value=MockResponse(json_data={'json': True}))
assert harbor_api._make_post_request('{base_url}/api/path') == {"json": True}
harbor_api.http.post = MagicMock(return_value=MockResponse(status_code=500))
with pytest.raises(HTTPError):
harbor_api._make_post_request('{base_url}/api/path')
| 43.347368
| 120
| 0.753035
|
ab47866bdd7f779d52254d019f551b3dccc349a3
| 2,649
|
py
|
Python
|
M-SPRING/template/adapter.py
|
CN-UPB/SPRING
|
1cb74919689e832987cb2c9b490eec7f09a64f52
|
[
"Apache-2.0"
] | 3
|
2019-09-27T08:07:11.000Z
|
2021-11-19T11:27:39.000Z
|
M-SPRING/template/adapter.py
|
CN-UPB/SPRING
|
1cb74919689e832987cb2c9b490eec7f09a64f52
|
[
"Apache-2.0"
] | null | null | null |
M-SPRING/template/adapter.py
|
CN-UPB/SPRING
|
1cb74919689e832987cb2c9b490eec7f09a64f52
|
[
"Apache-2.0"
] | null | null | null |
# module for adapting templates on the fly if components are reused
# check that all reused components are defined consistently -> else: exception
# check and return number of reuses
# return adapted templates with adapted reused components and exactly one arc per port (allows proportional output)
| 36.791667
| 116
| 0.690449
|
ab4824ab4c800c1d309f147567a8700135e66f6b
| 1,483
|
py
|
Python
|
source/vsm-dashboard/vsm_dashboard/test/test_data/swift_data.py
|
ramkrsna/virtual-storage-manager
|
78125bfb4dd4d78ff96bc3274c8919003769c545
|
[
"Apache-2.0"
] | 172
|
2015-01-07T08:40:17.000Z
|
2019-02-18T07:01:11.000Z
|
source/vsm-dashboard/vsm_dashboard/test/test_data/swift_data.py
|
ramkrsna/virtual-storage-manager
|
78125bfb4dd4d78ff96bc3274c8919003769c545
|
[
"Apache-2.0"
] | 83
|
2015-03-06T07:47:03.000Z
|
2018-07-05T15:10:19.000Z
|
source/vsm-dashboard/vsm_dashboard/test/test_data/swift_data.py
|
ramkrsna/virtual-storage-manager
|
78125bfb4dd4d78ff96bc3274c8919003769c545
|
[
"Apache-2.0"
] | 125
|
2015-01-05T12:22:15.000Z
|
2019-02-18T07:01:39.000Z
|
# Copyright 2012 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from vsm_dashboard.api import swift
from .utils import TestDataContainer
| 38.025641
| 78
| 0.645988
|
ab48693089ba2d51e690249090e8808f1456a30c
| 17,971
|
py
|
Python
|
cinder/backup/driver.py
|
liangintel/stx-cinder
|
f4c43797a3f8c0caebfd8fb67244c084d26d9741
|
[
"Apache-2.0"
] | null | null | null |
cinder/backup/driver.py
|
liangintel/stx-cinder
|
f4c43797a3f8c0caebfd8fb67244c084d26d9741
|
[
"Apache-2.0"
] | 2
|
2018-10-25T13:04:01.000Z
|
2019-08-17T13:15:24.000Z
|
cinder/backup/driver.py
|
liangintel/stx-cinder
|
f4c43797a3f8c0caebfd8fb67244c084d26d9741
|
[
"Apache-2.0"
] | 2
|
2018-10-17T13:32:50.000Z
|
2018-11-08T08:39:39.000Z
|
# Copyright (C) 2013 Deutsche Telekom AG
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Base class for all backup drivers."""
import abc
from oslo_config import cfg
from oslo_log import log as logging
from oslo_serialization import jsonutils
import six
from cinder.db import base
from cinder import exception
from cinder.i18n import _
from cinder import keymgr as key_manager
service_opts = [
cfg.IntOpt('backup_metadata_version', default=2,
help='Backup metadata version to be used when backing up '
'volume metadata. If this number is bumped, make sure the '
'service doing the restore supports the new version.'),
cfg.IntOpt('backup_object_number_per_notification',
default=10,
help='The number of chunks or objects, for which one '
'Ceilometer notification will be sent'),
cfg.IntOpt('backup_timer_interval',
default=120,
help='Interval, in seconds, between two progress notifications '
'reporting the backup status'),
]
CONF = cfg.CONF
CONF.register_opts(service_opts)
LOG = logging.getLogger(__name__)
| 40.566591
| 79
| 0.615492
|
ab48971329305ae9b948d764b897a02b20e65a2f
| 67
|
py
|
Python
|
__init__.py
|
ENDERZOMBI102/chained
|
d01f04d1eb9a913f64cea9da52e61d91300315ff
|
[
"MIT"
] | null | null | null |
__init__.py
|
ENDERZOMBI102/chained
|
d01f04d1eb9a913f64cea9da52e61d91300315ff
|
[
"MIT"
] | null | null | null |
__init__.py
|
ENDERZOMBI102/chained
|
d01f04d1eb9a913f64cea9da52e61d91300315ff
|
[
"MIT"
] | null | null | null |
from .chainOpen import chainOpen
__all__ = [
'chainOpen'
]
| 13.4
| 33
| 0.656716
|
ab4a651d98707257763d7fecd97ef8404192f74c
| 1,146
|
py
|
Python
|
code/reasoningtool/tests/QuerySciGraphTests.py
|
andrewsu/RTX
|
dd1de262d0817f7e6d2f64e5bec7d5009a3a2740
|
[
"MIT"
] | 31
|
2018-03-05T20:01:10.000Z
|
2022-02-01T03:31:22.000Z
|
code/reasoningtool/tests/QuerySciGraphTests.py
|
andrewsu/RTX
|
dd1de262d0817f7e6d2f64e5bec7d5009a3a2740
|
[
"MIT"
] | 1,774
|
2018-03-06T01:55:03.000Z
|
2022-03-31T03:09:04.000Z
|
code/reasoningtool/tests/QuerySciGraphTests.py
|
andrewsu/RTX
|
dd1de262d0817f7e6d2f64e5bec7d5009a3a2740
|
[
"MIT"
] | 19
|
2018-05-10T00:43:19.000Z
|
2022-03-08T19:26:16.000Z
|
import unittest
from QuerySciGraph import QuerySciGraph
if __name__ == '__main__':
unittest.main()
| 44.076923
| 101
| 0.620419
|
ab4c81650d8bacd66796cfc5a7b9384015825cae
| 1,342
|
py
|
Python
|
ledis/cli.py
|
gianghta/Ledis
|
a6b31617621746344408ee411cf510ef3cfb2e7b
|
[
"MIT"
] | null | null | null |
ledis/cli.py
|
gianghta/Ledis
|
a6b31617621746344408ee411cf510ef3cfb2e7b
|
[
"MIT"
] | null | null | null |
ledis/cli.py
|
gianghta/Ledis
|
a6b31617621746344408ee411cf510ef3cfb2e7b
|
[
"MIT"
] | null | null | null |
from typing import Any
from ledis import Ledis
from ledis.exceptions import InvalidUsage
| 29.173913
| 78
| 0.520119
|
ab4cd7dafdbec4a4f671b37357e68833614883fc
| 866
|
py
|
Python
|
ClosedLoopTF.py
|
nazhanshaberi/miniature-octo-barnacle
|
eb1a8b5366003bf2d0f7e89af9d9dea120965f4f
|
[
"MIT"
] | null | null | null |
ClosedLoopTF.py
|
nazhanshaberi/miniature-octo-barnacle
|
eb1a8b5366003bf2d0f7e89af9d9dea120965f4f
|
[
"MIT"
] | null | null | null |
ClosedLoopTF.py
|
nazhanshaberi/miniature-octo-barnacle
|
eb1a8b5366003bf2d0f7e89af9d9dea120965f4f
|
[
"MIT"
] | null | null | null |
#group 1: Question 1(b)
# A control system for positioning the head of a laser printer has the closed loop transfer function:
# !pip install control
import matplotlib.pyplot as plt
import control
a=10 #Value for a
b=50 #value for b
sys1 = control.tf(20*b,[1,20+a,b+20*a,20*b])
print('3rd order system transfer function T1(s)=',sys1)
sys2=control.tf(b,[1,a,b])
print('2nd order system transfer funtion T2(s)',sys2)
value = sys1.pole()
list_of_poles = [pole.round(2) for pole in value]
print('poles',list_of_poles)
y1=control.step_response(sys1)
y2=control.step_response(sys2)
plt.plot(y1[0],y1[1],'r--', label='3rd order actual system')
plt.plot(y2[0],y2[1],'g', label='2nd order approximation system')
plt.legend()
plt.grid()
plt.xlabel('time (s)')
plt.ylabel('step response y(t)')
plt.title('step response comparison of 3rd and 2nd order system')
plt.show()
| 29.862069
| 101
| 0.725173
|
ab4d5adf0a4cf40d756ef93b4de1fbf8fed57093
| 1,953
|
py
|
Python
|
example_project/test_messages/bbcode_tags.py
|
bastiedotorg/django-precise-bbcode
|
567a8a7f104fb7f2c9d59f304791e53d2d8f4dea
|
[
"BSD-3-Clause"
] | 30
|
2015-01-02T13:43:56.000Z
|
2021-02-08T18:43:09.000Z
|
example_project/test_messages/bbcode_tags.py
|
bastiedotorg/django-precise-bbcode
|
567a8a7f104fb7f2c9d59f304791e53d2d8f4dea
|
[
"BSD-3-Clause"
] | 31
|
2015-01-16T00:25:19.000Z
|
2021-12-11T16:40:03.000Z
|
example_project/test_messages/bbcode_tags.py
|
bastiedotorg/django-precise-bbcode
|
567a8a7f104fb7f2c9d59f304791e53d2d8f4dea
|
[
"BSD-3-Clause"
] | 13
|
2015-07-16T23:25:10.000Z
|
2020-08-23T20:12:24.000Z
|
import re
from precise_bbcode.bbcode.tag import BBCodeTag
from precise_bbcode.tag_pool import tag_pool
color_re = re.compile(r'^([a-z]+|#[0-9abcdefABCDEF]{3,6})$')
tag_pool.register_tag(SubTag)
tag_pool.register_tag(PreTag)
tag_pool.register_tag(SizeTag)
tag_pool.register_tag(FruitTag)
tag_pool.register_tag(PhoneLinkTag)
tag_pool.register_tag(StartsWithATag)
tag_pool.register_tag(RoundedBBCodeTag)
| 27.125
| 97
| 0.673835
|
ab4de19d0181da877a10411de0bdd3a02265b4f5
| 1,567
|
py
|
Python
|
tests/test_vmtkScripts/test_vmtksurfacescaling.py
|
ramtingh/vmtk
|
4d6f58ce65d73628353ba2b110cbc29a2e7aa7b3
|
[
"Apache-2.0"
] | null | null | null |
tests/test_vmtkScripts/test_vmtksurfacescaling.py
|
ramtingh/vmtk
|
4d6f58ce65d73628353ba2b110cbc29a2e7aa7b3
|
[
"Apache-2.0"
] | null | null | null |
tests/test_vmtkScripts/test_vmtksurfacescaling.py
|
ramtingh/vmtk
|
4d6f58ce65d73628353ba2b110cbc29a2e7aa7b3
|
[
"Apache-2.0"
] | 1
|
2019-06-18T23:41:11.000Z
|
2019-06-18T23:41:11.000Z
|
## Program: VMTK
## Language: Python
## Date: January 10, 2018
## Version: 1.4
## Copyright (c) Richard Izzo, Luca Antiga, All rights reserved.
## See LICENSE file for details.
## This software is distributed WITHOUT ANY WARRANTY; without even
## the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
## PURPOSE. See the above copyright notices for more information.
## Note: this code was contributed by
## Richard Izzo (Github @rlizzo)
## University at Buffalo
import pytest
import vmtk.vmtksurfacescaling as scaling
| 31.34
| 75
| 0.678366
|
ab4e0820c560508db4fbda99cab044003f105f34
| 155,374
|
py
|
Python
|
tencentcloud/vpc/v20170312/models.py
|
yangyimincn/tencentcloud-sdk-python
|
1d4f1bd83bb57a91bb6d2631131a339bc1f9b91d
|
[
"Apache-2.0"
] | null | null | null |
tencentcloud/vpc/v20170312/models.py
|
yangyimincn/tencentcloud-sdk-python
|
1d4f1bd83bb57a91bb6d2631131a339bc1f9b91d
|
[
"Apache-2.0"
] | null | null | null |
tencentcloud/vpc/v20170312/models.py
|
yangyimincn/tencentcloud-sdk-python
|
1d4f1bd83bb57a91bb6d2631131a339bc1f9b91d
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf8 -*-
# Copyright 1999-2017 Tencent Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from tencentcloud.common.abstract_model import AbstractModel
| 29.914132
| 366
| 0.643827
|
ab4e7cdf2bacce34a3021f862bd5b5457c0c010e
| 3,677
|
py
|
Python
|
mlcsim/dist.py
|
nobodywasishere/MLCSim
|
a3eb3d39b6970a4e706e292c6a283531fb44350c
|
[
"MIT"
] | null | null | null |
mlcsim/dist.py
|
nobodywasishere/MLCSim
|
a3eb3d39b6970a4e706e292c6a283531fb44350c
|
[
"MIT"
] | null | null | null |
mlcsim/dist.py
|
nobodywasishere/MLCSim
|
a3eb3d39b6970a4e706e292c6a283531fb44350c
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
"""Distribution functions
This module provides functions for dealing with normal distributions
and generating error maps.
When called directly as main, it allows for converting a threshold map
into an error map.
```
$ python -m mlcsim.dist --help
usage: dist.py [-h] [-b {1,2,3,4}] -f F [-o O]
options:
-h, --help show this help message and exit
-b {1,2,3,4} bits per cell
-f F Threshold map json to convert
-o O output to file
```
"""
import argparse
import json
from pprint import pprint
from typing import Dict, List
import numpy as np
from scipy import stats as ss # type: ignore
# https://stackoverflow.com/a/32574638/9047818
# https://stackoverflow.com/a/13072714/9047818
def normalMidpoint(mean_a: float, mean_b: float, std_a: float, std_b: float) -> float:
"""Find the midpoint between two normal distributions
Args:
mean_a (float): Mean of first distribution
mean_b (float): Mean of second distribution
std_a (float): Std dev of first distribution
std_b (float): Std dev of second distribution
Returns:
float: Midpoint between distributions
"""
a = 1 / (2 * std_a**2) - 1 / (2 * std_b**2)
b = mean_b / (std_b**2) - mean_a / (std_a**2)
c = (
mean_a**2 / (2 * std_a**2)
- mean_b**2 / (2 * std_b**2)
- np.log(std_b / std_a)
)
roots = np.roots([a, b, c])
masked = np.ma.masked_outside(roots, mean_a, mean_b)
return float(masked[~masked.mask][0][0])
# https://www.askpython.com/python/normal-distribution
def normalChance(mean: float, stdev: float, thr: float) -> float:
"""Find the chance of a normal distribution above/below a given value
Args:
mean (float): Mean of the distribution
stdev (float): Std dev of the distribution
thr (float): Threshold to check above/below
Returns:
float: Chance for threshold to end up above/below the given point in the distribution
"""
chance = ss.norm(loc=mean, scale=stdev).cdf(thr)
return float(chance if mean > thr else 1 - chance)
def genErrorMap(thr_maps: Dict[str, List[List[float]]], bpc: int) -> List[List[float]]:
"""Generate an error map from a threshold map
Args:
thr_maps (dict): Threshold map
bpc (int): Bits per cell
Raises:
ValueError: if the given bpc is not in the threshold map
Returns:
list: Error map from the threshold map
"""
if str(bpc) not in thr_maps.keys():
raise ValueError(f"Threshold map does not have values for {bpc} levels")
thr_map: List[List[float]] = thr_maps[str(bpc)]
err_map = [[0.0]]
for i in range(len(thr_map) - 1):
mid = normalMidpoint(
thr_map[i][0], thr_map[i + 1][0], thr_map[i][1], thr_map[i + 1][1]
)
up = normalChance(thr_map[i][0], thr_map[i][1], mid)
dn = normalChance(thr_map[i + 1][0], thr_map[i + 1][1], mid)
err_map[i].append(up)
err_map.append([dn])
err_map[-1].append(0.0)
return err_map
if __name__ == "__main__":
_main()
| 27.856061
| 93
| 0.627142
|
ab4ecc2d3d04743c00cc721399cf77a91c741662
| 2,063
|
py
|
Python
|
Pr-Lab5/lab5.py
|
JackShen1/pr-labs
|
c84df379d8f7b26ccff30248dfb23ae38e0ce7c2
|
[
"MIT"
] | 2
|
2021-02-25T11:42:06.000Z
|
2021-03-08T20:43:44.000Z
|
Pr-Lab5/lab5.py
|
JackShen1/pr-labs
|
c84df379d8f7b26ccff30248dfb23ae38e0ce7c2
|
[
"MIT"
] | null | null | null |
Pr-Lab5/lab5.py
|
JackShen1/pr-labs
|
c84df379d8f7b26ccff30248dfb23ae38e0ce7c2
|
[
"MIT"
] | null | null | null |
earth = {
"Asia":
{'Japan': ("Tokyo", 377975, 125620000)},
"Europe":
{'Austria': ("Vienna", 83800, 8404000),
'Germany': ("Berlin", 357000, 81751000),
'Great Britain': ("London", 244800, 62700000),
'Iceland': ("Reykjavk", 103000, 317630),
'Italy': ("Rome", 301400, 60605000),
'Spain': ("Madrid", 506000, 46162000),
'Ukraine': ("Kyiv", 603700, 45562000)}
}
input_str = input("Enter the name of the continent or country: ")
if input_str.title() in earth.keys():
Earth(input_str).print_continent()
else:
print(Earth(continent=None).print_country(input_str))
| 38.203704
| 125
| 0.49491
|
ab4efdf65ad735b02d6de387aa7fd48994cd82b2
| 1,507
|
bzl
|
Python
|
vue/repositories.bzl
|
ubiquitoustech/rules_vue
|
759786eae1b6caf647b1c6018e16030a66e486e2
|
[
"Apache-2.0"
] | null | null | null |
vue/repositories.bzl
|
ubiquitoustech/rules_vue
|
759786eae1b6caf647b1c6018e16030a66e486e2
|
[
"Apache-2.0"
] | null | null | null |
vue/repositories.bzl
|
ubiquitoustech/rules_vue
|
759786eae1b6caf647b1c6018e16030a66e486e2
|
[
"Apache-2.0"
] | null | null | null |
"""Declare runtime dependencies
These are needed for local dev, and users must install them as well.
See https://docs.bazel.build/versions/main/skylark/deploying.html#dependencies
"""
load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
load("@bazel_tools//tools/build_defs/repo:utils.bzl", "maybe")
# WARNING: any changes in this function may be BREAKING CHANGES for users
# because we'll fetch a dependency which may be different from one that
# they were previously fetching later in their WORKSPACE setup, and now
# ours took precedence. Such breakages are challenging for users, so any
# changes in this function should be marked as BREAKING in the commit message
# and released only in semver majors.
| 45.666667
| 126
| 0.7286
|
ab4f57f26ec8e3a5f4f9c3add8fa33115729abc3
| 3,956
|
py
|
Python
|
endpoints/api/test/test_tag.py
|
kwestpharedhat/quay
|
a0df895005bcd3e53847046f69f6a7add87c88fd
|
[
"Apache-2.0"
] | null | null | null |
endpoints/api/test/test_tag.py
|
kwestpharedhat/quay
|
a0df895005bcd3e53847046f69f6a7add87c88fd
|
[
"Apache-2.0"
] | null | null | null |
endpoints/api/test/test_tag.py
|
kwestpharedhat/quay
|
a0df895005bcd3e53847046f69f6a7add87c88fd
|
[
"Apache-2.0"
] | null | null | null |
import pytest
from playhouse.test_utils import assert_query_count
from data.registry_model import registry_model
from data.database import Manifest
from endpoints.api.test.shared import conduct_api_call
from endpoints.test.shared import client_with_identity
from endpoints.api.tag import RepositoryTag, RestoreTag, ListRepositoryTags
from test.fixtures import *
| 34.4
| 146
| 0.654954
|
ab4f80a6a89a2ba8ed869ff8442a2bb12f645322
| 8,541
|
py
|
Python
|
inventory.py
|
Jongerr/vendor_receiving
|
f69f09a5b41d38b45e9ea0bf82590bb27ce913f6
|
[
"MIT"
] | null | null | null |
inventory.py
|
Jongerr/vendor_receiving
|
f69f09a5b41d38b45e9ea0bf82590bb27ce913f6
|
[
"MIT"
] | null | null | null |
inventory.py
|
Jongerr/vendor_receiving
|
f69f09a5b41d38b45e9ea0bf82590bb27ce913f6
|
[
"MIT"
] | null | null | null |
import json
import os
import random
import requests
from passlib.hash import pbkdf2_sha256 as pbk
from PyQt5.QtSql import QSqlDatabase, QSqlQuery
from pprint import pprint
ENCODING = 'utf-8'
DB_PATH = os.path.join(os.path.curdir, 'inventory.db')
def scrambleWord(word):
"""Randomize the letters in word and return the resulting string."""
word_list = list(word)
random.shuffle(word_list)
word = ''.join(word_list)
return word
def generateItems():
"""Generate a dictionary of retail products and store the data in items.json.
Pulls a list of items and artificially doubles it with scrambled item names.
Each item is given a random PLU, UPC, and department number.
Each dictionary key is the item's PLU.
"""
response = requests.get('https://www.randomlists.com/data/things.json')
json_data = response.json()
items = json_data['RandL']['items']
#double sample size by scrambling item names
scrambled_list = []
for item in items:
scrambled_item = scrambleWord(item)
scrambled_list.append(scrambled_item)
items = items + scrambled_list
data = {}
for item in items:
random.seed(item)
upc = random.randint(100000000000, 999999999999)
plu = random.randint(1000, 9999999)
department = (plu % 7) + 1
print('UPC:{0} | PLU:{1} | Item:{2} | D{3}'.format(upc, plu, item, department))
if plu in data:
print('Duplicate found: {}'.format(plu))
continue
data[plu] = {'upc':upc, 'department':department, 'model':item}
with open('items.json', 'w') as f:
json.dump(data, f)
def generatePO():
"""Create dumby Purchase Orders and store them in pos.json.
Each PO is asigned one random vendor and department number,
along with a random length list of items belonging to said department.
Returns: True if items.json successfully opens, False otherwise.
"""
try:
with open('items.json', 'r') as f:
items_dict = json.load(f)
except FileNotFoundError:
return False
vendors = ['Dyson', 'Ingrammicro', 'LKG', 'Inland', 'Sandisk', 'Seagate', 'Hasbro', 'Mattel',\
'Gear Head', 'Logitech', 'NTE', 'Dell', 'Microsoft', 'Right Stuff', 'Alliance', 'Energizer']
po_dict = {}
for i in range(50):
po_num = 24000000 + random.randint(1, 999999)
if po_num in po_dict:
continue
po_dict[po_num] = {'department': (po_num % 7) + 1, 'items': {}, 'vendor': random.choice(vendors)}
for key in items_dict:
match_found = False
loops = 0
while not match_found:
loops += 1
if loops > 200:
print('\n\nToo many loops.\n\n')
break
po, department = random.choice(list(po_dict.items()))
department = department['department']
print('PO department: {}'.format(department))
print('item plu: {} department: {}'.format(key, items_dict[key]['department']))
if items_dict[key]['department'] == department:
max_count = random.randint(1, 20)
po_dict[po]['items'][key] = max_count
match_found = True
with open('pos.json', 'w') as f:
json.dump(po_dict, f)
return True
def fillDB():
"""Create a database and populate two tables(named items and purchase_order).
The 'items' and 'purchase_order' tables are populated with the data from items.json
and pos.json respectively.
"""
with open('items.json') as f:
data = json.load(f)
db = QSqlDatabase.addDatabase('QSQLITE')
db.setDatabaseName(DB_PATH)
if not db.open():
print('DB could not be opened')
error = QSqlDatabase.lastError()
print(error.text())
return False
query = QSqlQuery()
if query.exec_("drop table items"):
print('successfully dropped table')
else:
print('unsuccessfully dropped table')
print(query.lastError().text())
if query.exec_("create table items(plu int primary key, upc varchar(12) unique, "
"model varchar(20), department int)"):
print('success')
else:
print('failure')
print(query.lastError().text())
for key in data:
if query.exec_("insert into items values({}, '{}', '{}', {})".format(key, data[key]['upc'],
data[key]['model'], data[key]['department'])):
print("values({}, {}, {}, {}) successfully inserted.".format(key, data[key]['upc'], data[key]['model'], data[key]['department']))
else:
print("values({}, {}, {}, {}) unsuccessfully inserted.".format(key, data[key]['upc'], data[key]['model'], data[key]['department']))
print(query.lastError().text())
with open('pos.json') as f:
po_dict = json.load(f)
if query.exec_("drop table purchase_order"):
print('successfully dropped table')
else:
print('unsuccessfully dropped table')
print(query.lastError().text())
if query.exec_("create table purchase_order(po int primary key, vendor varchar(30), "
"department int, items blob)"):
print('success')
else:
print('failure')
print(query.lastError().text())
for key in po_dict:
item_string = json.dumps(po_dict[key]['items'])
item_blob = item_string.encode(ENCODING)
if query.exec_("insert into purchase_order values({}, '{}', {}, '{}')"\
.format(key, po_dict[key]['vendor'], po_dict[key]['department'], item_string)):
print("values({}, {}, {}, {}) successfully inserted."\
.format(key, po_dict[key]['vendor'], po_dict[key]['department'], item_string))
else:
print("values({}, {}, {}, {}) unsuccessfully inserted."\
.format(key, po_dict[key]['vendor'], po_dict[key]['department'], item_blob))
print(query.lastError().text())
if __name__ == '__main__':
generateItems()
generatePO()
fillDB()
createEmployeeTable()
testHashVerification('Terry')
| 37.296943
| 143
| 0.583772
|
ab4fc96f582ec2e7dbdc6b88ad13480fe26a5ca3
| 1,749
|
py
|
Python
|
lnbits/core/views/lnurl.py
|
frennkie/lnbits
|
5fe64d324dc7ac05d1d0fc25eb5ad6a5a414ea8a
|
[
"MIT"
] | null | null | null |
lnbits/core/views/lnurl.py
|
frennkie/lnbits
|
5fe64d324dc7ac05d1d0fc25eb5ad6a5a414ea8a
|
[
"MIT"
] | null | null | null |
lnbits/core/views/lnurl.py
|
frennkie/lnbits
|
5fe64d324dc7ac05d1d0fc25eb5ad6a5a414ea8a
|
[
"MIT"
] | null | null | null |
import requests
from flask import abort, redirect, request, url_for
from lnurl import LnurlWithdrawResponse, handle as handle_lnurl
from lnurl.exceptions import LnurlException
from time import sleep
from lnbits.core import core_app
from lnbits.helpers import Status
from lnbits.settings import WALLET
from ..crud import create_account, get_user, create_wallet, create_payment
| 30.155172
| 108
| 0.70669
|
ab5224dab7764f41af318140ad4ebc3291d1cf50
| 1,507
|
py
|
Python
|
driver_training/driver_training.py
|
munishm/MLOpsPython
|
e3ee31f6a0cac645a2b3ad945b8263e07d3085e4
|
[
"MIT"
] | null | null | null |
driver_training/driver_training.py
|
munishm/MLOpsPython
|
e3ee31f6a0cac645a2b3ad945b8263e07d3085e4
|
[
"MIT"
] | null | null | null |
driver_training/driver_training.py
|
munishm/MLOpsPython
|
e3ee31f6a0cac645a2b3ad945b8263e07d3085e4
|
[
"MIT"
] | null | null | null |
# Import libraries
import argparse
from azureml.core import Run
import joblib
import json
import os
import pandas as pd
import shutil
# Import functions from train.py
from train import split_data, train_model, get_model_metrics
# Get the output folder for the model from the '--output_folder' parameter
parser = argparse.ArgumentParser()
parser.add_argument('--output_folder', type=str, dest='output_folder', default="outputs")
args = parser.parse_args()
print(args)
output_folder = args.output_folder
# Get the experiment run context
run = Run.get_context()
# load the safe driver prediction dataset
train_df = pd.read_csv('porto_seguro_safe_driver_prediction_input.csv')
# Load the parameters for training the model from the file
with open("parameters.json") as f:
pars = json.load(f)
parameters = pars["training"]
# Log each of the parameters to the run
for param_name, param_value in parameters.items():
run.log(param_name, param_value)
# Call the functions defined in this file
train_data, valid_data = split_data(train_df)
data = [train_data, valid_data]
model = train_model(data, parameters)
# Print the resulting metrics for the model
model_metrics = get_model_metrics(model, data)
print(model_metrics)
for k, v in model_metrics.items():
run.log(k, v)
# Save the trained model to the output folder
os.makedirs(output_folder, exist_ok=True)
output_path = output_folder + "/porto_seguro_safe_driver_model.pkl"
joblib.dump(value=model, filename=output_path)
run.complete()
| 27.4
| 89
| 0.780358
|
ab52d48313f3351abea75bf8f470d2befb8d9c79
| 62
|
py
|
Python
|
tests/__init__.py
|
madman-bob/python-lua-imports
|
76d3765b03a0478544214022118e5a4a13f6e36a
|
[
"MIT"
] | 3
|
2020-04-15T00:00:42.000Z
|
2021-03-07T19:58:44.000Z
|
tests/__init__.py
|
madman-bob/python-lua-imports
|
76d3765b03a0478544214022118e5a4a13f6e36a
|
[
"MIT"
] | null | null | null |
tests/__init__.py
|
madman-bob/python-lua-imports
|
76d3765b03a0478544214022118e5a4a13f6e36a
|
[
"MIT"
] | 1
|
2020-10-05T16:21:53.000Z
|
2020-10-05T16:21:53.000Z
|
from lua_imports import lua_importer
lua_importer.register()
| 15.5
| 36
| 0.854839
|
ab5634ebc042f640696c7b2e6eb9efb69e3e48b5
| 258
|
py
|
Python
|
app/models/product.py
|
VyachAp/SalesFABackend
|
dcbe1b5106c030ee07535795dfd7b97613a1203d
|
[
"MIT"
] | 80
|
2020-10-06T00:35:57.000Z
|
2022-03-31T19:56:24.000Z
|
app/models/product.py
|
VyachAp/SalesFABackend
|
dcbe1b5106c030ee07535795dfd7b97613a1203d
|
[
"MIT"
] | 1
|
2021-03-28T06:17:41.000Z
|
2021-04-03T06:41:09.000Z
|
app/models/product.py
|
VyachAp/SalesFABackend
|
dcbe1b5106c030ee07535795dfd7b97613a1203d
|
[
"MIT"
] | 24
|
2020-11-14T03:04:43.000Z
|
2022-03-11T15:44:44.000Z
|
from sqlalchemy import Column, Integer, String, Float
from app.database.base_class import Base
| 25.8
| 54
| 0.744186
|
ab57b93b87294fbdfc94236ad38a6b407c2435a8
| 7,463
|
py
|
Python
|
katana/utils/directory_traversal_utils.py
|
warriorframework/Katanaframework
|
9dc78df9d0c8f19ef5eaaa8690fbfa1ad885b323
|
[
"Apache-2.0"
] | 1
|
2020-09-30T11:14:14.000Z
|
2020-09-30T11:14:14.000Z
|
katana/utils/directory_traversal_utils.py
|
warriorframework/Katanaframework
|
9dc78df9d0c8f19ef5eaaa8690fbfa1ad885b323
|
[
"Apache-2.0"
] | 4
|
2020-06-06T01:55:04.000Z
|
2021-06-10T22:57:50.000Z
|
katana/utils/directory_traversal_utils.py
|
warriorframework/Katanaframework
|
9dc78df9d0c8f19ef5eaaa8690fbfa1ad885b323
|
[
"Apache-2.0"
] | 1
|
2020-09-17T08:20:09.000Z
|
2020-09-17T08:20:09.000Z
|
import glob
import os
import re
import errno
import shutil
def get_sub_dirs_and_files(path, abs_path=False):
"""
Gets the direct child sub-files and sub-folders of the given directory
Args:
path: Absolute path to the directory
abs_path: If set to True, it returns a list of absolute paths to the sub-directories and
sub-files instead of directory names only
Returns:
dict: {"folders": [list of (if abs_path is True, then path to) sub-folders],
"files": [list of (if abs_path is True, then path to) sub-files]}
"""
folders = get_sub_folders(path, abs_path=abs_path)
files = get_sub_files(path, abs_path=abs_path)
return {"folders": folders, "files": files}
def get_sub_folders(path, abs_path=False):
"""
Gets the direct child sub-folders of the given directory
Args:
path: Absolute path to the directory
abs_path: If set to True, it returns a list of absolute paths to the sub-directories
instead of directory names only
Returns:
only_folders: [list of sub-folders]
"""
folders = []
temp = glob.glob(path + os.sep + "*")
for folder in temp:
if os.path.isdir(folder) and not folder.endswith('__pycache__'):
folders.append(folder)
only_folders = [f.replace("\\", '/') for f in folders]
if not abs_path:
only_folders = [f.rpartition('/')[2] for f in only_folders]
return only_folders
def get_sub_files(path, abs_path=False):
"""
Gets the direct child sub-files of the given directory
Args:
path: Absolute path to the directory
abs_path: If set to True, it returns a list of absolute paths to the sub-files instead of
file names only
Returns:
only_files: [list of sub-files]
"""
files = glob.glob(path + os.sep + "*.*")
only_files = [f.replace("\\", '/') for f in files]
if not abs_path:
only_files = [f.rpartition('/')[2] for f in only_files]
return only_files
def get_abs_path(relative_path, base_path=None, silence_error=False):
"""
Gets the absolute path from the given relative_path and base_path
Args:
relative_path: relative path to the file/directory
base_path: absolute path from where the relative path should be traced. If not provided, the
current working directory path will be used.
silence_error: Setting this to True would not verify if the directory exists
Returns:
path: absolute path derived from relative_path and base_path
"""
if base_path is None:
base_path = os.getcwd()
path = os.path.join(base_path.strip(), relative_path.strip())
if not silence_error and not os.path.exists(path):
print("An Error Occurred: {0} does not exist".format(path))
path = None
return path
def get_parent_directory(directory_path, level=1):
"""
Gets the parent directory
Args:
directory_path: Absolute path to the file/dir who's parent needs to be returned
level: Indicates how many levels up to go to find the parent
eg: default of 1 goes one level up (to the parent directory)
level=2 would get the grandparent directory
Returns:
"""
if directory_path.endswith(os.sep):
directory_path = directory_path[:-1]
for i in range(0, level):
directory_path = os.path.dirname(directory_path)
return directory_path
def get_paths_of_subfiles(parent_dir, extension=re.compile("\..*")):
"""
This function returns a list of all the sub-files inside the given directory
Args:
parent_dir: Absolute path to the directory
extension: Regular Expression tha would match a file extension. If not provided, file paths
of all extension will be returned
Returns:
file_path: Returns a list of paths to sub-files inside the parent_dir
"""
file_paths = []
sub_files_and_folders = get_sub_dirs_and_files(parent_dir, abs_path=True)
for sub_file in sub_files_and_folders["files"]:
if extension.match(os.path.splitext(sub_file)[1]):
file_paths.append(sub_file)
for sub_folder in sub_files_and_folders["folders"]:
file_paths.extend(get_paths_of_subfiles(sub_folder, extension=extension))
return file_paths
def get_dir_from_path(path):
"""
This function is wrapper function for os.path.basename.
Args:
path: a file path [Eg: /home/user/Documents/GitHub/warriorframework]
Returns:
The base directory name: [Eg: warriorframework]
"""
return os.path.basename(path)
def get_parent_dir_path(path):
"""
This function is wrapper function for os.path.dirname(os.path.normpath(<path>)).
Args:
path: a file path [Eg: /home/user/Documents/GitHub/warriorframework]
Returns:
The parent directory path: [Eg: /home/user/Documents/GitHub]
"""
return os.path.dirname(os.path.normpath(path))
def join_path(path, *paths):
"""
This function is wrapper function for os.path.join.
Args:
path: a file path
*paths: paths to be joined to the file path above
Returns:
Joined path
"""
return os.path.join(path, *paths)
def get_relative_path(path, start_directory):
"""
This is a wrapper function for the os.path.relpath
Args:
path: Absolute path to the file/dir to which the relatove path needs to be calculated.
start_directory: The absolute path to the starting directory
Returns:
rel_path: A relative path from start_directory
"""
if start_directory == "":
print("-- Error -- start_directory is empty.")
relpath = path
else:
try:
relpath = os.path.relpath(path, start_directory)
except Exception as e:
print("-- Error -- {0}".format(e))
relpath = None
else:
if not relpath.startswith(".") and not relpath.startswith(os.sep):
relpath = os.sep + relpath
return relpath
def get_direct_sub_files(path, abs_path=False, extension=re.compile("\..*")):
"""
Gets the direct child sub-files of the given directory
Args:
path: Absolute path to the directory
abs_path: If set to True, it returns a list of absolute paths to the sub-files instead of
file names only
Returns:
only_files: [list of sub-files]
"""
files = glob.glob(path + os.sep + "*.*")
only_files = [f.replace("\\", '/') for f in files]
if not abs_path:
only_files = [f.rpartition('/')[2] for f in only_files]
final_files = []
for sub_file in only_files:
if extension.match(os.path.splitext(sub_file)[1]):
final_files.append(sub_file)
return final_files
| 29.498024
| 100
| 0.643307
|
ab58cf0f15e7f5253d551ed9c21fd93da300dfec
| 8,381
|
py
|
Python
|
alignment.py
|
LucaOnline/theanine-synthetase
|
75a9d1f6d853409e12bf9f3b6e5948b594a03217
|
[
"MIT"
] | null | null | null |
alignment.py
|
LucaOnline/theanine-synthetase
|
75a9d1f6d853409e12bf9f3b6e5948b594a03217
|
[
"MIT"
] | 1
|
2021-04-28T21:34:45.000Z
|
2021-05-11T23:29:59.000Z
|
alignment.py
|
LucaOnline/theanine-synthetase
|
75a9d1f6d853409e12bf9f3b6e5948b594a03217
|
[
"MIT"
] | null | null | null |
"""The `alignment` module provides an implementation of the Needleman-Wunsch alignment algorithm."""
from typing import Tuple, Literal, List
from math import floor
import numpy as np
from stats import variance
MOVE_DIAGONAL = 0
MOVE_RIGHT = 1
MOVE_DOWN = 2
EditMove = Literal[MOVE_DIAGONAL, MOVE_RIGHT, MOVE_DOWN]
CHEMICAL_CLASS = {
"A": "Purine",
"G": "Purine",
"T": "Pyrimidine",
"C": "Pyrimidine",
}
def backtrack(quad: np.ndarray) -> EditMove:
"""Trace one step back through an edit matrix."""
if quad.shape == (0, 2):
return MOVE_DOWN
elif quad.shape == (2, 0):
return MOVE_RIGHT
# numpy's argmax doesn't allow for prioritizing non-indels
next_pos = (0, 0)
if quad[0, 1] > quad[next_pos]:
next_pos = (0, 1)
if quad[1, 0] > quad[next_pos]:
next_pos = (1, 0)
if next_pos == (0, 0):
return MOVE_DIAGONAL
elif next_pos == (0, 1):
return MOVE_RIGHT
else:
return MOVE_DOWN
def score_cell(
quad: np.ndarray,
top_char: str,
left_char: str,
nucleotides: bool,
chemical_classes: dict,
) -> np.int:
"""Calculate the Needleman-Wunsch score for a cell."""
down_score = quad[0, 1] - 1
right_score = quad[1, 0] - 1
# Penalize transversions more heavily
if nucleotides and chemical_classes[top_char] != chemical_classes[left_char]:
down_score -= 1
right_score -= 1
diag_score = quad[0, 0] - 1
if top_char == left_char:
diag_score += 2
return max([down_score, right_score, diag_score])
def align_sequences(
top_seq: str, left_seq: str, nucleotides: bool = True
) -> AlignmentResult:
"""
This function aligns the two provided sequences using Needleman-Wunsch
alignment. It uses a scoring scheme with a gap penalty of -1, a match
bonus of 1, and a mismatch penalty of -1. If the two sequences are
`nucleotides`, then an additional -1 penalty is applied to transversions.
"""
size1 = len(top_seq) + 1
size2 = len(left_seq) + 1
chemical_classes = CHEMICAL_CLASS # Copy this into the local scope so it can be accessed more quickly
# Build search matrix
search = np.zeros((size2, size1), dtype=np.int)
search[0] = [i for i in range(0, -size1, -1)]
search[:, 0] = [i for i in range(0, -size2, -1)]
# Do scoring
for x in range(1, size2):
for y in range(1, size1):
search[x, y] = score_cell(
search[x - 1 : x + 1, y - 1 : y + 1],
top_seq[y - 1],
left_seq[x - 1],
nucleotides,
chemical_classes,
)
search = search.T
# Unwind result
final_top = ""
final_left = ""
bt_x, bt_y = (size1 - 1, size2 - 1)
while bt_x != 0 or bt_y != 0:
next_move = backtrack(search[bt_x - 1 : bt_x + 1, bt_y - 1 : bt_y + 1])
if next_move == MOVE_DIAGONAL:
final_top = top_seq[bt_x - 1] + final_top
final_left = left_seq[bt_y - 1] + final_left
bt_x -= 1
bt_y -= 1
elif next_move == MOVE_DOWN:
final_top = "-" + final_top
final_left = left_seq[bt_y - 1] + final_left
bt_y -= 1
elif next_move == MOVE_RIGHT:
final_top = top_seq[bt_x - 1] + final_top
final_left = "-" + final_left
bt_x -= 1
return AlignmentResult(final_top, final_left)
| 32.111111
| 106
| 0.595752
|
ab58dbf2a732c20f8c5b6f7ff7869c6f7c00ca41
| 2,348
|
py
|
Python
|
examples/the-feeling-of-success/mock_grasp_object_op.py
|
yujialuo/erdos
|
7a631b55895f1a473b0f4d38a0d6053851e65b5d
|
[
"Apache-2.0"
] | null | null | null |
examples/the-feeling-of-success/mock_grasp_object_op.py
|
yujialuo/erdos
|
7a631b55895f1a473b0f4d38a0d6053851e65b5d
|
[
"Apache-2.0"
] | null | null | null |
examples/the-feeling-of-success/mock_grasp_object_op.py
|
yujialuo/erdos
|
7a631b55895f1a473b0f4d38a0d6053851e65b5d
|
[
"Apache-2.0"
] | null | null | null |
from mock_gripper_op import MockGripType
from std_msgs.msg import Bool
from erdos.op import Op
from erdos.data_stream import DataStream
from erdos.message import Message
| 34.028986
| 79
| 0.663969
|
ab58e931b2be617a8f028a51f43ae40e92333614
| 3,683
|
py
|
Python
|
src/pyfsa/lib/fsa.py
|
taliamax/pyfsa
|
d92faa96c1e17e4016df7b367c7d405a07f1253b
|
[
"Apache-2.0"
] | 1
|
2021-01-21T21:48:26.000Z
|
2021-01-21T21:48:26.000Z
|
src/pyfsa/lib/fsa.py
|
taliamax/pyfsa
|
d92faa96c1e17e4016df7b367c7d405a07f1253b
|
[
"Apache-2.0"
] | null | null | null |
src/pyfsa/lib/fsa.py
|
taliamax/pyfsa
|
d92faa96c1e17e4016df7b367c7d405a07f1253b
|
[
"Apache-2.0"
] | 4
|
2021-01-22T04:04:22.000Z
|
2021-11-01T14:43:09.000Z
|
# -*- coding: utf-8 -*-
import pygraphviz as gv # type: ignore
import itertools as it
from typing import (
List,
Optional,
)
from pyfsa.lib.types import TransitionsTable
def get_state_graph(
transitions: TransitionsTable,
start: Optional[str] = None,
end: Optional[str] = None,
nodes: Optional[List[str]] = None,
name: str = 'output.png',
draw: bool = True,
engine: str = 'circo',
) -> gv.AGraph:
'''
From a transition dictionary, creates a pygraphviz graph
of all the possible states and how to reach the given state.
Returns the resulting graph.
'''
graph = gv.AGraph(directed=True, strict=False, ranksep='1')
key_num = it.count()
if nodes is not None:
graph.add_nodes_from(nodes)
else:
graph.add_nodes_from(transitions.keys())
for node, transition_row in transitions.items():
for label, targets in transition_row.items():
for target in targets:
graph.add_edge(
node,
target,
key=f'{next(key_num)}',
label=label,
weight=1,
)
if start:
n: gv.Node = graph.get_node(start)
n.attr['color'] = '#0000FF'
n.attr['style'] = 'filled'
if end:
n = graph.get_node(end)
n.attr['color'] = '#00FF00'
n.attr['style'] = 'filled'
if draw:
graph.layout(prog=engine)
graph.draw(name)
return graph
def verify_string(
string: str,
starting_state: str,
final_state: str,
transitions: TransitionsTable,
) -> bool:
'''
Given a transitions table, a start and end state, and
some string, verifies that executing the finite state machine
on the given string produces the desired final state.
'''
current_state = starting_state
for letter in string:
transition = transitions[current_state]
current_state = transition[letter][0]
return current_state == final_state
def render_string_graph(
string: str,
start: str,
end: str,
transitions: TransitionsTable,
name: str = 'output.png',
draw: bool = True,
engine: str = 'circo'
) -> gv.AGraph:
'''
Given a string, a start state, an end state, end a
transitions table, produces the graph resulting in
the traversal of the string through the states defined
in the transitions table. By default, it will
output a png file of the result, but that can be
suppressed.
'''
graph = gv.AGraph(directed=True)
graph.graph_attr['label'] = f'Evaluating {string}'
node_names = it.count()
current_state = start
node_name = next(node_names)
graph.add_node(node_name)
current_node = gv.Node(graph, node_name)
current_node.attr['label'] = current_state
current_node.attr['fillcolor'] = '#0000FF'
current_node.attr['style'] = 'filled'
for letter in string:
node_name = next(node_names)
graph.add_node(node_name)
next_node = gv.Node(graph, node_name)
# TODO: The algorithm prioritizes just the first
# found state, which may not produce a correct
# answer. Needs to fix this
next_state = transitions[current_state][letter][0]
next_node.attr['label'] = next_state
graph.add_edge(current_node, next_node, label=letter)
current_node = next_node
current_state = next_state
if current_state == end:
current_node.attr['style'] = 'filled'
current_node.attr['fillcolor'] = '#00FF00'
if draw:
graph.layout(prog=engine)
graph.draw(name)
return graph
| 27.485075
| 65
| 0.620961
|
ab59b426727f7713efb93b6855597da219efc0be
| 1,695
|
py
|
Python
|
examples/multimedia/mmimdb_MFM.py
|
kapikantzari/MultiBench
|
44ab6ea028682040a0c04de68239ce5cdf15123f
|
[
"MIT"
] | 148
|
2021-03-06T06:54:13.000Z
|
2022-03-29T19:27:21.000Z
|
examples/multimedia/mmimdb_MFM.py
|
kapikantzari/MultiBench
|
44ab6ea028682040a0c04de68239ce5cdf15123f
|
[
"MIT"
] | 10
|
2021-07-19T22:57:49.000Z
|
2022-02-04T03:12:29.000Z
|
examples/multimedia/mmimdb_MFM.py
|
kapikantzari/MultiBench
|
44ab6ea028682040a0c04de68239ce5cdf15123f
|
[
"MIT"
] | 18
|
2021-07-22T07:17:27.000Z
|
2022-03-27T16:11:40.000Z
|
import torch
import sys
import os
sys.path.append(os.getcwd())
from utils.helper_modules import Sequential2
from unimodals.common_models import Linear, MLP, MaxOut_MLP
from datasets.imdb.get_data import get_dataloader
from fusions.common_fusions import Concat
from objective_functions.objectives_for_supervised_learning import MFM_objective
from objective_functions.recon import sigmloss1d
from training_structures.Supervised_Learning import train, test
filename = "best_mfm.pt"
traindata, validdata, testdata = get_dataloader(
"../video/multimodal_imdb.hdf5", "../video/mmimdb", vgg=True, batch_size=128)
classes = 23
n_latent = 512
fuse = Sequential2(Concat(), MLP(2*n_latent, n_latent, n_latent//2)).cuda()
encoders = [MaxOut_MLP(512, 512, 300, n_latent, False).cuda(
), MaxOut_MLP(512, 1024, 4096, n_latent, False).cuda()]
head = Linear(n_latent//2, classes).cuda()
decoders = [MLP(n_latent, 600, 300).cuda(), MLP(n_latent, 2048, 4096).cuda()]
intermediates = [MLP(n_latent, n_latent//2, n_latent//2).cuda(),
MLP(n_latent, n_latent//2, n_latent//2).cuda()]
recon_loss = MFM_objective(2.0, [sigmloss1d, sigmloss1d], [
1.0, 1.0], criterion=torch.nn.BCEWithLogitsLoss())
train(encoders, fuse, head, traindata, validdata, 1000, decoders+intermediates, early_stop=True, task="multilabel",
objective_args_dict={"decoders": decoders, "intermediates": intermediates}, save=filename, optimtype=torch.optim.AdamW, lr=5e-3, weight_decay=0.01, objective=recon_loss)
print("Testing:")
model = torch.load(filename).cuda()
test(model, testdata, method_name="MFM", dataset="imdb",
criterion=torch.nn.BCEWithLogitsLoss(), task="multilabel")
| 42.375
| 175
| 0.746313
|
ab5a9e198509b5652d8bbadee3e63897c02a6e94
| 1,461
|
py
|
Python
|
subeana/migrations/0001_initial.py
|
izumin2000/izuminapp
|
3464cebe1d98c85c2cd95c6fac779ec1f42ef930
|
[
"MIT"
] | null | null | null |
subeana/migrations/0001_initial.py
|
izumin2000/izuminapp
|
3464cebe1d98c85c2cd95c6fac779ec1f42ef930
|
[
"MIT"
] | null | null | null |
subeana/migrations/0001_initial.py
|
izumin2000/izuminapp
|
3464cebe1d98c85c2cd95c6fac779ec1f42ef930
|
[
"MIT"
] | null | null | null |
# Generated by Django 4.0.2 on 2022-06-01 04:43
from django.db import migrations, models
import django.db.models.deletion
| 40.583333
| 169
| 0.600274
|
ab5b1e3328548b4d29719e9eabea190e03a2dd78
| 78,836
|
py
|
Python
|
bifacialvf/vf.py
|
shirubana/bifacialvf
|
7cd1c4c658bb7a68f0815b2bd1a6d5c492ca7300
|
[
"BSD-3-Clause"
] | 1
|
2020-05-20T06:19:40.000Z
|
2020-05-20T06:19:40.000Z
|
bifacialvf/vf.py
|
shirubana/bifacialvf
|
7cd1c4c658bb7a68f0815b2bd1a6d5c492ca7300
|
[
"BSD-3-Clause"
] | null | null | null |
bifacialvf/vf.py
|
shirubana/bifacialvf
|
7cd1c4c658bb7a68f0815b2bd1a6d5c492ca7300
|
[
"BSD-3-Clause"
] | 1
|
2020-12-30T08:05:49.000Z
|
2020-12-30T08:05:49.000Z
|
# -*- coding: utf-8 -*-
"""
ViewFactor module - VF calculation helper files for bifacial-viewfactor
@author Bill Marion
@translated to python by sayala 06/09/17
"""
# ensure python3 compatible division and printing
from __future__ import division, print_function, absolute_import
import math
import numpy as np
from sun import solarPos, sunIncident, perezComp, aOIcorrection
import logging
# TODO: set level or add formatters if more advanced logging required
LOGGER = logging.getLogger(__name__) # only used to raise errors
DTOR = math.pi / 180.0 # Factor for converting from degrees to radians
def getBackSurfaceIrradiances(rowType, maxShadow, PVbackSurface, beta, sazm,
dni, dhi, C, D, albedo, zen, azm, cellRows,
pvBackSH, rearGroundGHI, frontGroundGHI,
frontReflected, offset=0):
"""
This method calculates the AOI corrected irradiance on the back of the PV
module/panel. 11/19/2015
Added rowType and other changes to distinguish between types of rows.
4/19/2016
Added input of offset of reference cell from PV module back (in PV panel
slope lengths) for modeling Sara's reference cell measurements, should be
set to zero for PV module cell irradiances.
Added while loop so projected Xs aren't too negative causing array index
problems (<0) 12/13/2016::
while (projectedX1 < -100.0 || projectedX2 < -100.0):
# Offset so array indexes are >= -100.0 12/13/2016
projectedX1 += 100.0;
projectedX2 += 100.0;
Parameters
----------
rowType : str
Type of row: "first", "interior", "last", or "single"
maxShadow
Maximum shadow length projected to the front(-) or rear (+) from the
front of the module
PVbackSurface
PV module back surface material type, either "glass" or "ARglass"
beta
Tilt from horizontal of the PV modules/panels (deg) (for front surface)
sazm
Surface azimuth of PV panels (deg) (for front surface)
dni
Direct normal irradiance (W/m2)
dhi
Diffuse horizontal irradiance (W/m2)
C
Ground clearance of PV panel (in PV panel slope lengths)
D
Horizontal distance between rows of PV panels (in PV panel slope
lengths)
albedo
Ground albedo
zen
Sun zenith (in radians)
azm
Sun azimuth (in radians)
pvBackSH
Decimal fraction of the back surface of the PV panel that is shaded,
0.0 to 1.0
rearGroundGHI : array of size [100]
Global horizontal irradiance for each of 100 ground segments (W/m2)
frontGroundGHI : array of size [100]
Global horizontal irradiance for each of 100 ground segments (W/m2)
frontReflected : array of size [cellRows]
Irradiance reflected from the front of the PV module/panel (W/m2) in
the row behind the one of interest
offset
Offset of reference cell from PV module back (in PV panel slope
lengths), set to zero for PV module cell irradiances
Returns
-------
backGTI : array of size [cellRows]
AOI corrected irradiance on back side of PV module/panel, one for each
cell row (W/m2)
aveGroundGHI : numeric
Average GHI on ground under PV array
Notes
-----
1-degree hemispherical segment AOI correction factor for glass (index=0)
and ARglass (index=1)
"""
backGTI = []
SegAOIcor = [
[0.057563, 0.128570, 0.199651, 0.265024, 0.324661, 0.378968, 0.428391, 0.473670, 0.514788, 0.552454,
0.586857, 0.618484, 0.647076, 0.673762, 0.698029, 0.720118, 0.740726, 0.759671, 0.776946, 0.792833,
0.807374, 0.821010, 0.833534, 0.845241, 0.855524, 0.865562, 0.874567, 0.882831, 0.890769, 0.897939,
0.904373, 0.910646, 0.916297, 0.921589, 0.926512, 0.930906, 0.935179, 0.939074, 0.942627, 0.946009,
0.949096, 0.952030, 0.954555, 0.957157, 0.959669, 0.961500, 0.963481, 0.965353, 0.967387, 0.968580,
0.970311, 0.971567, 0.972948, 0.974114, 0.975264, 0.976287, 0.977213, 0.978142, 0.979057, 0.979662,
0.980460, 0.981100, 0.981771, 0.982459, 0.982837, 0.983199, 0.983956, 0.984156, 0.984682, 0.985026,
0.985364, 0.985645, 0.985954, 0.986241, 0.986484, 0.986686, 0.986895, 0.987043, 0.987287, 0.987388,
0.987541, 0.987669, 0.987755, 0.987877, 0.987903, 0.987996, 0.988022, 0.988091, 0.988104, 0.988114,
0.988114, 0.988104, 0.988091, 0.988022, 0.987996, 0.987903, 0.987877, 0.987755, 0.987669, 0.987541,
0.987388, 0.987287, 0.987043, 0.986895, 0.986686, 0.986484, 0.986240, 0.985954, 0.985645, 0.985364,
0.985020, 0.984676, 0.984156, 0.983956, 0.983199, 0.982837, 0.982459, 0.981771, 0.981100, 0.980460,
0.979662, 0.979057, 0.978142, 0.977213, 0.976287, 0.975264, 0.974114, 0.972947, 0.971567, 0.970311,
0.968580, 0.967387, 0.965353, 0.963481, 0.961501, 0.959671, 0.957157, 0.954555, 0.952030, 0.949096,
0.946009, 0.942627, 0.939074, 0.935179, 0.930906, 0.926512, 0.921589, 0.916297, 0.910646, 0.904373,
0.897939, 0.890769, 0.882831, 0.874567, 0.865562, 0.855524, 0.845241, 0.833534, 0.821010, 0.807374,
0.792833, 0.776946, 0.759671, 0.740726, 0.720118, 0.698029, 0.673762, 0.647076, 0.618484, 0.586857,
0.552454, 0.514788, 0.473670, 0.428391, 0.378968, 0.324661, 0.265024, 0.199651, 0.128570, 0.057563],
[0.062742, 0.139913, 0.216842, 0.287226, 0.351055, 0.408796, 0.460966, 0.508397, 0.551116, 0.589915,
0.625035, 0.657029, 0.685667, 0.712150, 0.735991, 0.757467, 0.777313, 0.795374, 0.811669, 0.826496,
0.839932, 0.852416, 0.863766, 0.874277, 0.883399, 0.892242, 0.900084, 0.907216, 0.914023, 0.920103,
0.925504, 0.930744, 0.935424, 0.939752, 0.943788, 0.947313, 0.950768, 0.953860, 0.956675, 0.959339,
0.961755, 0.964039, 0.965984, 0.967994, 0.969968, 0.971283, 0.972800, 0.974223, 0.975784, 0.976647,
0.977953, 0.978887, 0.979922, 0.980773, 0.981637, 0.982386, 0.983068, 0.983759, 0.984436, 0.984855,
0.985453, 0.985916, 0.986417, 0.986934, 0.987182, 0.987435, 0.988022, 0.988146, 0.988537, 0.988792,
0.989043, 0.989235, 0.989470, 0.989681, 0.989857, 0.990006, 0.990159, 0.990263, 0.990455, 0.990515,
0.990636, 0.990731, 0.990787, 0.990884, 0.990900, 0.990971, 0.990986, 0.991042, 0.991048, 0.991057,
0.991057, 0.991048, 0.991042, 0.990986, 0.990971, 0.990900, 0.990884, 0.990787, 0.990731, 0.990636,
0.990515, 0.990455, 0.990263, 0.990159, 0.990006, 0.989857, 0.989681, 0.989470, 0.989235, 0.989043,
0.988787, 0.988532, 0.988146, 0.988022, 0.987435, 0.987182, 0.986934, 0.986417, 0.985916, 0.985453,
0.984855, 0.984436, 0.983759, 0.983068, 0.982386, 0.981637, 0.980773, 0.979920, 0.978887, 0.977953,
0.976647, 0.975784, 0.974223, 0.972800, 0.971284, 0.969970, 0.967994, 0.965984, 0.964039, 0.961755,
0.959339, 0.956675, 0.953860, 0.950768, 0.947313, 0.943788, 0.939752, 0.935424, 0.930744, 0.925504,
0.920103, 0.914023, 0.907216, 0.900084, 0.892242, 0.883399, 0.874277, 0.863766, 0.852416, 0.839932,
0.826496, 0.811669, 0.795374, 0.777313, 0.757467, 0.735991, 0.712150, 0.685667, 0.657029, 0.625035,
0.589915, 0.551116, 0.508397, 0.460966, 0.408796, 0.351055, 0.287226, 0.216842, 0.139913, 0.062742]]
# Tilt from horizontal of the PV modules/panels, in radians
beta = beta * DTOR
sazm = sazm * DTOR # Surface azimuth of PV module/panels, in radians
# 1. Calculate and assign various paramters to be used for modeling
# irradiances
# For calling PerezComp to break diffuse into components for zero tilt
# (horizontal)
iso_dif = 0.0; circ_dif = 0.0; horiz_dif = 0.0; grd_dif = 0.0; beam = 0.0
# Call to get iso_dif for horizontal surface
ghi, iso_dif, circ_dif, horiz_dif, grd_dif, beam = perezComp(
dni, dhi, albedo, zen, 0.0, zen)
# Isotropic irradiance from sky on horizontal surface, used later for
# determining isotropic sky component
iso_sky_dif = iso_dif
# For calling PerezComp to break diffuse into components for 90 degree tilt
# (vertical)
inc, tiltr, sazmr = sunIncident(0, 90.0, 180.0, 45.0, zen, azm)
# Call to get horiz_dif for vertical surface
vti, iso_dif, circ_dif, horiz_dif, grd_dif, beam = perezComp(
dni, dhi, albedo, inc, tiltr, zen)
# Horizon diffuse irradiance on a vertical surface, used later for
# determining horizon brightening irradiance component
F2DHI = horiz_dif
index = -99
n2 = -99.9
if (PVbackSurface == "glass"):
# Index to use with 1-degree hemispherical segment AOI correction
# factor array
index = 0
n2 = 1.526 # Index of refraction for glass
elif (PVbackSurface == "ARglass"):
# Index to use with 1-degree hemispherical segment AOI correction
# factor array
index = 1
n2 = 1.300 # Index of refraction for ARglass
else:
raise Exception(
"Incorrect text input for PVbackSurface."
" Must be glass or ARglass.")
# Reflectance at normal incidence, Duffie and Beckman p217
Ro = math.pow((n2 - 1.0) / (n2 + 1.0), 2.0)
# Average GHI on ground under PV array for cases when x projection exceed
# 2*rtr
aveGroundGHI = 0.0
for i in range(0,100):
aveGroundGHI += rearGroundGHI[i] / 100.0
# Calculate x,y coordinates of bottom and top edges of PV row in back of desired PV row so that portions of sky and ground viewed by the
# PV cell may be determined. Origin of x-y axis is the ground pobelow the lower front edge of the desired PV row. The row in back of
# the desired row is in the positive x direction.
h = math.sin(beta); # Vertical height of sloped PV panel (in PV panel slope lengths)
x1 = math.cos(beta); # Horizontal distance from front of panel to rear of panel (in PV panel slope lengths)
rtr = D + x1; # Row-to-row distance (in PV panel slope lengths)
PbotX = rtr; # x value for poon bottom egde of PV module/panel of row in back of (in PV panel slope lengths)
PbotY = C; # y value for poon bottom egde of PV module/panel of row in back of (in PV panel slope lengths)
PtopX = rtr + x1; # x value for poon top egde of PV module/panel of row in back of (in PV panel slope lengths)
PtopY = h + C; # y value for poon top egde of PV module/panel of row in back of (in PV panel slope lengths)
# 2. Calculate diffuse and direct component irradiances for each cell row
for i in range (0, cellRows):
# Calculate diffuse irradiances and reflected amounts for each cell row over it's field of view of 180 degrees,
# beginning with the angle providing the upper most view of the sky (j=0)
#PcellX = x1 * (i + 0.5) / ((double)cellRows); # x value for location of PV cell
#PcellY = C + h * (i + 0.5) / ((double)cellRows); # y value for location of PV cell
PcellX = x1 * (i + 0.5) / (cellRows) + offset * math.sin(beta); # x value for location of PV cell with OFFSET FOR SARA REFERENCE CELLS 4/26/2016
PcellY = C + h * (i + 0.5) / (cellRows) - offset * math.cos(beta); # y value for location of PV cell with OFFSET FOR SARA REFERENCE CELLS 4/26/2016
elvUP = math.atan((PtopY - PcellY) / (PtopX - PcellX)); # Elevation angle up from PV cell to top of PV module/panel, radians
elvDOWN = math.atan((PcellY - PbotY) / (PbotX - PcellX)); # Elevation angle down from PV cell to bottom of PV module/panel, radians
if (rowType == "last" or rowType == "single"): # 4/19/16 No array to the rear for these cases
elvUP = 0.0;
elvDOWN = 0.0;
#Console.WriteLine("ElvUp = 0", elvUP / DTOR);
#if (i == 0)
# Console.WriteLine("ElvDown = 0", elvDOWN / DTOR);
#123
#iStopIso = Convert.ToInt32((beta - elvUP) / DTOR); # Last whole degree in arc range that sees sky, first is 0
#Console.WriteLine("iStopIso = 0", iStopIso);
#iHorBright = Convert.ToInt32(max(0.0, 6.0 - elvUP / DTOR)); # Number of whole degrees for which horizon brightening occurs
#iStartGrd = Convert.ToInt32((beta + elvDOWN) / DTOR); # First whole degree in arc range that sees ground, last is 180
iStopIso = int(round((beta - elvUP) / DTOR)); # Last whole degree in arc range that sees sky, first is 0
#Console.WriteLine("iStopIso = 0", iStopIso);
iHorBright = int(round(max(0.0, 6.0 - elvUP / DTOR))); # Number of whole degrees for which horizon brightening occurs
iStartGrd = int(round((beta + elvDOWN) / DTOR)); # First whole degree in arc range that sees ground, last is 180
backGTI.append(0.0) # Initialtize front GTI
for j in range (0, iStopIso): # Add sky diffuse component and horizon brightening if present
backGTI[i] += 0.5 * (math.cos(j * DTOR) - math.cos((j + 1) * DTOR)) * SegAOIcor[index][j] * iso_sky_dif; # Sky radiation
# backGTI[i] += 0.5 * (math.cos(j * DTOR) - math.cos((j + 1) * DTOR)) * iso_sky_dif; # Sky radiation
if ((iStopIso - j) <= iHorBright): # Add horizon brightening term if seen
backGTI[i] += 0.5 * (math.cos(j * DTOR) - math.cos((j + 1) * DTOR)) * SegAOIcor[index][j] * F2DHI / 0.052264; # 0.052246 = 0.5 * [cos(84) - cos(90)]
#backGTI[i] += 0.5 * (math.cos(j * DTOR) - math.cos((j + 1) * DTOR)) * F2DHI / 0.052264; # 0.052246 = 0.5 * [cos(84) - cos(90)]
if (rowType == "interior" or rowType == "first"): # 4/19/16 Only add reflections from PV modules for these cases
for j in range (iStopIso, iStartGrd): #j = iStopIso; j < iStartGrd; j++) # Add relections from PV module front surfaces
L = (PbotX - PcellX) / math.cos(elvDOWN); # Diagonal distance from cell to bottom of module in row behind
startAlpha = -(j - iStopIso) * DTOR + elvUP + elvDOWN;
stopAlpha = -(j + 1 - iStopIso) * DTOR + elvUP + elvDOWN;
m = L * math.sin(startAlpha);
theta = math.pi - elvDOWN - (math.pi / 2.0 - startAlpha) - beta;
projectedX2 = m / math.cos(theta); # Projected distance on sloped PV module
m = L * math.sin(stopAlpha);
theta = math.pi - elvDOWN - (math.pi / 2.0 - stopAlpha) - beta;
projectedX1 = m / math.cos(theta); # Projected distance on sloped PV module
projectedX1 = max(0.0, projectedX1);
#Console.WriteLine("j= 0 projected X1 = 1,6:0.000 projected X2 = 2,6:0.000", j, projectedX1, projectedX2);
PVreflectedIrr = 0.0; # Irradiance from PV module front cover reflections
deltaCell = 1.0 / cellRows; # Length of cell in sloped direction in module/panel units (dimensionless)
for k in range (0, cellRows): # Determine which cells in behind row are seen, and their reflected irradiance
cellBot = k * deltaCell; # Position of bottom of cell along PV module/panel
cellTop = (k + 1) * deltaCell; # Position of top of cell along PV module/panel
cellLengthSeen = 0.0; # Length of cell seen for this row, start with zero
if (cellBot >= projectedX1 and cellTop <= projectedX2):
cellLengthSeen = cellTop - cellBot; # Sees the whole cell
elif (cellBot <= projectedX1 and cellTop >= projectedX2):
cellLengthSeen = projectedX2 - projectedX1; # Sees portion in the middle of cell
elif (cellBot >= projectedX1 and projectedX2 > cellBot and cellTop >= projectedX2):
cellLengthSeen = projectedX2 - cellBot; # Sees bottom of cell
elif (cellBot <= projectedX1 and projectedX1 < cellTop and cellTop <= projectedX2):
cellLengthSeen = cellTop - projectedX1; # Sees top of cell
#Console.WriteLine("cell= 0 cellBot = 1,5:0.00 cellTop = 2,5:0.00 Cell length seen = 3,5:0.00", k, cellBot, cellTop, cellLengthSeen);
PVreflectedIrr += cellLengthSeen * frontReflected[k]; # Add reflected radiation for this PV cell, if seen, weight by cell length seen
PVreflectedIrr /= projectedX2 - projectedX1; # Reflected irradiance from PV modules (W/m2)
backGTI[i] += 0.5 * (math.cos(j * DTOR) - math.cos((j + 1) * DTOR)) * SegAOIcor[index][j] * PVreflectedIrr; # Radiation reflected from PV module surfaces onto back surface of module
# End of adding reflections from PV module surfaces
#Console.WriteLine("");
#if (i == 0)
#Console.WriteLine("iStartGrd = 0", iStartGrd);
for j in range (iStartGrd, 180): # Add ground reflected component
startElvDown = (j - iStartGrd) * DTOR + elvDOWN; # Start and ending down elevations for this j loop
stopElvDown = (j + 1 - iStartGrd) * DTOR + elvDOWN;
projectedX2 = PcellX + np.float64(PcellY) / math.tan(startElvDown); # Projection of ElvDown to ground in +x direction (X1 and X2 opposite nomenclature for front irradiance method)
projectedX1 = PcellX + PcellY / math.tan(stopElvDown);
actualGroundGHI = 0.0; # Actuall ground GHI from summing array values
#if (i == 0)
# Console.WriteLine("j= 0 projected X1 = 1,6:0.0", j, 100 * projectedX1 / rtr);
if (abs(projectedX1 - projectedX2) > 0.99 * rtr):
if (rowType == "last" or rowType == "single"): # 4/19/16 No array to rear for these cases
actualGroundGHI = ghi; # Use total value if projection approximates the rtr
else:
actualGroundGHI = aveGroundGHI; # Use average value if projection approximates the rtr
else:
projectedX1 = 100.0 * projectedX1 / rtr; # Normalize projections and multiply by 100
projectedX2 = 100.0 * projectedX2 / rtr;
#Console.WriteLine("projectedX1 = 0 projectedX2 = 1", projectedX1, projectedX2);
if ((rowType == "last" or rowType == "single") and (abs(projectedX1) > 99.0 or abs(projectedX2) > 99.0)): #4/19/2016
actualGroundGHI = ghi; # Use total value if projection > rtr for "last" or "single"
else:
while (projectedX1 >= 100.0 or projectedX2 >= 100.0): # Offset so array indexes are less than 100
projectedX1 -= 100.0;
projectedX2 -= 100.0;
while (projectedX1 < -100.0 or projectedX2 < -100.0): # Offset so array indexes are >= -100.0 12/13/2016
projectedX1 += 100.0;
projectedX2 += 100.0;
#Console.WriteLine("projectedX1 = 0 projectedX2 = 1", projectedX1, projectedX2);
index1 = (int)(projectedX1 + 100.0) - 100; # Determine indexes for use with rearGroundGHI array and frontGroundGHI array(truncates values)
index2 = (int)(projectedX2 + 100.0) - 100; # (int)(1.9) = 1 and (int)(-1.9) = -1; (int)(1.9+100) - 100 = 1 and (int)(-1.9+100) - 100 = -2
#Console.WriteLine("index1=0 index2=1", index1, index2);
if (index1 == index2):
if (index1 < 0):
actualGroundGHI = frontGroundGHI[index1 + 100];
#actualGroundGHI = 0.0;
else:
actualGroundGHI = rearGroundGHI[index1]; # x projections in same groundGHI element THIS SEEMS TO ADD HICCUP 4/26/2016 ***************************
#actualGroundGHI = 0.0;
else:
for k in range (index1, index2+1): #for (k = index1; k <= index2; k++) # Sum the irradiances on the ground if projections are in different groundGHI elements
if (k == index1):
if (k < 0):
actualGroundGHI += frontGroundGHI[k + 100] * (k + 1.0 - projectedX1);
else:
actualGroundGHI += rearGroundGHI[k] * (k + 1.0 - projectedX1);
elif (k == index2):
if (k < 0):
actualGroundGHI += frontGroundGHI[k + 100] * (projectedX2 - k);
else:
actualGroundGHI += rearGroundGHI[k] * (projectedX2 - k);
else:
if (k < 0):
actualGroundGHI += frontGroundGHI[k + 100];
else:
actualGroundGHI += rearGroundGHI[k];
actualGroundGHI /= projectedX2 - projectedX1; # Irradiance on ground in the 1 degree field of view
#if (i == 0)
# Console.WriteLine("j=0 index1=1 index2=2 projectX1=3,5:0.0 projectX2=4,5:0.0 actualGrdGHI=5,6:0.0", j, index1, index2, projectedX1, projectedX2, actualGroundGHI);
# End of if looping to determine actualGroundGHI
backGTI[i] += 0.5 * (math.cos(j * DTOR) - math.cos((j + 1) * DTOR)) * SegAOIcor[index][j] * actualGroundGHI * albedo; # Add ground reflected component
#Console.WriteLine("actualGroundGHI = 0,6:0.0 inputGHI = 1,6:0.0 aveArrayGroundGHI = 2,6:0.0", actualGroundGHI, dhi + dni * math.cos(zen), aveGroundGHI);
# End of j loop for adding ground reflected componenet
# Calculate and add direct and circumsolar irradiance components
inc, tiltr, sazmr = sunIncident(0, 180-beta / DTOR, sazm / DTOR - 180, 45.0, zen, azm) # For calling PerezComp to break diffuse into components for downward facing tilt
gtiAllpc, iso_dif, circ_dif, horiz_dif, grd_dif, beam = perezComp(dni, dhi, albedo, inc, tiltr, zen) # Call to get components for the tilt
cellShade = pvBackSH * cellRows - i;
if (cellShade > 1.0): # Fully shaded if > 1, no shade if < 0, otherwise fractionally shaded
cellShade = 1.0;
elif (cellShade < 0.0):
cellShade = 0.0;
if (cellShade < 1.0 and inc < math.pi / 2.0): # Cell not shaded entirely and inc < 90 deg
cor = aOIcorrection(n2, inc); # Get AOI correction for beam and circumsolar
backGTI[i] += (1.0 - cellShade) * (beam + circ_dif) * cor; # Add beam and circumsolar radiation
# End of for i = 0; i < cellRows loop
return backGTI, aveGroundGHI;
# End of GetBackSurfaceIrradiances
def getFrontSurfaceIrradiances(rowType, maxShadow, PVfrontSurface, beta, sazm,
dni, dhi, C, D, albedo, zen, azm, cellRows,
pvFrontSH, frontGroundGHI):
"""
This method calculates the AOI corrected irradiance on the front of the PV
module/panel and the irradiance reflected from the the front of the PV
module/panel. 11/12/2015
Added row type and MaxShadow and changed code to accommodate 4/19/2015
Parameters
----------
rowType : str
Type of row: "first", "interior", "last", or "single"
maxShadow
Maximum shadow length projected to the front (-) or rear (+) from the
front of the module row (in PV panel slope lengths), only used for
`rowTypes` other than "interior"
PVfrontSurface
PV module front surface material type, either "glass" or "ARglass"
beta
Tilt from horizontal of the PV modules/panels (deg)
sazm
Surface azimuth of PV panels (deg)
dni
Direct normal irradiance (W/m2)
dhi
Diffuse horizontal irradiance (W/m2)
C
Ground clearance of PV panel (in PV panel slope lengths)
D
Horizontal distance between rows of PV panels (in PV panel slope
lengths)
albedo
Ground albedo
zen
Sun zenith (in radians)
azm
Sun azimuth (in radians)
pvFrontSH
Decimal fraction of the front surface of the PV panel that is shaded,
0.0 to 1.0
froutGroundGHI : array of size [100]
Global horizontal irradiance for each of 100 ground segments in front
of the module row
Returns
-------
frontGTI : array of size [cellRows]
AOI corrected irradiance on front side of PV module/panel, one for each
cell row (W/m2)
frontReflected : array of size [cellRows]
Irradiance reflected from the front of the PV module/panel (W/m2)
aveGroundGHI : numeric
Average GHI on the ground (includes effects of shading by array) from
the array frontGroundGHI[100]
Notes
-----
1-degree hemispherical segment AOI correction factor for glass (index=0)
and ARglass (index=1). Creates a list containing 5 lists, each of 8 items,
all set to 0
"""
frontGTI = []
frontReflected = []
#w, h = 2, 180;
#SegAOIcor = [[0 for x in range(w)] for y in range(h)]
SegAOIcor = ([[0.057563, 0.128570, 0.199651, 0.265024, 0.324661, 0.378968, 0.428391, 0.473670, 0.514788, 0.552454,
0.586857, 0.618484, 0.647076, 0.673762, 0.698029, 0.720118, 0.740726, 0.759671, 0.776946, 0.792833,
0.807374, 0.821010, 0.833534, 0.845241, 0.855524, 0.865562, 0.874567, 0.882831, 0.890769, 0.897939,
0.904373, 0.910646, 0.916297, 0.921589, 0.926512, 0.930906, 0.935179, 0.939074, 0.942627, 0.946009,
0.949096, 0.952030, 0.954555, 0.957157, 0.959669, 0.961500, 0.963481, 0.965353, 0.967387, 0.968580,
0.970311, 0.971567, 0.972948, 0.974114, 0.975264, 0.976287, 0.977213, 0.978142, 0.979057, 0.979662,
0.980460, 0.981100, 0.981771, 0.982459, 0.982837, 0.983199, 0.983956, 0.984156, 0.984682, 0.985026,
0.985364, 0.985645, 0.985954, 0.986241, 0.986484, 0.986686, 0.986895, 0.987043, 0.987287, 0.987388,
0.987541, 0.987669, 0.987755, 0.987877, 0.987903, 0.987996, 0.988022, 0.988091, 0.988104, 0.988114,
0.988114, 0.988104, 0.988091, 0.988022, 0.987996, 0.987903, 0.987877, 0.987755, 0.987669, 0.987541,
0.987388, 0.987287, 0.987043, 0.986895, 0.986686, 0.986484, 0.986240, 0.985954, 0.985645, 0.985364,
0.985020, 0.984676, 0.984156, 0.983956, 0.983199, 0.982837, 0.982459, 0.981771, 0.981100, 0.980460,
0.979662, 0.979057, 0.978142, 0.977213, 0.976287, 0.975264, 0.974114, 0.972947, 0.971567, 0.970311,
0.968580, 0.967387, 0.965353, 0.963481, 0.961501, 0.959671, 0.957157, 0.954555, 0.952030, 0.949096,
0.946009, 0.942627, 0.939074, 0.935179, 0.930906, 0.926512, 0.921589, 0.916297, 0.910646, 0.904373,
0.897939, 0.890769, 0.882831, 0.874567, 0.865562, 0.855524, 0.845241, 0.833534, 0.821010, 0.807374,
0.792833, 0.776946, 0.759671, 0.740726, 0.720118, 0.698029, 0.673762, 0.647076, 0.618484, 0.586857,
0.552454, 0.514788, 0.473670, 0.428391, 0.378968, 0.324661, 0.265024, 0.199651, 0.128570, 0.057563],
[0.062742, 0.139913, 0.216842, 0.287226, 0.351055, 0.408796, 0.460966, 0.508397, 0.551116, 0.589915,
0.625035, 0.657029, 0.685667, 0.712150, 0.735991, 0.757467, 0.777313, 0.795374, 0.811669, 0.826496,
0.839932, 0.852416, 0.863766, 0.874277, 0.883399, 0.892242, 0.900084, 0.907216, 0.914023, 0.920103,
0.925504, 0.930744, 0.935424, 0.939752, 0.943788, 0.947313, 0.950768, 0.953860, 0.956675, 0.959339,
0.961755, 0.964039, 0.965984, 0.967994, 0.969968, 0.971283, 0.972800, 0.974223, 0.975784, 0.976647,
0.977953, 0.978887, 0.979922, 0.980773, 0.981637, 0.982386, 0.983068, 0.983759, 0.984436, 0.984855,
0.985453, 0.985916, 0.986417, 0.986934, 0.987182, 0.987435, 0.988022, 0.988146, 0.988537, 0.988792,
0.989043, 0.989235, 0.989470, 0.989681, 0.989857, 0.990006, 0.990159, 0.990263, 0.990455, 0.990515,
0.990636, 0.990731, 0.990787, 0.990884, 0.990900, 0.990971, 0.990986, 0.991042, 0.991048, 0.991057,
0.991057, 0.991048, 0.991042, 0.990986, 0.990971, 0.990900, 0.990884, 0.990787, 0.990731, 0.990636,
0.990515, 0.990455, 0.990263, 0.990159, 0.990006, 0.989857, 0.989681, 0.989470, 0.989235, 0.989043,
0.988787, 0.988532, 0.988146, 0.988022, 0.987435, 0.987182, 0.986934, 0.986417, 0.985916, 0.985453,
0.984855, 0.984436, 0.983759, 0.983068, 0.982386, 0.981637, 0.980773, 0.979920, 0.978887, 0.977953,
0.976647, 0.975784, 0.974223, 0.972800, 0.971284, 0.969970, 0.967994, 0.965984, 0.964039, 0.961755,
0.959339, 0.956675, 0.953860, 0.950768, 0.947313, 0.943788, 0.939752, 0.935424, 0.930744, 0.925504,
0.920103, 0.914023, 0.907216, 0.900084, 0.892242, 0.883399, 0.874277, 0.863766, 0.852416, 0.839932,
0.826496, 0.811669, 0.795374, 0.777313, 0.757467, 0.735991, 0.712150, 0.685667, 0.657029, 0.625035,
0.589915, 0.551116, 0.508397, 0.460966, 0.408796, 0.351055, 0.287226, 0.216842, 0.139913, 0.062742]]);
beta = beta * DTOR # Tilt from horizontal of the PV modules/panels, in radians
sazm = sazm * DTOR # Surface azimuth of PV module/panels, in radians
# 1. Calculate and assign various paramters to be used for modeling irradiances
iso_dif = 0.0; circ_dif = 0.0; horiz_dif = 0.0; grd_dif = 0.0; beam = 0.0; # For calling PerezComp to break diffuse into components for zero tilt (horizontal)
ghi, iso_dif, circ_dif, horiz_dif, grd_dif, beam = perezComp(dni, dhi, albedo, zen, 0.0, zen) # Call to get iso_dif for horizontal surface
# print "PEREZCOMP1 = "
# print "ghi = ", ghi
# print "iso_dif = ", iso_dif
# print "circ_dif = ", circ_dif
# print "horiz_dif = ", horiz_dif
# print "grd_dif = ", grd_dif
# print "beam = ", beam
iso_sky_dif = iso_dif; # Isotropic irradiance from sky on horizontal surface, used later for determining isotropic sky component
inc, tiltr, sazmr = sunIncident(0, 90.0, 180.0, 45.0, zen, azm) # For calling PerezComp to break diffuse into components for 90 degree tilt (vertical)
# print "sunIncident 1."
# print "inc = ", inc
# print "tiltr = ", tiltr
# print "sazmr = ", sazmr
vti, iso_dif, circ_dif, horiz_dif, grd_dif, beam = perezComp(dni, dhi, albedo, inc, tiltr, zen) # Call to get horiz_dif for vertical surface
# print "PEREZCOMP1 = "
# print "vti = ", vti
# print "iso_dif = ", iso_dif
# print "circ_dif = ", circ_dif
# print "horiz_dif = ", horiz_dif
# print "grd_dif = ", grd_dif
# print "beam = ", beam
F2DHI = horiz_dif; # Horizon diffuse irradiance on a vertical surface, used later for determining horizon brightening irradiance component
index = -99;
n2 = -99.9;
if (PVfrontSurface == "glass"):
index = 0; # Index to use with 1-degree hemispherical segment AOI correction factor array
n2 = 1.526; # Index of refraction for glass
elif (PVfrontSurface == "ARglass"):
index = 1; # Index to use with 1-degree hemispherical segment AOI correction factor array
n2 = 1.300; # Index of refraction for ARglass
else:
raise Exception("Incorrect text input for PVfrontSurface. Must be glass or ARglass.")
Ro = math.pow((n2 - 1.0) / (n2 + 1.0), 2.0); # Reflectance at normal incidence, Duffie and Beckman p217
aveGroundGHI = 0.0; # Average GHI on ground under PV array for cases when x projection exceed 2*rtr
for i in range (0,100):
aveGroundGHI += frontGroundGHI[i] / 100.0;
# Calculate x,y coordinates of bottom and top edges of PV row in front of desired PV row so that portions of sky and ground viewed by the
# PV cell may be determined. Origin of x-y axis is the ground pobelow the lower front edge of the desired PV row. The row in front of
# the desired row is in the negative x direction.
h = math.sin(beta); # Vertical height of sloped PV panel (in PV panel slope lengths)
x1 = math.cos(beta); # Horizontal distance from front of panel to rear of panel (in PV panel slope lengths)
rtr = D + x1; # Row-to-row distance (in PV panel slope lengths)
PbotX = -rtr; # x value for poon bottom egde of PV module/panel of row in front of (in PV panel slope lengths)
PbotY = C; # y value for poon bottom egde of PV module/panel of row in front of (in PV panel slope lengths)
PtopX = -D; # x value for poon top egde of PV module/panel of row in front of (in PV panel slope lengths)
PtopY = h + C; # y value for poon top egde of PV module/panel of row in front of (in PV panel slope lengths)
# 2. Calculate diffuse and direct component irradiances for each cell row
for i in range (0, cellRows):
# Calculate diffuse irradiances and reflected amounts for each cell row over it's field of view of 180 degrees,
# beginning with the angle providing the upper most view of the sky (j=0)
PcellX = x1 * (i + 0.5) / (cellRows); # x value for location of PV cell
PcellY = C + h * (i + 0.5) / (cellRows); # y value for location of PV cell
elvUP = math.atan((PtopY - PcellY) / (PcellX - PtopX)); # Elevation angle up from PV cell to top of PV module/panel, radians
elvDOWN = math.atan((PcellY - PbotY) / (PcellX - PbotX)); # Elevation angle down from PV cell to bottom of PV module/panel, radians
if (rowType == "first" or rowType == "single"): # 4/19/16 No array in front for these cases
elvUP = 0.0;
elvDOWN = 0.0;
#Console.WriteLine("ElvUp = 0", elvUP / DTOR);
#if (i == 0)
# Console.WriteLine("ElvDown = 0", elvDOWN / DTOR);
if math.isnan(beta):
print( "Beta is Nan")
if math.isnan(elvUP):
print( "elvUP is Nan")
if math.isnan((math.pi - beta - elvUP) / DTOR):
print( "division is Nan")
iStopIso = int(round(np.float64((math.pi - beta - elvUP)) / DTOR)) # Last whole degree in arc range that sees sky, first is 0
#Console.WriteLine("iStopIso = 0", iStopIso);
iHorBright = int(round(max(0.0, 6.0 - elvUP / DTOR))); # Number of whole degrees for which horizon brightening occurs
iStartGrd = int(round((math.pi - beta + elvDOWN) / DTOR)); # First whole degree in arc range that sees ground, last is 180
# print "iStopIso = ", iStopIso
# print "iHorBright = ", iHorBright
# print "iStartGrd = ", iStartGrd
frontGTI.append(0.0) # Initialtize front GTI
frontReflected.append(0.0); # Initialize reflected amount from front
for j in range (0, iStopIso): # Add sky diffuse component and horizon brightening if present
#for (j = 0; j < iStopIso; j++)
frontGTI[i] += 0.5 * (math.cos(j * DTOR) - math.cos((j + 1) * DTOR)) * SegAOIcor[index][j] * iso_sky_dif; # Sky radiation
frontReflected[i] += 0.5 * (math.cos(j * DTOR) - math.cos((j + 1) * DTOR)) * iso_sky_dif * (1.0 - SegAOIcor[index][j] * (1.0 - Ro)); # Reflected radiation from module
if ((iStopIso - j) <= iHorBright): # Add horizon brightening term if seen
frontGTI[i] += 0.5 * (math.cos(j * DTOR) - math.cos((j + 1) * DTOR)) * SegAOIcor[index][j] * F2DHI / 0.052264; # 0.052246 = 0.5 * [cos(84) - cos(90)]
frontReflected[i] += 0.5 * (math.cos(j * DTOR) - math.cos((j + 1) * DTOR)) * (F2DHI / 0.052264) * (1.0 - SegAOIcor[index][j] * (1.0 - Ro)); # Reflected radiation from module
#if (i == 0)
# Console.WriteLine("iStartGrd = 0", iStartGrd);
for j in range (iStartGrd, 180): # Add ground reflected component
#(j = iStartGrd; j < 180; j++)
startElvDown = (j - iStartGrd) * DTOR + elvDOWN; # Start and ending down elevations for this j loop
stopElvDown = (j + 1 - iStartGrd) * DTOR + elvDOWN;
projectedX1 = PcellX - np.float64(PcellY) / math.tan(startElvDown); # Projection of ElvDown to ground in -x direction
projectedX2 = PcellX - PcellY / math.tan(stopElvDown);
actualGroundGHI = 0.0; # Actuall ground GHI from summing array values
#if (i == 0)
# Console.WriteLine("j= 0 projected X1 = 1,6:0.0", j, 100 * projectedX1 / rtr);
if (abs(projectedX1 - projectedX2) > 0.99 * rtr):
if (rowType == "first" or rowType == "single"): # 4/19/16 No array in front for these cases
actualGroundGHI = ghi; # Use total value if projection approximates the rtr
else:
actualGroundGHI = aveGroundGHI; # Use average value if projection approximates the rtr
else:
projectedX1 = 100.0 * projectedX1 / rtr; # Normalize projections and multiply by 100
projectedX2 = 100.0 * projectedX2 / rtr;
if ((rowType == "first" or rowType == "single") and (abs(projectedX1) > rtr or abs(projectedX2) > rtr)): #4/19/2016
actualGroundGHI = ghi; # Use total value if projection > rtr for "first" or "single"
else:
while (projectedX1 < 0.0 or projectedX2 < 0.0): # Offset so array indexes are positive
projectedX1 += 100.0;
projectedX2 += 100.0;
index1 = int(projectedX1); # Determine indexes for use with groundGHI array (truncates values)
index2 = int(projectedX2);
if (index1 == index2):
actualGroundGHI = frontGroundGHI[index1]; # x projections in same groundGHI element
else:
for k in range (index1, index2+1): # Sum the irradiances on the ground if projections are in different groundGHI elements
#for (k = index1; k <= index2; k++)
#Console.WriteLine("index1=0 index2=1", index1,index2);
if (k == index1):
actualGroundGHI += frontGroundGHI[k] * (k + 1.0 - projectedX1);
elif (k == index2):
if (k < 100):
actualGroundGHI += frontGroundGHI[k] * (projectedX2 - k);
else:
actualGroundGHI += frontGroundGHI[k - 100] * (projectedX2 - k);
else:
if (k < 100):
actualGroundGHI += frontGroundGHI[k];
else:
actualGroundGHI += frontGroundGHI[k - 100];
actualGroundGHI /= projectedX2 - projectedX1; # Irradiance on ground in the 1 degree field of view
#if (i == 0)
# Console.WriteLine("j=0 index1=1 index2=2 projectX1=3,5:0.0 projectX2=4,5:0.0 actualGrdGHI=5,6:0.0", j, index1, index2, projectedX1, projectedX2, actualGroundGHI);
frontGTI[i] += 0.5 * (math.cos(j * DTOR) - math.cos((j + 1) * DTOR)) * SegAOIcor[index][j] * actualGroundGHI * albedo; # Add ground reflected component
frontReflected[i] += 0.5 * (math.cos(j * DTOR) - math.cos((j + 1) * DTOR)) * actualGroundGHI * albedo * (1.0 - SegAOIcor[index][j] * (1.0 - Ro)); # Reflected ground radiation from module
#Console.WriteLine("actualGroundGHI = 0,6:0.0 inputGHI = 1,6:0.0 aveArrayGroundGHI = 2,6:0.0", actualGroundGHI, dhi + dni * math.cos(zen), aveGroundGHI);
# End of j loop for adding ground reflected componenet
# Calculate and add direct and circumsolar irradiance components
inc, tiltr, sazmr = sunIncident(0, beta / DTOR, sazm / DTOR, 45.0, zen, azm) # For calling PerezComp to break diffuse into components for 90 degree tilt (vertical)
# print "sunIncident 2."
# print "inc = ", inc
# print "tiltr = ", tiltr
# print "sazmr = ", sazmr
# print " INCIDENT REALY NEEDED for AOI ", inc
gtiAllpc, iso_dif, circ_dif, horiz_dif, grd_dif, beam = perezComp(dni, dhi, albedo, inc, tiltr, zen) # Call to get components for the tilt
# print "PEREZCOMP 2 = "
# print "gtiAllpc = ", vti
# print "iso_dif = ", iso_dif
# print "circ_dif = ", circ_dif
# print "horiz_dif = ", horiz_dif
# print "grd_dif = ", grd_dif
# print "beam = ", beam
cellShade = pvFrontSH * cellRows - i;
if (cellShade > 1.0): # Fully shaded if > 1, no shade if < 0, otherwise fractionally shaded
cellShade = 1.0;
elif (cellShade < 0.0):
cellShade = 0.0;
if (cellShade < 1.0 and inc < math.pi / 2.0): # Cell not shaded entirely and inc < 90 deg
cor = aOIcorrection(n2, inc); # Get AOI correction for beam and circumsolar
frontGTI[i] += (1.0 - cellShade) * (beam + circ_dif) * cor; # Add beam and circumsolar radiation
#frontReflected[i] += (1.0 - cellShade) * (beam + circ_dif) * (1.0 - cor * (1.0 - Ro)); # Reflected beam and circumsolar radiation from module
# End of for i = 0; i < cellRows loop
return aveGroundGHI, frontGTI, frontReflected;
# End of GetFrontSurfaceIrradiances
def getGroundShadeFactors(rowType, beta, C, D, elv, azm, sazm):
"""
This method determines if the ground is shaded from direct beam radiation
for points on the ground from the leading edge of one row of PV panels to
the leading edge of the next row of PV panels behind it. This row-to-row
dimension is divided into 100 ground segments and a ground shade factor is
returned for each ground segment, with values of 1 for shaded segments and
values of 0 for non shaded segments. The fractional amounts of shading of
the front and back surfaces of the PV panel are also returned. 8/20/2015
4/18/2016 - Modified to account for different row types. Because the ground
factors may now be different depending on row, they are calculated for the
row-to-row dimension to the rear of the leading module edge and to the
front of the leading edge. Also returned is the maximum shadow length
projected to the front or rear from the front of the module row
Parameters
----------
rowType : str
"first", "interior", "last", or "single"
beta
Tilt from horizontal of the PV modules/panels (deg)
C
Ground clearance of PV panel (in PV panel slope lengths)
D
Horizontal distance between rows of PV panels (in PV panel slope
lengths)
elv
Sun elevation (in radians)
azm
Sun azimuth (in radians)
sazm
Surface azimuth of PV panels (deg)
Returns
-------
pvFrontSH : numeric
Decimal fraction of the front surface of the PV panel that is shaded,
0.0 to 1.0
pvBackSH : numeric
Decimal fraction of the back surface of the PV panel that is shaded,
0.0 to 1.0
rearGroundSH : array of size [100]
Ground shade factors for ground segments to the rear, 0 = not shaded,
1 = shaded
frontGroundSH : array of size [100]
Ground shade factors for ground segments to the front, 0 = not shaded,
1 = shaded
maxShadow : numeric
Maximum shadow length projected to the front(-) or rear (+) from the
front of the module row (in PV panel slope lengths), only used later
for rowTypes other than "interior"
"""
rearGroundSH = []
frontGroundSH = []
beta = beta * DTOR # Tilt from horizontal of the PV modules/panels, in radians
sazm = sazm * DTOR # Surface azimuth of PV module/pamels, in radians
h = math.sin(beta); # Vertical height of sloped PV panel (in PV panel slope lengths)
x1 = math.cos(beta); # Horizontal distance from front of panel to rear of panel (in PV panel slope lengths)
rtr = D + x1; # Row-to-row distance (in PV panel slope lengths)
# Divide the row-to-row spacing into 100 intervals for calculating ground shade factors
delta = rtr / 100.0;
x = -delta / 2.0; # Initialize horizontal dimension x to provide midpoof intervals
Lh = (h / math.tan(elv)) * math.cos(sazm - azm); # Horizontal length of shadow perpindicular to row from top of module to bottom of module
Lhc = ((h + C) / math.tan(elv)) * math.cos(sazm - azm); # Horizontal length of shadow perpindicular to row from top of module to ground level
Lc = (C / math.tan(elv)) * math.cos(sazm - azm); # Horizontal length of shadow perpindicular to row from bottom of module to ground level
ss1 = 0.0; se1 = 0.0; ss2 = 0.0; se2 = 0.0; # Initialize shading start (s) and end (e) to zeros for two potential shading segments
pvFrontSH = 0.0;
pvBackSH = 0.0;
if (rowType == "interior"):
if (Lh > D): # Front side of PV module partially shaded, back completely shaded, ground completely shaded
pvFrontSH = (Lh - D) / (Lh + x1);
pvBackSH = 1.0;
ss1 = 0.0; # Ground shaded from 0.0 to rtr
se1 = rtr;
elif (Lh < -(rtr + x1)): # Back side of PV module partially shaded, front completely shaded, ground completely shaded
pvFrontSH = 1.0;
pvBackSH = (Lh + rtr + x1) / (Lh + x1);
ss1 = 0.0; # Ground shaded from 0.0 to rtr
se1 = rtr;
else: # Ground is partially shaded (I assume)
if (Lhc >= 0.0): # Shadow to rear of row, module front unshaded, back shaded
pvFrontSH = 0.0;
pvBackSH = 1.0;
Ss = Lc; # Shadow starts at Lc
Se = Lhc + x1; # Shadow ends here
while (Ss > rtr):
Ss -= rtr; # Put shadow in correct rtr space if needed
Se -= rtr;
ss1 = Ss;
se1 = Se;
if (se1 > rtr): # then need to use two shade areas
se1 = rtr;
ss2 = 0.0;
se2 = Se - rtr;
if (se2 > ss1):
# This would mean ground completely shaded, does this occur?
ss1 = 0.0; # Ground shaded from 0.0 to rtr
se1 = rtr;
else: # Shadow to front of row, either front or back might be shaded, depending on tilt and other factors
Ss = 0.0; # Shadow starts at Lc, initialize
Se = 0.0; # Shadow ends here, initialize
if (Lc < Lhc + x1):
pvFrontSH = 0.0;
pvBackSH = 1.0;
Ss = Lc; # Shadow starts at Lc
Se = Lhc + x1; # Shadow ends here
else:
pvFrontSH = 1.0;
pvBackSH = 0.0;
Ss = Lhc + x1; # Shadow starts at Lhc + x1
Se = Lc; # Shadow ends here
while (Ss < 0.0):
Ss += rtr; # Put shadow in correct rtr space if needed
Se += rtr;
ss1 = Ss;
se1 = Se;
if (se1 > rtr): # then need to use two shade areas
se1 = rtr;
ss2 = 0.0;
se2 = Se - rtr;
if (se2 > ss1):
# This would mean ground completely shaded, does this occur?
ss1 = 0.0; # Ground shaded from 0.0 to rtr
se1 = rtr;
# End of if (Lh > D) else branching
delta = rtr / 100.0;
x = -delta / 2.0; # Initialize horizontal dimension x to provide midpoof intervals
#for (i = 0; i <= 99; i++)
for i in range(0,100):
x += delta;
#if ((x >= ss1 && x < se1) || (x >= ss2 && x < se2)):
if ((x >= ss1 and x < se1) or (x >= ss2 and x < se2)):
rearGroundSH.append(1); # x within a shaded interval, set groundSH to 1 to indicate shaded
frontGroundSH.append(1); # same for both front and rear
else:
rearGroundSH.append(0); # x not within a shaded interval, set groundSH to 0 to indicated not shaded, i.e. sunny
frontGroundSH.append(0); # same for both front and rear
#Console.WriteLine("x = 0,6:0.0000 groundSH = 1", x, groundSH[i]);
# End of if row type == "interior"
elif (rowType == "first"):
if (Lh > 0.0): # Sun is on front side of PV module
pvFrontSH = 0.0;
pvBackSH = 1.0;
ss1 = Lc; # Ground shaded from shadow of lower edge
se1 = x1 + Lhc; # to shadow of upper edge
# End of if sun on front side of PV module
elif (Lh < -(rtr + x1)): # Back side of PV module partially shaded from row to rear, front completely shaded, ground completely shaded
pvFrontSH = 1.0;
pvBackSH = (Lh + rtr + x1) / (Lh + x1);
ss1 = -rtr; # Ground shaded from -rtr to rtr
se1 = rtr;
# End of if back side of PV module partially shaded, front completely shaded, ground completely shaded
else: # Shadow to frontside of row, either front or back might be shaded, depending on tilt and other factors
if (Lc < Lhc + x1):
pvFrontSH = 0.0;
pvBackSH = 1.0;
ss1 = Lc; # Shadow starts at Lc
se1 = Lhc + x1; # Shadow ends here
else:
pvFrontSH = 1.0;
pvBackSH = 0.0;
ss1 = Lhc + x1; # Shadow starts at Lhc + x1
se1 = Lc; # Shadow ends here
# End of shadow to front of row
delta = rtr / 100.0;
x = -delta / 2.0; # Initialize horizontal dimension x to provide midpoof intervals
for i in range(0,100):
x += delta;
if (x >= ss1 and x < se1):
rearGroundSH.append(1) # x within a shaded interval, set groundSH to 1 to indicate shaded
else:
rearGroundSH.append(0) # x not within a shaded interval, set groundSH to 0 to indicated not shaded, i.e. sunny
x = -rtr - delta / 2.0; # Initialize horizontal dimension x to provide midpoof intervals for front interval
for i in range(0,100):
x += delta;
if (x >= ss1 and x < se1):
frontGroundSH.append(1) # x within a shaded interval, set groundSH to 1 to indicate shaded
else:
frontGroundSH.append(0) # x not within a shaded interval, set groundSH to 0 to indicated not shaded, i.e. sunny
# End of if row type == "first"
elif (rowType == "last"):
if (Lh > D): # Front side of PV module partially shaded, back completely shaded, ground completely shaded
pvFrontSH = (Lh - D) / (Lh + x1);
pvBackSH = 1.0;
ss1 = -rtr; # Ground shaded from -rtr to rtr
se1 = rtr;
else: # Shadow to frontside of row, either front or back might be shaded, depending on tilt and other factors
if (Lc < Lhc + x1):
pvFrontSH = 0.0;
pvBackSH = 1.0;
ss1 = Lc; # Shadow starts at Lc
se1 = Lhc + x1; # Shadow ends here
else:
pvFrontSH = 1.0;
pvBackSH = 0.0;
ss1 = Lhc + x1; # Shadow starts at Lhc + x1
se1 = Lc; # Shadow ends here
# End of shadow to front of row
delta = rtr / 100.0;
x = -delta / 2.0; # Initialize horizontal dimension x to provide midpoof intervals
for i in range(0,100):
x += delta;
if (x >= ss1 and x < se1):
rearGroundSH.append(1); # x within a shaded interval, set groundSH to 1 to indicate shaded
else:
rearGroundSH.append(0); # x not within a shaded interval, set groundSH to 0 to indicated not shaded, i.e. sunny
x = -rtr - delta / 2.0; # Initialize horizontal dimension x to provide midpoof intervals for front interval
for i in range(0,100):
x += delta;
if (x >= ss1 and x < se1):
frontGroundSH.append(1); # x within a shaded interval, set groundSH to 1 to indicate shaded
else:
frontGroundSH.append(0); # x not within a shaded interval, set groundSH to 0 to indicated not shaded, i.e. sunny
# End of if row type == "last"
elif (rowType == "single"):
if (Lh > 0.0): # Shadow to the rear
pvFrontSH = 0.0;
pvBackSH = 1.0;
ss1 = Lc; # Ground shaded from shadow of lower edge
se1 = x1 + Lhc; # to shadow of upper edge
# End of if sun on front side of PV module
else: # Shadow to frontside of row, either front or back might be shaded, depending on tilt and other factors
if (Lc < Lhc + x1):
pvFrontSH = 0.0;
pvBackSH = 1.0;
ss1 = Lc; # Shadow starts at Lc
se1 = Lhc + x1; # Shadow ends here
else:
pvFrontSH = 1.0;
pvBackSH = 0.0;
ss1 = Lhc + x1; # Shadow starts at Lhc + x1
se1 = Lc; # Shadow ends here
# End of shadow to front of row
delta = rtr / 100.0;
x = -delta / 2.0; # Initialize horizontal dimension x to provide midpoof intervals
for i in range(0,100):
x += delta;
if (x >= ss1 and x < se1):
rearGroundSH.append(1); # x within a shaded interval, set groundSH to 1 to indicate shaded
else:
rearGroundSH.append(0); # x not within a shaded interval, set groundSH to 0 to indicated not shaded, i.e. sunny
x = -rtr - delta / 2.0; # Initialize horizontal dimension x to provide midpoof intervals for front interval
for i in range(0,100):
x += delta;
if (x >= ss1 and x < se1):
frontGroundSH.append(1); # x within a shaded interval, set groundSH to 1 to indicate shaded
else:
frontGroundSH.append(0); # x not within a shaded interval, set groundSH to 0 to indicated not shaded, i.e. sunny
# End of if row type == "single"
else:
print ("ERROR: Incorrect row type not passed to function GetGroundShadedFactors ");
if (abs(ss1) > abs(se1)): # Maximum shadow length projected from the front of the PV module row
maxShadow = ss1;
else:
maxShadow = se1;
#Console.WriteLine("elv = 0,6:0.00 azm = 1,6:0.00 sazm = 2,6:0.00", elv * 180.0 / math.pi, azm * 180.0 / math.pi, sazm * 180.0 / math.pi);
#Console.WriteLine("ss1 = 0,6:0.0000 se1 = 1,6:0.0000 ss2 = 2,6:0.0000 se2 = 3,6:0.0000 rtr = 4,6:0.000", ss1, se1, ss2, se2, rtr);
#Console.WriteLine("pvFrontSH = 0,6:0.00 pvBackSH = 1,6:0.00", pvFrontSH, pvBackSH);
# End of GetGroundShadedFactors
#print "rearGroundSH", rearGroundSH[0]
return pvFrontSH, pvBackSH, maxShadow, rearGroundSH, frontGroundSH;
# End of getGroundShadeFactors
def getSkyConfigurationFactors(rowType, beta, C, D):
"""
This method determines the sky configuration factors for points on the
ground from the leading edge of one row of PV panels to the leading edge of
the next row of PV panels behind it. This row-to-row dimension is divided
into 100 ground segments and a sky configuration factor is returned for
each ground segment. The sky configuration factor represents the fraction
of the isotropic diffuse sky radiation (unobstructed) that is present on
the ground when partially obstructed by the rows of PV panels. The
equations follow that on pages in the notebook dated 8/12/2015. 8/20/2015
4/15/2016 Modifed for calculations other than just the interior rows. Row
type is identified with the string `rowType`, with the possilbe values:
* first = first row of the array
* interior = interior row of array
* last = last row of the array
* single = a single row array
Because the sky configuration factors may now be different depending on
row, they are calculated for the row-to-row dimension to the rear of the
leading module edge and to the front of the leading edge.
Parameters
----------
rowType : str
"first", "interior", "last", or "single"
beta : float
Tilt from horizontal of the PV modules/panels (deg)
C : float
Ground clearance of PV panel (in PV module/panel slope lengths)
D : float
Horizontal distance between rows of PV panels (in PV module/panel slope
lengths)
Returns
-------
rearSkyConfigFactors : array of size [100]
Sky configuration factors to rear of leading PVmodule edge (decimal
fraction)
frontSkyConfigFactors : array of size [100]
Sky configuration factors to rear of leading PVmodule edge (decimal
fraction)
Notes
-----
The horizontal distance between rows, `D`, is from the back edge of one row
to the front edge of the next, and it is not the row-to-row spacing.
"""
rearSkyConfigFactors = []
frontSkyConfigFactors = []
# Tilt from horizontal of the PV modules/panels, in radians
beta = beta * DTOR
# Vertical height of sloped PV panel (in PV panel slope lengths)
h = math.sin(beta)
# Horizontal distance from front of panel to rear of panel (in PV panel
# slope lengths)
x1 = math.cos(beta)
rtr = D + x1 # Row-to-row distance (in PV panel slope lengths)
# Forced fix for case of C = 0
# FIXME: for some reason the Config Factors go from 1 to 2 and not 0 to 1.
# TODO: investigate why this is happening in the code.
if C==0:
C=0.0000000001
if C < 0:
LOGGER.error(
"Height is below ground level. Function GetSkyConfigurationFactors"
" will continue but results might be unreliable")
# Divide the row-to-row spacing into 100 intervals and calculate
# configuration factors
delta = rtr / 100.0
if (rowType == "interior"):
# Initialize horizontal dimension x to provide midpoint of intervals
x = -delta / 2.0
for i in range(0,100):
x += delta
# <--rtr=x1+D--><--rtr=x1+D--><--rtr=x1+D-->
# |\ |\ |\ |\
# | \ ` | \ | \ /| \
# h \ ` h \ h \ / h \
# | \ ` | \ | \ / | \
# |_x1_\____D__`|_x1_\____D___|_x1_\_/_D____|_x1_\_
# | ` <------x-----/|
# C ` /
# | angA ` / angB
# *------------------------`-/---------------------
# x
# use ATAN2: 4-quadrant tangent instead of ATAN
# check 2 rows away
angA = math.atan2(h + C, (2.0 * rtr + x1 - x))
angB = math.atan2(C, (2.0 * rtr - x))
beta1 = max(angA, angB)
# check 1 rows away
angA = math.atan2(h + C, (rtr + x1 - x))
angB = math.atan2(C, (rtr - x))
beta2 = min(angA, angB)
# check 0 rows away
beta3 = max(angA, angB)
beta4 = math.atan2(h + C, (x1 - x))
beta5 = math.atan2(C, (-x))
beta6 = math.atan2(h + C, (-D - x))
sky1 =0; sky2 =0; sky3 =0
if (beta2 > beta1):
sky1 = 0.5 * (math.cos(beta1) - math.cos(beta2))
if (beta4 > beta3):
sky2 = 0.5 * (math.cos(beta3) - math.cos(beta4))
if (beta6 > beta5):
sky3 = 0.5 * (math.cos(beta5) - math.cos(beta6))
skyAll = sky1 + sky2 + sky3
# Save as arrays of values, same for both to the rear and front
rearSkyConfigFactors.append(skyAll)
frontSkyConfigFactors.append(skyAll)
# End of if "interior"
elif (rowType == "first"):
# RearSkyConfigFactors don't have a row in front, calculation of sky3
# changed, beta6 = 180 degrees
x = -delta / 2.0; # Initialize horizontal dimension x to provide midpoint of intervals
for i in range(0,100):
x += delta;
angA = math.atan((h + C) / (2.0 * rtr + x1 - x));
if (angA < 0.0):
angA += math.pi;
angB = math.atan(C / (2.0 * rtr - x));
if (angB < 0.0):
angB += math.pi;
beta1 = max(angA, angB);
angA = math.atan((h + C) / (rtr + x1 - x));
if (angA < 0.0):
angA += math.pi;
angB = math.atan(C / (rtr - x));
if (angB < 0.0):
angB += math.pi;
beta2 = min(angA, angB);
beta3 = max(angA, angB);
beta4 = math.atan((h + C) / (x1 - x));
if (beta4 < 0.0):
beta4 += math.pi;
beta5 = math.atan(C / (-x));
if (beta5 < 0.0):
beta5 += math.pi;
beta6 = math.pi;
sky1 = 0.0; sky2 = 0.0; sky3 = 0.0;
if (beta2 > beta1):
sky1 = 0.5 * (math.cos(beta1) - math.cos(beta2));
if (beta4 > beta3):
sky2 = 0.5 * (math.cos(beta3) - math.cos(beta4));
if (beta6 > beta5):
sky3 = 0.5 * (math.cos(beta5) - math.cos(beta6));
skyAll = sky1 + sky2 + sky3;
rearSkyConfigFactors.append(skyAll); # Save as arrays of values
#Console.WriteLine("0,5:0.000,1,5:0.000,2,5:0.000,3,5:0.000,4,5:0.000", x, sky1, sky2, sky3, skyAll);
#sw.WriteLine("0,5:0.000,1,5:0.000,2,5:0.000,3,5:0.000,4,5:0.000", x, sky1, sky2, sky3, skyAll);
# frontSkyConfigFactors don't have a row in front, calculation of sky3 included as part of revised sky2,
# beta 4 set to 180 degrees
x = -delta / 2.0; # Initialize horizontal dimension x to provide midpoint of intervals
for i in range(0,100):
x += delta;
angA = math.atan((h + C) / (2.0 * rtr + x1 - x));
if (angA < 0.0):
angA += math.pi;
angB = math.atan(C / (2.0 * rtr - x));
if (angB < 0.0):
angB += math.pi;
beta1 = max(angA, angB);
angA = math.atan((h + C) / (rtr + x1 - x));
if (angA < 0.0):
angA += math.pi;
angB = math.atan(C / (rtr - x));
if (angB < 0.0):
angB += math.pi;
beta2 = min(angA, angB);
beta3 = max(angA, angB);
beta4 = math.pi;
sky1 = 0.0; sky2 = 0.0;
if (beta2 > beta1):
sky1 = 0.5 * (math.cos(beta1) - math.cos(beta2));
if (beta4 > beta3):
sky2 = 0.5 * (math.cos(beta3) - math.cos(beta4));
skyAll = sky1 + sky2;
frontSkyConfigFactors.append(skyAll); # Save as arrays of values
#Console.WriteLine("0,5:0.000,1,5:0.000,2,5:0.000,3,5:0.000,4,5:0.000", x, sky1, sky2, sky3, skyAll);
#sw.WriteLine("0,5:0.000,1,5:0.000,2,5:0.000,3,5:0.000,4,5:0.000", x, sky1, sky2, sky3, skyAll);
# End of if "first"
elif (rowType == "last"):
# RearSkyConfigFactors don't have a row to the rear, combine sky1 into sky 2, set beta 3 = 0.0
x = -delta / 2.0; # Initialize horizontal dimension x to provide midpoint of intervals
for i in range(0,100):
x += delta;
beta3 = 0.0;
beta4 = math.atan((h + C) / (x1 - x));
if (beta4 < 0.0):
beta4 += math.pi;
beta5 = math.atan(C / (-x));
if (beta5 < 0.0):
beta5 += math.pi;
beta6 = math.atan((h + C) / (-D - x));
if (beta6 < 0.0):
beta6 += math.pi;
sky2 = 0.0; sky3 = 0.0;
if (beta4 > beta3):
sky2 = 0.5 * (math.cos(beta3) - math.cos(beta4));
if (beta6 > beta5):
sky3 = 0.5 * (math.cos(beta5) - math.cos(beta6));
skyAll = sky2 + sky3;
rearSkyConfigFactors.append(skyAll); # Save as arrays of values
#Console.WriteLine("0,5:0.000,1,5:0.000,2,5:0.000,3,5:0.000,4,5:0.000", x, sky1, sky2, sky3, skyAll);
#sw.WriteLine("0,5:0.000,1,5:0.000,2,5:0.000,3,5:0.000,4,5:0.000", x, sky1, sky2, sky3, skyAll);
# FrontSkyConfigFactors have beta1 = 0.0
x = -delta / 2.0; # Initialize horizontal dimension x to provide midpoint of intervals
for i in range(0,100):
x += delta;
angA = math.atan((h + C) / (2.0 * rtr + x1 - x));
if (angA < 0.0):
angA += math.pi;
angB = math.atan(C / (2.0 * rtr - x));
if (angB < 0.0):
angB += math.pi;
beta1 = max(angA, angB);
beta1 = 0.0;
angA = math.atan((h + C) / (rtr + x1 - x));
if (angA < 0.0):
angA += math.pi;
angB = math.atan(C / (rtr - x));
if (angB < 0.0):
angB += math.pi;
beta2 = min(angA, angB);
beta3 = max(angA, angB);
beta4 = math.atan((h + C) / (x1 - x));
if (beta4 < 0.0):
beta4 += math.pi;
beta5 = math.atan(C / (-x));
if (beta5 < 0.0):
beta5 += math.pi;
beta6 = math.atan((h + C) / (-D - x));
if (beta6 < 0.0):
beta6 += math.pi;
sky1 = 0.0; sky2 = 0.0; sky3 = 0.0;
if (beta2 > beta1):
sky1 = 0.5 * (math.cos(beta1) - math.cos(beta2));
if (beta4 > beta3):
sky2 = 0.5 * (math.cos(beta3) - math.cos(beta4));
if (beta6 > beta5):
sky3 = 0.5 * (math.cos(beta5) - math.cos(beta6));
skyAll = sky1 + sky2 + sky3;
frontSkyConfigFactors.append(skyAll); # Save as arrays of values,
#Console.WriteLine("0,5:0.000,1,5:0.000,2,5:0.000,3,5:0.000,4,5:0.000", x, sky1, sky2, sky3, skyAll);
#sw.WriteLine("0,5:0.000,1,5:0.000,2,5:0.000,3,5:0.000,4,5:0.000", x, sky1, sky2, sky3, skyAll);
# End of if "last" row
elif (rowType == "single"):
# RearSkyConfigFactors don't have a row to the rear ir front, combine sky1 into sky 2, set beta 3 = 0.0,
# for sky3, beta6 = 180.0.
x = -delta / 2.0; # Initialize horizontal dimension x to provide midpoint of intervals
for i in range(0,100):
x += delta;
beta3 = 0.0;
beta4 = math.atan((h + C) / (x1 - x));
if (beta4 < 0.0):
beta4 += math.pi;
beta5 = math.atan(C / (-x));
if (beta5 < 0.0):
beta5 += math.pi;
beta6 = math.pi;
sky2 = 0.0; sky3 = 0.0;
if (beta4 > beta3):
sky2 = 0.5 * (math.cos(beta3) - math.cos(beta4));
if (beta6 > beta5):
sky3 = 0.5 * (math.cos(beta5) - math.cos(beta6));
skyAll = sky2 + sky3;
rearSkyConfigFactors.append(skyAll); # Save as arrays of values
#Console.WriteLine("0,5:0.000,1,5:0.000,2,5:0.000,3,5:0.000,4,5:0.000", x, sky1, sky2, sky3, skyAll);
#sw.WriteLine("0,5:0.000,1,5:0.000,2,5:0.000,3,5:0.000,4,5:0.000", x, sky1, sky2, sky3, skyAll);
# FrontSkyConfigFactors have only a row to the rear, combine sky3 into sky2, set beta1 = 0, beta4 = 180
x = -delta / 2.0; # Initialize horizontal dimension x to provide midpoint of intervals
for i in range(0,100):
x += delta;
angA = math.atan((h + C) / (2.0 * rtr + x1 - x));
if (angA < 0.0):
angA += math.pi;
angB = math.atan(C / (2.0 * rtr - x));
if (angB < 0.0):
angB += math.pi;
beta1 = max(angA, angB);
beta1 = 0.0;
angA = math.atan((h + C) / (rtr + x1 - x));
if (angA < 0.0):
angA += math.pi;
angB = math.atan(C / (rtr - x));
if (angB < 0.0):
angB += math.pi;
beta2 = min(angA, angB);
beta3 = max(angA, angB);
beta4 = math.pi;
sky1 = 0.0; sky2 = 0.0;
if (beta2 > beta1):
sky1 = 0.5 * (math.cos(beta1) - math.cos(beta2));
if (beta4 > beta3):
sky2 = 0.5 * (math.cos(beta3) - math.cos(beta4));
skyAll = sky1 + sky2;
frontSkyConfigFactors.append(skyAll); # Save as arrays of values
#Console.WriteLine("0,5:0.000,1,5:0.000,2,5:0.000,3,5:0.000,4,5:0.000", x, sky1, sky2, sky3, skyAll);
#sw.WriteLine("0,5:0.000,1,5:0.000,2,5:0.000,3,5:0.000,4,5:0.000", x, sky1, sky2, sky3, skyAll);
# End of if "single"
else:
print("ERROR: Incorrect row type not passed to function GetSkyConfigurationFactors ");
return rearSkyConfigFactors, frontSkyConfigFactors;
# End of GetSkyConfigurationFactors
def rowSpacing(beta, sazm, lat, lng, tz, hour, minute):
"""
This method determines the horizontal distance D between rows of PV panels
(in PV module/panel slope lengths) for no shading on December 21 (north
hemisphere) June 21 (south hemisphere) for a module tilt angle beta and
surface azimuth sazm, and a given latitude, longitude, and time zone and
for the time passed to the method (typically 9 am).
(Ref: the row-to-row spacing is then ``D + cos(beta)``)
8/21/2015
Parameters
----------
beta : double
Tilt from horizontal of the PV modules/panels (deg)
sazm : double
Surface azimuth of the PV modules/panels (deg)
lat : double
Site latitude (deg)
lng : double
Site longitude (deg)
tz : double
Time zone (hrs)
hour : int
hour for no shading criteria
minute: double
minute for no shading
Returns
-------
D : numeric
Horizontal distance between rows of PV panels (in PV panel slope
lengths)
"""
beta = beta * DTOR # Tilt from horizontal of the PV modules/panels, in radians
sazm = sazm * DTOR # Surface azimuth of PV module/pamels, in radians
if lat >= 0:
[azm, zen, elv, dec, sunrise, sunset, Eo, tst] = solarPos (2014, 12, 21, hour, minute, lat, lng, tz)
else:
[azm, zen, elv, dec, sunrise, sunset, Eo, tst] = solarPos (2014, 6, 21, hour, minute, lat, lng, tz)
tst = 8.877 ##DLL Forced value
minute -= 60.0 * (tst - hour); # Adjust minute so sun position is calculated for a tst equal to the
# time passed to the function
if lat >= 0:
[azm, zen, elv, dec, sunrise, sunset, Eo, tst] = solarPos(2014, 12, 21, hour, minute, lat, lng, tz)
else:
[azm, zen, elv, dec, sunrise, sunset, Eo, tst] = solarPos(2014, 6, 21, hour, minute, lat, lng, tz)
# Console.WriteLine("tst = {0} azm = {1} elv = {2}", tst, azm * 180.0 / Math.PI, elv * 180.0 / Math.PI);
D = math.cos(sazm - azm) * math.sin(beta) / math.tan(elv)
return D
# End of RowSpacing
def trackingBFvaluescalculator(beta, hub_height, r2r):
'''
1-axis tracking helper file
Parameters
----------
beta : float
Tilt from horizontal of the PV modules/panels, in radians
hub_height : float
tracker hub height
r2r : float
Row-to-row distance (in PV panel slope lengths)
Returns
-------
C : float
ground clearance of PV panel
D : float
row-to-row distance (each in PV panel slope lengths)
'''
# Created on Tue Jun 13 08:01:56 2017
# @author: sayala
beta = beta * DTOR # Tilt from horizontal of the PV modules/panels, in radians
x1 = math.cos(beta); # Horizontal distance from front of panel to rear of panel (in PV panel slope lengths)
#rtr = D + x1; # Row-to-row distance (in PV panel slope lengths)
D = r2r - x1; # Calculates D DistanceBetweenRows(panel slope lengths)
hm = 0.5*math.sin(beta); # vertical distance from bottom of panel to top of panel (in PV panel slope lengths)
#C = 0.5+Cv-hm # Ground clearance of PV panel (in PV panel slope lengths).
C = hub_height - hm #Adding a 0.5 for half a panel slope length, since it is assumed the panel is rotating around its middle axis
return C, D
| 50.374441
| 203
| 0.534692
|
ab5be15bbc59ecc18cf93a6170b0dd272f33cfd6
| 833
|
py
|
Python
|
tests/test_slison.py
|
Habidatum/slisonner
|
488be30a199a5d29271e24377c37a7ad83d52e3e
|
[
"MIT"
] | 2
|
2017-02-06T17:15:11.000Z
|
2017-04-17T13:18:18.000Z
|
tests/test_slison.py
|
Habidatum/slisonner
|
488be30a199a5d29271e24377c37a7ad83d52e3e
|
[
"MIT"
] | null | null | null |
tests/test_slison.py
|
Habidatum/slisonner
|
488be30a199a5d29271e24377c37a7ad83d52e3e
|
[
"MIT"
] | null | null | null |
from slisonner import decoder, encoder
from tests import mocker
from tempfile import mkdtemp
from shutil import rmtree
| 29.75
| 75
| 0.716687
|
ab5da7ec6af05a22fe8ff59c25c22325724698fc
| 403
|
py
|
Python
|
cartrade/cartrade/doctype/category/category.py
|
vignesharumainayagam/cartrade
|
81349bc3cd9dbd441491304734077aab10dca56f
|
[
"MIT"
] | null | null | null |
cartrade/cartrade/doctype/category/category.py
|
vignesharumainayagam/cartrade
|
81349bc3cd9dbd441491304734077aab10dca56f
|
[
"MIT"
] | null | null | null |
cartrade/cartrade/doctype/category/category.py
|
vignesharumainayagam/cartrade
|
81349bc3cd9dbd441491304734077aab10dca56f
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# Copyright (c) 2018, Tridots Tech Pvt. Ltd. and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe
from frappe.website.website_generator import WebsiteGenerator
| 28.785714
| 61
| 0.751861
|
ab5e4dfefa5bc8fdcbff9af5c74dd0475612065f
| 372
|
py
|
Python
|
exercises/pyfiles/ex812_polarsincos.py
|
TUDelft-AE-Python/ae1205-exercises
|
342d1d567b64d3ccb3371ce9826c02a87a155fa8
|
[
"MIT"
] | 1
|
2021-10-05T04:49:54.000Z
|
2021-10-05T04:49:54.000Z
|
exercises/pyfiles/ex812_polarsincos.py
|
TUDelft-AE1205/ae1205-exercises
|
342d1d567b64d3ccb3371ce9826c02a87a155fa8
|
[
"MIT"
] | null | null | null |
exercises/pyfiles/ex812_polarsincos.py
|
TUDelft-AE1205/ae1205-exercises
|
342d1d567b64d3ccb3371ce9826c02a87a155fa8
|
[
"MIT"
] | null | null | null |
import matplotlib.pyplot as plt
import math
xtab = []
ytab = []
for i in range(0, 628):
# Calculate polar coordinates for provided equation
phi = float(i) / 100.0
r = 4 * math.cos(2 * phi)
# Convert to Cartesian and store in lists
x = r * math.cos(phi)
y = r * math.sin(phi)
xtab.append(x)
ytab.append(y)
plt.plot(xtab, ytab)
plt.show()
| 19.578947
| 55
| 0.620968
|
ab5ff68a9733a875c0aeb19f8b19c6f3ac7260b4
| 3,108
|
py
|
Python
|
vendor/packages/logilab-astng/__pkginfo__.py
|
jgmize/kitsune
|
8f23727a9c7fcdd05afc86886f0134fb08d9a2f0
|
[
"BSD-3-Clause"
] | 2
|
2019-08-19T17:08:47.000Z
|
2019-10-05T11:37:02.000Z
|
vendor/packages/logilab-astng/__pkginfo__.py
|
jgmize/kitsune
|
8f23727a9c7fcdd05afc86886f0134fb08d9a2f0
|
[
"BSD-3-Clause"
] | null | null | null |
vendor/packages/logilab-astng/__pkginfo__.py
|
jgmize/kitsune
|
8f23727a9c7fcdd05afc86886f0134fb08d9a2f0
|
[
"BSD-3-Clause"
] | null | null | null |
# Copyright (c) 2003-2010 LOGILAB S.A. (Paris, FRANCE).
# http://www.logilab.fr/ -- mailto:contact@logilab.fr
#
# This program is free software; you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the Free Software
# Foundation; either version 2 of the License, or (at your option) any later
# version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License along with
# this program; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
# copyright 2003-2010 Sylvain Thenault, all rights reserved.
# contact mailto:thenault@gmail.com
#
# This file is part of logilab-astng.
#
# logilab-astng is free software: you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published by the
# Free Software Foundation, either version 2.1 of the License, or (at your
# option) any later version.
#
# logilab-astng is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
# for more details.
#
# You should have received a copy of the GNU Lesser General Public License along
# with logilab-astng. If not, see <http://www.gnu.org/licenses/>.
"""
logilab.astng packaging information
"""
distname = 'logilab-astng'
modname = 'astng'
subpackage_of = 'logilab'
numversion = (0, 20, 1)
version = '.'.join([str(num) for num in numversion])
install_requires = ['logilab-common >= 0.49.0']
pyversions = ["2.3", "2.4", "2.5", '2.6']
license = 'LGPL'
author = 'Logilab'
author_email = 'python-projects@lists.logilab.org'
mailinglist = "mailto://%s" % author_email
web = "http://www.logilab.org/project/%s" % distname
ftp = "ftp://ftp.logilab.org/pub/%s" % modname
short_desc = "rebuild a new abstract syntax tree from Python's ast"
long_desc = """The aim of this module is to provide a common base \
representation of python source code for projects such as pychecker, pyreverse,
pylint... Well, actually the development of this library is essentially
governed by pylint's needs.
It rebuilds the tree generated by the compiler.ast [1] module (python <= 2.4)
or by the builtin _ast module (python >= 2.5) by recursively walking down the
AST and building an extended ast (let's call it astng ;). The new node classes
have additional methods and attributes for different usages.
Furthermore, astng builds partial trees by inspecting living objects."""
from os.path import join
include_dirs = [join('test', 'regrtest_data'),
join('test', 'data'), join('test', 'data2')]
| 40.363636
| 87
| 0.740991
|
ab60f9944d5fde6a7550fbbfc9b1e8fd43e10b50
| 1,518
|
py
|
Python
|
W-DCGAN/model.py
|
lmyybh/pytorch-networks
|
8da055f5042c3803b275734afc89d33d239d7585
|
[
"MulanPSL-1.0"
] | null | null | null |
W-DCGAN/model.py
|
lmyybh/pytorch-networks
|
8da055f5042c3803b275734afc89d33d239d7585
|
[
"MulanPSL-1.0"
] | null | null | null |
W-DCGAN/model.py
|
lmyybh/pytorch-networks
|
8da055f5042c3803b275734afc89d33d239d7585
|
[
"MulanPSL-1.0"
] | null | null | null |
import torch
import torch.nn as nn
| 33
| 87
| 0.563241
|
ab617d4c442405b9219d3fa02f66e3a525d82e42
| 4,339
|
py
|
Python
|
bioinformatics/analysis/rnaseq/prepare/split_gtf_by_type.py
|
bioShaun/omsCabinet
|
741179a06cbd5200662cd03bc2e0115f4ad06917
|
[
"MIT"
] | null | null | null |
bioinformatics/analysis/rnaseq/prepare/split_gtf_by_type.py
|
bioShaun/omsCabinet
|
741179a06cbd5200662cd03bc2e0115f4ad06917
|
[
"MIT"
] | null | null | null |
bioinformatics/analysis/rnaseq/prepare/split_gtf_by_type.py
|
bioShaun/omsCabinet
|
741179a06cbd5200662cd03bc2e0115f4ad06917
|
[
"MIT"
] | null | null | null |
import fire
import gtfparse
from pathlib import Path
GENCODE_CATEGORY_MAP = {
'IG_C_gene': 'protein_coding',
'IG_D_gene': 'protein_coding',
'IG_J_gene': 'protein_coding',
'IG_V_gene': 'protein_coding',
'IG_LV_gene': 'protein_coding',
'TR_C_gene': 'protein_coding',
'TR_J_gene': 'protein_coding',
'TR_V_gene': 'protein_coding',
'TR_D_gene': 'protein_coding',
'TEC': 'protein_coding',
'nonsense_mediated_decay': 'protein_coding',
'non_stop_decay': 'protein_coding',
'retained_intron': 'lncRNA',
'protein_coding': 'protein_coding',
'ambiguous_orf': 'lncRNA',
'Mt_rRNA': 'ncRNA',
'Mt_tRNA': 'ncRNA',
'miRNA': 'ncRNA',
'misc_RNA': 'ncRNA',
'rRNA': 'ncRNA',
'snRNA': 'ncRNA',
'snoRNA': 'ncRNA',
'ribozyme': 'ncRNA',
'sRNA': 'ncRNA',
'scaRNA': 'ncRNA',
'scRNA': 'ncRNA',
'non_coding': 'lncRNA',
'known_ncrna': 'ncRNA',
'3prime_overlapping_ncrna': 'lncRNA',
'3prime_overlapping_ncRNA': 'lncRNA',
'vaultRNA': 'ncRNA',
'processed_transcript': 'lncRNA',
'lincRNA': 'lncRNA',
'macro_lncRNA': 'lncRNA',
'sense_intronic': 'lncRNA',
'sense_overlapping': 'lncRNA',
'antisense': 'lncRNA',
'antisense_RNA': 'lncRNA',
'bidirectional_promoter_lncRNA': 'lncRNA',
'IG_pseudogene': 'pseudogene',
'IG_D_pseudogene': 'pseudogene',
'IG_C_pseudogene': 'pseudogene',
'IG_J_pseudogene': 'pseudogene',
'IG_V_pseudogene': 'pseudogene',
'TR_V_pseudogene': 'pseudogene',
'TR_J_pseudogene': 'pseudogene',
'Mt_tRNA_pseudogene': 'pseudogene',
'tRNA_pseudogene': 'pseudogene',
'snoRNA_pseudogene': 'pseudogene',
'snRNA_pseudogene': 'pseudogene',
'scRNA_pseudogene': 'pseudogene',
'rRNA_pseudogene': 'pseudogene',
'misc_RNA_pseudogene': 'pseudogene',
'miRNA_pseudogene': 'pseudogene',
'pseudogene': 'pseudogene',
'processed_pseudogene': 'pseudogene',
'polymorphic_pseudogene': 'pseudogene',
'retrotransposed': 'pseudogene',
'transcribed_processed_pseudogene': 'pseudogene',
'transcribed_unprocessed_pseudogene': 'pseudogene',
'transcribed_unitary_pseudogene': 'pseudogene',
'translated_processed_pseudogene': 'pseudogene',
'translated_unprocessed_pseudogene': 'pseudogene',
'unitary_pseudogene': 'pseudogene',
'unprocessed_pseudogene': 'pseudogene',
'novel_lncRNA': 'lncRNA',
'TUCP': 'TUCP',
'lncRNA': 'lncRNA'
}
if __name__ == '__main__':
fire.Fire(split_gtf)
| 30.77305
| 59
| 0.63563
|
ab61967196abc0b2e677bfd1d2c054cef2f1f32b
| 792
|
py
|
Python
|
rational.py
|
navel0810/chibi
|
d2e9a791492352c3c1b76c841a3ad30df2f444fd
|
[
"MIT"
] | null | null | null |
rational.py
|
navel0810/chibi
|
d2e9a791492352c3c1b76c841a3ad30df2f444fd
|
[
"MIT"
] | null | null | null |
rational.py
|
navel0810/chibi
|
d2e9a791492352c3c1b76c841a3ad30df2f444fd
|
[
"MIT"
] | null | null | null |
import math
q1=Q(1,2)
q2=Q(1,3)
print(q1/q2)
| 17.217391
| 36
| 0.412879
|
ab6209870d287fc20132452f64da2ca39e9ab140
| 1,890
|
py
|
Python
|
cities_light/tests/test_import.py
|
jsandovalc/django-cities-light
|
a1c6af08938b7b01d4e12555bd4cb5040905603d
|
[
"MIT"
] | null | null | null |
cities_light/tests/test_import.py
|
jsandovalc/django-cities-light
|
a1c6af08938b7b01d4e12555bd4cb5040905603d
|
[
"MIT"
] | null | null | null |
cities_light/tests/test_import.py
|
jsandovalc/django-cities-light
|
a1c6af08938b7b01d4e12555bd4cb5040905603d
|
[
"MIT"
] | null | null | null |
from __future__ import unicode_literals
import glob
import os
from dbdiff.fixture import Fixture
from .base import TestImportBase, FixtureDir
from ..settings import DATA_DIR
| 29.53125
| 84
| 0.607937
|
ab6246c7ab7820986e2418ba286e6e0d8c06092b
| 574
|
py
|
Python
|
custom_components/hoymiles/__init__.py
|
Cosik/HAHoymiles
|
e956f8fafc4ae59d4c05755c6e8a5d5d7caa37f9
|
[
"MIT"
] | null | null | null |
custom_components/hoymiles/__init__.py
|
Cosik/HAHoymiles
|
e956f8fafc4ae59d4c05755c6e8a5d5d7caa37f9
|
[
"MIT"
] | null | null | null |
custom_components/hoymiles/__init__.py
|
Cosik/HAHoymiles
|
e956f8fafc4ae59d4c05755c6e8a5d5d7caa37f9
|
[
"MIT"
] | null | null | null |
import datetime
import logging
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import CONF_PASSWORD, CONF_USERNAME
import homeassistant.helpers.config_validation as cv
from .const import (
CONF_PLANT_ID,
)
_LOGGER = logging.getLogger(__name__)
MIN_TIME_BETWEEN_UPDATES = datetime.timedelta(seconds=600)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_USERNAME): cv.string,
vol.Required(CONF_PASSWORD): cv.string,
vol.Required(CONF_PLANT_ID): cv.string,
}
)
| 23.916667
| 60
| 0.775261
|
db3b69e3c4b3003f7bdcfee2a7ee4c426c44a37d
| 5,738
|
py
|
Python
|
views/auth.py
|
bluebibi/flask_rest
|
9b1ee876060bca5d97459bb894c73530f66c4c15
|
[
"MIT"
] | null | null | null |
views/auth.py
|
bluebibi/flask_rest
|
9b1ee876060bca5d97459bb894c73530f66c4c15
|
[
"MIT"
] | 1
|
2022-02-11T03:43:51.000Z
|
2022-02-11T03:43:51.000Z
|
views/auth.py
|
bluebibi/flask_rest
|
9b1ee876060bca5d97459bb894c73530f66c4c15
|
[
"MIT"
] | 2
|
2019-11-19T02:09:03.000Z
|
2020-04-04T06:55:14.000Z
|
from flask import Blueprint, redirect, render_template, request, flash, session
from database import base
from database.base import User
from forms import UserForm, LoginForm, MyPageUserForm
from flask_login import login_required, login_user, logout_user, current_user
import requests
auth_blueprint = Blueprint('auth', __name__)
kakao_oauth = {}
def kakao_me_and_signup():
url = "https://kapi.kakao.com/v1/user/me"
headers = {
"Authorization": "Bearer {0}".format(kakao_oauth["access_token"]),
"Content-Type": "application/x-www-form-urlencoded;charset=utf-8"
}
response = requests.post(
url=url,
headers=headers
)
#print("kakao_me_and_signup", response.json())
kakao_oauth["kaccount_email"] = response.json()["kaccount_email"]
kakao_oauth["id"] = response.json()["id"]
kakao_oauth["kakao_profile_image"] = response.json()["properties"]["profile_image"]
kakao_oauth["nickname"] = response.json()["properties"]["nickname"]
kakao_oauth["kakao_thumbnail_image"] = response.json()["properties"]["thumbnail_image"]
c = base.db_session.query(User).filter(User.email == kakao_oauth["kaccount_email"]).count()
if c == 0:
user = User(name=kakao_oauth["nickname"], email=kakao_oauth["kaccount_email"], affiliation=None)
user.set_password("1234")
base.db_session.add(user)
base.db_session.commit()
| 32.977011
| 104
| 0.648832
|
db3d773e3532da7f969a86616e27db866a72624c
| 3,500
|
py
|
Python
|
doc/.src/book/exer/cable_sin.py
|
hplgit/fem-book
|
c23099715dc3cb72e7f4d37625e6f9614ee5fc4e
|
[
"MIT"
] | 86
|
2015-12-17T12:57:11.000Z
|
2022-03-26T01:53:47.000Z
|
doc/.src/book/exer/cable_sin.py
|
hplgit/fem-book
|
c23099715dc3cb72e7f4d37625e6f9614ee5fc4e
|
[
"MIT"
] | 9
|
2017-04-16T21:57:29.000Z
|
2021-04-17T08:09:30.000Z
|
doc/.src/book/exer/cable_sin.py
|
hplgit/fem-book
|
c23099715dc3cb72e7f4d37625e6f9614ee5fc4e
|
[
"MIT"
] | 43
|
2016-03-11T19:33:14.000Z
|
2022-03-05T00:21:57.000Z
|
import matplotlib.pyplot as plt
def model():
"""Solve u'' = -1, u(0)=0, u'(1)=0."""
import sympy as sym
x, c_0, c_1, = sym.symbols('x c_0 c_1')
u_x = sym.integrate(1, (x, 0, x)) + c_0
u = sym.integrate(u_x, (x, 0, x)) + c_1
r = sym.solve([u.subs(x,0) - 0,
sym.diff(u,x).subs(x, 1) - 0],
[c_0, c_1])
u = u.subs(c_0, r[c_0]).subs(c_1, r[c_1])
u = sym.simplify(sym.expand(u))
return u
def midpoint_rule(f, M=100000):
"""Integrate f(x) over [0,1] using M intervals."""
from numpy import sum, linspace
dx = 1.0/M # interval length
x = linspace(dx/2, 1-dx/2, M) # integration points
return dx*sum(f(x))
if __name__ == '__main__':
import sys
print(model())
print('sine 2*i+1 integral:')
check_integral_b()
print('sine i+1 integral, sympy answer:')
check_integral_d_sympy_answer()
print('sine i+1 integral:')
check_integral_d()
#sys.exit(0)
plot_sine_sum()
plt.figure()
plot_sine_sum_d()
plt.show()
| 28.92562
| 60
| 0.512571
|
db3dd00adedd165108fb972c0f4e5656055ffd1d
| 1,196
|
py
|
Python
|
skgmm.py
|
liuliu663/speaker-recognition-py3
|
8fd0f77ac011e4a11c7cac751dc985b9cd1f2c4d
|
[
"Apache-2.0"
] | null | null | null |
skgmm.py
|
liuliu663/speaker-recognition-py3
|
8fd0f77ac011e4a11c7cac751dc985b9cd1f2c4d
|
[
"Apache-2.0"
] | null | null | null |
skgmm.py
|
liuliu663/speaker-recognition-py3
|
8fd0f77ac011e4a11c7cac751dc985b9cd1f2c4d
|
[
"Apache-2.0"
] | null | null | null |
from sklearn.mixture import GaussianMixture
import operator
import numpy as np
import math
| 27.813953
| 81
| 0.601171
|
db3fb84bca4d1b9ce63dca5f602d76eb7650bd3f
| 106
|
py
|
Python
|
lib/loss/__init__.py
|
kennethwdk/PINet
|
3a0abbd653146c56e39612384891c94c3fb49b35
|
[
"MIT"
] | 10
|
2021-12-22T11:31:53.000Z
|
2022-01-18T11:52:17.000Z
|
lib/loss/__init__.py
|
kennethwdk/PINet
|
3a0abbd653146c56e39612384891c94c3fb49b35
|
[
"MIT"
] | null | null | null |
lib/loss/__init__.py
|
kennethwdk/PINet
|
3a0abbd653146c56e39612384891c94c3fb49b35
|
[
"MIT"
] | null | null | null |
from .heatmaploss import HeatmapLoss
from .offsetloss import OffsetLoss
from .refineloss import RefineLoss
| 35.333333
| 36
| 0.867925
|
db40a9951b31f74580005898f3f6b78a4f2c461b
| 1,080
|
py
|
Python
|
eth2/beacon/types/historical_batch.py
|
AndrewBezold/trinity
|
bc656da4dece431a0c929a99349d45faf75decf8
|
[
"MIT"
] | null | null | null |
eth2/beacon/types/historical_batch.py
|
AndrewBezold/trinity
|
bc656da4dece431a0c929a99349d45faf75decf8
|
[
"MIT"
] | null | null | null |
eth2/beacon/types/historical_batch.py
|
AndrewBezold/trinity
|
bc656da4dece431a0c929a99349d45faf75decf8
|
[
"MIT"
] | null | null | null |
from typing import Sequence
from eth.constants import ZERO_HASH32
from eth_typing import Hash32
import ssz
from ssz.sedes import Vector, bytes32
from eth2.configs import Eth2Config
from .defaults import default_tuple, default_tuple_of_size
| 30
| 87
| 0.649074
|
db42ec903108c18c60147d1092a12cae13582379
| 186
|
py
|
Python
|
app/settings.py
|
nikosk/fastAPI-microservice-example-
|
a1a61ab4e521bc0c48eee5b3a755db134c098546
|
[
"MIT"
] | null | null | null |
app/settings.py
|
nikosk/fastAPI-microservice-example-
|
a1a61ab4e521bc0c48eee5b3a755db134c098546
|
[
"MIT"
] | null | null | null |
app/settings.py
|
nikosk/fastAPI-microservice-example-
|
a1a61ab4e521bc0c48eee5b3a755db134c098546
|
[
"MIT"
] | null | null | null |
import os
from pydantic import BaseSettings
| 15.5
| 51
| 0.688172
|
db434cf2f9b45ff5f3690a75cc12bde4fd9cb6aa
| 1,354
|
py
|
Python
|
ADVECTOR/io_tools/create_bathymetry.py
|
john-science/ADVECTOR
|
5c5ca7595c2c051f1a088b1f0e694936c3da3610
|
[
"MIT"
] | 7
|
2021-09-07T02:32:00.000Z
|
2022-01-15T11:35:02.000Z
|
ADVECTOR/io_tools/create_bathymetry.py
|
TheOceanCleanupAlgorithms/ADVECT
|
e27ce15da6a2fcbccbe363f8c2415b0122696d1f
|
[
"MIT"
] | 1
|
2021-12-24T15:16:26.000Z
|
2021-12-24T15:16:26.000Z
|
ADVECTOR/io_tools/create_bathymetry.py
|
TheOceanCleanupAlgorithms/ADVECT
|
e27ce15da6a2fcbccbe363f8c2415b0122696d1f
|
[
"MIT"
] | 1
|
2021-12-12T15:13:52.000Z
|
2021-12-12T15:13:52.000Z
|
import numpy as np
import xarray as xr
def create_bathymetry_from_land_mask(land_mask: xr.DataArray) -> xr.DataArray:
"""Method: identifies the lower depth bound of the shallowest
ocean cell (non-null) in each vertical grid column.
:param land_mask: dimensions {time, depth, lat, lon}, boloean array, True where cell is land"""
assert np.all(land_mask.depth <= 0), "depth coordinate must be positive up"
assert np.all(
np.diff(land_mask.depth) > 0
), "depth coordinate must be sorted ascending"
# In the kernel, particles look up data based on the nearest cell-center.
# Thus cell bounds are the midpoints between each centers.
# Very top cell bound is surface, and bottom cell bounds are
# assumed to be symmetric about bottom cell center.
depth_diff = np.diff(land_mask.depth)
depth_bnds = np.concatenate(
[
land_mask.depth.values[:1] - depth_diff[0] / 2,
land_mask.depth.values[:-1] + depth_diff / 2,
[0],
]
)
bathy = (
(~land_mask)
.assign_coords({"depth": depth_bnds[:-1]})
.idxmax(dim="depth")
.where(~land_mask.isel(depth=-1), depth_bnds[-1])
)
bathy = bathy.drop(["time", "depth"])
bathy.name = "bathymetry"
bathy.attrs = {"units": "m", "positive": "up"}
return bathy
| 34.717949
| 99
| 0.637371
|
db446d4b90633693172c23acd329f858502b3ec2
| 712
|
py
|
Python
|
unitconvert/distance.py
|
cr2630git/unitconvert
|
64a530f53b27a9412988877c7ae1b3b34f9ce8a6
|
[
"MIT"
] | null | null | null |
unitconvert/distance.py
|
cr2630git/unitconvert
|
64a530f53b27a9412988877c7ae1b3b34f9ce8a6
|
[
"MIT"
] | null | null | null |
unitconvert/distance.py
|
cr2630git/unitconvert
|
64a530f53b27a9412988877c7ae1b3b34f9ce8a6
|
[
"MIT"
] | 2
|
2017-11-16T15:11:09.000Z
|
2021-08-19T19:34:23.000Z
|
"""
A simple python module for converting kilometers to miles or vice versa.
So simple that it doesn't even have any dependencies.
"""
def kilometers_to_miles(dist_in_km):
"""
Actually does the conversion of distance from km to mi.
PARAMETERS
--------
dist_in_km: float
A distance in kilometers.
RETURNS
-------
dist_in_mi: float
The same distance converted to miles.
"""
return (dist_in_km)/1.609344
def miles_to_kilometers(dist_in_mi):
"""
Actually does the conversion of distance from mi to km.
PARAMETERS
----------
dist_in_mi: float
A distance to miles.
RETURNS
-------
dist_in_km: float
The same distance converted to kilometers.
"""
return (dist_in_mi)*1.609344
| 17.365854
| 72
| 0.703652
|