hexsha stringlengths 40 40 | size int64 5 2.06M | ext stringclasses 11 values | lang stringclasses 1 value | max_stars_repo_path stringlengths 3 251 | max_stars_repo_name stringlengths 4 130 | max_stars_repo_head_hexsha stringlengths 40 78 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 3 251 | max_issues_repo_name stringlengths 4 130 | max_issues_repo_head_hexsha stringlengths 40 78 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 116k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 3 251 | max_forks_repo_name stringlengths 4 130 | max_forks_repo_head_hexsha stringlengths 40 78 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 1 1.05M | avg_line_length float64 1 1.02M | max_line_length int64 3 1.04M | alphanum_fraction float64 0 1 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
1c01520b2864cd9a92e81b37cca59f5c41b908e7 | 44 | py | Python | __init__.py | minjunli/jsonc | a61b72e92729f9177d8d8685deae744244d6be16 | [
"MIT"
] | 2 | 2020-08-30T08:02:10.000Z | 2020-09-06T05:33:55.000Z | __init__.py | minjunli/jsonc | a61b72e92729f9177d8d8685deae744244d6be16 | [
"MIT"
] | null | null | null | __init__.py | minjunli/jsonc | a61b72e92729f9177d8d8685deae744244d6be16 | [
"MIT"
] | null | null | null | from .jsonc import load, loads, dump, dumps
| 22 | 43 | 0.75 |
1c0188f969e63d35026803dbf920b5a12f7d75cf | 12,648 | py | Python | specs/d3d9caps.py | prahal/apitrace | e9426dd61586757d23d7dddc85b3076f477e7f07 | [
"MIT"
] | 1 | 2020-06-19T12:34:44.000Z | 2020-06-19T12:34:44.000Z | specs/d3d9caps.py | prahal/apitrace | e9426dd61586757d23d7dddc85b3076f477e7f07 | [
"MIT"
] | null | null | null | specs/d3d9caps.py | prahal/apitrace | e9426dd61586757d23d7dddc85b3076f477e7f07 | [
"MIT"
] | null | null | null | ##########################################################################
#
# Copyright 2008-2009 VMware, Inc.
# All Rights Reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
##########################################################################/
"""d3d9caps.h"""
from winapi import *
from d3d9types import *
D3DVS20CAPS = Flags(DWORD, [
"D3DVS20CAPS_PREDICATION",
])
D3DVSHADERCAPS2_0 = Struct("D3DVSHADERCAPS2_0", [
(D3DVS20CAPS, "Caps"),
(INT, "DynamicFlowControlDepth"),
(INT, "NumTemps"),
(INT, "StaticFlowControlDepth"),
])
D3DPS20CAPS = Flags(DWORD, [
"D3DPS20CAPS_ARBITRARYSWIZZLE",
"D3DPS20CAPS_GRADIENTINSTRUCTIONS",
"D3DPS20CAPS_PREDICATION",
"D3DPS20CAPS_NODEPENDENTREADLIMIT",
"D3DPS20CAPS_NOTEXINSTRUCTIONLIMIT",
])
D3DPSHADERCAPS2_0 = Struct("D3DPSHADERCAPS2_0", [
(D3DPS20CAPS, "Caps"),
(INT, "DynamicFlowControlDepth"),
(INT, "NumTemps"),
(INT, "StaticFlowControlDepth"),
(INT, "NumInstructionSlots"),
])
D3DCAPS = Flags(DWORD, [
"D3DCAPS_READ_SCANLINE",
])
D3DCAPS2 = Flags(DWORD, [
"D3DCAPS2_FULLSCREENGAMMA",
"D3DCAPS2_CANCALIBRATEGAMMA",
"D3DCAPS2_RESERVED",
"D3DCAPS2_CANMANAGERESOURCE",
"D3DCAPS2_DYNAMICTEXTURES",
"D3DCAPS2_CANAUTOGENMIPMAP",
"D3DCAPS2_CANSHARERESOURCE",
])
D3DCAPS3 = Flags(DWORD, [
"D3DCAPS3_RESERVED",
"D3DCAPS3_ALPHA_FULLSCREEN_FLIP_OR_DISCARD",
"D3DCAPS3_LINEAR_TO_SRGB_PRESENTATION",
"D3DCAPS3_COPY_TO_VIDMEM",
"D3DCAPS3_COPY_TO_SYSTEMMEM",
])
D3DPRESENT_INTERVAL = Flags(DWORD, [
#"D3DPRESENT_INTERVAL_DEFAULT", # 0
"D3DPRESENT_INTERVAL_ONE",
"D3DPRESENT_INTERVAL_TWO",
"D3DPRESENT_INTERVAL_THREE",
"D3DPRESENT_INTERVAL_FOUR",
"D3DPRESENT_INTERVAL_IMMEDIATE",
])
D3DCURSORCAPS = Flags(DWORD, [
"D3DCURSORCAPS_COLOR",
"D3DCURSORCAPS_LOWRES",
])
D3DDEVCAPS = Flags(DWORD, [
"D3DDEVCAPS_EXECUTESYSTEMMEMORY",
"D3DDEVCAPS_EXECUTEVIDEOMEMORY",
"D3DDEVCAPS_TLVERTEXSYSTEMMEMORY",
"D3DDEVCAPS_TLVERTEXVIDEOMEMORY",
"D3DDEVCAPS_TEXTURESYSTEMMEMORY",
"D3DDEVCAPS_TEXTUREVIDEOMEMORY",
"D3DDEVCAPS_DRAWPRIMTLVERTEX",
"D3DDEVCAPS_CANRENDERAFTERFLIP",
"D3DDEVCAPS_TEXTURENONLOCALVIDMEM",
"D3DDEVCAPS_DRAWPRIMITIVES2",
"D3DDEVCAPS_SEPARATETEXTUREMEMORIES",
"D3DDEVCAPS_DRAWPRIMITIVES2EX",
"D3DDEVCAPS_HWTRANSFORMANDLIGHT",
"D3DDEVCAPS_CANBLTSYSTONONLOCAL",
"D3DDEVCAPS_HWRASTERIZATION",
"D3DDEVCAPS_PUREDEVICE",
"D3DDEVCAPS_QUINTICRTPATCHES",
"D3DDEVCAPS_RTPATCHES",
"D3DDEVCAPS_RTPATCHHANDLEZERO",
"D3DDEVCAPS_NPATCHES",
])
D3DPMISCCAPS = Flags(DWORD, [
"D3DPMISCCAPS_MASKZ",
"D3DPMISCCAPS_CULLNONE",
"D3DPMISCCAPS_CULLCW",
"D3DPMISCCAPS_CULLCCW",
"D3DPMISCCAPS_COLORWRITEENABLE",
"D3DPMISCCAPS_CLIPPLANESCALEDPOINTS",
"D3DPMISCCAPS_CLIPTLVERTS",
"D3DPMISCCAPS_TSSARGTEMP",
"D3DPMISCCAPS_BLENDOP",
"D3DPMISCCAPS_NULLREFERENCE",
"D3DPMISCCAPS_INDEPENDENTWRITEMASKS",
"D3DPMISCCAPS_PERSTAGECONSTANT",
"D3DPMISCCAPS_FOGANDSPECULARALPHA",
"D3DPMISCCAPS_SEPARATEALPHABLEND",
"D3DPMISCCAPS_MRTINDEPENDENTBITDEPTHS",
"D3DPMISCCAPS_MRTPOSTPIXELSHADERBLENDING",
"D3DPMISCCAPS_FOGVERTEXCLAMPED",
"D3DPMISCCAPS_POSTBLENDSRGBCONVERT",
])
D3DLINECAPS = Flags(DWORD, [
"D3DLINECAPS_TEXTURE",
"D3DLINECAPS_ZTEST",
"D3DLINECAPS_BLEND",
"D3DLINECAPS_ALPHACMP",
"D3DLINECAPS_FOG",
"D3DLINECAPS_ANTIALIAS",
])
D3DPRASTERCAPS = Flags(DWORD, [
"D3DPRASTERCAPS_DITHER",
"D3DPRASTERCAPS_ZTEST",
"D3DPRASTERCAPS_FOGVERTEX",
"D3DPRASTERCAPS_FOGTABLE",
"D3DPRASTERCAPS_MIPMAPLODBIAS",
"D3DPRASTERCAPS_ZBUFFERLESSHSR",
"D3DPRASTERCAPS_FOGRANGE",
"D3DPRASTERCAPS_ANISOTROPY",
"D3DPRASTERCAPS_WBUFFER",
"D3DPRASTERCAPS_WFOG",
"D3DPRASTERCAPS_ZFOG",
"D3DPRASTERCAPS_COLORPERSPECTIVE",
"D3DPRASTERCAPS_SCISSORTEST",
"D3DPRASTERCAPS_SLOPESCALEDEPTHBIAS",
"D3DPRASTERCAPS_DEPTHBIAS",
"D3DPRASTERCAPS_MULTISAMPLE_TOGGLE",
])
D3DPCMPCAPS = Flags(DWORD, [
"D3DPCMPCAPS_NEVER",
"D3DPCMPCAPS_LESS",
"D3DPCMPCAPS_EQUAL",
"D3DPCMPCAPS_LESSEQUAL",
"D3DPCMPCAPS_GREATER",
"D3DPCMPCAPS_NOTEQUAL",
"D3DPCMPCAPS_GREATEREQUAL",
"D3DPCMPCAPS_ALWAYS",
])
D3DPBLENDCAPS = Flags(DWORD, [
"D3DPBLENDCAPS_ZERO",
"D3DPBLENDCAPS_ONE",
"D3DPBLENDCAPS_SRCCOLOR",
"D3DPBLENDCAPS_INVSRCCOLOR",
"D3DPBLENDCAPS_SRCALPHA",
"D3DPBLENDCAPS_INVSRCALPHA",
"D3DPBLENDCAPS_DESTALPHA",
"D3DPBLENDCAPS_INVDESTALPHA",
"D3DPBLENDCAPS_DESTCOLOR",
"D3DPBLENDCAPS_INVDESTCOLOR",
"D3DPBLENDCAPS_SRCALPHASAT",
"D3DPBLENDCAPS_BOTHSRCALPHA",
"D3DPBLENDCAPS_BOTHINVSRCALPHA",
"D3DPBLENDCAPS_BLENDFACTOR",
"D3DPBLENDCAPS_SRCCOLOR2",
"D3DPBLENDCAPS_INVSRCCOLOR2",
])
D3DPSHADECAPS = Flags(DWORD, [
"D3DPSHADECAPS_COLORGOURAUDRGB",
"D3DPSHADECAPS_SPECULARGOURAUDRGB",
"D3DPSHADECAPS_ALPHAGOURAUDBLEND",
"D3DPSHADECAPS_FOGGOURAUD",
])
D3DPTEXTURECAPS = Flags(DWORD, [
"D3DPTEXTURECAPS_PERSPECTIVE",
"D3DPTEXTURECAPS_POW2",
"D3DPTEXTURECAPS_ALPHA",
"D3DPTEXTURECAPS_SQUAREONLY",
"D3DPTEXTURECAPS_TEXREPEATNOTSCALEDBYSIZE",
"D3DPTEXTURECAPS_ALPHAPALETTE",
"D3DPTEXTURECAPS_NONPOW2CONDITIONAL",
"D3DPTEXTURECAPS_PROJECTED",
"D3DPTEXTURECAPS_CUBEMAP",
"D3DPTEXTURECAPS_VOLUMEMAP",
"D3DPTEXTURECAPS_MIPMAP",
"D3DPTEXTURECAPS_MIPVOLUMEMAP",
"D3DPTEXTURECAPS_MIPCUBEMAP",
"D3DPTEXTURECAPS_CUBEMAP_POW2",
"D3DPTEXTURECAPS_VOLUMEMAP_POW2",
"D3DPTEXTURECAPS_NOPROJECTEDBUMPENV",
])
D3DPTFILTERCAPS = Flags(DWORD, [
"D3DPTFILTERCAPS_MINFPOINT",
"D3DPTFILTERCAPS_MINFLINEAR",
"D3DPTFILTERCAPS_MINFANISOTROPIC",
"D3DPTFILTERCAPS_MINFPYRAMIDALQUAD",
"D3DPTFILTERCAPS_MINFGAUSSIANQUAD",
"D3DPTFILTERCAPS_MIPFPOINT",
"D3DPTFILTERCAPS_MIPFLINEAR",
"D3DPTFILTERCAPS_CONVOLUTIONMONO",
"D3DPTFILTERCAPS_MAGFPOINT",
"D3DPTFILTERCAPS_MAGFLINEAR",
"D3DPTFILTERCAPS_MAGFANISOTROPIC",
"D3DPTFILTERCAPS_MAGFPYRAMIDALQUAD",
"D3DPTFILTERCAPS_MAGFGAUSSIANQUAD",
])
D3DPTADDRESSCAPS = Flags(DWORD, [
"D3DPTADDRESSCAPS_WRAP",
"D3DPTADDRESSCAPS_MIRROR",
"D3DPTADDRESSCAPS_CLAMP",
"D3DPTADDRESSCAPS_BORDER",
"D3DPTADDRESSCAPS_INDEPENDENTUV",
"D3DPTADDRESSCAPS_MIRRORONCE",
])
D3DSTENCILCAPS = Flags(DWORD, [
"D3DSTENCILCAPS_KEEP",
"D3DSTENCILCAPS_ZERO",
"D3DSTENCILCAPS_REPLACE",
"D3DSTENCILCAPS_INCRSAT",
"D3DSTENCILCAPS_DECRSAT",
"D3DSTENCILCAPS_INVERT",
"D3DSTENCILCAPS_INCR",
"D3DSTENCILCAPS_DECR",
"D3DSTENCILCAPS_TWOSIDED",
])
D3DTEXOPCAPS = Flags(DWORD, [
"D3DTEXOPCAPS_DISABLE",
"D3DTEXOPCAPS_SELECTARG1",
"D3DTEXOPCAPS_SELECTARG2",
"D3DTEXOPCAPS_MODULATE",
"D3DTEXOPCAPS_MODULATE2X",
"D3DTEXOPCAPS_MODULATE4X",
"D3DTEXOPCAPS_ADD",
"D3DTEXOPCAPS_ADDSIGNED",
"D3DTEXOPCAPS_ADDSIGNED2X",
"D3DTEXOPCAPS_SUBTRACT",
"D3DTEXOPCAPS_ADDSMOOTH",
"D3DTEXOPCAPS_BLENDDIFFUSEALPHA",
"D3DTEXOPCAPS_BLENDTEXTUREALPHA",
"D3DTEXOPCAPS_BLENDFACTORALPHA",
"D3DTEXOPCAPS_BLENDTEXTUREALPHAPM",
"D3DTEXOPCAPS_BLENDCURRENTALPHA",
"D3DTEXOPCAPS_PREMODULATE",
"D3DTEXOPCAPS_MODULATEALPHA_ADDCOLOR",
"D3DTEXOPCAPS_MODULATECOLOR_ADDALPHA",
"D3DTEXOPCAPS_MODULATEINVALPHA_ADDCOLOR",
"D3DTEXOPCAPS_MODULATEINVCOLOR_ADDALPHA",
"D3DTEXOPCAPS_BUMPENVMAP",
"D3DTEXOPCAPS_BUMPENVMAPLUMINANCE",
"D3DTEXOPCAPS_DOTPRODUCT3",
"D3DTEXOPCAPS_MULTIPLYADD",
"D3DTEXOPCAPS_LERP",
])
D3DFVFCAPS = Flags(DWORD, [
"D3DFVFCAPS_TEXCOORDCOUNTMASK",
"D3DFVFCAPS_DONOTSTRIPELEMENTS",
"D3DFVFCAPS_PSIZE",
])
D3DVTXPCAPS = Flags(DWORD, [
"D3DVTXPCAPS_TEXGEN",
"D3DVTXPCAPS_MATERIALSOURCE7",
"D3DVTXPCAPS_DIRECTIONALLIGHTS",
"D3DVTXPCAPS_POSITIONALLIGHTS",
"D3DVTXPCAPS_LOCALVIEWER",
"D3DVTXPCAPS_TWEENING",
"D3DVTXPCAPS_TEXGEN_SPHEREMAP",
"D3DVTXPCAPS_NO_TEXGEN_NONLOCALVIEWER",
])
D3DDEVCAPS2 = Flags(DWORD, [
"D3DDEVCAPS2_STREAMOFFSET",
"D3DDEVCAPS2_DMAPNPATCH",
"D3DDEVCAPS2_ADAPTIVETESSRTPATCH",
"D3DDEVCAPS2_ADAPTIVETESSNPATCH",
"D3DDEVCAPS2_CAN_STRETCHRECT_FROM_TEXTURES",
"D3DDEVCAPS2_PRESAMPLEDDMAPNPATCH",
"D3DDEVCAPS2_VERTEXELEMENTSCANSHARESTREAMOFFSET",
])
D3DDTCAPS = Flags(DWORD, [
"D3DDTCAPS_UBYTE4",
"D3DDTCAPS_UBYTE4N",
"D3DDTCAPS_SHORT2N",
"D3DDTCAPS_SHORT4N",
"D3DDTCAPS_USHORT2N",
"D3DDTCAPS_USHORT4N",
"D3DDTCAPS_UDEC3",
"D3DDTCAPS_DEC3N",
"D3DDTCAPS_FLOAT16_2",
"D3DDTCAPS_FLOAT16_4",
])
#D3DPS_VERSION = Enum("DWORD", [
# "D3DPS_VERSION(0,0)",
# "D3DPS_VERSION(1,0)",
# "D3DPS_VERSION(1,1)",
# "D3DPS_VERSION(1,2)",
# "D3DPS_VERSION(1,3)",
# "D3DPS_VERSION(1,4)",
# "D3DPS_VERSION(2,0)",
# "D3DPS_VERSION(3,0)",
#])
D3DPS_VERSION = DWORD
#D3DVS_VERSION = Enum("DWORD", [
# "D3DVS_VERSION(0,0)",
# "D3DVS_VERSION(1,0)",
# "D3DVS_VERSION(1,1)",
# "D3DVS_VERSION(2,0)",
# "D3DVS_VERSION(3,0)",
#])
D3DVS_VERSION = DWORD
D3DCAPS9 = Struct("D3DCAPS9", [
(D3DDEVTYPE, "DeviceType"),
(UINT, "AdapterOrdinal"),
(D3DCAPS, "Caps"),
(D3DCAPS2, "Caps2"),
(D3DCAPS3, "Caps3"),
(D3DPRESENT_INTERVAL, "PresentationIntervals"),
(D3DCURSORCAPS, "CursorCaps"),
(D3DDEVCAPS, "DevCaps"),
(D3DPMISCCAPS, "PrimitiveMiscCaps"),
(D3DPRASTERCAPS, "RasterCaps"),
(D3DPCMPCAPS, "ZCmpCaps"),
(D3DPBLENDCAPS, "SrcBlendCaps"),
(D3DPBLENDCAPS, "DestBlendCaps"),
(D3DPCMPCAPS, "AlphaCmpCaps"),
(D3DPSHADECAPS, "ShadeCaps"),
(D3DPTEXTURECAPS, "TextureCaps"),
(D3DPTFILTERCAPS, "TextureFilterCaps"),
(D3DPTFILTERCAPS, "CubeTextureFilterCaps"),
(D3DPTFILTERCAPS, "VolumeTextureFilterCaps"),
(D3DPTADDRESSCAPS, "TextureAddressCaps"),
(D3DPTADDRESSCAPS, "VolumeTextureAddressCaps"),
(D3DLINECAPS, "LineCaps"),
(DWORD, "MaxTextureWidth"),
(DWORD, "MaxTextureHeight"),
(DWORD, "MaxVolumeExtent"),
(DWORD, "MaxTextureRepeat"),
(DWORD, "MaxTextureAspectRatio"),
(DWORD, "MaxAnisotropy"),
(Float, "MaxVertexW"),
(Float, "GuardBandLeft"),
(Float, "GuardBandTop"),
(Float, "GuardBandRight"),
(Float, "GuardBandBottom"),
(Float, "ExtentsAdjust"),
(D3DSTENCILCAPS, "StencilCaps"),
(D3DFVFCAPS, "FVFCaps"),
(D3DTEXOPCAPS, "TextureOpCaps"),
(DWORD, "MaxTextureBlendStages"),
(DWORD, "MaxSimultaneousTextures"),
(D3DVTXPCAPS, "VertexProcessingCaps"),
(DWORD, "MaxActiveLights"),
(DWORD, "MaxUserClipPlanes"),
(DWORD, "MaxVertexBlendMatrices"),
(DWORD, "MaxVertexBlendMatrixIndex"),
(Float, "MaxPointSize"),
(DWORD, "MaxPrimitiveCount"),
(DWORD, "MaxVertexIndex"),
(DWORD, "MaxStreams"),
(DWORD, "MaxStreamStride"),
(D3DVS_VERSION, "VertexShaderVersion"),
(DWORD, "MaxVertexShaderConst"),
(D3DPS_VERSION, "PixelShaderVersion"),
(Float, "PixelShader1xMaxValue"),
(D3DDEVCAPS2, "DevCaps2"),
(Float, "MaxNpatchTessellationLevel"),
(DWORD, "Reserved5"),
(UINT, "MasterAdapterOrdinal"),
(UINT, "AdapterOrdinalInGroup"),
(UINT, "NumberOfAdaptersInGroup"),
(D3DDTCAPS, "DeclTypes"),
(DWORD, "NumSimultaneousRTs"),
(D3DPTFILTERCAPS, "StretchRectFilterCaps"),
(D3DVSHADERCAPS2_0, "VS20Caps"),
(D3DPSHADERCAPS2_0, "PS20Caps"),
(D3DPTFILTERCAPS, "VertexTextureFilterCaps"),
(DWORD, "MaxVShaderInstructionsExecuted"),
(DWORD, "MaxPShaderInstructionsExecuted"),
(DWORD, "MaxVertexShader30InstructionSlots"),
(DWORD, "MaxPixelShader30InstructionSlots"),
])
| 29.971564 | 79 | 0.716872 |
1c020c67e87e2c7154ebd489b70107987ace54ea | 23 | py | Python | miika_nlu/venv/Lib/site-packages/tqdm/_dist_ver.py | NimBuzz01/Project-Miika_SDGP | 08ad1aafafbe9f47c59bd1568b8ac367fefe5503 | [
"MIT"
] | null | null | null | miika_nlu/venv/Lib/site-packages/tqdm/_dist_ver.py | NimBuzz01/Project-Miika_SDGP | 08ad1aafafbe9f47c59bd1568b8ac367fefe5503 | [
"MIT"
] | null | null | null | miika_nlu/venv/Lib/site-packages/tqdm/_dist_ver.py | NimBuzz01/Project-Miika_SDGP | 08ad1aafafbe9f47c59bd1568b8ac367fefe5503 | [
"MIT"
] | null | null | null | __version__ = '4.64.0'
| 11.5 | 22 | 0.652174 |
1c026bf515ed2ee75cb046511de891145466adbe | 10,188 | py | Python | gym_flock/envs/old/mapping.py | katetolstaya/gym-flock | 3236d1dafcb1b9be0cf78b471672e8becb2d37af | [
"MIT"
] | 19 | 2019-07-29T22:19:58.000Z | 2022-01-27T04:38:38.000Z | gym_flock/envs/old/mapping.py | henghenghahei849/gym-flock | b09bdfbbe4a96fe052958d1f9e1e9dd314f58419 | [
"MIT"
] | null | null | null | gym_flock/envs/old/mapping.py | henghenghahei849/gym-flock | b09bdfbbe4a96fe052958d1f9e1e9dd314f58419 | [
"MIT"
] | 5 | 2019-10-03T14:44:49.000Z | 2021-12-09T20:39:39.000Z | import gym
from gym import spaces, error, utils
from gym.utils import seeding
import numpy as np
import configparser
from os import path
import matplotlib.pyplot as plt
from matplotlib.pyplot import gca
font = {'family': 'sans-serif',
'weight': 'bold',
'size': 14}
| 38.014925 | 120 | 0.565077 |
1c0280c3ce6e60aeea9f1bd9542b3a69e75d70e4 | 10,478 | py | Python | tensorflow/python/kernel_tests/lu_op_test.py | PaulWang1905/tensorflow | ebf12d22b4801fb8dab5034cc94562bf7cc33fa0 | [
"Apache-2.0"
] | 36 | 2016-12-17T15:25:25.000Z | 2022-01-29T21:50:53.000Z | tensorflow/python/kernel_tests/lu_op_test.py | PaulWang1905/tensorflow | ebf12d22b4801fb8dab5034cc94562bf7cc33fa0 | [
"Apache-2.0"
] | 59 | 2019-06-17T09:37:49.000Z | 2022-01-19T01:21:34.000Z | tensorflow/python/kernel_tests/lu_op_test.py | PaulWang1905/tensorflow | ebf12d22b4801fb8dab5034cc94562bf7cc33fa0 | [
"Apache-2.0"
] | 36 | 2017-07-27T21:12:40.000Z | 2022-02-03T16:45:56.000Z | # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for tensorflow.ops.tf.Lu."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.python.client import session
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import errors
from tensorflow.python.framework import ops
from tensorflow.python.framework import test_util
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import linalg_ops
from tensorflow.python.ops import map_fn
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import random_ops
from tensorflow.python.ops import variables
from tensorflow.python.platform import benchmark
from tensorflow.python.platform import test
if __name__ == "__main__":
test.main()
| 36.894366 | 80 | 0.644589 |
1c02cc341eda998a41a468c758008bb1da86efcd | 10,970 | py | Python | aligner/features/processing.py | zhouyangnk/Montreal-Forced-Aligner | 4f8733409e79a50744616921a04fccf115e8af6f | [
"MIT"
] | 1 | 2021-03-09T03:15:14.000Z | 2021-03-09T03:15:14.000Z | aligner/features/processing.py | missaaoo/Montreal-Forced-Aligner | 62a40e2337448752a4b8fc7a4ec9cbf3f159fbff | [
"MIT"
] | null | null | null | aligner/features/processing.py | missaaoo/Montreal-Forced-Aligner | 62a40e2337448752a4b8fc7a4ec9cbf3f159fbff | [
"MIT"
] | 1 | 2021-03-09T03:15:17.000Z | 2021-03-09T03:15:17.000Z | import multiprocessing as mp
import subprocess
import shutil
import os
from ..helper import make_path_safe, thirdparty_binary, filter_scp
from ..exceptions import CorpusError
def mfcc(mfcc_directory, num_jobs, feature_config, frequency_configs):
"""
Multiprocessing function that converts wav files into MFCCs
See http://kaldi-asr.org/doc/feat.html and
http://kaldi-asr.org/doc/compute-mfcc-feats_8cc.html for more details on how
MFCCs are computed.
Also see https://github.com/kaldi-asr/kaldi/blob/master/egs/wsj/s5/steps/make_mfcc.sh
for the bash script this function was based on.
Parameters
----------
mfcc_directory : str
Directory to save MFCC feature matrices
log_directory : str
Directory to store log files
num_jobs : int
The number of processes to use in calculation
mfcc_configs : list of :class:`~aligner.config.MfccConfig`
Configuration object for generating MFCCs
Raises
------
CorpusError
If the files per speaker exceeds the number of files that are
allowed to be open on the computer (for Unix-based systems)
"""
child_env = os.environ.copy()
os.makedirs(os.path.join(mfcc_directory, 'log'), exist_ok=True)
paths = []
for j, p in frequency_configs:
paths.append(feature_config.write(mfcc_directory, j, p))
jobs = [(mfcc_directory, x, paths[x])
for x in range(num_jobs)]
with mp.Pool(processes=num_jobs, initializer=init, initargs=(child_env,)) as pool:
r = False
try:
results = [pool.apply_async(mfcc_func, args=i) for i in jobs]
output = [p.get() for p in results]
except OSError as e:
print(dir(e))
if e.errno == 24:
r = True
else:
raise
if r:
raise (CorpusError(
'There were too many files per speaker to process based on your OS settings. Please try to split your data into more speakers.'))
| 52.740385 | 142 | 0.51887 |
1c03e5b1937b24240e10a556ec3658ca89e78e05 | 19,640 | py | Python | ffai/util/bothelper.py | tysen2k/ffai | 2fa1fd45a8877986fdb21e3fea5e01cbf819d3ec | [
"Apache-2.0"
] | null | null | null | ffai/util/bothelper.py | tysen2k/ffai | 2fa1fd45a8877986fdb21e3fea5e01cbf819d3ec | [
"Apache-2.0"
] | null | null | null | ffai/util/bothelper.py | tysen2k/ffai | 2fa1fd45a8877986fdb21e3fea5e01cbf819d3ec | [
"Apache-2.0"
] | null | null | null | """
A number of static methods for interpretting the state of the fantasy football pitch that aren't required directly by
the client
"""
from ffai.core import Game, Action, ActionType
from ffai.core.procedure import *
from ffai.util.pathfinding import *
from typing import Optional, List, Dict
def blitz_used(game: Game) -> bool:
for action in game.state.available_actions:
if action.action_type == ActionType.START_BLITZ:
return False
return True
def handoff_used(game: Game) -> bool:
for action in game.state.available_actions:
if action.action_type == ActionType.START_HANDOFF:
return False
return True
def foul_used(game: Game) -> bool:
for action in game.state.available_actions:
if action.action_type == ActionType.START_FOUL:
return False
return True
def pass_used(game: Game) -> bool:
for action in game.state.available_actions:
if action.action_type == ActionType.START_PASS:
return False
return True
def get_players(game: Game, team: Team, include_own: bool = True, include_opp: bool = True, include_stunned: bool = True, include_used: bool = True, include_off_pitch: bool = False, only_blockable: bool = False, only_used: bool = False) -> List[Player]:
players: List[Player] = []
selected_players: List[Player] = []
for iteam in game.state.teams:
if iteam == team and include_own:
players.extend(iteam.players)
if iteam != team and include_opp:
players.extend(iteam.players)
for player in players:
if only_blockable and not player.state.up:
continue
if only_used and not player.state.used:
continue
if include_stunned or not player.state.stunned:
if include_used or not player.state.used:
if include_off_pitch or (player.position is not None and not game.is_out_of_bounds(player.position)):
selected_players.append(player)
return selected_players
def caging_squares_north_east(game: Game, protect_square: Square) -> List[Square]:
# * At it's simplest, a cage requires 4 platers in the North-East, South-East, South-West and North-West
# * positions, relative to the ball carrier, such that there is no more than 3 squares between the players in
# * each of those adjacent compass directions.
# *
# * 1 3
# * xx-xx
# * xx-xx
# * --o--
# * xx-xx
# * xx-xx
# * 3 4
# *
# * pitch is 26 long
# *
# *
# * Basically we need one player in each of the corners: 1-4, but spaced such that there is no gap of 3 squares.
# * If the caging player is in 1-4, but next to ball carrier, he ensures this will automatically be me
# *
# * The only exception to this is when the ball carrier is on, or near, the sideline. Then return the squares
# * that can otherwise form the cage.
# *
caging_squares: List[Square] = []
x = protect_square.x
y = protect_square.y
if x <= game.state.pitch.width - 3:
if y == game.state.pitch.height-2:
caging_squares.append(game.get_square(x + 1, y + 1))
caging_squares.append(game.get_square(x + 2, y + 1))
caging_squares.append(game.get_square(x + 1, y))
caging_squares.append(game.get_square(x + 2, y))
elif y == game.state.pitch.height-1:
caging_squares.append(game.get_square(x + 1, y))
caging_squares.append(game.get_square(x + 2, y))
else:
caging_squares.append(game.get_square(x + 1, y + 1))
caging_squares.append(game.get_square(x + 1, y + 2))
caging_squares.append(game.get_square(x + 2, y + 1))
# caging_squares.append(game.state.pitch.get_square(x + 3, y + 3))
return caging_squares
def caging_squares_north_west(game: Game, protect_square: Square) -> List[Square]:
caging_squares: List[Square] = []
x = protect_square.x
y = protect_square.y
if x >= 3:
if y == game.state.pitch.height-2:
caging_squares.append(game.get_square(x - 1, y + 1))
caging_squares.append(game.get_square(x - 2, y + 1))
caging_squares.append(game.get_square(x - 1, y))
caging_squares.append(game.get_square(x - 2, y))
elif y == game.state.pitch.height-1:
caging_squares.append(game.get_square(x - 1, y))
caging_squares.append(game.get_square(x - 2, y))
else:
caging_squares.append(game.get_square(x - 1, y + 1))
caging_squares.append(game.get_square(x - 1, y + 2))
caging_squares.append(game.get_square(x - 2, y + 1))
# caging_squares.append(game.state.pitch.get_square(x - 3, y + 3))
return caging_squares
def caging_squares_south_west(game: Game, protect_square: Square) -> List[Square]:
caging_squares: List[Square] = []
x = protect_square.x
y = protect_square.y
if x >= 3:
if y == 2:
caging_squares.append(game.get_square(x - 1, y - 1))
caging_squares.append(game.get_square(x - 2, y - 1))
caging_squares.append(game.get_square(x - 1, y))
caging_squares.append(game.get_square(x - 2, y))
elif y == 1:
caging_squares.append(game.get_square(x - 1, y))
caging_squares.append(game.get_square(x - 2, y))
else:
caging_squares.append(game.get_square(x - 1, y - 1))
caging_squares.append(game.get_square(x - 1, y - 2))
caging_squares.append(game.get_square(x - 2, y - 1))
# caging_squares.append(game.state.pitch.get_square(x - 3, y - 3))
return caging_squares
def caging_squares_south_east(game: Game, protect_square: Square) -> List[Square]:
caging_squares: List[Square] = []
x = protect_square.x
y = protect_square.y
if x <= game.state.pitch.width-3:
if y == 2:
caging_squares.append(game.get_square(x + 1, y - 1))
caging_squares.append(game.get_square(x + 2, y - 1))
caging_squares.append(game.get_square(x + 1, y))
caging_squares.append(game.get_square(x + 2, y))
elif y == 1:
caging_squares.append(game.get_square(x + 1, y))
caging_squares.append(game.get_square(x + 2, y))
else:
caging_squares.append(game.get_square(x + 1, y - 1))
caging_squares.append(game.get_square(x + 1, y - 2))
caging_squares.append(game.get_square(x + 2, y - 1))
# caging_squares.append(game.get_square(x + 3, y - 3))
return caging_squares
def is_caging_position(game: Game, player: Player, protect_player: Player) -> bool:
return player.position.distance(protect_player.position) <= 2 and not is_castle_position_of(game, player, protect_player)
def has_player_within_n_squares(game: Game, units: List[Player], square: Square, num_squares: int) -> bool:
for cur in units:
if cur.position.distance(square) <= num_squares:
return True
return False
def has_adjacent_player(game: Game, square: Square) -> bool:
return not game.get_adjacent_players(square)
def is_castle_position_of(game: Game, player1: Player, player2: Player) -> bool:
return player1.position.x == player2.position.x or player1.position.y == player2.position.y
def is_bishop_position_of(game: Game, player1: Player, player2: Player) -> bool:
return abs(player1.position.x - player2.position.x) == abs(player1.position.y - player2.position.y)
def attacker_would_surf(game: Game, attacker: Player, defender: Player) -> bool:
if (defender.has_skill(Skill.SIDE_STEP) and not attacker.has_skill(Skill.GRAB)) or defender.has_skill(Skill.STAND_FIRM):
return False
if not attacker.position.is_adjacent(defender.position):
return False
return direct_surf_squares(game, attacker.position, defender.position)
def direct_surf_squares(game: Game, attack_square: Square, defend_square: Square) -> bool:
defender_on_sideline: bool = on_sideline(game, defend_square)
defender_in_endzone: bool = on_endzone(game, defend_square)
if defender_on_sideline and defend_square.x == attack_square.x:
return True
if defender_in_endzone and defend_square.y == attack_square.y:
return True
if defender_in_endzone and defender_on_sideline:
return True
return False
def reverse_x_for_right(game: Game, team: Team, x: int) -> int:
if not game.is_team_side(Square(13, 3), team):
res = game.state.pitch.width - 1 - x
else:
res = x
return res
def reverse_x_for_left(game: Game, team: Team, x: int) -> int:
if game.is_team_side(Square(13, 3), team):
res = game.state.pitch.width - 1 - x
else:
res = x
return res
def on_sideline(game: Game, square: Square) -> bool:
return square.y == 1 or square.y == game.state.pitch.height - 1
def on_endzone(game: Game, square: Square) -> bool:
return square.x == 1 or square.x == game.state.pitch.width - 1
def on_los(game: Game, team: Team, square: Square) -> bool:
return (reverse_x_for_right(game, team, square.x) == 13) and 4 < square.y < 21
def los_squares(game: Game, team: Team) -> List[Square]:
squares: List[Square] = [
game.get_square(reverse_x_for_right(game, team, 13), 5),
game.get_square(reverse_x_for_right(game, team, 13), 6),
game.get_square(reverse_x_for_right(game, team, 13), 7),
game.get_square(reverse_x_for_right(game, team, 13), 8),
game.get_square(reverse_x_for_right(game, team, 13), 9),
game.get_square(reverse_x_for_right(game, team, 13), 10),
game.get_square(reverse_x_for_right(game, team, 13), 11)
]
return squares
def distance_to_sideline(game: Game, square: Square) -> int:
return min(square.y - 1, game.state.pitch.height - square.y - 2)
def is_endzone(game, square: Square) -> bool:
return square.x == 1 or square.x == game.state.pitch.width - 1
def last_block_proc(game) -> Optional[Block]:
for i in range(len(game.state.stack.items) - 1, -1, -1):
if isinstance(game.state.stack.items[i], Block):
block_proc = game.state.stack.items[i]
return block_proc
return None
def is_adjacent_ball(game: Game, square: Square) -> bool:
ball_square = game.get_ball_position()
return ball_square is not None and ball_square.is_adjacent(square)
def squares_within(game: Game, square: Square, distance: int) -> List[Square]:
squares: List[Square] = []
for i in range(-distance, distance+1):
for j in range(-distance, distance+1):
cur_square = game.get_square(square.x+i, square.y+j)
if cur_square != square and not game.is_out_of_bounds(cur_square):
squares.append(cur_square)
return squares
def distance_to_defending_endzone(game: Game, team: Team, position: Square) -> int:
res = reverse_x_for_right(game, team, position.x) - 1
return res
def distance_to_scoring_endzone(game: Game, team: Team, position: Square) -> int:
res = reverse_x_for_left(game, team, position.x) - 1
return res
#return game.state.pitch.width - 1 - reverse_x_for_right(game, team, position.x)
def players_in_scoring_endzone(game: Game, team: Team, include_own: bool = True, include_opp: bool = False) -> List[Player]:
players: List[Player] = get_players(game, team, include_own=include_own, include_opp=include_opp)
selected_players: List[Player] = []
for player in players:
if in_scoring_endzone(game, team, player.position): selected_players.append(player)
return selected_players
def in_scoring_endzone(game: Game, team: Team, square: Square) -> bool:
return reverse_x_for_left(game, team, square.x) == 1
def players_in_scoring_distance(game: Game, team: Team, include_own: bool = True, include_opp: bool = True, include_stunned: bool = False) -> List[Player]:
players: List[Player] = get_players(game, team, include_own=include_own, include_opp=include_opp, include_stunned=include_stunned)
selected_players: List[Player] = []
for player in players:
if distance_to_scoring_endzone(game, team, player.position) <= player.num_moves_left(): selected_players.append(player)
return selected_players
def distance_to_nearest_player(game: Game, team: Team, square: Square, include_own: bool = True, include_opp: bool = True, only_used: bool = False, include_used: bool = True, include_stunned: bool = True, only_blockable: bool = False) -> int:
opps: List[Player] = get_players(game, team, include_own=include_own, include_opp=include_opp, only_used=only_used, include_used=include_used, include_stunned=include_stunned, only_blockable=only_blockable)
cur_max = 100
for opp in opps:
dist = opp.position.distance(square)
cur_max = min(cur_max, dist)
return cur_max
def screening_distance(game: Game, from_square: Square, to_square: Square) -> float:
# Return the "screening distance" between 3 squares. (To complete)
# float dist =math.sqrt(math.pow(square.x - cur.position.x, 3) + math.pow(square.y - cur.position.y, 3))
return 0.0
def num_opponents_can_reach(game: Game, team: Team, square: Square) -> int:
opps: List[Player] = get_players(game, team, include_own=False, include_opp=True)
num_opps_reach: int = 0
for cur in opps:
dist = max(square.x - cur.position.x, square.y - cur.position.y)
if cur.state.stunned: continue
move_allowed = cur.get_ma() + 2
if not cur.state.up: move_allowed -= 3
if dist < move_allowed: num_opps_reach += 1
return num_opps_reach
def num_opponents_on_field(game: Game, team: Team) -> int:
opps: List[Player] = get_players(game, team, include_own=False, include_opp=True)
num_opponents = 0
for cur in opps:
if cur.position is not None: num_opponents += 1
return num_opponents
def number_opponents_closer_than_to_endzone(game: Game, team: Team, square: Square) -> int:
opponents: List[Player] = get_players(game, team, include_own=False, include_opp=True)
num_opps = 0
distance_square_endzone = distance_to_defending_endzone(game, team, square)
for opponent in opponents:
distance_opponent_endzone = distance_to_defending_endzone(game, team, opponent.position)
if distance_opponent_endzone < distance_square_endzone: num_opps += 1
return num_opps
def in_scoring_range(game: Game, player: Player) -> bool:
return player.num_moves_left() >= distance_to_scoring_endzone(game, player.team, player.position)
def players_in_scoring_range(game: Game, team: Team, include_own=True, include_opp=True, include_used=True, include_stunned=True) -> List[Player]:
players: List[Player] = get_players(game, team, include_own=include_own, include_opp=include_opp, include_stunned=include_stunned, include_used=include_used)
res: List[Player] = []
for player in players:
if in_scoring_range(game, player): res.append(player)
return res
def players_in(game: Game, team: Team, squares: List[Square], include_own=True, include_opp=True, include_used=True, include_stunned=True, only_blockable=False) -> List[Player]:
allowed_players: List[Player] = get_players(game, team, include_own=include_own, include_opp=include_opp, include_used=include_used, include_stunned=include_stunned, only_blockable=only_blockable)
res: List[Player] = []
for square in squares:
player: Optional[Player] = game.get_player_at(square)
if player is None:
continue
if player in allowed_players:
res.append(player)
return res
| 39.676768 | 253 | 0.667668 |
1c04bbf01c459890b136a85f02fecf87b5220fd7 | 601 | py | Python | sb_backend/cli/cli.py | DmitriyGrigoriev/sb-fastapi | 1aef3db6ce26ea054e048e5927552d48c2eccbfb | [
"MIT"
] | null | null | null | sb_backend/cli/cli.py | DmitriyGrigoriev/sb-fastapi | 1aef3db6ce26ea054e048e5927552d48c2eccbfb | [
"MIT"
] | null | null | null | sb_backend/cli/cli.py | DmitriyGrigoriev/sb-fastapi | 1aef3db6ce26ea054e048e5927552d48c2eccbfb | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
"""sb-fastapi CLI root."""
import logging
import click
from sb_backend.cli.commands.serve import serve
cli.add_command(serve)
| 18.78125 | 73 | 0.582363 |
1c04cbdee0c3246cf3dd07ebf05dec05475d975b | 1,379 | py | Python | 1-Chapter/htmlcomponents.py | DSandovalFlavio/Dashboards-Plotly-Dash | 58867c2e813bc9273838dec12e7bd15be25504fa | [
"MIT"
] | null | null | null | 1-Chapter/htmlcomponents.py | DSandovalFlavio/Dashboards-Plotly-Dash | 58867c2e813bc9273838dec12e7bd15be25504fa | [
"MIT"
] | null | null | null | 1-Chapter/htmlcomponents.py | DSandovalFlavio/Dashboards-Plotly-Dash | 58867c2e813bc9273838dec12e7bd15be25504fa | [
"MIT"
] | null | null | null | import dash
from dash import html
app = dash.Dash(__name__)
app.layout = html.Div(children=[html.H1('Data Science',
style = {'textAlign': 'center',
'color': '#0FD08D',
'font-size': '50px'}),
html.H2('La carrera mas sexy del siglo XXI',
style = {'textAlign': 'center',
'color' : '#009A64'}),
html.P('Factores clave:'),
html.Ul(children = [html.Li('Factor 1'),
html.Li('Factor 2'),
html.Li('Factor 3'),
html.Li(['Source: ',
html.A('https://www.excelsior.com.mx/nacional/ciencia-de-datos-la-carrera-mas-sexy-del-xxi-en-la-unam/1323946',
href = 'https://www.excelsior.com.mx/nacional/ciencia-de-datos-la-carrera-mas-sexy-del-xxi-en-la-unam/1323946')
])
])
])
if __name__ == '__main__':
app.run_server(debug=True) | 55.16 | 175 | 0.354605 |
1c051afc13411020a3654ad65fd96dd86b2c6979 | 75,380 | py | Python | baadalinstallation/baadal/modules/vm_helper.py | iitd-plos/baadal2.0 | 0496a8ddb5c0620f3448f018ba48b080b96cbe61 | [
"Apache-2.0"
] | 8 | 2017-01-30T17:40:18.000Z | 2022-02-07T19:37:32.000Z | baadalinstallation/baadal/modules/vm_helper.py | iitd-plos/baadal2.0 | 0496a8ddb5c0620f3448f018ba48b080b96cbe61 | [
"Apache-2.0"
] | null | null | null | baadalinstallation/baadal/modules/vm_helper.py | iitd-plos/baadal2.0 | 0496a8ddb5c0620f3448f018ba48b080b96cbe61 | [
"Apache-2.0"
] | 7 | 2016-11-08T13:38:10.000Z | 2019-11-26T04:33:17.000Z | # -*- coding: utf-8 -*-
###################################################################################
from gluon import current
from helper import get_constant, execute_remote_cmd, config, get_datetime, \
log_exception, is_pingable, get_context_path
from libvirt import * # @UnusedWildImport
from log_handler import logger
from nat_mapper import create_mapping, remove_mapping
import math, shutil, libvirt, os, time, random
import xml.etree.ElementTree as etree
def _choose_datastore():
"""
Chooses datastore from a list of available datastores
"""
# datastore_capacity = current.db(current.db.datastore.id >= 0).select(orderby = current.db.datastore.used
datastores = current.db(current.db.datastore.id >= 0).select()
datastore_length = len(datastores)
logger.debug("datastore_lengtn" + str(datastore_length))
if(datastore_length == 0):
raise Exception("No datastore found.")
else:
count = datastore_length
available_datastores = {}
while count != 0:
available = datastores[datastore_length-count].capacity - datastores[datastore_length-count].used
available_datastores[datastores[datastore_length-count]] = available
count = count-1
z = [(i,available_datastores[i]) for i in available_datastores]
z.sort(key=lambda x: x[1])
available_datastores = z
logger.debug("available d" + str(available_datastores[-1]))
first_elts = available_datastores[-1]
first_elts = first_elts[0]
logger.debug("selected database" + str(first_elts))
return first_elts
def host_resources_used(host_id):
"""
Returns resources utilization of a host in MB, Count
"""
RAM = 0.0
CPU = 0.0
vms = current.db((current.db.vm_data.host_id == host_id) & (current.db.vm_data.status != current.VM_STATUS_UNKNOWN) & (current.db.vm_data.status != current.VM_STATUS_IN_QUEUE)).select()
logger.debug("vms selected are: " + str(vms))
for vm_data in vms:
RAM += vm_data.RAM
CPU += vm_data.vCPU
return (math.ceil(RAM),math.ceil(CPU))
def getVirshDomainConn(vm_details, host_ip=None, domain_name=None):
"""
Generic method to establish libvirt connection
"""
if vm_details != None:
host_ip = vm_details.host_id.host_ip.private_ip
domain_name = vm_details.vm_identity
connection_object = libvirt.open("qemu+ssh://root@" + host_ip + "/system")
domain = connection_object.lookupByName(domain_name)
return (connection_object, domain)
def getVirshDomain(vm_details):
"""
Generic method to establish libvirt connection
"""
(connection_object, domain) = getVirshDomainConn(vm_details)
connection_object.close()
return domain
def _set_portgroup_in_vm(domain_name, portgroup, host_ip, vlan_tag):
"""
Set the vlan tag in network configuration of VM
This is required to ensure that VM fetches IP of its vlan from DHCP
"""
(connection_object, domain) = getVirshDomainConn(None, host_ip, domain_name)
xml = etree.fromstring(domain.XMLDesc(0))
source_network_element = xml.find('.//interface/source')
source_network_string=etree.tostring(source_network_element)
logger.debug("Source network is " + source_network_string)
if source_network_string.find(" bridge=") != -1:
logger.debug("Source is set to bridge adding <vlan><tag_id> to the interface tag ")
root_new = xml.find('.//interface')
root_new_vlan= etree.SubElement(root_new, 'vlan')
root_new_tag= etree.SubElement(root_new_vlan, 'tag')
root_new_tag.set('id',vlan_tag)
logger.debug("After append root_new_vlan is " + etree.tostring(root_new_vlan))
elif source_network_string.find(" network=") != -1:
logger.debug("Source is set to network adding portgroup to the source tag ")
source_network_element.set('portgroup', portgroup)
logger.debug("Changed source network is " + etree.tostring(source_network_element))
else:
logger.debug("Neither VM nor vlan tagId is added in the xml" )
domain = connection_object.defineXML(etree.tostring(xml))
domain.destroy()
domain.create()
domain.isActive()
connection_object.close()
def _get_private_ip_mac(security_domain_id):
"""
Chooses a random Private IP from the pool, such that:
- It is not assigned to any VM or host
- It belongs to VLAN of given security domain
"""
vlans = current.db(current.db.security_domain.id == security_domain_id)._select(current.db.security_domain.vlan)
private_ip_pool = current.db((~current.db.private_ip_pool.id.belongs(current.db(current.db.vm_data.private_ip != None)._select(current.db.vm_data.private_ip)))
& (~current.db.private_ip_pool.id.belongs(current.db(current.db.host.host_ip != None)._select(current.db.host.host_ip)))
& (current.db.private_ip_pool.vlan.belongs(vlans))).select(current.db.private_ip_pool.ALL, orderby='<random>').first()
if private_ip_pool:
return private_ip_pool
else:
sd = current.db.security_domain[security_domain_id]
raise Exception(("Available MACs are exhausted for security domain '%s'." % sd.name))
def _choose_random_public_ip():
"""
Chooses a random Public IP from the pool, such that:
- It is not assigned to any VM
- It is not assigned to any host
- IP is marked active.
"""
public_ip_pool = current.db((~current.db.public_ip_pool.id.belongs(current.db(current.db.vm_data.public_ip != None)._select(current.db.vm_data.public_ip)))
& (~current.db.public_ip_pool.id.belongs(current.db(current.db.host.public_ip != None)._select(current.db.host.public_ip)))
& (current.db.public_ip_pool.is_active == True)) \
.select(current.db.public_ip_pool.ALL, orderby='<random>').first()
return public_ip_pool
def _choose_mac_ip(vm_properties):
"""
Chooses mac address and ip address for a vm to be installed.
It also chooses a random public IP if requested
"""
if not 'private_ip' in vm_properties:
private_ip_info = _get_private_ip_mac(vm_properties['security_domain'])
vm_properties['private_ip'] = private_ip_info.private_ip
vm_properties['mac_addr'] = private_ip_info.mac_addr
vm_properties['vlan_name'] = private_ip_info.vlan.name
vm_properties['vlan_tag'] = private_ip_info.vlan.vlan_tag
if vm_properties['public_ip_req']:
if 'public_ip' not in vm_properties:
public_ip_pool = _choose_random_public_ip()
if public_ip_pool:
vm_properties['public_ip'] = public_ip_pool.public_ip
else:
raise Exception("Available Public IPs are exhausted.")
else:
vm_properties['public_ip'] = None
def _choose_mac_ip_vncport(vm_properties):
"""
Chooses mac address, ip address and vncport for a vm to be installed
"""
_choose_mac_ip(vm_properties)
start_range = int(get_constant('vncport_start_range'))
end_range = int(get_constant('vncport_end_range'))
vnc_ports_taken = current.db().select(current.db.vm_data.vnc_port)
while True:
random_vnc_port = random.randrange(start_range, end_range, 1)
if not random_vnc_port in vnc_ports_taken:
break;
vm_properties['vnc_port'] = str(random_vnc_port)
def find_new_host(RAM, vCPU):
"""
Select a random host from list of 3 hosts with available RAM and CPU
Availability is checked with 200 percent over-commitment.
"""
hosts = current.db(current.db.host.status == 1).select()
hosts = hosts.as_list(True,False)
count = 3
selected_hosts = []
while count != 0 and hosts:
host = random.choice(hosts)
logger.debug("Checking host =" + host['host_name'])
(used_ram, used_cpu) = host_resources_used(host['id'])
logger.debug("used ram: " + str(used_ram) + " used cpu: " + str(used_cpu) + " host ram: " + str(host['RAM']) + " host cpu "+ str(host['CPUs']))
host_ram_after_200_percent_overcommitment = math.floor((host['RAM'] * 1024) * 2)
host_cpu_after_200_percent_overcommitment = math.floor(host['CPUs'] * 2)
logger.debug("ram available: %s cpu available: %s cpu < max cpu: %s" % ((( host_ram_after_200_percent_overcommitment - used_ram) >= RAM), ((host_cpu_after_200_percent_overcommitment - used_cpu) >= vCPU), (vCPU <= host['CPUs']) ))
if((( host_ram_after_200_percent_overcommitment - used_ram) >= RAM) and ((host_cpu_after_200_percent_overcommitment - used_cpu) >= vCPU) and (vCPU <= host['CPUs'])):
selected_hosts.append(host)
count = count -1
hosts.remove(host)
if selected_hosts:
#Sort selected host list by Ram first then Cpu
selected_host = sorted(selected_hosts,key=lambda k: k['RAM'])[0]
return selected_host['id']
#If no suitable host found
raise Exception("No active host is available for a new vm.")
def allocate_vm_properties(vm_details):
"""
Allocates vm properties ( datastore, host, ip address, mac address, vnc port, ram, vcpus)
"""
logger.debug("Inside allocate_vm_properties()...")
vm_properties = {}
vm_properties['datastore'] = _choose_datastore()
logger.debug("Datastore selected is: " + str(vm_properties['datastore']))
vm_properties['host'] = find_new_host(vm_details.RAM, vm_details.vCPU)
logger.debug("Host selected is: " + str(vm_properties['host']))
vm_properties['public_ip_req'] = False if (vm_details.public_ip == None) else True
vm_properties['security_domain'] = vm_details.security_domain
_choose_mac_ip_vncport(vm_properties)
logger.debug("MAC is : " + str(vm_properties['mac_addr']) + " IP is : " + str(vm_properties['private_ip']) + " VNCPORT is : " \
+ str(vm_properties['vnc_port']) + " Vlan tag is " + str(vm_properties['vlan_tag']) )
vm_properties['ram'] = vm_details.RAM
vm_properties['vcpus'] = vm_details.vCPU
return vm_properties
def create_vm_image(vm_details, datastore):
"""
Create a VM image
- Creates a directory for the new VM using vm_identity
- Find the location of template image requested for
- Copy the template image from its location to new vm directory
"""
# Creates a directory for the new vm
vm_directory_path = datastore.system_mount_point + '/' + get_constant('vms') + '/' + vm_details.vm_identity
logger.debug("Creating vm directory...")
if not os.path.exists (vm_directory_path):
os.makedirs(vm_directory_path)
else:
raise Exception("Directory with same name as vmname already exists.")
# Finds the location of template image that the user has requested for its vm.
template = current.db.template[vm_details.template_id]
vm_image_name = vm_directory_path + '/' + vm_details.vm_identity + '.qcow2'
# Copies the template image from its location to new vm directory
storage_type = config.get("GENERAL_CONF","storage_type")
copy_command = 'ndmpcopy ' if storage_type == current.STORAGE_NETAPP_NFS else 'cp '
#template_dir = get_constant('vm_templates_datastore')
if copy_command == 'cp ':
template_location = datastore.system_mount_point + '/' + get_constant('templates_dir') + '/' + template.hdfile
logger.debug("cp %s %s" % (template_location, vm_image_name))
rc = os.system("cp %s %s" % (template_location, vm_image_name))
if rc != 0:
logger.error("Copy not successful")
raise Exception("Copy not successful")
else:
logger.debug("Copied successfully")
elif copy_command == 'ndmpcopy ':
template_dir = template.datastore_id.path
logger.debug(template_dir)
logger.debug("Copy in progress when storage type is " + str(storage_type))
command_to_execute = copy_command + template_dir + '/' + get_constant("templates_dir") + '/' + \
template.hdfile + ' ' + datastore.path + '/' + get_constant('vms') + '/' + \
vm_details.vm_identity
logger.debug("ndmpcopy command: " + str(command_to_execute))
command_output = execute_remote_cmd(datastore.ds_ip, datastore.username, command_to_execute, datastore.password)
logger.debug(command_output)
logger.debug("Copied successfully.")
try:
vm_template_name = datastore.system_mount_point + '/' + get_constant('vms') + '/' + vm_details.vm_identity + '/' + template.hdfile
os.rename(vm_template_name, vm_image_name)
logger.debug("Template renamed successfully")
except:
logger.debug("Template rename not successful")
raise Exception("Template rename not successful")
return (template, vm_image_name)
def _get_install_command(vm_details, vm_image_location, vm_properties):
"""
Generates install command for vm
"""
template = vm_properties['template']
bus = ',bus=virtio'
optional = ' --import --os-type=' + template.os
model = ',model=virtio'
if (template.arch != 'amd64' and template.os == 'Linux'):
optional = optional + ' --arch=' + template.arch + ' '
format_command = ''
if (template.type == 'QCOW2'):
format_command = ',format=qcow2'
if (template.os == 'Windows'):
bus = ''
model = ''
install_command = 'virt-install \
--name=' + vm_details.vm_identity + ' \
--ram=' + str(vm_properties['ram']) + ' \
--vcpus=' + str(vm_properties['vcpus']) + optional + ' \
--disk path=' + vm_image_location + format_command + bus + ',cache=none' + ' \
--network network='+current.LIBVIRT_NETWORK + model + ',mac=' + vm_properties['mac_addr'] + ' \
--graphics vnc,port=' + vm_properties['vnc_port'] + ',listen=0.0.0.0,password=duolc \
--noautoconsole \
--autostart \
--force'
return install_command
def _generate_disk_xml(diskpath,target_disk):
"""
Generates xml for defining new disk
"""
root_element = etree.Element('disk',attrib = {'type':'block','device':'disk'})
etree.SubElement(root_element, 'driver',attrib = {'name':'qemu','cache':'none', 'type':'qcow2'})
etree.SubElement(root_element, 'source', attrib = {'dev':diskpath})
etree.SubElement(root_element, 'target', attrib = {'dev': target_disk})
return (etree.tostring(root_element))
def create_extra_disk_image(vm_details, disk_name, size, datastore):
"""
Create extra disk image
"""
vm_extra_disks_directory_path = datastore.system_mount_point + '/' + get_constant('extra_disks_dir') + '/' + \
datastore.ds_name + '/' + vm_details.vm_identity
if not os.path.exists (vm_extra_disks_directory_path):
logger.debug("Making Directory")
os.makedirs(vm_extra_disks_directory_path)
diskpath = vm_extra_disks_directory_path + '/' + disk_name
command= "qemu-img create -f qcow2 "+ diskpath + " " + str(size) + "G"
output = os.system(command)
return False if output != 0 else True
def attach_disk(vm_details, disk_name, hostip, already_attached_disks, new_vm):
"""
Attach given disk to the VM
"""
try:
(connection_object, domain) = getVirshDomainConn(None, hostip, vm_details.vm_identity)
#already_attached_disks = len(current.db(current.db.attached_disks.vm_id == vm.id).select())
logger.debug("Value of alreadyattached is : " + str(already_attached_disks))
(diskpath, device_present, disk_size) = get_extra_disk_location(vm_details.datastore_id, vm_details.vm_identity, disk_name, True)
if not device_present:
raise Exception("Device to be attached %s missing" %(diskpath))
# Attaching disk to vm using libvirt API
target_disk = "vd" + chr(97 + already_attached_disks + 1)
logger.debug(target_disk)
logger.debug("...................")
xmlDescription = _generate_disk_xml(diskpath, target_disk)
logger.debug(xmlDescription)
logger.debug("new vm is %s " % new_vm)
if new_vm:
logger.debug("Starting to attach disk on new vm request.")
domain.destroy()
logger.debug("VM destroyed")
domain.attachDeviceFlags(xmlDescription, VIR_DOMAIN_AFFECT_CONFIG)
logger.debug("Disk attached")
logger.debug("Turn on vm")
domain.create()
logger.debug("VM started")
domain.isActive()
elif vm_details.status == current.VM_STATUS_SHUTDOWN:
logger.debug("Starting to attach disk while vm is shutdown.")
domain.attachDeviceFlags(xmlDescription, VIR_DOMAIN_AFFECT_CONFIG)
logger.debug("Disk attached")
else:
raise Exception("VM is not in shutdown state. Check its status on host")
xmlfile = domain.XMLDesc(0)
domain = connection_object.defineXML(xmlfile)
logger.debug("VM XML redefined")
connection_object.close()
return disk_size
except:
logger.exception('Exception: ')
return 0
def serve_extra_disk_request(vm_details, disk_size, host_ip, new_vm = False):
"""
Serves extra disk request and updates db
"""
logger.debug("Starting to serve extra disk request...")
logger.debug("new vm is %s " % new_vm)
datastore = _choose_datastore()
already_attached_disks = len(current.db(current.db.attached_disks.vm_id == vm_details.id).select())
disk_name = vm_details.vm_identity + "_disk" + str(already_attached_disks + 1) + ".qcow2"
disk_created = create_extra_disk_image(vm_details, disk_name, disk_size, datastore)
vm_details.datastore_id = datastore.id
if disk_created:
if (attach_disk(vm_details, disk_name, host_ip, already_attached_disks, new_vm)):
current.db.attached_disks.insert(vm_id = vm_details.id, datastore_id = datastore.id , attached_disk_name = disk_name, capacity = disk_size)
current.db(current.db.datastore.id == datastore.id).update(used = int(datastore.used) + int(disk_size))
return True
return False
def launch_vm_on_host(vm_details, vm_image_location, vm_properties):
"""
Launches a vm image on host
"""
attach_disk_status_message = ''
install_command = _get_install_command(vm_details, vm_image_location, vm_properties)
# Starts installing a vm
host_ip = current.db.host[vm_properties['host']].host_ip.private_ip
logger.debug("Installation started...")
logger.debug("Host is "+ host_ip)
logger.debug("Installation command : " + install_command)
command_output = execute_remote_cmd(host_ip, 'root', install_command)
logger.debug(command_output)
logger.debug("Starting to set portgroup in vm...")
_set_portgroup_in_vm(vm_details['vm_identity'], vm_properties['vlan_name'], host_ip, vm_properties['vlan_tag'])
logger.debug("Portgroup set in vm")
# Serving HDD request
if (int(vm_details.extra_HDD) != 0):
if (serve_extra_disk_request(vm_details, vm_details.extra_HDD, host_ip, new_vm = True)):
message = "Attached extra disk successfully."
attach_disk_status_message += message
logger.debug(message)
else:
attach_disk_status_message += "Attached extra disk failed."
return attach_disk_status_message
def check_if_vm_defined(hostip, vmname):
"""
Checks if a newly created vm is successfully defined
"""
vm_defined = False
try:
connection_object = libvirt.openReadOnly('qemu+ssh://root@'+ hostip +'/system')
domain = connection_object.lookupByName(vmname)
if domain.ID() in connection_object.listDomainsID():
vm_defined = True
connection_object.close()
return vm_defined
except:
return False
def _free_vm_properties(vm_details, vm_properties):
"""
Frees vm properties in-case installation has failed mid-way
"""
logger.debug("VM installation fails..Starting to free vm properties")
if vm_properties:
host_ip_of_vm = current.db.host[vm_properties['host']].host_ip.private_ip
logger.debug("Host IP of vm is " + str(host_ip_of_vm))
if check_if_vm_defined(host_ip_of_vm, vm_details.vm_identity):
connection_object = libvirt.open('qemu+ssh://root@'+ host_ip_of_vm +'/system')
domain = connection_object.lookupByName(vm_details.vm_identity)
logger.debug("Starting to delete vm from host..")
domain.destroy()
domain.undefine()
connection_object.close()
logger.debug("VM deleted.")
current.db(current.db.attached_disks.vm_id == vm_details.id).delete()
if 'datastore' in vm_properties:
vm_directory_path = vm_properties['datastore'].system_mount_point + '/' + get_constant('vms') + '/' + vm_details.vm_identity
vm_extra_disk_dir_path = vm_properties['datastore'].system_mount_point + '/' + get_constant('extra_disks_dir') + '/' + vm_properties['datastore'].ds_name + '/' + vm_details.vm_identity
if os.path.exists (vm_directory_path):
logger.debug("Starting to delete vm directory.")
shutil.rmtree(vm_directory_path)
if os.path.exists (vm_extra_disk_dir_path):
logger.debug("Starting to delete vm extra disk directory.")
shutil.rmtree(vm_extra_disk_dir_path)
return
def update_db_after_vm_installation(vm_details, vm_properties, parent_id = None):
"""
Updates db after a vm is installed successfully
"""
logger.debug("Starting to update db after vm installation..")
hostid = vm_properties['host']
datastore = vm_properties['datastore']
template_hdd = vm_properties['template'].hdd
logger.debug("Inside update db after installation")
logger.debug(vm_properties)
# Updating the used entry of datastore
current.db(current.db.datastore.id == datastore.id).update(used = int(datastore.used) + int(vm_details.extra_HDD) +
int(template_hdd))
private_ip_id = current.db.private_ip_pool(private_ip=vm_properties['private_ip']).id
public_ip_id = None
if vm_properties['public_ip'] != None:
public_ip_id = current.db.public_ip_pool(public_ip=vm_properties['public_ip']).id
if parent_id:
vm_status = current.VM_STATUS_SHUTDOWN
else:
vm_status = current.VM_STATUS_RUNNING
# Update vm_data table
current.db(current.db.vm_data.id == vm_details.id).update( host_id = hostid,
extra_HDD = vm_details.extra_HDD,
datastore_id = datastore.id,
vnc_port = vm_properties['vnc_port'],
private_ip = private_ip_id,
public_ip = public_ip_id,
start_time = get_datetime(),
parent_id = parent_id,
status = vm_status)
logger.debug("Updated db")
return
# Installs a vm
def install(parameters):
"""
Installs a vm
"""
vmid = parameters['vm_id']
logger.debug("In install() function...")
vm_details = current.db.vm_data[vmid]
vm_properties = None
try:
# Fetches vm details from vm_data table
logger.debug("VM details are: " + str(vm_details))
# Calling allocate_vm_properties function
vm_properties = allocate_vm_properties(vm_details)
# Calling create_vm_image function
(vm_properties['template'], vm_image_location) = create_vm_image(vm_details, vm_properties['datastore'])
# Calling launch_vm_on_host
attach_disk_status_message = launch_vm_on_host(vm_details, vm_image_location, vm_properties)
# Checking if vm has been installed successfully
assert(check_if_vm_defined(current.db.host[vm_properties['host']].host_ip.private_ip, vm_details.vm_identity)), "VM is not installed. Check logs."
if vm_properties['public_ip_req']:
create_mapping(vm_properties['public_ip'], vm_properties['private_ip'])
# Update database after vm installation
update_db_after_vm_installation(vm_details, vm_properties)
message = "VM is installed successfully." + attach_disk_status_message
logger.debug("Task Status: SUCCESS Message: %s " % message)
return (current.TASK_QUEUE_STATUS_SUCCESS, message)
except:
if vm_properties != None:
_free_vm_properties(vm_details, vm_properties)
logger.debug("Task Status: FAILED Error: %s " % log_exception())
return (current.TASK_QUEUE_STATUS_FAILED, log_exception())
def start(parameters):
"""
Starts a vm
"""
logger.debug("Inside start() function")
vm_id = parameters['vm_id']
vm_details = current.db.vm_data[vm_id]
try:
domain = getVirshDomain(vm_details)
if domain.info()[0] == VIR_DOMAIN_RUNNING:
raise Exception("VM is already running. Check vm status on host.")
domain.create()
current.db(current.db.vm_data.id == vm_id).update(status = current.VM_STATUS_RUNNING)
message = vm_details.vm_identity + " is started successfully."
logger.debug("Task Status: SUCCESS Message: %s " % message)
return (current.TASK_QUEUE_STATUS_SUCCESS, message)
except:
logger.debug("Task Status: FAILED Error: %s " % log_exception())
return (current.TASK_QUEUE_STATUS_FAILED, log_exception())
def suspend(parameters):
"""
Suspends a vm
"""
logger.debug("Inside suspend() function")
vm_id = parameters['vm_id']
vm_details = current.db.vm_data[vm_id]
try:
domain = getVirshDomain(vm_details)
if domain.info()[0] == VIR_DOMAIN_PAUSED:
raise Exception("VM is already paused. Check vm status on host.")
domain.suspend()
current.db(current.db.vm_data.id == vm_id).update(status = current.VM_STATUS_SUSPENDED)
message = vm_details.vm_identity + " is suspended successfully."
logger.debug("Task Status: SUCCESS Message: %s " % message)
return (current.TASK_QUEUE_STATUS_SUCCESS, message)
except:
logger.debug("Task Status: FAILED Error: %s " % log_exception())
return (current.TASK_QUEUE_STATUS_FAILED, log_exception())
def resume(parameters):
"""
Resumes a vm
"""
logger.debug("Inside resume() function")
vm_id = parameters['vm_id']
vm_details = current.db.vm_data[vm_id]
try:
domain = getVirshDomain(vm_details)
if domain.info()[0] == VIR_DOMAIN_RUNNING:
raise Exception("VM is already running. Check vm status on host.")
domain.resume()
current.db(current.db.vm_data.id == vm_id).update(status = current.VM_STATUS_RUNNING)
message = vm_details.vm_identity + " is resumed successfully."
logger.debug("Task Status: SUCCESS Message: %s " % message)
return (current.TASK_QUEUE_STATUS_SUCCESS, message)
except:
logger.debug("Task Status: FAILED Error: %s " % log_exception())
return (current.TASK_QUEUE_STATUS_FAILED, log_exception())
def destroy(parameters):
"""
Destroys a vm forcefully
"""
logger.debug("Inside destroy() function")
vm_id = parameters['vm_id']
vm_details = current.db.vm_data[vm_id]
logger.debug(str(vm_details))
try:
domain = getVirshDomain(vm_details)
if domain.info()[0] == VIR_DOMAIN_SHUTOFF:
raise Exception("VM is already shutoff. Check vm status on host.")
domain.destroy()
current.db(current.db.vm_data.id == vm_id).update(status = current.VM_STATUS_SHUTDOWN)
message = vm_details.vm_identity + " is destroyed successfully."
logger.debug("Task Status: SUCCESS Message: %s " % message)
return (current.TASK_QUEUE_STATUS_SUCCESS, message)
except:
logger.debug("Task Status: FAILED Error: %s " % log_exception())
return (current.TASK_QUEUE_STATUS_FAILED, log_exception())
def shutdown(parameters):
"""
Destroys a vm gracefully
"""
logger.debug("Inside shutdown() function")
vm_id = parameters['vm_id']
vm_details = current.db.vm_data[vm_id]
logger.debug(str(vm_details))
try:
domain = getVirshDomain(vm_details)
if domain.info()[0] == VIR_DOMAIN_SHUTOFF:
raise Exception("VM is already shutoff. Check vm status on host.")
domain.managedSave()
current.db(current.db.vm_data.id == vm_id).update(status = current.VM_STATUS_SHUTDOWN)
message = vm_details.vm_identity + " is shutdown successfully."
logger.debug("Task Status: SUCCESS Message: %s " % message)
return (current.TASK_QUEUE_STATUS_SUCCESS, message)
except:
logger.debug("Task Status: FAILED Error: %s " % log_exception())
return (current.TASK_QUEUE_STATUS_FAILED, log_exception())
def _clean_up_database_after_vm_deletion(vm_details):
"""
Cleans up database after vm deletion
"""
logger.debug("Inside clean up database after vm deletion () function...")
# moving vm image folder to archives folder
archive_directory_path = vm_details.datastore_id.system_mount_point + '/' + get_constant('archives_dir')
if not os.path.exists(archive_directory_path):
os.makedirs(archive_directory_path)
source_file = vm_details.datastore_id.system_mount_point + '/' + get_constant('vms') + '/' + vm_details.vm_identity
archive_filename = vm_details.vm_identity + str(get_datetime())
logger.debug(archive_filename)
destination_file = archive_directory_path + '/' + archive_filename
shutil.move(source_file, destination_file)
# removing hdd
vm_extra_disks_directory_path = vm_details.datastore_id.system_mount_point + '/' + get_constant('extra_disks_dir') + '/' + \
vm_details.datastore_id.ds_name + "/" + vm_details.vm_identity
if os.path.exists(vm_extra_disks_directory_path):
shutil.rmtree(vm_extra_disks_directory_path)
# updating the used entry of database
current.db(current.db.datastore.id == vm_details.datastore_id).update(used = int(vm_details.datastore_id.used) - \
(int(vm_details.extra_HDD) + int(vm_details.template_id.hdd)))
# updating task_queue_event entry to remove reference of VM
current.db(current.db.task_queue_event.vm_id == vm_details.id).update(vm_id = None)
# deleting entry of extra disk of vm
current.db(current.db.attached_disks.vm_id == vm_details.id).delete()
logger.debug("Database cleaned")
def vm_has_snapshots(vm_id):
"""
Checks if a vm has snapshot(s)
"""
if (current.db(current.db.snapshot.vm_id == vm_id).select()):
return True
else:
return False
def delete(parameters):
"""
Deletes a vm
"""
logger.debug("Inside delete() function")
vm_id = parameters['vm_id']
vm_details = current.db.vm_data[vm_id]
try:
domain = getVirshDomain(vm_details)
logger.debug(str(vm_details.status))
if (vm_details.status == current.VM_STATUS_RUNNING or vm_details.status == current.VM_STATUS_SUSPENDED):
logger.debug("Vm is not shutoff. Shutting it off first.")
domain.destroy()
logger.debug("Starting to delete it...")
domain.undefineFlags(VIR_DOMAIN_UNDEFINE_SNAPSHOTS_METADATA )
if vm_details.public_ip:
remove_mapping(vm_details.public_ip.public_ip, vm_details.private_ip.private_ip)
message = vm_details.vm_identity + " is deleted successfully."
logger.debug(message)
_clean_up_database_after_vm_deletion(vm_details)
current.db(current.db.vm_data.id == vm_id).delete()
current.db.commit()
logger.debug("Task Status: SUCCESS Message: %s " % message)
return (current.TASK_QUEUE_STATUS_SUCCESS, message)
except:
logger.debug("Task Status: FAILED Error: %s " % log_exception())
return (current.TASK_QUEUE_STATUS_FAILED, log_exception())
def migrate_domain_with_snapshots(vm_details, destination_host_ip, domain, domain_snapshots_list, current_snapshot_name, flags, vm_backup_during_migration):
"""
Migrate domain with snapshots
"""
# XML dump of snapshot(s) of the vm
logger.debug("Starting to take xml dump of the snapshot(s) of the vm... ")
if not os.path.exists(vm_backup_during_migration):
os.makedirs(vm_backup_during_migration)
for domain_snapshot in domain_snapshots_list:
logger.debug("snapshot name is " + str(domain_snapshot))
dump_xml_path = vm_backup_during_migration + '/' + 'dump_' + domain_snapshot
snapshot_dumpxml_command = 'virsh snapshot-dumpxml %s %s > %s' % ( vm_details.vm_identity, domain_snapshot, dump_xml_path)
logger.debug("Taking xml dump of" + str(domain_snapshot))
command_output = execute_remote_cmd(vm_details.host_id.host_ip.private_ip, 'root', snapshot_dumpxml_command)
logger.debug(command_output)
logger.debug("XML dump of " + str(domain_snapshot) + "succeeded.")
# Delete snapshot(s) of the vm and migrate it to destination host
logger.debug("Starting to delete snapshots of the vm....")
for domain_snapshot in domain_snapshots_list:
snapshot = domain.snapshotLookupByName(domain_snapshot, 0)
snapshot.delete(0)
logger.debug("Migrating the vm to destination host...")
domain.migrateToURI("qemu+ssh://root@" + destination_host_ip + "/system", flags , None, 0)
# Redefine all the snapshot(s) of the vm on the destination host and set current snapshot
logger.debug("Starting to redefine all the snapshot(s) of the domain...")
for domain_snapshot in domain_snapshots_list:
redefine_xml_path = vm_backup_during_migration + '/' + 'dump_' + domain_snapshot
snapshot_redefine_command = 'virsh snapshot-create --redefine %s %s ' % (vm_details.vm_identity, redefine_xml_path)
command_output = execute_remote_cmd(destination_host_ip, 'root', snapshot_redefine_command)
logger.debug(command_output)
snapshot_current_command = 'virsh snapshot-current %s %s' % (vm_details.vm_identity, current_snapshot_name)
command_output = execute_remote_cmd(destination_host_ip, 'root', snapshot_current_command)
logger.debug(command_output)
return
def _clean_migration_directory(vm_backup_during_migration):
"""
Delete directory created for storing dumpxml of vm snapshots
"""
if os.path.exists(vm_backup_during_migration):
shutil.rmtree(vm_backup_during_migration)
return
def undo_migration(vm_details, domain_snapshots_list, current_snapshot_name, vm_backup_during_migration):
"""
Undo the migration
"""
if domain_snapshots_list:
# Redefine the snapshots of the vm on the source host
logger.debug("Starting to redefine all the snapshot(s) of the vm on the source host...")
for domain_snapshot in domain_snapshots_list:
redefine_xml_path = vm_backup_during_migration + '/' + 'dump_' + domain_snapshot
snapshot_redefine_command = 'virsh snapshot-create --redefine %s %s ' % (vm_details.vm_identity, redefine_xml_path)
command_output = execute_remote_cmd(vm_details.host_id.host_ip.private_ip, 'root', snapshot_redefine_command, None, True)
logger.debug(command_output)
snapshot_current_command = 'virsh snapshot-current %s %s' % (vm_details.vm_identity, current_snapshot_name)
command_output = execute_remote_cmd(vm_details.host_id.host_ip.private_ip, 'root', snapshot_current_command, None, True)
logger.debug(command_output)
# Delete directory created for storing dumpxml of vm snapshots
_clean_migration_directory(vm_backup_during_migration)
return
def migrate_domain(vm_id, destination_host_id=None, live_migration=False):
"""
Migrate domain
"""
vm_details = current.db.vm_data[vm_id]
domain_snapshots_list = []
current_snapshot_name = ''
vm_migration_directory = get_constant('vm_migration_data')
vm_backup_during_migration = vm_details.datastore_id.system_mount_point + '/' + vm_migration_directory + '/' + \
vm_details.vm_identity
if destination_host_id == None:
destination_host_id = find_new_host(vm_details.RAM, vm_details.vCPU)
destination_host_ip = current.db.host[destination_host_id].host_ip.private_ip
flags = VIR_MIGRATE_PEER2PEER|VIR_MIGRATE_PERSIST_DEST|VIR_MIGRATE_UNDEFINE_SOURCE|VIR_MIGRATE_UNSAFE
if live_migration:
flags |= VIR_MIGRATE_TUNNELLED|VIR_MIGRATE_LIVE
if vm_details.status == current.VM_STATUS_SUSPENDED:
logger.debug("Vm is suspended")
flags |= VIR_MIGRATE_TUNNELLED|VIR_MIGRATE_PAUSED
elif vm_details.status == current.VM_STATUS_SHUTDOWN:
logger.debug("Vm is shut off")
flags |= VIR_MIGRATE_OFFLINE
logger.debug("Flags: " + str(flags))
try:
domain = getVirshDomain(vm_details)
dom_snapshot_names = domain.snapshotListNames(0)
for snapshot in current.db(current.db.snapshot.vm_id == vm_id).select():
logger.debug("snapshot:" + str(snapshot.snapshot_name))
domain_snapshots_list.append(snapshot.snapshot_name)
dom_snapshot_names.remove(snapshot.snapshot_name)
logger.debug("domain snapshot list is " + str(domain_snapshots_list))
for dom_snapshot in dom_snapshot_names:
logger.debug("Deleting orphan snapshot %s" %(dom_snapshot))
snapshot = domain.snapshotLookupByName(dom_snapshot, 0)
snapshot.delete(0)
if domain_snapshots_list:
current_snapshot = domain.snapshotCurrent(0)
current_snapshot_name = current_snapshot.getName()
migrate_domain_with_snapshots(vm_details, destination_host_ip, domain, domain_snapshots_list, current_snapshot_name, flags, vm_backup_during_migration)
else:
domain.migrateToURI("qemu+ssh://root@" + destination_host_ip + "/system", flags , None, 0)
vm_details.update_record(host_id = destination_host_id)
current.db.commit()
# Delete directory created for storing dumpxml of vm snapshot
_clean_migration_directory(vm_backup_during_migration)
message = vm_details.vm_identity + " is migrated successfully."
logger.debug("Task Status: SUCCESS Message: %s " % message)
return (current.TASK_QUEUE_STATUS_SUCCESS, message)
except:
undo_migration(vm_details, domain_snapshots_list, current_snapshot_name, vm_backup_during_migration)
logger.debug("Task Status: FAILED Error: %s " % log_exception())
return (current.TASK_QUEUE_STATUS_FAILED, log_exception())
def migrate_domain_datastore(vmid, destination_datastore_id, live_migration=False):
"""
Migrate VM domain from one datastore to another.
- Copy VM Image to new datastore
- Update VM XML definition
- Update database
"""
logger.debug(sys.path)
vm_details = current.db.vm_data[vmid]
# datastore_id = vm_details["datastore_id"]
logger.debug("Inside live disk migration block")
try:
(connection_object, domain) = getVirshDomainConn(vm_details)
datastore = current.db.datastore[destination_datastore_id]
vm_directory_path = datastore.system_mount_point + get_constant('vms') + '/' + vm_details.vm_identity
logger.debug("Creating vm directory on other datastore...")
if not os.path.exists (vm_directory_path):
os.makedirs(vm_directory_path)
diskpath = vm_directory_path + '/' + vm_details.vm_identity + '.qcow2'
current_disk_path = vm_details.datastore_id.system_mount_point + get_constant('vms') + '/' + vm_details.vm_identity
current_disk_file = current_disk_path + '/' + vm_details.vm_identity + '.qcow2'
logger.debug(current_disk_file)
xmlfile = domain.XMLDesc(0)
if(live_migration==False):
rc = os.system("cp %s %s" % (current_disk_file, diskpath))
if rc != 0:
logger.error("Copy not successful")
raise Exception("Copy not successful")
else:
logger.debug("Copied successfully")
else:
if domain.isActive:
domain.undefine()
root = etree.fromstring(xmlfile)
target_elem = root.find("devices/disk/target")
target_disk = target_elem.get('dev')
#
# destxml = generate_blockcopy_xml(diskpath,target_disk)
flag = VIR_DOMAIN_BLOCK_REBASE_SHALLOW | VIR_DOMAIN_BLOCK_REBASE_COPY
domain.blockRebase(target_disk, diskpath, 0, flag)
block_info_list = domain.blockJobInfo(current_disk_file,0)
while(block_info_list['end'] != block_info_list['cur']):
logger.debug("time to sleep")
time.sleep(60)
block_info_list = domain.blockJobInfo(current_disk_file,0)
domain.blockJobAbort(current_disk_file, VIR_DOMAIN_BLOCK_JOB_ABORT_PIVOT)
source_elem = root.find("devices/disk/source")
source_elem.set('file',diskpath)
newxml_file = etree.tostring(root)
domain = connection_object.defineXML(newxml_file)
vm_details.update_record(datastore_id=destination_datastore_id)
if os.path.exists (diskpath):
os.remove(current_disk_file)
restore_symboltable_path = current_disk_path+"/restore_symboltable"
if os.path.exists (restore_symboltable_path):
logger.debug(restore_symboltable_path)
os.remove(restore_symboltable_path)
os.rmdir(current_disk_path)
connection_object.close()
message = vm_details.vm_identity + " is migrated successfully to new datastore."
logger.debug("Task Status: SUCCESS Message: %s " % message)
return (current.TASK_QUEUE_STATUS_SUCCESS, message)
except:
#undo_datastore_migration(vm_details, domain, diskpath, current_disk_file, vm_directory_path, datastore_id)
connection_object.close()
logger.debug("Task Status: FAILED Error: %s " % log_exception())
return (current.TASK_QUEUE_STATUS_FAILED, log_exception())
def undo_datastore_migration(vm_details, domain, diskpath, current_disk_file, vm_directory_path, datastore_id):
"""
Undo migration in case of any issue
"""
# undo databse changes
vm_details.update_record(datastore_id=datastore_id)
if domain.isActive:
logger.debug("domain is active")
block_info_list = domain.blockJobInfo(current_disk_file,0)
if(bool(block_info_list) == True):
while(block_info_list['end'] != block_info_list['cur']):
logger.debug("time to sleep")
time.sleep(60)
block_info_list = domain.blockJobInfo(current_disk_file,0)
if(block_info_list['end'] == block_info_list['cur']):
domain.blockJobAbort(current_disk_file)
block_info_list = domain.blockJobInfo(current_disk_file,0)
if os.path.exists (diskpath):
os.remove(diskpath)
os.rmdir(vm_directory_path)
def migrate(parameters):
"""
Migrates VM to new host
"""
vmid = parameters['vm_id']
logger.debug("Inside migrate() function for vm_id: "+str(vmid))
destination_host_id = parameters['destination_host']
if parameters['live_migration'] == 'on':
live_migration = True
else:
live_migration = False
return migrate_domain(vmid, destination_host_id, live_migration)
def migrate_datastore(parameters):
"""
Migrates VM to new datastore
"""
logger.debug("Inside migrate_datastore() function")
vmid = parameters['vm_id']
destination_ds_id = parameters['destination_ds']
if parameters['live_migration'] == 'on':
live_migration = True
else:
live_migration = False
return migrate_domain_datastore(vmid, destination_ds_id, live_migration)
def snapshot(parameters):
"""
Snapshots a vm
"""
logger.debug("Inside snapshot() function")
vm_id = parameters['vm_id']
snapshot_type = parameters['snapshot_type']
try:
vm_details = current.db.vm_data[vm_id]
if is_pingable(str(vm_details.private_ip.private_ip)):
logger.debug("VM is pingable. Starting to start with snapshotting...")
if snapshot_type != current.SNAPSHOT_USER:
snapshots = current.db((current.db.snapshot.vm_id == vm_id) & (current.db.snapshot.type == snapshot_type)).select()
#Delete the existing Daily/Monthly/Yearly snapshot
for snapshot_cron in snapshots:
logger.debug(snapshot_cron)
delete_snapshot({'vm_id':vm_id, 'snapshot_id':snapshot_cron.id})
snapshot_name = get_datetime().strftime("%I:%M%p_%B%d,%Y")
domain = getVirshDomain(vm_details)
xmlDesc = "<domainsnapshot><name>%s</name></domainsnapshot>" % (snapshot_name)
domain.snapshotCreateXML(xmlDesc, 0)
message = "Snapshotted successfully."
current.db.snapshot.insert(vm_id = vm_id, datastore_id = vm_details.datastore_id, snapshot_name = snapshot_name, type = snapshot_type)
logger.debug("Task Status: SUCCESS Message: %s " % message)
return (current.TASK_QUEUE_STATUS_SUCCESS, message)
else:
message = "Unable to ping VM before snapshoting: %s" % (vm_details.private_ip.private_ip)
raise Exception("Unable to ping VM before snapshoting: %s" % (vm_details.private_ip.private_ip))
except:
logger.debug("Task Status: FAILED Error: %s " % log_exception())
return (current.TASK_QUEUE_STATUS_FAILED, log_exception())
def revert(parameters):
"""
Reverts to snapshot
"""
logger.debug("Inside revert snapshot() function")
vm_id = parameters['vm_id']
snapshotid = parameters['snapshot_id']
vm_details = current.db.vm_data[vm_id]
try:
domain = getVirshDomain(vm_details)
snapshot_name = current.db(current.db.snapshot.id == snapshotid).select().first()['snapshot_name']
snapshot = domain.snapshotLookupByName(snapshot_name, 0)
domain.revertToSnapshot(snapshot, 0)
message = "Reverted to snapshot successfully."
logger.debug("Task Status: SUCCESS Message: %s " % message)
return (current.TASK_QUEUE_STATUS_SUCCESS, message)
except:
logger.debug("Task Status: FAILED Error: %s " % log_exception())
return (current.TASK_QUEUE_STATUS_FAILED, log_exception())
def delete_snapshot(parameters):
"""
Deletes a snapshot
"""
logger.debug("Inside delete snapshot() function")
vm_id = parameters['vm_id']
snapshotid = parameters['snapshot_id']
vm_details = current.db.vm_data[vm_id]
logger.debug(str(vm_details))
try:
domain = getVirshDomain(vm_details)
snapshot_name = current.db(current.db.snapshot.id == snapshotid).select().first()['snapshot_name']
snapshot = None
try:
snapshot = domain.snapshotLookupByName(snapshot_name, 0)
except libvirtError:
logger.debug("Snapshot %s not found" %(snapshot_name))
if snapshot != None:
snapshot.delete(0)
message = "Deleted snapshot successfully."
logger.debug(message)
current.db(current.db.snapshot.id == snapshotid).delete()
logger.debug("Task Status: SUCCESS Message: %s " % message)
return (current.TASK_QUEUE_STATUS_SUCCESS, message)
except:
logger.debug("Task Status: FAILED Error: %s " % log_exception())
return (current.TASK_QUEUE_STATUS_FAILED, log_exception())
def update_security_domain(vm_details, security_domain_id, xmlDesc=None):
"""
Get new IP for given security domain.
Update the VM XML with new mac_address and update the information in DB
"""
# fetch new private IP from db from given security domain
private_ip_info = _get_private_ip_mac(security_domain_id)
# update vm config to add new mac address.
root = etree.fromstring(xmlDesc)
mac_elem = root.find("devices/interface[@type='bridge']/mac")
mac_elem.set('address', private_ip_info.mac_addr)
vlan_tag_elem = root.find("devices/interface[@type='bridge']/vlan/tag")
vlan_tag_elem.set('id', private_ip_info.vlan.vlan_tag)
# update NAT IP mapping, if public IP present
if vm_details.public_ip:
remove_mapping(vm_details.public_ip.public_ip, vm_details.private_ip.private_ip)
create_mapping(vm_details.public_ip.public_ip, private_ip_info.private_ip)
# update vm_data
current.db(current.db.vm_data.id == vm_details.id).update(security_domain = security_domain_id,
private_ip = private_ip_info.id)
return etree.tostring(root)
def edit_vm_config(parameters):
"""
Edits vm configuration
"""
logger.debug("Inside edit vm config() function")
vm_id = parameters['vm_id']
vm_details = current.db.vm_data[vm_id]
message = ""
try:
connection_object, domain = getVirshDomainConn(vm_details)
if 'vcpus' in parameters:
new_vcpus = int(parameters['vcpus'])
domain.setVcpusFlags(new_vcpus, VIR_DOMAIN_VCPU_MAXIMUM)
domain.setVcpusFlags(new_vcpus, VIR_DOMAIN_AFFECT_CONFIG)
message += "Edited vCPU successfully."
current.db(current.db.vm_data.id == vm_id).update(vCPU = new_vcpus)
if 'ram' in parameters:
new_ram = int(parameters['ram']) * 1024
logger.debug(str(new_ram))
domain.setMemoryFlags(new_ram, VIR_DOMAIN_MEM_MAXIMUM)
domain.setMemoryFlags(new_ram, VIR_DOMAIN_AFFECT_CONFIG)
message += " And edited RAM successfully."
current.db(current.db.vm_data.id == vm_id).update(RAM = int(parameters['ram']))
if 'public_ip' in parameters:
enable_public_ip = parameters['public_ip']
if enable_public_ip:
public_ip_pool = _choose_random_public_ip()
if public_ip_pool:
create_mapping(public_ip_pool.public_ip, vm_details.private_ip.private_ip)
current.db.vm_data[vm_id] = dict(public_ip=public_ip_pool.id)
message += "Edited Public IP successfully."
else:
raise Exception("Available Public IPs are exhausted.")
else:
remove_mapping(vm_details.public_ip.public_ip, vm_details.private_ip.private_ip)
current.db.vm_data[vm_id] = dict(public_ip = None)
if 'security_domain' in parameters:
logger.debug('Updating security domain')
xmlfile = update_security_domain(vm_details, parameters['security_domain'], domain.XMLDesc(0))
domain = connection_object.defineXML(xmlfile)
if domain.isActive():
domain.reboot(0)
message += "Edited security domain successfully"
connection_object.close()
logger.debug("Task Status: SUCCESS Message: %s " % message)
return (current.TASK_QUEUE_STATUS_SUCCESS, message)
except:
logger.debug("Task Status: FAILED Error: %s " % log_exception())
return (current.TASK_QUEUE_STATUS_FAILED, log_exception())
def _get_clone_properties(vm_details, cloned_vm_details, vm_properties):
"""
Get properties for Cloned VM.
"""
datastore = _choose_datastore()
vm_properties['datastore'] = datastore
logger.debug("Datastore selected is: " + str(datastore))
vm_properties['security_domain'] = vm_details.security_domain
vm_properties['public_ip_req'] = False
# Finds mac address, ip address and vnc port for the cloned vm
_choose_mac_ip_vncport(vm_properties)
logger.debug("MAC is : " + str(vm_properties['mac_addr']) + " IP is : " + str(vm_properties['private_ip']) + \
" VNCPORT is : " + str(vm_properties['vnc_port']))
# Template and host of parent vm
vm_properties['template'] = current.db(current.db.template.id == vm_details.template_id).select()[0]
vm_properties['vm_host_details'] = current.db.host[vm_details.host_id]
vm_properties['host'] = vm_properties['vm_host_details'].id
# Creates a directory for the cloned vm
logger.debug("Creating directory for cloned vm...")
cloned_vm_directory_path = datastore.system_mount_point + '/' + get_constant('vms') + '/' + cloned_vm_details.vm_identity
if not os.path.exists (cloned_vm_directory_path):
os.makedirs(cloned_vm_directory_path)
clone_file_parameters = ' --file ' + cloned_vm_directory_path + '/' + cloned_vm_details.vm_identity + '.qcow2'
else:
raise Exception("Directory with same name as vmname already exists.")
# Creates a folder for additional disks of the cloned vm
vm = current.db(current.db.vm_data.vm_identity == vm_details.vm_identity).select().first()
disk_details_of_cloning_vm = current.db(current.db.attached_disks.vm_id == vm.id).select(orderby=current.db.attached_disks.attached_disk_name)
logger.debug(disk_details_of_cloning_vm)
already_attached_disks = len(disk_details_of_cloning_vm)
cloned_vm_extra_disks_directory = datastore.system_mount_point + '/' + get_constant('extra_disks_dir') + '/' + \
datastore.ds_name + '/' + cloned_vm_details.vm_identity
if already_attached_disks > 0:
if not os.path.exists (cloned_vm_extra_disks_directory):
logger.debug("Making Directory")
os.makedirs(cloned_vm_extra_disks_directory)
count = already_attached_disks
while already_attached_disks > 0:
disk_name = cloned_vm_details.vm_identity + '_disk' + str(count - already_attached_disks + 1) + '.qcow2'
clone_file_parameters += ' --file ' + cloned_vm_extra_disks_directory + '/' + disk_name
current.db.attached_disks.insert(vm_id = cloned_vm_details.id,
datastore_id = datastore.id ,
attached_disk_name = disk_name,
capacity = disk_details_of_cloning_vm[count - already_attached_disks].capacity)
already_attached_disks -= 1
return (clone_file_parameters)
def migrate_clone_to_new_host(vm_details, cloned_vm_details, new_host_id_for_cloned_vm,vm_properties):
"""
Migrates cloned vm to new host
"""
try:
new_host_ip_for_cloned_vm = current.db.host[new_host_id_for_cloned_vm].host_ip.private_ip
logger.debug("New host ip for cloned vm is: " + str(new_host_ip_for_cloned_vm))
flags = VIR_MIGRATE_PEER2PEER|VIR_MIGRATE_PERSIST_DEST|VIR_MIGRATE_UNDEFINE_SOURCE|VIR_MIGRATE_OFFLINE|VIR_MIGRATE_UNSAFE
logger.debug("Clone currently on: " + str(vm_details.host_id.host_ip))
(current_host_connection_object, domain) = getVirshDomainConn(None, vm_details.host_id.host_ip, cloned_vm_details.vm_identity)
logger.debug("Starting to migrate cloned vm to host " + str(new_host_ip_for_cloned_vm))
domain.migrateToURI("qemu+ssh://root@" + new_host_ip_for_cloned_vm + "/system", flags , None, 0)
current_host_connection_object.close()
logger.debug("Successfully migrated cloned vm to host " + str(new_host_ip_for_cloned_vm))
cloned_vm_details.update_record(host_id = new_host_id_for_cloned_vm)
vm_properties['host'] = new_host_id_for_cloned_vm
return True
except libvirt.libvirtError,e:
message = e.get_error_message()
logger.debug("Error: " + message)
return False
def clone(vmid):
"""
Clones vm
"""
vm_properties = {}
logger.debug("Inside clone() function")
cloned_vm_details = current.db.vm_data[vmid]
vm_details = current.db(current.db.vm_data.id == cloned_vm_details.parent_id).select().first()
try:
domain = getVirshDomain(vm_details)
if domain.info()[0] != VIR_DOMAIN_SHUTOFF:
raise Exception("VM is not shutoff. Check vm status.")
clone_file_parameters = _get_clone_properties(vm_details, cloned_vm_details, vm_properties)
logger.debug("cloned vm properties after clone_file_parameters" + str(vm_properties))
host = vm_properties['vm_host_details']
logger.debug("host is: " + str(host))
logger.debug("host details are: " + str(host))
(used_ram, used_cpu) = host_resources_used(host.id)
logger.debug("uram: " + str(used_ram) + " used_cpu: " + str(used_cpu) + " host ram: " + str(host.RAM) +" host cpu: " + str(host.CPUs))
host_ram_after_200_percent_overcommitment = math.floor((host.RAM * 1024) * 2)
host_cpu_after_200_percent_overcommitment = math.floor(host.CPUs * 2)
logger.debug("host_ram_after_200_percent_overcommitment in MB " + str(host_ram_after_200_percent_overcommitment))
logger.debug("host_cpu_after_200_percent_overcommitment " + str(host_cpu_after_200_percent_overcommitment))
logger.debug("Available RAM on host: %s, Requested RAM: %s" % ((host_ram_after_200_percent_overcommitment - used_ram), vm_details.RAM))
logger.debug("Available CPUs on host: %s, Requested CPU: %s " % ((host_cpu_after_200_percent_overcommitment - used_cpu), vm_details.vCPU))
if((( host_ram_after_200_percent_overcommitment - used_ram) >= vm_details.RAM) and ((host_cpu_after_200_percent_overcommitment - used_cpu) >= vm_details.vCPU) and (vm_details.vCPU <= host.CPUs)):
clone_command = "virt-clone --original " + vm_details.vm_identity + " --name " + cloned_vm_details.vm_identity + \
clone_file_parameters + " --mac " + vm_properties['mac_addr']
command_output = execute_remote_cmd(vm_details.host_id.host_ip.private_ip, 'root', clone_command, None, True)
logger.debug(command_output)
logger.debug("Updating db after cloning")
update_db_after_vm_installation(cloned_vm_details, vm_properties, parent_id = vm_details.id)
message = "Cloned successfully. "
try:
new_host_id_for_cloned_vm = find_new_host(cloned_vm_details.RAM, cloned_vm_details.vCPU)
if new_host_id_for_cloned_vm != host.id:
if migrate_clone_to_new_host(vm_details, cloned_vm_details, new_host_id_for_cloned_vm,vm_properties):
message += "Found new host and migrated successfully."
else:
message += "Found new host but not migrated successfully."
else:
message += "New host selected to migrate cloned vm is same as the host on which it currently resides."
except:
message += "Could not find host to migrate cloned vm."
logger.debug("Task Status: SUCCESS Message: %s " % message)
return (current.TASK_QUEUE_STATUS_SUCCESS, message)
else:
raise Exception("Host resources exhausted. Migrate the host vms and then try.")
except:
_free_vm_properties(cloned_vm_details, vm_properties)
logger.debug("Task Status: FAILED Error: %s " % log_exception())
return (current.TASK_QUEUE_STATUS_FAILED, log_exception())
def attach_extra_disk(parameters):
"""
Attaches extra disk to VM
"""
logger.debug("Inside attach extra disk() function")
vmid = parameters['vm_id']
disk_size = parameters['disk_size']
vm_details = current.db.vm_data[vmid]
logger.debug(str(vm_details))
try:
if (serve_extra_disk_request(vm_details, disk_size, vm_details.host_id.host_ip.private_ip)):
current.db(current.db.vm_data.id == vmid).update(extra_HDD = vm_details.extra_HDD + disk_size)
message = "Attached extra disk successfully"
logger.debug(message)
return (current.TASK_QUEUE_STATUS_SUCCESS, message)
else:
message = " Your request for additional HDD could not be completed at this moment. Check logs."
logger.debug("Task Status: SUCCESS Message: %s " % message)
return (current.TASK_QUEUE_STATUS_FAILED, message)
except:
logger.debug("Task Status: FAILED Error: %s " % log_exception())
return (current.TASK_QUEUE_STATUS_FAILED, log_exception())
def get_vm_image_location(datastore_id, vm_identity):
"""
Get the file path for qcow2 image of a VM
"""
datastore = current.db.datastore[datastore_id]
vm_directory_path = datastore.system_mount_point + '/' + get_constant('vms') + '/' + vm_identity
vm_image_name = vm_directory_path + '/' + vm_identity + '.qcow2'
image_present = True if os.path.exists(vm_image_name) else False
return (vm_image_name, image_present)
def get_extra_disk_location(datastore_id, vm_identity, disk_name, get_disk_size=False):
"""
Get the file path for qcow2 image of teh extra disk
"""
datastore = current.db.datastore[datastore_id]
if datastore:
vm_extra_disks_directory_path = datastore.system_mount_point + '/' + get_constant('extra_disks_dir') + '/' + \
datastore.ds_name + '/' + vm_identity
ext = '' if disk_name.endswith('.qcow2') else '.qcow2'
disk_image_path = vm_extra_disks_directory_path + '/' + disk_name + ext
image_present = True if os.path.exists(disk_image_path) else False
disk_size = 0
if image_present & get_disk_size:
command = "qemu-img info " + disk_image_path + " | grep 'virtual size'"
ret = os.popen(command).read() # Returns e.g. virtual size: 40G (42949672960 bytes)
disk_size = int(ret[ret.index(':')+1:ret.index('G ')].strip())
return (disk_image_path, image_present, disk_size)
else:
return (None, False, 0)
def launch_existing_vm_image(vm_details):
"""
Launch existing VM image
- Choose new private_ip & mac_addr if not provided
- Get location for VM image
- Launch VM on given host
- Attach extra disk to VM if defined
- Create mapping between public IP and private IP if required
"""
logger.debug('Launch existing VM image')
vm_properties = {}
vm_properties['ram'] = vm_details.RAM
vm_properties['vcpus'] = vm_details.vCPU
vm_properties['security_domain'] = vm_details.security_domain
#If Private IP was already chosen previously and DHCP entry is done
if vm_details.private_ip != None:
private_ip_info = current.db.private_ip_pool[vm_details.private_ip]
if private_ip_info:
vm_properties['private_ip'] = private_ip_info.private_ip
vm_properties['mac_addr'] = private_ip_info.mac_addr
vm_properties['vlan_name'] = private_ip_info.vlan.name
vm_properties['vlan_tag'] = private_ip_info.vlan.vlan_tag
if vm_details.public_ip == None:
vm_properties['public_ip_req'] = False
else:
vm_properties['public_ip_req'] = True
if vm_details.public_ip.is_active:
vm_properties['public_ip'] = vm_details.public_ip.public_ip
_choose_mac_ip_vncport(vm_properties)
vm_properties['template'] = current.db.template[vm_details.template_id]
vm_properties['datastore'] = current.db.datastore[vm_details.datastore_id]
vm_properties['host'] = find_new_host(vm_details.RAM, vm_details.vCPU)
(vm_image_name, image_present) = get_vm_image_location(vm_details.datastore_id, vm_details.vm_identity)
if image_present:
launch_vm_on_host(vm_details, vm_image_name, vm_properties)
#Check if extra disk needs to be attached
attached_disks = current.db((current.db.attached_disks.vm_id == vm_details.id)).select()
if attached_disks:
#Extra disk to be attached to the VM
host_ip = current.db.host[vm_properties['host']].host_ip.private_ip
disk_counter = 1
for attached_disk in attached_disks:
disk_size = attach_disk(vm_details, attached_disk.attached_disk_name, host_ip, disk_counter, True)
current.db(current.db.attached_disks.vm_id == attached_disk.vm_id and
current.db.attached_disks.attached_disk_name==attached_disk.attached_disk_name
).update(capacity = disk_size)
vm_details.extra_HDD += disk_size
disk_counter += 1
#Create mapping of Private_IP and Public_IP
if vm_properties['public_ip_req']:
create_mapping(vm_properties['public_ip'], vm_properties['private_ip'])
update_db_after_vm_installation(vm_details, vm_properties)
def save_vm_as_template(parameters):
"""
Save VM as template
If template for given VM already exists, replace with new template.
"""
logger.debug("Inside save_as_template() function")
vm_id = parameters['vm_id']
vm_data = current.db.vm_data[vm_id]
user_list = []
vm_details = current.db.vm_data[vm_id]
logger.debug(str(vm_details))
try:
(is_templated_created, new_template, old_template) = create_new_template(vm_details)
if (is_templated_created):
#remove old template
if os.path.exists (old_template):
os.remove(old_template)
else:
for user in current.db(current.db.user_vm_map.vm_id == vm_id).select(current.db.user_vm_map.user_id):
user_list.append(user.user_id)
new_template_id = current.db.template.insert(name = vm_data.vm_name + "_template" ,
os = vm_data.template_id.os ,
os_name = vm_data.template_id.os_name ,
os_version = vm_data.template_id.os_version ,
os_type = vm_data.template_id.os_type ,
arch = vm_data.template_id.arch ,
hdd = vm_data.template_id.hdd ,
hdfile = new_template ,
type = vm_data.template_id.type ,
tag = vm_data.vm_name + "_template" ,
datastore_id = vm_data.template_id.datastore_id,
owner = user_list)
current.db.vm_data[vm_id] = dict(saved_template = new_template_id)
message = "User Template saved successfully"
logger.debug(message)
return (current.TASK_QUEUE_STATUS_SUCCESS, message)
else:
message = " Vm Template not saved "
logger.debug("Task Status: %s " % message)
return (current.TASK_QUEUE_STATUS_FAILED, message)
except:
logger.debug("Task Status: FAILED Error: %s " % log_exception())
return (current.TASK_QUEUE_STATUS_FAILED, log_exception())
def delete_template(parameters):
"""
Delete template
"""
logger.debug("Inside delete_template() function")
template_id = parameters['template_id']
template_details = current.db.template[template_id]
template_path = template_details["hdfile"]
if os.path.exists(template_path):
os.remove(template_path)
# set value in db also
parent_vm = current.db.vm_data(saved_template = template_id)
if parent_vm:
parent_vm.update_record(saved_template = None)
del current.db.template[template_id]
return (current.TASK_QUEUE_STATUS_SUCCESS, "")
def create_new_template(vm_details):
"""
Create a new template from the VM image
- Create template directory
- Copy VM Image to directory(Live copy if VM is running)
- Update database to define new template
"""
try:
(connection_object, domain) = getVirshDomainConn(vm_details)
xmlfile = domain.XMLDesc(0)
logger.debug("connection object created")
datastore = _choose_datastore()
logger.debug(datastore)
new_template_dir = datastore.system_mount_point + '/' +get_constant('templates_dir') + '/' + vm_details.requester_id.first_name
logger.debug("Creating user template directory...")
if not os.path.exists (new_template_dir):
os.makedirs(new_template_dir)
template = new_template_dir + '/' + vm_details.vm_identity + '_template.qcow2'
template_location = '/' + vm_details.requester_id.first_name + '/' + vm_details.vm_identity + '_template.qcow2'
old_template = new_template_dir + '/' + vm_details.vm_identity + '_template_old.qcow2'
if os.path.exists (template):
# move template to some other path
logger.debug("move template to some other file")
shutil.move(template, old_template)
logger.debug("template " + template)
current_disk_path = vm_details.datastore_id.system_mount_point + get_constant('vms') + '/' + vm_details.vm_identity
current_disk_file = current_disk_path + '/' + vm_details.vm_identity + '.qcow2'
if (vm_details.status == current.VM_STATUS_RUNNING or vm_details.status == current.VM_STATUS_SUSPENDED):
logger.debug("vm is active in db")
if domain.isActive():
domain.undefine()
root = etree.fromstring(xmlfile)
target_elem = root.find("devices/disk/target")
target_disk = target_elem.get('dev')
flag = VIR_DOMAIN_BLOCK_REBASE_SHALLOW | VIR_DOMAIN_BLOCK_REBASE_COPY
domain.blockRebase(target_disk, template, 0, flag)
block_info_list = domain.blockJobInfo(current_disk_file,0)
while(block_info_list['end'] != block_info_list['cur']):
logger.debug("time to sleep")
time.sleep(60)
block_info_list = domain.blockJobInfo(current_disk_file,0)
domain.blockJobAbort(current_disk_file)
domain = connection_object.defineXML(xmlfile)
connection_object.close()
return (True, template_location, old_template)
else:
logger.debug("domain is not running on host")
return (False, template_location, old_template)
elif(vm_details.status == current.VM_STATUS_SHUTDOWN):
if domain.isActive():
logger.debug("Domain is still active...Please try again after some time!!!")
return (False, template_location, old_template)
else:
logger.debug("copying")
copy_command = "cp "+current_disk_file+" "+template
logger.debug("copy_command"+copy_command)
#rc = os.system("cp %s %s" % (current_disk_file, template))
logger.debug("copy command running on " + vm_details.host_id.host_ip.private_ip + " host")
command_output = execute_remote_cmd(vm_details.host_id.host_ip.private_ip, 'root', copy_command)
logger.debug(command_output)
return (True, template_location, old_template)
except:
if not domain.isPersistent():
domain = connection_object.defineXML(xmlfile)
connection_object.close()
logger.debug("Task Status: FAILED Error: %s " % log_exception())
return (False, template_location, old_template)
| 44.789067 | 237 | 0.655399 |
1c051e0c55fbe3f232891ef1ecc26d26fcbe892f | 1,152 | py | Python | third_party/webrtc/src/chromium/src/build/android/devil/android/sdk/aapt.py | bopopescu/webrtc-streaming-node | 727a441204344ff596401b0253caac372b714d91 | [
"MIT"
] | 8 | 2016-02-08T11:59:31.000Z | 2020-05-31T15:19:54.000Z | third_party/webrtc/src/chromium/src/build/android/devil/android/sdk/aapt.py | bopopescu/webrtc-streaming-node | 727a441204344ff596401b0253caac372b714d91 | [
"MIT"
] | 1 | 2016-01-29T00:54:49.000Z | 2016-01-29T00:54:49.000Z | third_party/webrtc/src/chromium/src/build/android/devil/android/sdk/aapt.py | bopopescu/webrtc-streaming-node | 727a441204344ff596401b0253caac372b714d91 | [
"MIT"
] | 7 | 2016-02-09T09:28:14.000Z | 2020-07-25T19:03:36.000Z | # Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""This module wraps the Android Asset Packaging Tool."""
import os
from devil.utils import cmd_helper
from pylib import constants
_AAPT_PATH = os.path.join(constants.ANDROID_SDK_TOOLS, 'aapt')
def _RunAaptCmd(args):
"""Runs an aapt command.
Args:
args: A list of arguments for aapt.
Returns:
The output of the command.
"""
cmd = [_AAPT_PATH] + args
status, output = cmd_helper.GetCmdStatusAndOutput(cmd)
if status != 0:
raise Exception('Failed running aapt command: "%s" with output "%s".' %
(' '.join(cmd), output))
return output
def Dump(what, apk, assets=None):
"""Returns the output of the aapt dump command.
Args:
what: What you want to dump.
apk: Path to apk you want to dump information for.
assets: List of assets in apk you want to dump information for.
"""
assets = assets or []
if isinstance(assets, basestring):
assets = [assets]
return _RunAaptCmd(['dump', what, apk] + assets).splitlines()
| 27.428571 | 75 | 0.688368 |
1c0586c2c8dd465a0ae7f5e9116c1bf2f5320f2f | 46 | py | Python | examples/Tests/Misc/Resources/PythonFile/basic.py | esayui/mworks | 0522e5afc1e30fdbf1e67cedd196ee50f7924499 | [
"MIT"
] | null | null | null | examples/Tests/Misc/Resources/PythonFile/basic.py | esayui/mworks | 0522e5afc1e30fdbf1e67cedd196ee50f7924499 | [
"MIT"
] | null | null | null | examples/Tests/Misc/Resources/PythonFile/basic.py | esayui/mworks | 0522e5afc1e30fdbf1e67cedd196ee50f7924499 | [
"MIT"
] | null | null | null | setvar('nsamples', getvar('a') + getvar('b'))
| 23 | 45 | 0.608696 |
1c0727618b7254d68a22ae858de032e6c20ddbc5 | 2,218 | py | Python | quacc/recipes/psi4/core.py | arosen93/HT-ASE | a76542e7a2bc5bf6e7382d8f1387374eb2abc713 | [
"BSD-3-Clause-LBNL"
] | 9 | 2022-02-08T08:31:30.000Z | 2022-03-30T21:37:35.000Z | quacc/recipes/psi4/core.py | arosen93/HT-ASE | a76542e7a2bc5bf6e7382d8f1387374eb2abc713 | [
"BSD-3-Clause-LBNL"
] | 5 | 2022-02-02T21:47:59.000Z | 2022-03-18T21:28:52.000Z | quacc/recipes/psi4/core.py | arosen93/HT-ASE | a76542e7a2bc5bf6e7382d8f1387374eb2abc713 | [
"BSD-3-Clause-LBNL"
] | 3 | 2022-02-23T12:00:57.000Z | 2022-03-24T23:54:22.000Z | """Core recipes for Psi4"""
from __future__ import annotations
from dataclasses import dataclass
from typing import Any, Dict
from ase.atoms import Atoms
from ase.calculators.psi4 import Psi4
from jobflow import Maker, job
from monty.dev import requires
try:
import psi4
except:
psi4 = None
from quacc.schemas.calc import summarize_run
from quacc.util.basics import merge_dicts
from quacc.util.calc import run_calc
| 25.494253 | 84 | 0.587917 |
1c073d575249e6f524c3e4fa1ac84edb0ff05cc7 | 984 | py | Python | UAS/UAS 11 & 12/main.py | Archedar/UAS | 3237d9304026340acc93c8f36b358578dc0ae66f | [
"BSD-Source-Code"
] | null | null | null | UAS/UAS 11 & 12/main.py | Archedar/UAS | 3237d9304026340acc93c8f36b358578dc0ae66f | [
"BSD-Source-Code"
] | null | null | null | UAS/UAS 11 & 12/main.py | Archedar/UAS | 3237d9304026340acc93c8f36b358578dc0ae66f | [
"BSD-Source-Code"
] | null | null | null | #Main Program
from Class import Barang
import Menu
histori = list()
listBarang = [
Barang('Rinso', 5000, 20),
Barang('Sabun', 3000, 20),
Barang('Pulpen', 2500, 20),
Barang('Tisu', 10000, 20),
Barang('Penggaris', 1000, 20)
]
while True:
print('''
Menu
1. Tampilkan Barang
2. Tambahkan Barang
3. Tambah Stock Barang
4. Hapus Barang
5. Cari Barang Berdasarkan Keyword
6. Hitung Barang Belanjaan
7. Histori Keluar Masuk Barang
0. Keluar Program
''')
choice = input('Masukan No Menu: ')
if choice == '1':
Menu.menu1(listBarang)
elif choice == '2':
Menu.menu2(listBarang, histori)
elif choice == '3':
Menu.menu3(listBarang, histori)
elif choice == '4':
Menu.menu4(listBarang, histori)
elif choice == '5':
Menu.menu5(listBarang)
elif choice == '6':
Menu.menu6(listBarang, histori)
elif choice == '7':
Menu.menu7(histori)
elif choice == '0':
print('Keluar Program')
break
else:
print('Invalid Input!') | 20.93617 | 37 | 0.645325 |
1c075f34dca283714195a979ceda054e43bd4f75 | 13,010 | py | Python | original/baselines/train/JointE+ONE.py | thunlp/JointNRE | 29e2070910d0940bf4d32a8b8c97800bceff98fb | [
"MIT"
] | 186 | 2018-01-29T09:33:59.000Z | 2022-03-17T08:20:44.000Z | original/baselines/train/JointE+ONE.py | thunlp/JointNRE | 29e2070910d0940bf4d32a8b8c97800bceff98fb | [
"MIT"
] | 19 | 2018-03-01T01:55:08.000Z | 2022-02-17T03:38:21.000Z | original/baselines/train/JointE+ONE.py | thunlp/JointNRE | 29e2070910d0940bf4d32a8b8c97800bceff98fb | [
"MIT"
] | 36 | 2018-02-02T06:29:29.000Z | 2021-01-22T08:36:00.000Z | #coding:utf-8
import numpy as np
import tensorflow as tf
import os
import time
import datetime
import ctypes
import threading
import json
ll1 = ctypes.cdll.LoadLibrary
lib_cnn = ll1("./init_cnn.so")
ll2 = ctypes.cdll.LoadLibrary
lib_kg = ll2("./init_know.so")
bags_sum = 0.0
bags_hit_NA = 0.0
sum_NA = 0.0
sum_fNA = 0.0
bags_hit = 0.0
loss_sum = 0.0
if __name__ == "__main__":
lib_cnn.readWordVec()
lib_cnn.readFromFile()
lib_kg.init()
np.random.seed(0)
tf.set_random_seed(0)
config = Config()
word_embeddings = np.zeros(config.num_words * config.word_size, dtype = np.float32)
lib_cnn.getWordVec.argtypes = [ctypes.c_void_p]
lib_cnn.getWordVec(word_embeddings.__array_interface__['data'][0])
word_embeddings.resize((config.num_words,config.word_size))
config.batch_size = lib_kg.getTripleTotal() / config.nbatches
config.entityTotal = lib_kg.getEntityTotal()
config.relationTotal = lib_kg.getRelationTotal()
with tf.Graph().as_default():
conf = tf.ConfigProto()
sess = tf.Session(config=conf)
with sess.as_default():
initializer = tf.contrib.layers.xavier_initializer()
with tf.variable_scope("model", reuse=None, initializer = initializer):
m = Model(config = config)
global_step_cnn = tf.Variable(0, name="global_step_cnn", trainable=False)
optimizer_cnn = tf.train.GradientDescentOptimizer(0.01)
grads_and_vars_cnn = optimizer_cnn.compute_gradients(m.loss_cnn)
train_op_cnn = optimizer_cnn.apply_gradients(grads_and_vars_cnn, global_step = global_step_cnn)
global_step_kg = tf.Variable(0, name="global_step_kg", trainable=False)
optimizer_kg = tf.train.GradientDescentOptimizer(0.001)
grads_and_vars_kg = optimizer_kg.compute_gradients(m.loss_kg)
train_op_kg = optimizer_kg.apply_gradients(grads_and_vars_kg, global_step=global_step_kg)
sess.run(tf.initialize_all_variables())
x_batch = np.zeros((config.instanceTot,config.sequence_size), dtype = np.int32)
p_t_batch = np.zeros((config.instanceTot,config.sequence_size), dtype = np.int32)
p_h_batch = np.zeros((config.instanceTot,config.sequence_size), dtype = np.int32)
r_batch = np.zeros((1, 1), dtype = np.int32)
y_batch = np.zeros((1, config.num_classes), dtype = np.int32)
r_n_batch = np.zeros((1, 1), dtype = np.float32)
h_batch = np.zeros((1, 1), dtype = np.int32)
t_batch = np.zeros((1, 1), dtype = np.int32)
x_batch_addr = x_batch.__array_interface__['data'][0]
p_t_batch_addr = p_t_batch.__array_interface__['data'][0]
p_h_batch_addr = p_h_batch.__array_interface__['data'][0]
y_batch_addr = y_batch.__array_interface__['data'][0]
r_batch_addr = r_batch.__array_interface__['data'][0]
r_n_batch_addr = r_n_batch.__array_interface__['data'][0]
h_batch_addr = h_batch.__array_interface__['data'][0]
t_batch_addr = t_batch.__array_interface__['data'][0]
lib_cnn.batch_iter.argtypes = [ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p]
tipTotal = lib_cnn.getTipTotal()
loop = 0
ph = np.zeros(config.batch_size * 2, dtype = np.int32)
pt = np.zeros(config.batch_size * 2, dtype = np.int32)
pr = np.zeros(config.batch_size * 2, dtype = np.int32)
nh = np.zeros(config.batch_size * 2, dtype = np.int32)
nt = np.zeros(config.batch_size * 2, dtype = np.int32)
nr = np.zeros(config.batch_size * 2, dtype = np.int32)
ph_addr = ph.__array_interface__['data'][0]
pt_addr = pt.__array_interface__['data'][0]
pr_addr = pr.__array_interface__['data'][0]
nh_addr = nh.__array_interface__['data'][0]
nt_addr = nt.__array_interface__['data'][0]
nr_addr = nr.__array_interface__['data'][0]
lib_kg.getBatch.argtypes = [ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_int]
times_kg = 0
coord = tf.train.Coordinator()
threads = []
threads.append(threading.Thread(target=train_kg, args=(coord,)))
threads.append(threading.Thread(target=train_cnn, args=(coord,)))
for t in threads: t.start()
coord.join(threads)
| 41.301587 | 321 | 0.711299 |
1c076dedd327711fd82ede330f8c1964afc14a4e | 7,468 | py | Python | i2vec_cli/__main__.py | rachmadaniHaryono/i2vec_cli | 9e03ca1c930e5eab8e42ac882c66e18f7c7435ba | [
"MIT"
] | null | null | null | i2vec_cli/__main__.py | rachmadaniHaryono/i2vec_cli | 9e03ca1c930e5eab8e42ac882c66e18f7c7435ba | [
"MIT"
] | null | null | null | i2vec_cli/__main__.py | rachmadaniHaryono/i2vec_cli | 9e03ca1c930e5eab8e42ac882c66e18f7c7435ba | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
"""get tag from http://demo.illustration2vec.net/."""
# note:
# - error 'ERROR: Request Entity Too Large' for file 1.1 mb
# <span style="color:red;">ERROR: Request Entity Too Large</span>
from collections import OrderedDict
from pathlib import Path
from pprint import pformat
import imghdr
import logging
import os
import shutil
import time
import urllib
import hashlib
import click
import requests
import structlog
import peewee
from PIL import Image
from i2vec_cli import models
from i2vec_cli.requests_session import Session, convert_raw_to_hydrus
from i2vec_cli.sha256 import sha256_checksum
from i2vec_cli.utils import user_data_dir, thumb_folder
def is_url(path):
"""Return True if path is url, False otherwise."""
scheme = urllib.parse.urlparse(path).scheme
if scheme in ('http', 'https'):
return True
return False
def is_ext_equal(file_ext, imghdr_ext):
"""compare file extension with result from imghdr_ext."""
if not imghdr_ext:
return False
if file_ext.lower() == '.{}'.format(imghdr_ext):
return True
if file_ext.lower() in ('.jpg', '.jpeg') and imghdr_ext == 'jpeg':
return True
return False
def download(url, no_clobber):
"""download url.
Args:
url: URL to be downloaded.
no_clobber: Skip download if file already exist.
Returns:
Downloaded filename or existing file if `no_clobber` is `True`
"""
log = structlog.getLogger()
basename = os.path.basename(url)
if os.path.isfile(basename) and no_clobber:
return basename
response = requests.get(url, stream=True)
with open(basename, 'wb') as out_file:
shutil.copyfileobj(response.raw, out_file)
name, ext = os.path.splitext(basename)
imghdr_ext = imghdr.what(basename)
ext_equal = is_ext_equal(file_ext=ext, imghdr_ext=imghdr_ext)
if not imghdr_ext:
log.debug("imghdr can't recognize file", file=basename)
return basename
else:
new_basename = '{}.{}'.format(name, imghdr_ext)
new_basename_exist = os.path.isfile(new_basename)
if ext_equal:
log.debug('Extension is equal', file_ext=ext, imghdr_ext=imghdr_ext)
return basename
elif not ext_equal:
if new_basename_exist and not no_clobber:
log.debug('Replace existing file', old=basename, new=new_basename)
shutil.move(basename, new_basename)
elif not new_basename_exist:
log.debug('Rename file ext', file=basename, new_ext=imghdr_ext)
shutil.move(basename, new_basename)
else:
log.debug('Not replace/rename file', no_clobber=no_clobber, new_basename=new_basename)
return new_basename
else:
log.debug(
'Unknown condition',
file=basename,
ext_equal=ext_equal,
new_basename_exist=new_basename_exist,
imghdr_ext=imghdr_ext
)
# just return base name if any error happen
return basename
def validate_close_delay(ctx, param, value):
"""validate close delay."""
try:
value = int(value)
except Exception as e:
raise click.BadParameter(
'Error when validate close delay: value={}, error={}'.format(value, e))
if value >= -1:
return value
else:
raise click.BadParameter('Close delay have to be bigger or equal than -1')
def delay_close(close_delay):
"""delay when closing the program."""
log = structlog.getLogger()
if close_delay == -1:
click.pause()
elif close_delay == 0:
log.debug('No close delay')
elif close_delay > 0:
time.sleep(close_delay)
else:
log.error('Invalid close delay', v=close_delay)
def create_thumbnail(path, thumb_path):
"""create thumbnail."""
size = 320, 320
try:
im = Image.open(path)
im.thumbnail(size)
im.save(thumb_path, "JPEG")
except IOError:
raise IOError("cannot create thumbnail for", path)
def get_print_result(path, db_path, format, session):
"""get print result."""
# compatibility
p = path
sha256 = sha256_checksum(p)
md5 = md5_checksum(p)
thumb_path = os.path.join(user_data_dir, 'thumb', '{}.jpg'.format(sha256))
try:
load_res = models.load_result(db=db_path, sha256=sha256, md5=md5)
except models.Image.DoesNotExist:
load_res = None
if load_res:
tags = {'prediction': load_res}
else:
tags = session.get_tags(path=p)
try:
models.save_result(
db=db_path, sha256=sha256, md5=md5, prediction=tags['prediction'])
except peewee.IntegrityError as e:
log.debug(str(e))
except keyError as e:
log.debug(str(tags))
if not os.path.isfile(thumb_path):
create_thumbnail(p, thumb_path)
if format == 'dict':
return tags
if format == 'hydrus':
return convert_raw_to_hydrus(tags)
else:
return pformat(tags['prediction'])
if __name__ == '__main__':
main()
| 30.987552 | 98 | 0.644215 |
1c079634ef4058798430437eb20ed1003701c2d2 | 23,443 | py | Python | cherrypy/lib/cptools.py | debrando/cherrypy | a92c5cc5d888b0aad327bce34e94da4a1f961e43 | [
"BSD-3-Clause"
] | 2 | 2019-03-04T15:17:49.000Z | 2021-04-04T08:08:14.000Z | lib/cherrypy/lib/cptools.py | rrosajp/script.module.cherrypy | 61ae795123755f3be43611e0f2667e85ef20c9d3 | [
"BSD-3-Clause"
] | 2 | 2019-11-16T13:20:55.000Z | 2021-01-10T11:28:43.000Z | lib/cherrypy/lib/cptools.py | rrosajp/script.module.cherrypy | 61ae795123755f3be43611e0f2667e85ef20c9d3 | [
"BSD-3-Clause"
] | 6 | 2020-05-22T15:25:34.000Z | 2021-08-13T09:43:01.000Z | """Functions for builtin CherryPy tools."""
import logging
import re
from hashlib import md5
import six
from six.moves import urllib
import cherrypy
from cherrypy._cpcompat import text_or_bytes
from cherrypy.lib import httputil as _httputil
from cherrypy.lib import is_iterator
# Conditional HTTP request support #
def validate_etags(autotags=False, debug=False):
"""Validate the current ETag against If-Match, If-None-Match headers.
If autotags is True, an ETag response-header value will be provided
from an MD5 hash of the response body (unless some other code has
already provided an ETag header). If False (the default), the ETag
will not be automatic.
WARNING: the autotags feature is not designed for URL's which allow
methods other than GET. For example, if a POST to the same URL returns
no content, the automatic ETag will be incorrect, breaking a fundamental
use for entity tags in a possibly destructive fashion. Likewise, if you
raise 304 Not Modified, the response body will be empty, the ETag hash
will be incorrect, and your application will break.
See :rfc:`2616` Section 14.24.
"""
response = cherrypy.serving.response
# Guard against being run twice.
if hasattr(response, 'ETag'):
return
status, reason, msg = _httputil.valid_status(response.status)
etag = response.headers.get('ETag')
# Automatic ETag generation. See warning in docstring.
if etag:
if debug:
cherrypy.log('ETag already set: %s' % etag, 'TOOLS.ETAGS')
elif not autotags:
if debug:
cherrypy.log('Autotags off', 'TOOLS.ETAGS')
elif status != 200:
if debug:
cherrypy.log('Status not 200', 'TOOLS.ETAGS')
else:
etag = response.collapse_body()
etag = '"%s"' % md5(etag).hexdigest()
if debug:
cherrypy.log('Setting ETag: %s' % etag, 'TOOLS.ETAGS')
response.headers['ETag'] = etag
response.ETag = etag
# "If the request would, without the If-Match header field, result in
# anything other than a 2xx or 412 status, then the If-Match header
# MUST be ignored."
if debug:
cherrypy.log('Status: %s' % status, 'TOOLS.ETAGS')
if status >= 200 and status <= 299:
request = cherrypy.serving.request
conditions = request.headers.elements('If-Match') or []
conditions = [str(x) for x in conditions]
if debug:
cherrypy.log('If-Match conditions: %s' % repr(conditions),
'TOOLS.ETAGS')
if conditions and not (conditions == ['*'] or etag in conditions):
raise cherrypy.HTTPError(412, 'If-Match failed: ETag %r did '
'not match %r' % (etag, conditions))
conditions = request.headers.elements('If-None-Match') or []
conditions = [str(x) for x in conditions]
if debug:
cherrypy.log('If-None-Match conditions: %s' % repr(conditions),
'TOOLS.ETAGS')
if conditions == ['*'] or etag in conditions:
if debug:
cherrypy.log('request.method: %s' %
request.method, 'TOOLS.ETAGS')
if request.method in ('GET', 'HEAD'):
raise cherrypy.HTTPRedirect([], 304)
else:
raise cherrypy.HTTPError(412, 'If-None-Match failed: ETag %r '
'matched %r' % (etag, conditions))
def validate_since():
"""Validate the current Last-Modified against If-Modified-Since headers.
If no code has set the Last-Modified response header, then no validation
will be performed.
"""
response = cherrypy.serving.response
lastmod = response.headers.get('Last-Modified')
if lastmod:
status, reason, msg = _httputil.valid_status(response.status)
request = cherrypy.serving.request
since = request.headers.get('If-Unmodified-Since')
if since and since != lastmod:
if (status >= 200 and status <= 299) or status == 412:
raise cherrypy.HTTPError(412)
since = request.headers.get('If-Modified-Since')
if since and since == lastmod:
if (status >= 200 and status <= 299) or status == 304:
if request.method in ('GET', 'HEAD'):
raise cherrypy.HTTPRedirect([], 304)
else:
raise cherrypy.HTTPError(412)
# Tool code #
def allow(methods=None, debug=False):
"""Raise 405 if request.method not in methods (default ['GET', 'HEAD']).
The given methods are case-insensitive, and may be in any order.
If only one method is allowed, you may supply a single string;
if more than one, supply a list of strings.
Regardless of whether the current method is allowed or not, this
also emits an 'Allow' response header, containing the given methods.
"""
if not isinstance(methods, (tuple, list)):
methods = [methods]
methods = [m.upper() for m in methods if m]
if not methods:
methods = ['GET', 'HEAD']
elif 'GET' in methods and 'HEAD' not in methods:
methods.append('HEAD')
cherrypy.response.headers['Allow'] = ', '.join(methods)
if cherrypy.request.method not in methods:
if debug:
cherrypy.log('request.method %r not in methods %r' %
(cherrypy.request.method, methods), 'TOOLS.ALLOW')
raise cherrypy.HTTPError(405)
else:
if debug:
cherrypy.log('request.method %r in methods %r' %
(cherrypy.request.method, methods), 'TOOLS.ALLOW')
def proxy(base=None, local='X-Forwarded-Host', remote='X-Forwarded-For',
scheme='X-Forwarded-Proto', debug=False):
"""Change the base URL (scheme://host[:port][/path]).
For running a CP server behind Apache, lighttpd, or other HTTP server.
For Apache and lighttpd, you should leave the 'local' argument at the
default value of 'X-Forwarded-Host'. For Squid, you probably want to set
tools.proxy.local = 'Origin'.
If you want the new request.base to include path info (not just the host),
you must explicitly set base to the full base path, and ALSO set 'local'
to '', so that the X-Forwarded-Host request header (which never includes
path info) does not override it. Regardless, the value for 'base' MUST
NOT end in a slash.
cherrypy.request.remote.ip (the IP address of the client) will be
rewritten if the header specified by the 'remote' arg is valid.
By default, 'remote' is set to 'X-Forwarded-For'. If you do not
want to rewrite remote.ip, set the 'remote' arg to an empty string.
"""
request = cherrypy.serving.request
if scheme:
s = request.headers.get(scheme, None)
if debug:
cherrypy.log('Testing scheme %r:%r' % (scheme, s), 'TOOLS.PROXY')
if s == 'on' and 'ssl' in scheme.lower():
# This handles e.g. webfaction's 'X-Forwarded-Ssl: on' header
scheme = 'https'
else:
# This is for lighttpd/pound/Mongrel's 'X-Forwarded-Proto: https'
scheme = s
if not scheme:
scheme = request.base[:request.base.find('://')]
if local:
lbase = request.headers.get(local, None)
if debug:
cherrypy.log('Testing local %r:%r' % (local, lbase), 'TOOLS.PROXY')
if lbase is not None:
base = lbase.split(',')[0]
if not base:
default = urllib.parse.urlparse(request.base).netloc
base = request.headers.get('Host', default)
if base.find('://') == -1:
# add http:// or https:// if needed
base = scheme + '://' + base
request.base = base
if remote:
xff = request.headers.get(remote)
if debug:
cherrypy.log('Testing remote %r:%r' % (remote, xff), 'TOOLS.PROXY')
if xff:
if remote == 'X-Forwarded-For':
# Grab the first IP in a comma-separated list. Ref #1268.
xff = next(ip.strip() for ip in xff.split(','))
request.remote.ip = xff
def ignore_headers(headers=('Range',), debug=False):
"""Delete request headers whose field names are included in 'headers'.
This is a useful tool for working behind certain HTTP servers;
for example, Apache duplicates the work that CP does for 'Range'
headers, and will doubly-truncate the response.
"""
request = cherrypy.serving.request
for name in headers:
if name in request.headers:
if debug:
cherrypy.log('Ignoring request header %r' % name,
'TOOLS.IGNORE_HEADERS')
del request.headers[name]
def response_headers(headers=None, debug=False):
"""Set headers on the response."""
if debug:
cherrypy.log('Setting response headers: %s' % repr(headers),
'TOOLS.RESPONSE_HEADERS')
for name, value in (headers or []):
cherrypy.serving.response.headers[name] = value
response_headers.failsafe = True
def referer(pattern, accept=True, accept_missing=False, error=403,
message='Forbidden Referer header.', debug=False):
"""Raise HTTPError if Referer header does/does not match the given pattern.
pattern
A regular expression pattern to test against the Referer.
accept
If True, the Referer must match the pattern; if False,
the Referer must NOT match the pattern.
accept_missing
If True, permit requests with no Referer header.
error
The HTTP error code to return to the client on failure.
message
A string to include in the response body on failure.
"""
try:
ref = cherrypy.serving.request.headers['Referer']
match = bool(re.match(pattern, ref))
if debug:
cherrypy.log('Referer %r matches %r' % (ref, pattern),
'TOOLS.REFERER')
if accept == match:
return
except KeyError:
if debug:
cherrypy.log('No Referer header', 'TOOLS.REFERER')
if accept_missing:
return
raise cherrypy.HTTPError(error, message)
session_auth.__doc__ = (
"""Session authentication hook.
Any attribute of the SessionAuth class may be overridden via a keyword arg
to this function:
""" + '\n'.join(['%s: %s' % (k, type(getattr(SessionAuth, k)).__name__)
for k in dir(SessionAuth) if not k.startswith('__')])
)
def log_traceback(severity=logging.ERROR, debug=False):
"""Write the last error's traceback to the cherrypy error log."""
cherrypy.log('', 'HTTP', severity=severity, traceback=True)
def log_request_headers(debug=False):
"""Write request headers to the cherrypy error log."""
h = [' %s: %s' % (k, v) for k, v in cherrypy.serving.request.header_list]
cherrypy.log('\nRequest Headers:\n' + '\n'.join(h), 'HTTP')
def log_hooks(debug=False):
"""Write request.hooks to the cherrypy error log."""
request = cherrypy.serving.request
msg = []
# Sort by the standard points if possible.
from cherrypy import _cprequest
points = _cprequest.hookpoints
for k in request.hooks.keys():
if k not in points:
points.append(k)
for k in points:
msg.append(' %s:' % k)
v = request.hooks.get(k, [])
v.sort()
for h in v:
msg.append(' %r' % h)
cherrypy.log('\nRequest Hooks for ' + cherrypy.url() +
':\n' + '\n'.join(msg), 'HTTP')
def redirect(url='', internal=True, debug=False):
"""Raise InternalRedirect or HTTPRedirect to the given url."""
if debug:
cherrypy.log('Redirecting %sto: %s' %
({True: 'internal ', False: ''}[internal], url),
'TOOLS.REDIRECT')
if internal:
raise cherrypy.InternalRedirect(url)
else:
raise cherrypy.HTTPRedirect(url)
def trailing_slash(missing=True, extra=False, status=None, debug=False):
"""Redirect if path_info has (missing|extra) trailing slash."""
request = cherrypy.serving.request
pi = request.path_info
if debug:
cherrypy.log('is_index: %r, missing: %r, extra: %r, path_info: %r' %
(request.is_index, missing, extra, pi),
'TOOLS.TRAILING_SLASH')
if request.is_index is True:
if missing:
if not pi.endswith('/'):
new_url = cherrypy.url(pi + '/', request.query_string)
raise cherrypy.HTTPRedirect(new_url, status=status or 301)
elif request.is_index is False:
if extra:
# If pi == '/', don't redirect to ''!
if pi.endswith('/') and pi != '/':
new_url = cherrypy.url(pi[:-1], request.query_string)
raise cherrypy.HTTPRedirect(new_url, status=status or 301)
def accept(media=None, debug=False):
"""Return the client's preferred media-type (from the given Content-Types).
If 'media' is None (the default), no test will be performed.
If 'media' is provided, it should be the Content-Type value (as a string)
or values (as a list or tuple of strings) which the current resource
can emit. The client's acceptable media ranges (as declared in the
Accept request header) will be matched in order to these Content-Type
values; the first such string is returned. That is, the return value
will always be one of the strings provided in the 'media' arg (or None
if 'media' is None).
If no match is found, then HTTPError 406 (Not Acceptable) is raised.
Note that most web browsers send */* as a (low-quality) acceptable
media range, which should match any Content-Type. In addition, "...if
no Accept header field is present, then it is assumed that the client
accepts all media types."
Matching types are checked in order of client preference first,
and then in the order of the given 'media' values.
Note that this function does not honor accept-params (other than "q").
"""
if not media:
return
if isinstance(media, text_or_bytes):
media = [media]
request = cherrypy.serving.request
# Parse the Accept request header, and try to match one
# of the requested media-ranges (in order of preference).
ranges = request.headers.elements('Accept')
if not ranges:
# Any media type is acceptable.
if debug:
cherrypy.log('No Accept header elements', 'TOOLS.ACCEPT')
return media[0]
else:
# Note that 'ranges' is sorted in order of preference
for element in ranges:
if element.qvalue > 0:
if element.value == '*/*':
# Matches any type or subtype
if debug:
cherrypy.log('Match due to */*', 'TOOLS.ACCEPT')
return media[0]
elif element.value.endswith('/*'):
# Matches any subtype
mtype = element.value[:-1] # Keep the slash
for m in media:
if m.startswith(mtype):
if debug:
cherrypy.log('Match due to %s' % element.value,
'TOOLS.ACCEPT')
return m
else:
# Matches exact value
if element.value in media:
if debug:
cherrypy.log('Match due to %s' % element.value,
'TOOLS.ACCEPT')
return element.value
# No suitable media-range found.
ah = request.headers.get('Accept')
if ah is None:
msg = 'Your client did not send an Accept header.'
else:
msg = 'Your client sent this Accept header: %s.' % ah
msg += (' But this resource only emits these media types: %s.' %
', '.join(media))
raise cherrypy.HTTPError(406, msg)
def autovary(ignore=None, debug=False):
"""Auto-populate the Vary response header based on request.header access.
"""
request = cherrypy.serving.request
req_h = request.headers
request.headers = MonitoredHeaderMap()
request.headers.update(req_h)
if ignore is None:
ignore = set(['Content-Disposition', 'Content-Length', 'Content-Type'])
request.hooks.attach('before_finalize', set_response_header, 95)
def convert_params(exception=ValueError, error=400):
"""Convert request params based on function annotations, with error handling.
exception
Exception class to catch.
status
The HTTP error code to return to the client on failure.
"""
request = cherrypy.serving.request
types = request.handler.callable.__annotations__
with cherrypy.HTTPError.handle(exception, error):
for key in set(types).intersection(request.params):
request.params[key] = types[key](request.params[key])
| 36.572543 | 81 | 0.601971 |
1c07bfa6e3a91882477a3925b04caaee6211dc0f | 3,216 | py | Python | pyiomica/utilityFunctions.py | benstear/pyiomica | bc26032b610fc911cc03b54115d6abdf53a56fce | [
"MIT"
] | null | null | null | pyiomica/utilityFunctions.py | benstear/pyiomica | bc26032b610fc911cc03b54115d6abdf53a56fce | [
"MIT"
] | null | null | null | pyiomica/utilityFunctions.py | benstear/pyiomica | bc26032b610fc911cc03b54115d6abdf53a56fce | [
"MIT"
] | null | null | null | '''Utility functions'''
import multiprocessing
from .globalVariables import *
def readMathIOmicaData(fileName):
'''Read text files exported by MathIOmica and convert to Python data
Parameters:
fileName: str
Path of directories and name of the file containing data
Returns:
data
Python data
Usage:
data = readMathIOmicaData("../../MathIOmica/MathIOmica/MathIOmicaData/ExampleData/rnaExample")
'''
if os.path.isfile(fileName):
with open(fileName, 'r') as tempFile:
data = tempFile.read()
data = data.replace('\n','').replace('{','(').replace('}',')').replace('->',':').replace('|>','}')
data = data.replace('<|','{').replace('^','*').replace('`','*').replace('Missing[]','"Missing[]"')
data = data.replace("\\",'')
else:
print('File not found (%s)'%(fileName))
returning = None
try:
returning = eval(data)
except:
print('Error occured while converting data (%s)'%(fileName))
return returning
def runCPUs(NumberOfAvailableCPUs, func, list_of_tuples_of_func_params):
"""Parallelize function call with multiprocessing.Pool.
Parameters:
NumberOfAvailableCPUs: int
Number of processes to create
func: function
Function to apply, must take at most one argument
list_of_tuples_of_func_params: list
Function parameters
Returns:
2d numpy.array
Results of func in a numpy array
Usage:
results = runCPUs(4, pAutocorrelation, [(times[i], data[i], allTimes) for i in range(10)])
"""
instPool = multiprocessing.Pool(processes = NumberOfAvailableCPUs)
return_values = instPool.map(func, list_of_tuples_of_func_params)
instPool.close()
instPool.join()
return np.vstack(return_values)
def createReverseDictionary(inputDictionary):
"""Efficient way to create a reverse dictionary from a dictionary.
Utilizes Pandas.Dataframe.groupby and Numpy arrays indexing.
Parameters:
inputDictionary: dictionary
Dictionary to reverse
Returns:
dictionary
Reversed dictionary
Usage:
revDict = createReverseDictionary(Dict)
"""
keys, values = np.array(list(inputDictionary.keys())), np.array(list(inputDictionary.values()))
df = pd.DataFrame(np.array([[keys[i], value] for i in range(len(keys)) for value in values[i]]))
dfGrouped = df.groupby(df.columns[1])
keys, values = list(dfGrouped.indices.keys()), list(dfGrouped.indices.values())
GOs = df.values.T[0]
return dict(zip(keys, [GOs[value].tolist() for value in values]))
def createDirectories(path):
"""Create a path of directories, unless the path already exists.
Parameters:
path: str
Path directory
Returns:
None
Usage:
createDirectories("/pathToFolder1/pathToSubFolder2")
"""
if path=='':
return None
if not os.path.exists(path):
os.makedirs(path)
return None
| 26.578512 | 107 | 0.60852 |
1c07e719407fcda373a642abe4461b09f4086e6c | 4,041 | py | Python | CRNitschke/get_sextract_thresholds.py | deapplegate/wtgpipeline | 9693e8562022cc97bf5a96427e22965e1a5e8497 | [
"MIT"
] | 1 | 2019-03-15T04:01:19.000Z | 2019-03-15T04:01:19.000Z | CRNitschke/get_sextract_thresholds.py | deapplegate/wtgpipeline | 9693e8562022cc97bf5a96427e22965e1a5e8497 | [
"MIT"
] | 5 | 2017-12-11T00:11:39.000Z | 2021-07-09T17:05:16.000Z | CRNitschke/get_sextract_thresholds.py | deapplegate/wtgpipeline | 9693e8562022cc97bf5a96427e22965e1a5e8497 | [
"MIT"
] | 2 | 2017-08-15T21:19:11.000Z | 2017-10-12T00:36:35.000Z | #! /usr/bin/env python
#adam-does# runs SeeingClearly to get the seeing and rms of the image, then uses those to get sextractor thresholds for CR detection
#adam-use# use with CRNitschke pipeline
#adam-call_example# call it like ./get_sextract_thresholds.py /path/flname.fits output_file.txt
#IO stuff:
import sys ; sys.path.append('/u/ki/awright/InstallingSoftware/pythons')
###saveout = sys.stdout
saveout = sys.stdout
###logout = open('SeeingClearly_stdout.log','w')
###sys.stdout = logout
saveerr = sys.stderr
###logerr = open('SeeingClearly_stderr.log','w')
###sys.stderr = logerr
sys.stdout = sys.stderr
#the basics
import hashlib
import os
import SeeingClearly
from copy import deepcopy
import imagetools
import glob
import astropy
from astropy.io import ascii
from numpy import asarray
if __name__ == "__main__":
args=deepcopy(sys.argv[1:])
for false_arg in ['-i', '--']:
if false_arg in args: args.remove(false_arg)
if len(args)<1:
sys.exit()
if not os.path.isfile(args[0]):
print "sys.argv[1]=",args[0]
raise Exception(args[0]+" is not a file!")
else:
fl=args[0]
fl2save=args[1]
#start tmp
print "Using SeeingClearly to get seeing for: "+fl
print "saving output to: " +fl2save
try:
FILTER=astropy.io.fits.open(fl)[0].header['FILTER']
except:
FILTER="UnknownFilt"
BASE,ending=os.path.basename(fl).split('OCF')
ending="OCF"+ending
ending=ending.replace('.fits','')
fls_dir=os.path.dirname(fl)
basename=os.path.basename(fl)
CCDnum=imagetools.GetCCD(fl)
globthis='_'+str(CCDnum)
glob_basename=basename.replace(globthis,'_*')
fls=sorted(glob.glob(fls_dir+"/"+glob_basename))
if not len(fls)==10:
raise Exception('cannot find 10 files like this from different CCDs')
#adam-old# seeing,back_rms=SeeingClearly.seeing_clearly_withplot(fls,checkplots=1,saveas='pltSeeingClearly_%s_%s' % (FILTER,BASE[:-1]+"ALL"))
import adam_stars_from_cat
import numpy
seeing,back_rms=adam_stars_from_cat.get_seeing_backrms(fls)
back_rms=numpy.array(back_rms)
ft,dt=seeing_to_ft_dt(seeing)
detect_thresh=dt/back_rms #convert to S2N ratio
filter_thresh=ft/back_rms #convert to S2N ratio
if FILTER=='W-J-B':
detect_thresh=asarray([min(170.0,detect_thresh[i]) for i in range(len(detect_thresh))])
filter_thresh=asarray([min(20.0,filter_thresh[i]) for i in range(len(filter_thresh))])
elif (detect_thresh>170.0).any() or (filter_thresh>20.0).any():
print 'checkit: filter=%s and %.2f %% of the detection thresholds are above 170.0 and %.2f %% of the filter thresholds are above 20.0' % (FILTER,(detect_thresh>170.0).mean()*100, (filter_thresh>20.0).mean()*100)
dict_out={}
dict_out['seeing']=[seeing]*10
dict_out['rms']=back_rms
dict_out['dt']=detect_thresh
dict_out['ft']=filter_thresh
dict_out['#files']=fls
t=astropy.table.Table(data=dict_out,names=['#files','rms','seeing','dt','ft'],dtype=[str,float,float,float,float])
t.write(fl2save,format="ascii.basic")
#adam-2014#detect_thresh_cap=min(detect_thresh,150.0) #cap is now set in the function seeing_to_ft_dt
#PIXSCALE=float(os.environ['PIXSCALE'])
#if seeing>PIXSCALE*2.5: #I have no check for being undersampled, should I?
#if seeing>.4:
# sys.stdout=saveout #back to printing to terminal
# ###sys.stdout.write(str(seeing))
# print "'0 "+str(back_rms)+" "+str(seeing)+" "+str(detect_thresh)+" "+str(filter_thresh)+"'"
#
#else:
# #print "exit 1;"
# #raise Exception('Seeing less than 2.5xPIXSCALE. The image is undersampled')
# #sys.stderr=saveerr #back to printing to terminal
# #sys.stderr.write('1')
# sys.stdout=saveout #back to printing to terminal
# print "0 "+str(back_rms)+" "+str(seeing)+" "+str(detect_thresh)+" "+str(filter_thresh)
| 36.736364 | 213 | 0.732739 |
1c0a49535e49079808208d6bb2ff3cdc8ca96e3f | 2,843 | py | Python | python/labbox/api/_session.py | flatironinstitute/labbox | d8b331d55a5cca543567c3b2e92bcdc02b46e799 | [
"Apache-2.0"
] | 1 | 2021-09-23T01:10:39.000Z | 2021-09-23T01:10:39.000Z | python/labbox/api/_session.py | flatironinstitute/labbox | d8b331d55a5cca543567c3b2e92bcdc02b46e799 | [
"Apache-2.0"
] | null | null | null | python/labbox/api/_session.py | flatironinstitute/labbox | d8b331d55a5cca543567c3b2e92bcdc02b46e799 | [
"Apache-2.0"
] | 1 | 2021-09-23T01:10:39.000Z | 2021-09-23T01:10:39.000Z | import time
import multiprocessing
| 38.418919 | 140 | 0.593739 |
1c0adf2b82d9cbfe6db20d368afa9827c211f577 | 8,114 | py | Python | aldryn_newsblog/tests/test_reversion.py | GabrielDumbrava/aldryn-newsblog | f3be5ff78e88fde532ce4c45e5eeb88d98fa6d93 | [
"BSD-3-Clause"
] | null | null | null | aldryn_newsblog/tests/test_reversion.py | GabrielDumbrava/aldryn-newsblog | f3be5ff78e88fde532ce4c45e5eeb88d98fa6d93 | [
"BSD-3-Clause"
] | null | null | null | aldryn_newsblog/tests/test_reversion.py | GabrielDumbrava/aldryn-newsblog | f3be5ff78e88fde532ce4c45e5eeb88d98fa6d93 | [
"BSD-3-Clause"
] | null | null | null | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from unittest import skipIf
try:
from django.core.urlresolvers import reverse
except ModuleNotFoundError:
from django.urls import reverse
from django.db import transaction
from aldryn_reversion.core import create_revision as aldryn_create_revision
from parler.utils.context import switch_language
import six
from . import NewsBlogTestCase
from aldryn_newsblog.cms_appconfig import NewsBlogConfig
from ..settings import ENABLE_REVERSION
if ENABLE_REVERSION:
try:
from reversion import create_revision
from reversion import default_revision_manager
except ImportError:
from reversion.revisions import create_revision
from reversion.revisions import default_revision_manager
| 37.391705 | 78 | 0.646537 |
1c0c6c9d53be4b7690f691af9859df23fb71fa58 | 38,971 | py | Python | network/network.py | VirtualEmbryo/lumen_network | 35b1dadccd087c9ef234f12c2735098b82890b34 | [
"MIT"
] | 1 | 2019-08-02T07:41:27.000Z | 2019-08-02T07:41:27.000Z | network/network.py | VirtualEmbryo/lumen_network | 35b1dadccd087c9ef234f12c2735098b82890b34 | [
"MIT"
] | null | null | null | network/network.py | VirtualEmbryo/lumen_network | 35b1dadccd087c9ef234f12c2735098b82890b34 | [
"MIT"
] | null | null | null | # Library for the dynamics of a lumen network
# The lumen are 2 dimensional and symmetric and connected with 1 dimensional tubes
#
# Created by A. Mielke, 2018
# Modified by M. Le Verge--Serandour on 8/04/2019
"""
network.py conf.init
Defines the class network and associated functions
Imports
-------
Libraries : numpy, os, math
Created by A. Mielke
Modified by H. Turlier on 8/06/2018
Modified by M. Le Verge--Serandour on 8/04/2019
"""
import numpy as np
import math
import os
| 38.700099 | 215 | 0.497575 |
1c0d361e8337d02f5fbc92a1db2b014025d1f86f | 943 | py | Python | scripts/upsampling_demo.py | always-newbie161/pyprobml | eb70c84f9618d68235ef9ba7da147c009b2e4a80 | [
"MIT"
] | 2 | 2021-02-26T04:36:10.000Z | 2021-02-26T04:36:24.000Z | scripts/upsampling_demo.py | always-newbie161/pyprobml | eb70c84f9618d68235ef9ba7da147c009b2e4a80 | [
"MIT"
] | 9 | 2021-03-31T20:18:21.000Z | 2022-03-12T00:52:47.000Z | scripts/upsampling_demo.py | always-newbie161/pyprobml | eb70c84f9618d68235ef9ba7da147c009b2e4a80 | [
"MIT"
] | 1 | 2021-06-21T01:18:07.000Z | 2021-06-21T01:18:07.000Z | # Illustrate upsampling in 2d
# Code from Jason Brownlee
# https://machinelearningmastery.com/generative_adversarial_networks/
import tensorflow as tf
from tensorflow import keras
from numpy import asarray
#from keras.models import Sequential
from tensorflow.keras.models import Sequential
#from keras.layers import UpSampling2D
from tensorflow.keras.layers import UpSampling2D
X = asarray([[1, 2],
[3, 4]])
X = asarray([[1, 2, 3],
[4, 5, 6],
[7,8,9]])
print(X)
nr = X.shape[0]
nc = X.shape[1]
# reshape input data into one sample a sample with a channel
X = X.reshape((1, nr, nc, 1))
model = Sequential()
model.add(UpSampling2D(input_shape=(nr, nc, 1))) # nearest neighbor
yhat = model.predict(X)
yhat = yhat.reshape((2*nr, 2*nc))
print(yhat)
model = Sequential()
model.add(UpSampling2D(input_shape=(nc, nc, 1), interpolation='bilinear'))
yhat = model.predict(X)
yhat = yhat.reshape((2*nr, 2*nc))
print(yhat) | 22.452381 | 74 | 0.709438 |
1c1095f5f37cb41b6318b6beaf6df6c400bfad6c | 17,334 | py | Python | V2RaycSpider1225/src/BusinessCentralLayer/scaffold.py | njchj/V2RayCloudSpider | 16154cf48c74fa2c8cf2f6792d2db3632501f5d6 | [
"MIT"
] | 1 | 2021-09-28T09:38:15.000Z | 2021-09-28T09:38:15.000Z | V2RaycSpider1225/src/BusinessCentralLayer/scaffold.py | njchj/V2RayCloudSpider | 16154cf48c74fa2c8cf2f6792d2db3632501f5d6 | [
"MIT"
] | null | null | null | V2RaycSpider1225/src/BusinessCentralLayer/scaffold.py | njchj/V2RayCloudSpider | 16154cf48c74fa2c8cf2f6792d2db3632501f5d6 | [
"MIT"
] | 1 | 2021-09-09T07:22:47.000Z | 2021-09-09T07:22:47.000Z | __all__ = ['scaffold', 'command_set']
from gevent import monkey
monkey.patch_all()
import csv
import os
import sys
import time
import shutil
from typing import List
import gevent
from src.BusinessCentralLayer.setting import logger, DEFAULT_POWER, CHROMEDRIVER_PATH, \
REDIS_MASTER, SERVER_DIR_DATABASE_CACHE, SERVER_DIR_CLIENT_DEPORT, SERVER_PATH_DEPOT_VCS, SERVER_DIR_CACHE_BGPIC, \
REDIS_SLAVER_DDT, CRAWLER_SEQUENCE, terminal_echo, SERVER_DIR_DATABASE_LOG, SERVER_DIR_SSPANEL_MINING
command_set = {
# ---------------------------------------------
#
# ---------------------------------------------
'deploy': "/Flask yaml",
# ---------------------------------------------
#
# ---------------------------------------------
"clear": "",
"decouple": "subs_ddt",
"overdue": "",
"run": "[spawn]",
"force_run": "[spawn]",
"remain": "",
"ping": "",
"entropy": "",
"exile": "",
"spawn": "",
"mining": "STAFF hostSEO",
# ---------------------------------------------
#
# ---------------------------------------------
# usage: python main.py --parse https://domain/link/token?sub=3
# usage: python main.py --parse https://domain/link/token?sub=3 https://domain/link/token2?sub=3
# "--parse": """ping""",
# ---------------------------------------------
# Windows
# ---------------------------------------------
"panel": "[for Windows] ",
"ash": "[for Windows] ,Clash yaml,"
"URL SchemeClash",
# ---------------------------------------------
#
# ---------------------------------------------
"example": "python main.py ping"
}
_ConfigQuarantine().run()
scaffold = _ScaffoldGuider()
| 37.038462 | 119 | 0.546267 |
1c10af158381d3bf41fbdb21d408b7a2f5c450b9 | 2,336 | py | Python | python/swap_header.py | daniestevez/gr-csp | 0a10e4d2e5cf4a51256e5dc72aa42f8d3d54c232 | [
"Unlicense"
] | 19 | 2016-05-27T15:12:31.000Z | 2021-04-19T09:42:35.000Z | python/swap_header.py | daniestevez/gr-csp | 0a10e4d2e5cf4a51256e5dc72aa42f8d3d54c232 | [
"Unlicense"
] | null | null | null | python/swap_header.py | daniestevez/gr-csp | 0a10e4d2e5cf4a51256e5dc72aa42f8d3d54c232 | [
"Unlicense"
] | 5 | 2017-04-26T22:48:40.000Z | 2022-02-19T23:49:33.000Z | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2016 Daniel Estevez <daniel@destevez.net>.
#
# This is free and unencumbered software released into the public domain.
#
# Anyone is free to copy, modify, publish, use, compile, sell, or
# distribute this software, either in source code form or as a compiled
# binary, for any purpose, commercial or non-commercial, and by any
# means.
#
# In jurisdictions that recognize copyright laws, the author or authors
# of this software dedicate any and all copyright interest in the
# software to the public domain. We make this dedication for the benefit
# of the public at large and to the detriment of our heirs and
# successors. We intend this dedication to be an overt act of
# relinquishment in perpetuity of all present and future rights to this
# software under copyright law.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
# IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR
# OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
# ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
# OTHER DEALINGS IN THE SOFTWARE.
#
# For more information, please refer to <http://unlicense.org>
#
import numpy
from gnuradio import gr
import pmt
import array
| 36.5 | 90 | 0.704195 |
1c10ce60247554a61b0b5d48488c8c6ceac709b6 | 2,149 | py | Python | start.py | gleenn/dfplayer | dd390a6f54b3bd8b2a3397fddd6caacfba01b29d | [
"MIT"
] | null | null | null | start.py | gleenn/dfplayer | dd390a6f54b3bd8b2a3397fddd6caacfba01b29d | [
"MIT"
] | null | null | null | start.py | gleenn/dfplayer | dd390a6f54b3bd8b2a3397fddd6caacfba01b29d | [
"MIT"
] | null | null | null | #!/usr/bin/python
#
# Start dfplayer.
import argparse
import os
import shutil
import subprocess
import sys
import time
_PROJ_DIR = os.path.dirname(__file__)
main()
| 28.653333 | 76 | 0.68497 |
1c1166ff6a3e4c18665f05d1beca5c764b5fda93 | 3,368 | py | Python | AIY/voice/cloudspeech_demo.py | Pougnator/Prometheus | d7c59f3a97b4f60958f130741ccc16b81d65f505 | [
"Apache-2.0"
] | null | null | null | AIY/voice/cloudspeech_demo.py | Pougnator/Prometheus | d7c59f3a97b4f60958f130741ccc16b81d65f505 | [
"Apache-2.0"
] | null | null | null | AIY/voice/cloudspeech_demo.py | Pougnator/Prometheus | d7c59f3a97b4f60958f130741ccc16b81d65f505 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python3
# Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""A demo of the Google CloudSpeech recognizer."""
import aiy.audio
import aiy.cloudspeech
import aiy.voicehat
import aiy.i18n
import aiy.audio
CONFIRM_SOUND_PATH = '/home/pi/Music/R2D2/R2_Understood.wav'
CONFUSED_SOUND_PATH = '/home/pi/Music/R2D2/R2_Confused.wav'
UNRECOGNISED_SOUND_PATH = '/home/pi/Music/R2D2/R2_FastBip.wav'
if __name__ == '__main__':
main()
| 37.010989 | 94 | 0.58462 |
1c119d6282e07a22b49176d0f6616aca7099e5dc | 3,159 | py | Python | options/base_option.py | lime-j/YTMT-Strategy-1 | aacc38c4e61b91e187cac81aa95500e0422d4d0f | [
"Apache-2.0"
] | 26 | 2021-11-08T07:49:34.000Z | 2022-03-28T14:09:27.000Z | options/base_option.py | lime-j/YTMT-Strategy-1 | aacc38c4e61b91e187cac81aa95500e0422d4d0f | [
"Apache-2.0"
] | 2 | 2021-10-22T02:53:10.000Z | 2021-12-29T12:35:13.000Z | options/base_option.py | lime-j/YTMT-Strategy-1 | aacc38c4e61b91e187cac81aa95500e0422d4d0f | [
"Apache-2.0"
] | 1 | 2021-10-18T08:00:22.000Z | 2021-10-18T08:00:22.000Z | import argparse
import models
model_names = sorted(name for name in models.__dict__
if name.islower() and not name.startswith("__")
and callable(models.__dict__[name]))
| 65.8125 | 174 | 0.652738 |
1c11a09e91a9f24c73ca32bb8e2bc358e52c7c63 | 2,277 | py | Python | bookstore/__init__.py | JanhaviSoni/Book-Recommendation-Analysis | d2697e1f2eb9b9b4e0bafc0dd43d486ceb3d1707 | [
"MIT"
] | 23 | 2021-01-15T15:46:45.000Z | 2021-11-16T12:26:58.000Z | bookstore/__init__.py | JanhaviSoni/Book-Recommendation-Analysis | d2697e1f2eb9b9b4e0bafc0dd43d486ceb3d1707 | [
"MIT"
] | 108 | 2021-01-13T11:02:31.000Z | 2022-03-21T17:47:24.000Z | bookstore/__init__.py | JanhaviSoni/Book-Recommendation-Analysis | d2697e1f2eb9b9b4e0bafc0dd43d486ceb3d1707 | [
"MIT"
] | 46 | 2021-01-14T17:27:28.000Z | 2022-03-20T10:12:24.000Z |
from flask import Flask, Response
from flask_basicauth import BasicAuth
from flask_cors import CORS, cross_origin
import os
#from flask_admin import Admin,AdminIndexView
#from flask_admin.contrib.sqla import ModelView
from flask_sqlalchemy import SQLAlchemy as _BaseSQLAlchemy
from flask_migrate import Migrate, MigrateCommand
from flask_script import Manager
from werkzeug.exceptions import HTTPException
from flask_login import LoginManager
from itsdangerous import URLSafeSerializer
# import psycopg2
# import pymysql
# import logging
# import warnings
# warnings.filterwarnings("ignore")
# Initializing Flask App
app = Flask(__name__)
app.secret_key="Vampire"
# This video demonstrates why we use CORS in our Flask App - https://www.youtube.com/watch?v=vWl5XcvQBx0
CORS(app)
app.config.from_object("config.DevelopmentConfig")
# Creating and Initializing db object of SQLAlchemy class
db = SQLAlchemy(app)
db.init_app(app)
migrate = Migrate(app, db, render_as_batch=True)
with app.app_context():
if db.engine.url.drivername == 'sqlite':
migrate.init_app(app, db, render_as_batch=True)
else:
migrate.init_app(app, db)
manager = Manager(app)
manager.add_command('db', MigrateCommand)
# Creating serializer object of URLSafeSerializer class for serializing session_token
serializer = URLSafeSerializer(app.secret_key)
# Here we set session_token as our user_loader.
from bookstore.client.views import client
from bookstore.admin.views import admin
app.register_blueprint(client)
app.register_blueprint(admin)
| 25.021978 | 104 | 0.798858 |
1c11dc94f130a2807798806bad63a6da530e4ff6 | 8,438 | py | Python | cobl/lexicon/management/commands/stats236.py | Bibiko/CoBL-public | 5092a0d01b7a13565c7da6bf2f6c52d648a2debe | [
"BSD-2-Clause"
] | null | null | null | cobl/lexicon/management/commands/stats236.py | Bibiko/CoBL-public | 5092a0d01b7a13565c7da6bf2f6c52d648a2debe | [
"BSD-2-Clause"
] | null | null | null | cobl/lexicon/management/commands/stats236.py | Bibiko/CoBL-public | 5092a0d01b7a13565c7da6bf2f6c52d648a2debe | [
"BSD-2-Clause"
] | 1 | 2020-04-30T11:02:51.000Z | 2020-04-30T11:02:51.000Z | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.core.management import BaseCommand
from cobl.lexicon.models import LanguageList, \
MeaningList, \
Meaning, \
Lexeme, \
CognateClass, \
CognateJudgement, \
LanguageClade, \
Clade
| 48.494253 | 79 | 0.508059 |
1c13e64266dfbb7d662d8dc0ddfc5df3b7bd9dd2 | 1,809 | py | Python | collation/test2.py | enabling-languages/dinka | 981ffd07e7468f692c4d17472083a3c5485987f8 | [
"MIT"
] | 1 | 2018-11-13T13:34:58.000Z | 2018-11-13T13:34:58.000Z | collation/test2.py | enabling-languages/dinka | 981ffd07e7468f692c4d17472083a3c5485987f8 | [
"MIT"
] | 6 | 2018-07-18T23:50:31.000Z | 2021-08-24T06:57:49.000Z | collation/test2.py | enabling-languages/dinka | 981ffd07e7468f692c4d17472083a3c5485987f8 | [
"MIT"
] | null | null | null | import pandas as pd
from icu import Collator, Locale, RuleBasedCollator
ddf = pd.read_csv("../word_frequency/unilex/din.txt", sep='\t', skiprows = range(2,5))
collator = Collator.createInstance(Locale('en_AU.UTF-8'))
# https://stackoverflow.com/questions/13838405/custom-sorting-in-pandas-dataframe/27009771#27009771
# https://gist.github.com/seanpue/e1cb846f676194ae77eb
sort_by_custom_dict = sort_pd(key=collator.getSortKey)
#ddf.iloc[sort_by_custom_dict(ddf.index)]
# ddf.iloc[sort_by_custom_dict(ddf['Form'])]
ddf.iloc[sort_by_custom_dict(ddf['Form'])]
#https://python3.wannaphong.com/2015/03/sort-python.html
# https://pyerror.com/detail/1316/
lexemes = ddf.Form
#lexemes2 = ddf['Form']
temp = lexemes.sort_values()
collation_rules = "&A<<aa<<<aA<<<Aa<<<AA<<<<<<<<<<<<<<<<\n&D<dh<<<dH<<<Dh<<<DH\n&E<<ee<<<eE<<<Ee<<<EE<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<\n&G<<<<\n&I<<ii<<<iI<<<Ii<<<II<<<<<<<<<<<<<<<<\n&N<nh<<<nH<<<Nh<<<NH<ny<<<nY<<<Ny<<<NH<<<<\n&O<<oo<<<oO<<<Oo<<<OO<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<\n&T<th<<<tH<<<Th<<<TH\n&U<<uu<<<uU<<<Uu<<<UU"
custom_collator = RuleBasedCollator(collation_rules)
temp.sort_values(key=lambda x: custom_collator.getSortKey(x) )
sort_by_custom_dict = sort_pd(key=custom_collator.getSortKey) | 37.6875 | 459 | 0.655611 |
1c154cd85ac8501efc488d575c2d366b73815f35 | 3,495 | py | Python | pkgs/ops-pkg/src/genie/libs/ops/dot1x/ios/tests/test_dot1x.py | jbronikowski/genielibs | 200a34e5fe4838a27b5a80d5973651b2e34ccafb | [
"Apache-2.0"
] | 94 | 2018-04-30T20:29:15.000Z | 2022-03-29T13:40:31.000Z | pkgs/ops-pkg/src/genie/libs/ops/dot1x/ios/tests/test_dot1x.py | jbronikowski/genielibs | 200a34e5fe4838a27b5a80d5973651b2e34ccafb | [
"Apache-2.0"
] | 67 | 2018-12-06T21:08:09.000Z | 2022-03-29T18:00:46.000Z | pkgs/ops-pkg/src/genie/libs/ops/dot1x/ios/tests/test_dot1x.py | jbronikowski/genielibs | 200a34e5fe4838a27b5a80d5973651b2e34ccafb | [
"Apache-2.0"
] | 49 | 2018-06-29T18:59:03.000Z | 2022-03-10T02:07:59.000Z | # Python
import unittest
from copy import deepcopy
from unittest.mock import Mock
# ATS
from pyats.topology import Device
# Genie
from genie.libs.ops.dot1x.ios.dot1x import Dot1X
from genie.libs.ops.dot1x.ios.tests.dot1x_output import Dot1xOutput
# Parser
from genie.libs.parser.ios.show_dot1x import ShowDot1xAllDetail, \
ShowDot1xAllStatistics, \
ShowDot1xAllSummary, \
ShowDot1xAllCount
if __name__ == '__main__':
unittest.main()
| 30.657895 | 74 | 0.58083 |
1c1603d1abab233380508e0466aae61f575bb066 | 5,218 | py | Python | script/analysis/check_transformation_matrices.py | lanl/nubhlight | 6c0f2abc05884538fe8e4e2e70a021b7c48a72c2 | [
"BSD-3-Clause"
] | 16 | 2020-02-05T22:59:21.000Z | 2022-03-18T11:05:37.000Z | script/analysis/check_transformation_matrices.py | lanl/nubhlight | 6c0f2abc05884538fe8e4e2e70a021b7c48a72c2 | [
"BSD-3-Clause"
] | 13 | 2020-03-06T02:10:48.000Z | 2021-06-15T20:00:30.000Z | script/analysis/check_transformation_matrices.py | lanl/nubhlight | 6c0f2abc05884538fe8e4e2e70a021b7c48a72c2 | [
"BSD-3-Clause"
] | 4 | 2020-02-21T04:59:44.000Z | 2020-12-10T21:42:12.000Z | # ======================================================================
# copyright 2020. Triad National Security, LLC. All rights
# reserved. This program was produced under U.S. Government contract
# 89233218CNA000001 for Los Alamos National Laboratory (LANL), which
# is operated by Triad National Security, LLC for the U.S. Department
# of Energy/National Nuclear Security Administration. All rights in
# the program are reserved by Triad National Security, LLC, and the
# U.S. Department of Energy/National Nuclear Security
# Administration. The Government is granted for itself and others
# acting on its behalf a nonexclusive, paid-up, irrevocable worldwide
# license in this material to reproduce, prepare derivative works,
# distribute copies to the public, perform publicly and display
# publicly, and to permit others to do so.
# ======================================================================
# Authors: Oleg Korobkin (korobkin@lanl.gov)
# Purpose:
# Provides a check of whether a coordinate transformation of the metric
# from code coordinates to Kerr-Schild coordinates produces correct
# metric, consistent with the closed form (as in e.g. Eq.(3)
# McKinney & Gammie 2004, https://arxiv.org/abs/astro-ph/0404512)
#
# Functions:
# - print_matrix
# - check_transformation_matrices
#
from math import *
import numpy as np
def print_matrix(matrix,fmt="%19.11e",tostdout=True) -> str:
"""Pretty-prints a matrix to a string (optinally, to stdout)
Parameters
----------
matrix : numpy.array([N,M])
matrix to print
fmt : str
C-style format of each element (default: "%19.11e")
tostdout : bool
output to stdout (default: true)
Returns
-------
str
formatted output string
"""
N = matrix.shape[0]
M = matrix.shape[1]
s = "["
for i in range(N):
s+= "["
for j in range(M):
s+= (fmt % matrix[i,j])
if j < M - 1: s += ", "
s+= "]"
if i < N - 1: s += ",\n "
s+="]"
if tostdout: print(s)
return s
def check_transformation_matrices(geom, a, ir, jth,
verbose=True, tol=1e-12) -> bool:
"""Transforms the metric to spherical KS and compares with analytic formula
Test 1: covariant metric, gcov, at A = {ir, jth}
1.1 sample gcov and Lambda_h2bl_cov at A
1.2 transform gcov to gks using transofmration matrices
1.3 compare to expected values at {r,th} at A
Parameters
----------
geom : dictionary
nubhlight geom object
a : Float
dimensionless Kerr spin parameter
ir : Integer
index of sample point in radial direction
jth : Integer
index of sample point in angular theta-direction
verbose : bool
output steps to stdout
tol : Float
tolerance to relative error (wrt det g)
Returns
-------
bool
True if all checks passed
Examples
--------
import hdf5_to_dict as io
hdr = io.load_hdr("dump_00000010.h5")
geom = io.load_geom(hdr,recalc=True)
check_transformation_matrices(geom, -1, 64)
"""
# sample gcov and h2bl at point A
gcov_A = geom['gcov'][ir,jth]
h2bl_A = geom['Lambda_h2bl_cov'][ir,jth]
# sample r and theta, compute BL metric-related quantities
r = geom['r'][ir,jth,0]; r2 = r*r
a2 = a*a
th= geom['th'][ir,jth,0]
sth2= sin(th)**2
Delta= r2 - 2*r + a2
Sigma= r2 + a2*cos(th)**2
A = (r2 + a2)**2 - a2*Delta*sin(th)**2
if verbose:
print ("r = %19.11e" % r)
print ("theta = %19.11e" % th)
print ("a = %19.11e" % a)
print ("Delta = %19.11e" % Delta)
print ("Sigma = %19.11e" % Sigma)
print ("A = %19.11e" % A)
# output metric
print ("gcov_A = ")
print_matrix (gcov_A)
print ("")
# output transformation matrix
print ("h2bl_A = ")
print_matrix (h2bl_A)
print ("")
# compute BL metric at A
gks_A = np.zeros([4,4])
for i in range(4):
for j in range(4):
for k in range(4):
for l in range(4):
gks_A[i,j] = gks_A[i,j] + h2bl_A[k,i]*h2bl_A[l,j]*gcov_A[k,l]
if verbose:
print ("gks_A = ")
print_matrix (gks_A)
print("")
# expected values at {r, th}
g_tt = -1. + 2.*r/Sigma
g_rr = 1. + 2.*r/Sigma
g_ff = sth2*(Sigma + a2*g_rr*sth2)
g_thth = Sigma
g_tr = 2*r/Sigma
g_tf = -2*a*r*sth2/Sigma
g_rf = -a*g_rr*sth2
det_g = -Sigma**2*sth2
if verbose:
print ("Expected:")
print (" g_tt = %19.11e" % g_tt )
print (" g_rr = %19.11e" % g_rr )
print (" g_thth = %19.11e" % g_thth)
print (" g_ff = %19.11e" % g_ff )
print (" g_tr = %19.11e" % g_tr )
print (" g_rf = %19.11e" % g_rf )
print (" g_tf = %19.11e" % g_tf )
print ("")
# check gks_A
gks_expected = np.array(
[[ g_tt, g_tr, 0.0, g_tf],
[ g_tr, g_rr, 0.0, g_rf],
[ 0.0, 0.0, g_thth, 0.0],
[ g_tf, g_rf, 0.0, g_ff]]
)
passed = True
for i in range(4):
for j in range(4):
if abs(gks_A[i,j] - gks_expected[i,j])/abs(det_g) > tol:
passed = False
if verbose:
print (f"WARNING: Significant mismatch in gks_A[{i},{j}]:")
print (" -- expected: %19.11e" % gks_expected[i,j])
print (" -- actual: %19.11e" % gks_A[i,j])
return passed
| 28.358696 | 77 | 0.593139 |
1c16c296d9a00d573be4e9e818881918f7bcc86c | 73 | py | Python | holobot/discord/sdk/models/channel.py | rexor12/holobot | 89b7b416403d13ccfeee117ef942426b08d3651d | [
"MIT"
] | 1 | 2021-05-24T00:17:46.000Z | 2021-05-24T00:17:46.000Z | holobot/discord/sdk/models/channel.py | rexor12/holobot | 89b7b416403d13ccfeee117ef942426b08d3651d | [
"MIT"
] | 41 | 2021-03-24T22:50:09.000Z | 2021-12-17T12:15:13.000Z | holobot/discord/sdk/models/channel.py | rexor12/holobot | 89b7b416403d13ccfeee117ef942426b08d3651d | [
"MIT"
] | null | null | null | from dataclasses import dataclass
| 12.166667 | 33 | 0.767123 |
1c1762c8ad20949427e7a540afab16d1a42370e8 | 13,174 | py | Python | pynet/models/braingengan.py | claireguichon/pynet | 92706375e61fb5cb523548303b7d04769c9de134 | [
"CECILL-B"
] | 8 | 2020-06-23T16:30:52.000Z | 2021-07-27T15:07:18.000Z | pynet/models/braingengan.py | claireguichon/pynet | 92706375e61fb5cb523548303b7d04769c9de134 | [
"CECILL-B"
] | 8 | 2019-12-18T17:28:47.000Z | 2021-02-12T09:10:58.000Z | pynet/models/braingengan.py | claireguichon/pynet | 92706375e61fb5cb523548303b7d04769c9de134 | [
"CECILL-B"
] | 18 | 2019-08-19T14:17:48.000Z | 2021-12-20T03:56:39.000Z | # -*- coding: utf-8 -*-
##########################################################################
# NSAp - Copyright (C) CEA, 2020
# Distributed under the terms of the CeCILL-B license, as published by
# the CEA-CNRS-INRIA. Refer to the LICENSE file or to
# http://www.cecill.info/licences/Licence_CeCILL-B_V1-en.html
# for details.
##########################################################################
"""
3D MRI Brain Generation with Generative Adversarial Networks (BGGAN) with
Variational Auto Encoder (VAE).
"""
# Imports
import logging
import collections
import numpy as np
import torch
import torch.nn as nn
from torch.autograd import Variable
import torch.nn.functional as func
from pynet.utils import Networks
# Global parameters
logger = logging.getLogger("pynet")
def _downsample_shape(shape, nb_iterations=1, scale_factor=2):
shape = np.asarray(shape)
all_shapes = [shape.astype(int).tolist()]
for idx in range(nb_iterations):
shape = np.floor(shape / scale_factor)
all_shapes.append(shape.astype(int).tolist())
return all_shapes
| 37.214689 | 76 | 0.587141 |
1c192652847b82c847977050650f6dd9bf312075 | 7,587 | py | Python | research/object_detection/core/freezable_batch_norm_test.py | baranshad/models | aaf008855e9764f32d974e86f8e1f9cfddfafd9a | [
"Apache-2.0"
] | 3 | 2019-12-15T18:05:04.000Z | 2021-04-30T16:26:04.000Z | research/object_detection/core/freezable_batch_norm_test.py | baranshad/models | aaf008855e9764f32d974e86f8e1f9cfddfafd9a | [
"Apache-2.0"
] | 10 | 2020-01-28T23:15:47.000Z | 2022-03-12T00:11:34.000Z | research/object_detection/core/freezable_batch_norm_test.py | baranshad/models | aaf008855e9764f32d974e86f8e1f9cfddfafd9a | [
"Apache-2.0"
] | 5 | 2020-06-02T09:14:45.000Z | 2022-02-05T17:32:44.000Z | # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for object_detection.core.freezable_batch_norm."""
import numpy as np
import tensorflow as tf
from object_detection.core import freezable_batch_norm
if __name__ == '__main__':
tf.test.main()
| 41.010811 | 80 | 0.63991 |
1c1a874f462704bb21b985ac2653cb99f9e8cd06 | 351 | py | Python | offspect/gui/VWidgets/message.py | translationalneurosurgery/tool-offspect | 011dafb697e8542fc7c3cf8af8523af3ff704a14 | [
"MIT"
] | 1 | 2022-02-23T12:26:45.000Z | 2022-02-23T12:26:45.000Z | offspect/gui/VWidgets/message.py | neuromti/tool-offspect | 011dafb697e8542fc7c3cf8af8523af3ff704a14 | [
"MIT"
] | 51 | 2020-03-15T14:52:36.000Z | 2020-09-28T09:30:53.000Z | offspect/gui/VWidgets/message.py | neuromti/tool-offspect | 011dafb697e8542fc7c3cf8af8523af3ff704a14 | [
"MIT"
] | 1 | 2020-03-24T07:35:30.000Z | 2020-03-24T07:35:30.000Z | # from PyQt5.QtWidgets import QMessageBox
# def raise_error(message: str = "DEFAULT:Error Description:More Information"):
# box = QMessageBox()
# kind, msg, info = message.split(":")
# box.setIcon(QMessageBox.Critical)
# box.setWindowTitle(kind + " Error")
# box.setText(msg)
# box.setInformativeText(info)
# box.exec_()
| 29.25 | 79 | 0.669516 |
1c1a9dfbfb88778f3a6aa8f06f925295c99a8f4b | 3,182 | py | Python | Widen/LC759_Employee_Free_Time.py | crazywiden/Leetcode_daily_submit | 15637e260ab547022ac0c828dd196337bd8d50a3 | [
"MIT"
] | null | null | null | Widen/LC759_Employee_Free_Time.py | crazywiden/Leetcode_daily_submit | 15637e260ab547022ac0c828dd196337bd8d50a3 | [
"MIT"
] | null | null | null | Widen/LC759_Employee_Free_Time.py | crazywiden/Leetcode_daily_submit | 15637e260ab547022ac0c828dd196337bd8d50a3 | [
"MIT"
] | null | null | null | """
759. Employee Free Time
We are given a list schedule of employees, which represents the working time for each employee.
Each employee has a list of non-overlapping Intervals, and these intervals are in sorted order.
Return the list of finite intervals representing common, positive-length free time for all employees, also in sorted order.
(Even though we are representing Intervals in the form [x, y], the objects inside are Intervals, not lists or arrays. For example, schedule[0][0].start = 1, schedule[0][0].end = 2, and schedule[0][0][0] is not defined). Also, we wouldn't include intervals like [5, 5] in our answer, as they have zero length.
"""
# Line Swap method
# if we met a start, cnt += 1
# if we met an end, cnt -= 1
# time complexity -- O(NlogN), need sort all intervals
# Runtime: 96 ms, faster than 87.95% of Python3 online submissions for Employee Free Time.
# Memory Usage: 14.7 MB, less than 25.00% of Python3 online submissions for Employee Free Time.
"""
# Definition for an Interval.
class Interval:
def __init__(self, start: int = None, end: int = None):
self.start = start
self.end = end
"""
# priority queue
# if the current end is less than the smallest start
# then means there is a free time
# use priority queue to maintain the smallest start
# also only stort one of jobs of each person in the queue to save memory
# time complexity -- O(NlogC), C is the number of employee
"""
# Definition for an Interval.
class Interval:
def __init__(self, start: int = None, end: int = None):
self.start = start
self.end = end
"""
import heapq
| 38.804878 | 309 | 0.634192 |
1c1aaaf29009692c2f76cb9c4300ce895525d07d | 1,333 | py | Python | storitch/config.py | thomaserlang/storitch | dbcf97af547d9cb1ae5c3994654e8db03e43a253 | [
"MIT"
] | null | null | null | storitch/config.py | thomaserlang/storitch | dbcf97af547d9cb1ae5c3994654e8db03e43a253 | [
"MIT"
] | 1 | 2022-03-03T00:35:08.000Z | 2022-03-03T00:35:08.000Z | storitch/config.py | thomaserlang/storitch | dbcf97af547d9cb1ae5c3994654e8db03e43a253 | [
"MIT"
] | null | null | null | import os, yaml
config = {
'debug': False,
'port': 5000,
'store_path': '/var/storitch',
'pool_size': 5,
'logging': {
'level': 'warning',
'path': None,
'max_size': 100 * 1000 * 1000,# ~ 95 mb
'num_backups': 10,
},
'image_exts': [
'.jpg', '.jpeg', '.png', '.tiff', '.tif', '.gif',
'.bmp', '.bmp2', '.bmp3', '.dcm', '.dicom', '.webp',
],
} | 28.361702 | 72 | 0.487622 |
1c1c06a5f2fd1746b831968ec2394fc2e3c54a63 | 3,727 | py | Python | keras/lstm-securitai/model/pipeline_invoke_python.py | PipelineAI/models | d8df07877aa8b10ce9b84983bb440af75e84dca7 | [
"Apache-2.0"
] | 44 | 2017-11-17T06:19:05.000Z | 2021-11-03T06:00:56.000Z | keras/lstm-securitai/model/pipeline_invoke_python.py | PipelineAI/models | d8df07877aa8b10ce9b84983bb440af75e84dca7 | [
"Apache-2.0"
] | 3 | 2018-08-09T14:28:17.000Z | 2018-09-10T03:32:42.000Z | keras/lstm-securitai/model/pipeline_invoke_python.py | PipelineAI/models | d8df07877aa8b10ce9b84983bb440af75e84dca7 | [
"Apache-2.0"
] | 21 | 2017-11-18T15:12:12.000Z | 2020-08-15T07:08:33.000Z | import io
import os
import numpy as np
import pandas
import json
import logging #<== Optional. Log to console, file, kafka
from pipeline_monitor import prometheus_monitor as monitor #<== Optional. Monitor runtime metrics
from pipeline_logger import log
import tensorflow as tf
from tensorflow.contrib import predictor
from keras.models import Sequential, load_model
from keras.preprocessing import sequence
from keras.preprocessing.text import Tokenizer
from collections import OrderedDict
_logger = logging.getLogger('pipeline-logger')
_logger.setLevel(logging.INFO)
_logger_stream_handler = logging.StreamHandler()
_logger_stream_handler.setLevel(logging.INFO)
_logger.addHandler(_logger_stream_handler)
__all__ = ['invoke'] #<== Optional. Being a good Python citizen.
_labels = { #<== Optional. Used for metrics/labels
'name': 'injection',
'tag': 'v1',
'type': 'tensorflow',
'runtime': 'python',
'chip': 'cpu',
}
def _initialize_upon_import(): #<== Optional. Called once upon server startup
''' Initialize / Restore Model Object.
'''
model = load_model('securitai-lstm-model.h5')
model.load_weights('securitai-lstm-weights.h5')
model.compile(loss = 'binary_crossentropy', optimizer = 'adam', metrics = ['accuracy'])
return model
# This is called unconditionally at *module import time*...
_model = _initialize_upon_import()
#@log(labels=_labels, logger=_logger) #<== Optional. Sample and compare predictions
def invoke(request): #<== Required. Called on every prediction
'''Where the magic happens...'''
with monitor(labels=_labels, name="transform_request"): #<== Optional. Expose fine-grained metrics
transformed_request = _transform_request(request) #<== Optional. Transform input (json) into TensorFlow (tensor)
with monitor(labels=_labels, name="invoke"): #<== Optional. Calls _model.predict()
response = _model.predict(transformed_request)
with monitor(labels=_labels, name="transform_response"): #<== Optional. Transform TensorFlow (tensor) into output (json)
transformed_response = _transform_response(response)
return transformed_response #<== Required. Returns the predicted value(s)
if __name__ == '__main__':
with open('./pipeline_test_request.csv', 'rb') as fb:
request_bytes = fb.read()
response_bytes = invoke(request_bytes)
print(response_bytes)
| 38.822917 | 127 | 0.648779 |
1c1cd0e44417cd753d2dd2376c6b05a4b1e765f1 | 9,465 | py | Python | src/act/common/aCTReport.py | ATLASControlTower/aCT | fb841bddbe086db9f0d620167c4a11ae4634ef4f | [
"Apache-2.0"
] | null | null | null | src/act/common/aCTReport.py | ATLASControlTower/aCT | fb841bddbe086db9f0d620167c4a11ae4634ef4f | [
"Apache-2.0"
] | 8 | 2019-12-12T14:41:50.000Z | 2020-12-04T21:06:44.000Z | src/act/common/aCTReport.py | ATLASControlTower/aCT | fb841bddbe086db9f0d620167c4a11ae4634ef4f | [
"Apache-2.0"
] | 4 | 2018-02-05T11:25:20.000Z | 2018-07-19T09:53:13.000Z | import argparse
import importlib
import os
import re
import signal
import subprocess
import sys
import time
import logging
from act.common import aCTLogger
from act.common.aCTConfig import aCTConfigAPP
from act.arc import aCTDBArc
if __name__ == '__main__':
main()
| 32.979094 | 123 | 0.481247 |
1c1fc5837c8db7a7bfccee73b5ceb661f8e4a0b9 | 3,477 | py | Python | unittests/test_apiv2_user.py | mtcolman/django-DefectDojo | 76175aca446e077884bdb5e1d8e2a671a0840775 | [
"BSD-3-Clause"
] | 249 | 2016-09-06T21:04:40.000Z | 2018-01-19T15:59:44.000Z | unittests/test_apiv2_user.py | mtcolman/django-DefectDojo | 76175aca446e077884bdb5e1d8e2a671a0840775 | [
"BSD-3-Clause"
] | 275 | 2021-02-19T15:16:15.000Z | 2022-03-31T21:09:29.000Z | unittests/test_apiv2_user.py | mtcolman/django-DefectDojo | 76175aca446e077884bdb5e1d8e2a671a0840775 | [
"BSD-3-Clause"
] | 152 | 2016-09-06T21:04:54.000Z | 2018-01-18T08:52:24.000Z | from rest_framework.test import APITestCase, APIClient
from django.urls import reverse
from rest_framework.authtoken.models import Token
| 39.067416 | 100 | 0.581823 |
1c20efc185d3c6e0666c4268894d9c9ea652083d | 371 | py | Python | src/init.py | ankit-kushwaha-51/RESTful_API | 4513e8a058cb0200b41d47830b93b8a23ea38d7b | [
"MIT"
] | null | null | null | src/init.py | ankit-kushwaha-51/RESTful_API | 4513e8a058cb0200b41d47830b93b8a23ea38d7b | [
"MIT"
] | null | null | null | src/init.py | ankit-kushwaha-51/RESTful_API | 4513e8a058cb0200b41d47830b93b8a23ea38d7b | [
"MIT"
] | null | null | null | from flask import Flask
from src.models import db
from . import config
| 24.733333 | 80 | 0.752022 |
1c234ce8283c844cd60cb5760e7b8e156d4ade05 | 310 | py | Python | tests/helpers/examples/order/tasks.py | nicoddemus/dependencies | 74180e2c6098d8ad03bc53c5703bdf8dc61c3ed9 | [
"BSD-2-Clause"
] | null | null | null | tests/helpers/examples/order/tasks.py | nicoddemus/dependencies | 74180e2c6098d8ad03bc53c5703bdf8dc61c3ed9 | [
"BSD-2-Clause"
] | null | null | null | tests/helpers/examples/order/tasks.py | nicoddemus/dependencies | 74180e2c6098d8ad03bc53c5703bdf8dc61c3ed9 | [
"BSD-2-Clause"
] | null | null | null | from dependencies import Injector
from dependencies import this
from dependencies.contrib.celery import shared_task
from examples.order.commands import ProcessOrder
| 20.666667 | 51 | 0.783871 |
1c23b751e4593238c12f386d607b9cb57efba768 | 115 | py | Python | 001 - 050/ex032.py | SocrammBR/Desafios-Python-CursoEmVideo | bd2454a24134500343ece91b936c169d3a66f89e | [
"MIT"
] | null | null | null | 001 - 050/ex032.py | SocrammBR/Desafios-Python-CursoEmVideo | bd2454a24134500343ece91b936c169d3a66f89e | [
"MIT"
] | null | null | null | 001 - 050/ex032.py | SocrammBR/Desafios-Python-CursoEmVideo | bd2454a24134500343ece91b936c169d3a66f89e | [
"MIT"
] | null | null | null | ano = int(input('Digite o ano: '))
if (ano%4) == 0:
print ('Ele bissexto')
else:
print ('Ele no bissexto') | 23 | 34 | 0.6 |
1c245bb9f12c18581a3c0f8f320d7e2f0d45632a | 42 | py | Python | rn/__init__.py | vikneswaran20/rn | 33e0bfaf58bb8a5ec54c6d010035693b35e9909d | [
"BSD-3-Clause"
] | null | null | null | rn/__init__.py | vikneswaran20/rn | 33e0bfaf58bb8a5ec54c6d010035693b35e9909d | [
"BSD-3-Clause"
] | null | null | null | rn/__init__.py | vikneswaran20/rn | 33e0bfaf58bb8a5ec54c6d010035693b35e9909d | [
"BSD-3-Clause"
] | null | null | null | __version__ = '0.0.1'
__license__ = 'BSD'
| 14 | 21 | 0.666667 |
1c25a6c39831217b32cbaed42c9755b9bd09bf27 | 7,655 | py | Python | flexmeasures/cli/data_edit.py | FlexMeasures/flexmeasures | a4367976d37ac5721b8eb3ce8a2414595e52c678 | [
"Apache-2.0"
] | 12 | 2021-12-18T10:41:10.000Z | 2022-03-29T23:00:29.000Z | flexmeasures/cli/data_edit.py | FlexMeasures/flexmeasures | a4367976d37ac5721b8eb3ce8a2414595e52c678 | [
"Apache-2.0"
] | 103 | 2021-12-07T08:51:15.000Z | 2022-03-31T13:28:48.000Z | flexmeasures/cli/data_edit.py | FlexMeasures/flexmeasures | a4367976d37ac5721b8eb3ce8a2414595e52c678 | [
"Apache-2.0"
] | 3 | 2022-01-18T04:45:48.000Z | 2022-03-14T09:48:22.000Z | from datetime import timedelta
from typing import Union, List, Optional
import click
import pandas as pd
from flask import current_app as app
from flask.cli import with_appcontext
from flexmeasures import Sensor
from flexmeasures.data import db
from flexmeasures.data.schemas.generic_assets import GenericAssetIdField
from flexmeasures.data.schemas.sensors import SensorIdField
from flexmeasures.data.models.generic_assets import GenericAsset
from flexmeasures.data.models.time_series import TimedBelief
from flexmeasures.data.utils import save_to_db
app.cli.add_command(fm_edit_data)
def parse_attribute_value(
attribute_null_value: bool,
attribute_float_value: Optional[float] = None,
attribute_bool_value: Optional[bool] = None,
attribute_str_value: Optional[str] = None,
attribute_int_value: Optional[int] = None,
) -> Union[float, int, bool, str, None]:
"""Parse attribute value."""
if not single_true(
[attribute_null_value]
+ [
v is not None
for v in [
attribute_float_value,
attribute_bool_value,
attribute_str_value,
attribute_int_value,
]
]
):
raise ValueError("Cannot set multiple values simultaneously.")
if attribute_null_value:
return None
elif attribute_float_value is not None:
return float(attribute_float_value)
elif attribute_bool_value is not None:
return bool(attribute_bool_value)
elif attribute_int_value is not None:
return int(attribute_int_value)
return attribute_str_value
| 31.502058 | 189 | 0.679556 |
1c25d14605a86b5c88bbbc4dc7cdc7e4ecc3b9b3 | 216 | py | Python | semana2/mail_settings.py | ArseniumGX/bluemer-modulo2 | 24e5071b734de362dc47ef9d402c191699d15b43 | [
"MIT"
] | null | null | null | semana2/mail_settings.py | ArseniumGX/bluemer-modulo2 | 24e5071b734de362dc47ef9d402c191699d15b43 | [
"MIT"
] | null | null | null | semana2/mail_settings.py | ArseniumGX/bluemer-modulo2 | 24e5071b734de362dc47ef9d402c191699d15b43 | [
"MIT"
] | null | null | null | mail_settings = {
"MAIL_SERVER": 'smtp.gmail.com',
"MAIL_PORT": 465,
"MAIL_USE_TLS": False,
"MAIL_USE_SSL": True,
"MAIL_USERNAME": 'c003.teste.jp@gmail.com',
"MAIL_PASSWORD": 'C003.teste'
} | 27 | 47 | 0.625 |
1c25eef341ba86ac217ef07e6815f7ede8df615f | 3,071 | py | Python | frame/base/parser.py | dingjingmaster/blog_spider | 7a0885bf886166eac0caca4471ee9f6424be2225 | [
"MIT"
] | null | null | null | frame/base/parser.py | dingjingmaster/blog_spider | 7a0885bf886166eac0caca4471ee9f6424be2225 | [
"MIT"
] | null | null | null | frame/base/parser.py | dingjingmaster/blog_spider | 7a0885bf886166eac0caca4471ee9f6424be2225 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3.6
# -*- encoding=utf8 -*-
import pyquery
"""
1. URL
2.
3.
1. PARSER_PASSAGE_URL URL
2. PARSER_PASSAGE_TITLE
3. PARSER_PASSAGE_DATE
4. PARSER_PASSAGE_CATEGORY
5. PARSER_PASSAGE_TAG
6. PARSER_PASSAGE_CONTENT
7. PARSER_PASSAGE_IMGURL URL
"""
| 31.659794 | 71 | 0.552914 |
1c2623d238b3de2bae87d9eae327584f97cd5fb9 | 6,341 | py | Python | tools/mkblocks.py | Commodore-Bench/u5remastered | 02c7ed86055e368b97d3c3c5ca26622782bd564d | [
"Apache-2.0"
] | 14 | 2020-02-07T06:55:40.000Z | 2022-01-15T19:54:00.000Z | tools/mkblocks.py | Commodore-Bench/u5remastered | 02c7ed86055e368b97d3c3c5ca26622782bd564d | [
"Apache-2.0"
] | 1 | 2021-11-21T23:06:24.000Z | 2021-11-21T23:06:24.000Z | tools/mkblocks.py | Commodore-Bench/u5remastered | 02c7ed86055e368b97d3c3c5ca26622782bd564d | [
"Apache-2.0"
] | 3 | 2020-02-22T13:48:18.000Z | 2021-04-06T17:09:43.000Z | #!/usr/bin/env python3
# ----------------------------------------------------------------------------
# Copyright 2019 Drunella
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ----------------------------------------------------------------------------
import os
import sys
import glob
import subprocess
import argparse
import hashlib
import traceback
import pprint
if __name__ == '__main__':
try:
retval = main(sys.argv)
sys.exit(retval)
except Exception as e:
print(e)
traceback.print_exc()
sys.exit(1)
| 32.352041 | 109 | 0.598171 |
1c27318c3cab57ef8dbbc5271fbb6a638278cdd3 | 5,079 | py | Python | src/apetest/decode.py | boxingbeetle/apetest | c6dd7aaca014c64eec4bde7e755c4a3dec72404a | [
"BSD-3-Clause"
] | 6 | 2019-04-01T09:42:31.000Z | 2020-05-20T15:23:17.000Z | src/apetest/decode.py | boxingbeetle/apetest | c6dd7aaca014c64eec4bde7e755c4a3dec72404a | [
"BSD-3-Clause"
] | 31 | 2019-02-04T11:38:32.000Z | 2022-03-03T02:51:15.000Z | src/apetest/decode.py | boxingbeetle/apetest | c6dd7aaca014c64eec4bde7e755c4a3dec72404a | [
"BSD-3-Clause"
] | null | null | null | # SPDX-License-Identifier: BSD-3-Clause
"""
Text decode functions.
These functions can be used to get Unicode strings from a series of bytes.
"""
from codecs import (
BOM_UTF8,
BOM_UTF16_BE,
BOM_UTF16_LE,
BOM_UTF32_BE,
BOM_UTF32_LE,
CodecInfo,
lookup as lookup_codec,
)
from collections import OrderedDict
from typing import Dict, Iterable, Optional, Tuple
from apetest.typing import LoggerT
def encoding_from_bom(data: bytes) -> Optional[str]:
"""
Look for a byte-order-marker at the start of the given C{bytes}.
If found, return the encoding matching that BOM, otherwise return C{None}.
"""
if data.startswith(BOM_UTF8):
return "utf-8"
elif data.startswith(BOM_UTF16_LE) or data.startswith(BOM_UTF16_BE):
return "utf-16"
elif data.startswith(BOM_UTF32_LE) or data.startswith(BOM_UTF32_BE):
return "utf-32"
else:
return None
def standard_codec_name(name: str) -> str:
"""
Map a codec name to the preferred standardized version.
The preferred names were taken from this list published by IANA:
U{http://www.iana.org/assignments/character-sets/character-sets.xhtml}
@param name:
Text encoding name, in lower case.
"""
if name.startswith("iso8859"):
return "iso-8859" + name[7:]
return {
"ascii": "us-ascii",
"euc_jp": "euc-jp",
"euc_kr": "euc-kr",
"iso2022_jp": "iso-2022-jp",
"iso2022_jp_2": "iso-2022-jp-2",
"iso2022_kr": "iso-2022-kr",
}.get(name, name)
def try_decode(data: bytes, encodings: Iterable[str]) -> Tuple[str, str]:
"""
Attempt to decode text using the given encodings in order.
@param data:
Encoded version of the text.
@param encodings:
Names of the encodings to try. Must all be lower case.
@return: C{(text, encoding)}
The decoded string and the encoding used to decode it.
The returned encoding is name the preferred name, which could differ
from the name used in the C{encodings} argument.
@raise ValueError:
If the text could not be decoded.
"""
# Build sequence of codecs to try.
codecs: Dict[str, CodecInfo] = OrderedDict()
for encoding in encodings:
try:
codec = lookup_codec(encoding)
except LookupError:
pass
else:
codecs[standard_codec_name(codec.name)] = codec
# Apply decoders to the document.
for name, codec in codecs.items():
try:
text, consumed = codec.decode(data, "strict")
except UnicodeDecodeError:
continue
if consumed == len(data):
return text, name
raise ValueError("Unable to determine document encoding")
def decode_and_report(
data: bytes,
encoding_options: Iterable[Tuple[Optional[str], str]],
logger: LoggerT,
) -> Tuple[str, str]:
"""
Attempt to decode text using several encoding options in order.
@param data:
Encoded version of the text.
@param encoding_options: C{(encoding | None, source)*}
Each option is a pair of encoding name and a description of
where this encoding suggestion originated.
If the encoding name is C{None}, the option is skipped.
@param logger:
Non-fatal problems are logged here.
Such problems include an unknown or differing encodings
among the options.
@return: C{(text, encoding)}
The decoded string and the encoding used to decode it.
@raise ValueError:
If the text could not be decoded.
"""
# Filter and remember encoding options.
options = [
(encoding, source)
for encoding, source in encoding_options
if encoding is not None
]
encodings = [encoding for encoding, source in options]
# Always try to decode as UTF-8, since that is the most common encoding
# these days, plus it's a superset of ASCII so it also works for old or
# simple documents.
encodings.append("utf-8")
text, used_encoding = try_decode(data, encodings)
# Report differences between suggested encodings and the one we
# settled on.
for encoding, source in options:
try:
codec = lookup_codec(encoding)
except LookupError:
logger.warning(
'%s specifies encoding "%s", which is unknown to Python',
source,
encoding,
)
continue
std_name = standard_codec_name(codec.name)
if std_name != used_encoding:
logger.warning(
'%s specifies encoding "%s", ' 'while actual encoding seems to be "%s"',
source,
encoding,
used_encoding,
)
elif std_name != encoding:
logger.info(
'%s specifies encoding "%s", ' 'which is not the standard name "%s"',
source,
encoding,
used_encoding,
)
return text, used_encoding
| 30.596386 | 88 | 0.620398 |
1c27cd5f5bfc380cd284613d082bf0df751fd64e | 43,000 | py | Python | utils.py | jiangycTarheel/Compositional-Auxseq | e4645a92c21c893cd320eb186c19d392bc147b43 | [
"MIT"
] | 8 | 2021-10-02T00:08:27.000Z | 2022-02-15T17:23:14.000Z | utils.py | jiangycTarheel/compositional-auxseq | e4645a92c21c893cd320eb186c19d392bc147b43 | [
"MIT"
] | null | null | null | utils.py | jiangycTarheel/compositional-auxseq | e4645a92c21c893cd320eb186c19d392bc147b43 | [
"MIT"
] | null | null | null | import os
import json
import gzip
from copy import deepcopy, copy
import numpy as np
import csv
import torch
import torch.nn as nn
import torch.nn.functional as F
from torch.utils.data import Dataset, DataLoader, RandomSampler
from transformers.tokenization_utils import trim_batch
# Special symbols
SOS_token = "<SOS>" # start of sentence
EOS_token = "<EOS>" # end of sentence
PAD_token = SOS_token # padding symbol
INPUT_TOKENS_SCAN = ['jump', 'opposite', 'right', 'twice', 'and', 'turn', 'thrice', 'run', 'after', 'around', 'left', 'walk', 'look']
OUTPUT_TOKENS_SCAN = ['I_TURN_RIGHT', 'I_JUMP', 'I_TURN_LEFT', 'I_RUN', 'I_WALK', 'I_LOOK']
# ACTION_TO_TEXT = {'I_TURN_RIGHT': 'right', 'I_JUMP': 'jump', 'I_TURN_LEFT': 'left', 'I_RUN': 'run', 'I_WALK': 'walk', 'I_LOOK': 'look'}
# def encode_file_iterator(tokenizer, data_path, max_length, pad_to_max_length=True, return_tensors="pt", max_examples=None):
# '''
# This provides a low-memory usage way of iterating thru all of the source/target lines for processing by JIT loader.
# '''
# if data_path[-3:] == '.gz':
# print('Data file is gzipped')
# f = gzip.open(data_path, "rt")
# else:
# print('Data file is plain text')
# f = open(data_path, "r", encoding='utf-8')
#
# for i, text in enumerate(f):
#
# tokenized = tokenizer.batch_encode_plus( [text + ' </s>'], max_length=max_length,
# pad_to_max_length=pad_to_max_length, return_tensors=return_tensors )
#
# yield tokenized
#
# if max_examples and i >= max_examples:
# break
#
# f.close()
# def convert_scan_actions_to_text(actions):
# return ' '.join([ACTION_TO_TEXT[_action] for _action in actions.split(' ')])
# def encode_scan_file(tokenizer, data, io_type, max_length, pad_to_max_length=True, return_tensors="pt", max_examples=None):
# examples = []
# # a = tokenizer.batch_encode_plus( ['right jump left run walk look' + ' <s> </s>'], max_length=max_length,
# # pad_to_max_length=pad_to_max_length, return_tensors=return_tensors )
# # print(a)
# # exit()
# for dp in data:
# input, output = dp[0], dp[1]
# if io_type == 'input':
# raw = input
# else:
# assert io_type == 'output'
# raw = convert_scan_actions_to_text(output)
#
# tokenized = tokenizer.batch_encode_plus( [raw + ' </s>'], max_length=max_length,
# pad_to_max_length=pad_to_max_length, return_tensors=return_tensors )
#
# if max_examples and i >= max_examples:
# break
# examples.append(tokenized)
#
# return examples
| 49.768519 | 158 | 0.548721 |
1c27f0ef99cad37dbc55d60ec83e6ae7afff0829 | 3,484 | py | Python | bridger/serializers/fields/related.py | intellineers/django-bridger | ed097984a99df7da40a4d01bd00c56e3c6083056 | [
"BSD-3-Clause"
] | 2 | 2020-03-17T00:53:23.000Z | 2020-07-16T07:00:33.000Z | bridger/serializers/fields/related.py | intellineers/django-bridger | ed097984a99df7da40a4d01bd00c56e3c6083056 | [
"BSD-3-Clause"
] | 76 | 2019-12-05T01:15:57.000Z | 2021-09-07T16:47:27.000Z | bridger/serializers/fields/related.py | intellineers/django-bridger | ed097984a99df7da40a4d01bd00c56e3c6083056 | [
"BSD-3-Clause"
] | 1 | 2020-02-05T15:09:47.000Z | 2020-02-05T15:09:47.000Z | from typing import Dict
from rest_framework import serializers
from rest_framework.fields import empty
from rest_framework.relations import ManyRelatedField
from rest_framework.request import Request
from .mixins import BridgerSerializerFieldMixin
from .types import BridgerType, ReturnContentType
| 34.156863 | 104 | 0.639208 |
1c28644cccaa9b7ccc104007acdb7fe41da7c7ad | 1,198 | py | Python | python/graphscope/experimental/nx/tests/algorithms/forward/operators/test_product.py | wenyuanyu/GraphScope | a40ccaf70557e608d8b091eb25ab04477f99ce21 | [
"Apache-2.0"
] | 2 | 2020-12-15T08:42:10.000Z | 2022-01-14T09:13:16.000Z | python/graphscope/experimental/nx/tests/algorithms/forward/operators/test_product.py | wenyuanyu/GraphScope | a40ccaf70557e608d8b091eb25ab04477f99ce21 | [
"Apache-2.0"
] | 1 | 2020-12-22T13:15:40.000Z | 2020-12-22T13:15:40.000Z | python/graphscope/experimental/nx/tests/algorithms/forward/operators/test_product.py | wenyuanyu/GraphScope | a40ccaf70557e608d8b091eb25ab04477f99ce21 | [
"Apache-2.0"
] | 1 | 2021-11-23T03:40:43.000Z | 2021-11-23T03:40:43.000Z | import networkx.algorithms.operators.tests.test_product
import pytest
from graphscope.experimental.nx.utils.compat import import_as_graphscope_nx
import_as_graphscope_nx(networkx.algorithms.operators.tests.test_product,
decorators=pytest.mark.usefixtures("graphscope_session"))
| 27.860465 | 81 | 0.724541 |
1c29c6717099eef5374f68ca8190022df79ffc51 | 4,574 | py | Python | Footy/UnsupportedBantzStrings.py | schleising/banter-bot | 3e51453ae993d2c26dc51464a3cef3875a6be3c9 | [
"Apache-2.0"
] | null | null | null | Footy/UnsupportedBantzStrings.py | schleising/banter-bot | 3e51453ae993d2c26dc51464a3cef3875a6be3c9 | [
"Apache-2.0"
] | 2 | 2022-03-27T10:44:38.000Z | 2022-03-28T19:24:39.000Z | Footy/UnsupportedBantzStrings.py | schleising/banter-bot | 3e51453ae993d2c26dc51464a3cef3875a6be3c9 | [
"Apache-2.0"
] | 1 | 2022-03-28T11:45:47.000Z | 2022-03-28T11:45:47.000Z | # {team} -> Name of team
# {name} -> Name of person who supports team
teamMatchStarted: list[str] = [
"{team} are shit",
"{team} cunts",
"Dirty {team}",
"Dirty {team}, dirty {name}",
]
drawing: list[str] = [
"{team} level, this is a shit match",
"Boring old {team}",
"Happy with how it's going, {name}?",
"Yawn...",
"{team} wankers",
"How can you support this rubbish, {name}?",
"You get the feeling that {team} don't really want this",
"No passion from {team}, {name}",
"If a game of football is like making love to a beautiful woman, this {team} game is a 10 hand job from a swivel-eyed misfit",
"This {team} match is like a game of chess. But with more players and only one piece",
]
teamLeadByOne: list[str] = [
"{team} cheats, the ref's a cunt",
"That was never a goal for {team}",
"{team} don't deserve that",
"Bollocks",
"That should go to VAR",
"Bit fortunuate for {team}",
"Can't imagine {team} will keep this lead",
"Lucky goal for {team}",
"{team} got lucky there",
"{team} aren't good enough to stay ahead",
"Offside!",
]
teamExtendingLead: list[str] = [
"There's no way {team} deserve this lead",
"Have {team} paid the ref?",
"This is bullshit",
"The ref's a cunt, {name}'s a cunt",
"The ref's a cunt, {team} are cunts, {name}'s a cunt",
"Something has gone seriously wrong with this country",
"When I voted for Brexit, I didn't vote for this",
"At least Boris remains in charge, we've still got that",
"Richard Wanger would be turning in his grave",
"Liberal elite bullshit",
"That was so offside",
"VAR!",
"Is the linesman OK?",
"If only {name}'s wife was as dirty as this game",
]
teamLosingLead: list[str] = [
"Lazy old {team}, lazy old {name}",
"{team} are throwing it away",
"{team} are rubbish",
"{team} fucking it up again",
"We really are being treated to some world class flouncing from {team} today",
"Brace yourself, {name}. This is going to hurt",
"This is brown trouser time for {team}",
"I hope {name} brought a spare pair of underpants",
"I see {team} are playing their B Team. B for Bullshit",
]
teamDeficitOfOne: list[str] = [
"This is more like it from {team}",
"Oh dear...",
"{team} wankers",
"How are you feeling, {name}?",
"Bit disappointing, {name}?",
"Not looking good for {team}, {name}",
"You must be furious, {name}",
"{team} have just got no heart",
"This is what happens when you try to buy success",
"All that money spent, {name}, and for what?",
]
teamExtendingDeficit: list[str] = [
"Starting to feel a bit sorry for {team}",
"Never mind, {name}, there's always the next game",
"Poor {team}",
"Whoops...",
"Oh dear, everything OK, {name}?",
"Hey {name}, where'd you get your keeper?\nPOUNDSHOP !! POUNDSHOP !!",
"{team} can't raise themselves for this game, typical",
"A team like {team} have such a proud history, but what we see today is just embarrassing",
"{team} clearly not up for it today",
"{team} are letting you down, {name}",
"Watching {team} is like watching a bunch of cavemen: Neanderthal",
]
teamLosingDeficit: list[str] = [
"Too little too late for {team}",
"{team} won't come back from here",
"The ref's a cunt",
"This is awful",
"What a mess",
"Well this is an unmitigated shit show",
]
teamWon: list[str] = [
"That was a shit game",
"There's no way {team} deserved that",
"Fuck you, {name} !!",
"This will go down in history...\nAs the most tedious game I have ever had the misfortune to witness",
]
teamLost: list[str] = [
"Justice done, {team} lost",
"Job done for {team}?",
"Job done, {name}?",
"{name} !!?",
"Probably the best {team} could hope for",
"Everything OK, {name}?",
"{team} continue to disappoint",
"Well if the football thing doesn't work out for {team}, they can always consider a career on the stage",
"{team} set the bar low",
"{team} fail to meet their already low expectations",
]
teamDrew: list [str] = [
"Another uninspiring result for {team}",
"Thanks for nothing, {team}",
"Well that's 90 minutes we won't get back, thanks {team}",
"Another draw for {team}",
"Boring old {team}",
"You should be happy with that result, {name}",
"If I could pick one highlight from this {team} game it would be when it finally ended.",
"I think {name} will be happy with {team}'s performance today.",
]
| 34.390977 | 131 | 0.622868 |
1c29cd725d84cf362de4c0b82dab4a78ea1506c5 | 128 | py | Python | bench/fibrec.py | codr7/alisp | 05ac47ab2c28683373af4ec80e5a94937390fa6c | [
"MIT"
] | 8 | 2021-09-04T10:18:49.000Z | 2022-01-10T01:05:13.000Z | bench/fibrec.py | codr7/alisp | 05ac47ab2c28683373af4ec80e5a94937390fa6c | [
"MIT"
] | null | null | null | bench/fibrec.py | codr7/alisp | 05ac47ab2c28683373af4ec80e5a94937390fa6c | [
"MIT"
] | 2 | 2021-10-05T11:00:14.000Z | 2021-10-11T05:54:59.000Z | from bench import bench
print(bench(100, '''
def fib(n):
return n if n < 2 else fib(n-1) + fib(n-2)
''', '''
fib(20)
'''))
| 14.222222 | 44 | 0.546875 |
1c2a4f856378f5f6862c783cf6cd99f449623c3f | 3,271 | py | Python | nlpir/native/classifier.py | NLPIR-team/nlpir-python | 029f81e69ee561725fa017dce09cfd55acb34d20 | [
"MIT"
] | 18 | 2021-01-01T03:07:17.000Z | 2022-03-20T11:25:46.000Z | nlpir/native/classifier.py | yangyaofei/nlpir-python | af7bca9e2c3ba5f07316364da8f5e46a2bbc7c52 | [
"MIT"
] | 23 | 2020-12-04T07:10:09.000Z | 2022-03-10T09:39:26.000Z | nlpir/native/classifier.py | yangyaofei/nlpir-python | af7bca9e2c3ba5f07316364da8f5e46a2bbc7c52 | [
"MIT"
] | 13 | 2020-10-23T13:38:26.000Z | 2022-03-18T12:10:10.000Z | # coding=utf-8
from nlpir.native.nlpir_base import NLPIRBase
from ctypes import c_bool, c_char_p, c_int, POINTER, Structure, c_float
| 27.957265 | 112 | 0.597982 |
1c2af8e5727d2303652df4218b453994acacde5b | 1,875 | py | Python | tests/param/get_param_type_spec_test.py | nickgaya/bravado-core | 16e752963bfceb4adfa43724085bc4127eefcd59 | [
"BSD-3-Clause"
] | 122 | 2015-04-22T17:31:18.000Z | 2021-11-08T10:29:57.000Z | tests/param/get_param_type_spec_test.py | nickgaya/bravado-core | 16e752963bfceb4adfa43724085bc4127eefcd59 | [
"BSD-3-Clause"
] | 364 | 2015-04-10T22:19:23.000Z | 2022-02-25T08:55:10.000Z | tests/param/get_param_type_spec_test.py | nickgaya/bravado-core | 16e752963bfceb4adfa43724085bc4127eefcd59 | [
"BSD-3-Clause"
] | 118 | 2015-04-20T15:11:53.000Z | 2021-12-09T10:03:34.000Z | # -*- coding: utf-8 -*-
import pytest
from mock import Mock
from bravado_core.exception import SwaggerMappingError
from bravado_core.operation import Operation
from bravado_core.param import get_param_type_spec
from bravado_core.param import Param
from bravado_core.spec import Spec
| 31.25 | 76 | 0.678933 |
1c2b194566a9e96dba834338ec915a2289eb1837 | 682 | py | Python | functions/markdown-to-html/markdown2html.py | truls/faas-profiler | d54ca0d9926f38c693f616ba4d08414aea823f51 | [
"MIT"
] | null | null | null | functions/markdown-to-html/markdown2html.py | truls/faas-profiler | d54ca0d9926f38c693f616ba4d08414aea823f51 | [
"MIT"
] | null | null | null | functions/markdown-to-html/markdown2html.py | truls/faas-profiler | d54ca0d9926f38c693f616ba4d08414aea823f51 | [
"MIT"
] | null | null | null | # Copyright (c) 2019 Princeton University
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
from markdown import markdown
import base64
import json
import base64
| 29.652174 | 108 | 0.690616 |
1c2b4a1c07a03c84645790de2fd147b0a49af942 | 779 | py | Python | Python Files/Dataset_Formating/Audio_splicing.py | brennanMosher/Music-Genre-Recognition-using-a-Machine-Learning-Appraoch | 7834fe5d709e894322ad76ef118067febaa78bce | [
"MIT"
] | 1 | 2021-04-13T16:22:27.000Z | 2021-04-13T16:22:27.000Z | Python Files/Dataset_Formating/Audio_splicing.py | brennanMosher/Music-Genre-Recognition-using-a-Machine-Learning-Appraoch | 7834fe5d709e894322ad76ef118067febaa78bce | [
"MIT"
] | null | null | null | Python Files/Dataset_Formating/Audio_splicing.py | brennanMosher/Music-Genre-Recognition-using-a-Machine-Learning-Appraoch | 7834fe5d709e894322ad76ef118067febaa78bce | [
"MIT"
] | null | null | null | from pydub import AudioSegment
import os
import math
from pathlib import Path
'''
Splice wav files into multiple segments.
'''
LENGTH = 3 # Set splice length in seconds
| 25.129032 | 89 | 0.65982 |
1c2b5a500905db564cebad53847b80d4840a37d9 | 3,947 | py | Python | manpages.py | mba811/dash-manpages-zh | 94f7345f48084c2fa22ae00996920d1309458649 | [
"Apache-2.0"
] | 1 | 2020-04-09T10:51:01.000Z | 2020-04-09T10:51:01.000Z | manpages.py | mba811/dash-manpages-zh | 94f7345f48084c2fa22ae00996920d1309458649 | [
"Apache-2.0"
] | null | null | null | manpages.py | mba811/dash-manpages-zh | 94f7345f48084c2fa22ae00996920d1309458649 | [
"Apache-2.0"
] | 1 | 2020-09-16T03:04:18.000Z | 2020-09-16T03:04:18.000Z | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
@author: Wu Liang
@contact:
@date: 2014/06/23
"""
import os
import sqlite3
import urllib2
import shutil
import tarfile
import hashlib
import codecs
from mako.template import Template
from pyquery import PyQuery
currentPath = os.path.join(os.path.dirname(os.path.realpath(__file__)))
name = "manpages"
baseName = "manpages-zh"
output = baseName + ".docset"
appName = "dash-" + baseName
tarFileName = baseName + ".tgz"
feedName = baseName + ".xml"
version = "1.5.0"
docsetPath = os.path.join(currentPath, output, "Contents", "Resources", "Documents")
# Step 2: Copy the HTML Documentation
fin = codecs.open(os.path.join(docsetPath, "index.html"), "r", "utf-8")
content = fin.read()
fin.close()
jQuery = PyQuery(content)
jQuery.find("body").empty()
fileNames = []
itemTemplate = Template("<a href='html/${fileName}'>${name}</a><br />\n")
for fileName in os.listdir(os.path.join(docsetPath, "html")):
fileNames.append({
"name": fileName.split(".")[0],
"fileName": fileName
})
jQuery.find("body").append(itemTemplate.render(name = fileName.split(".")[0], fileName = fileName))
fin = codecs.open(os.path.join(docsetPath, "index.html"), "w", "utf-8")
newContent = jQuery.html()
fin.write(newContent)
fin.close()
# Step 3: create the Info.plist file
infoTemplate = Template('''<?xml version="1.0" encoding="UTF-8"?>
<plist version="1.0">
<dict>
<key>CFBundleIdentifier</key>
<string>${name}</string>
<key>CFBundleName</key>
<string>${name}</string>
<key>DocSetPlatformFamily</key>
<string>${name}</string>
<key>dashIndexFilePath</key>
<string>index.html</string>
<key>dashIndexFilePath</key>
<string>index.html</string>
<key>isDashDocset</key><true/>
<key>isJavaScriptEnabled</key><true/>
</dict>
</plist>''')
infoPlistFile = os.path.join(currentPath, output, "Contents", "Info.plist")
fin = open(infoPlistFile, "w")
fin.write(infoTemplate.render(name = name))
fin.close()
# Step 4: Create the SQLite Index
dbFile = os.path.join(currentPath, output, "Contents", "Resources", "docSet.dsidx")
if os.path.exists(dbFile):
os.remove(dbFile)
db = sqlite3.connect(dbFile)
cursor = db.cursor()
try:
cursor.execute("DROP TABLE searchIndex;")
except Exception:
pass
cursor.execute('CREATE TABLE searchIndex(id INTEGER PRIMARY KEY, name TEXT, type TEXT, path TEXT);')
cursor.execute('CREATE UNIQUE INDEX anchor ON searchIndex (name, type, path);')
insertTemplate = Template("INSERT OR IGNORE INTO searchIndex(name, type, path) VALUES ('${name}', '${type}', '${path}');")
# Step 5: Populate the SQLite Index
for result in fileNames:
sql = insertTemplate.render(name = result["name"], type = "Builtin", path = "html/" + result["fileName"])
print sql
cursor.execute(sql)
db.commit()
db.close()
# Step 6: copy icon
shutil.copyfile(os.path.join(currentPath, "icon.png"),
os.path.join(currentPath, output, "icon.png"))
shutil.copyfile(os.path.join(currentPath, "icon@2x.png"),
os.path.join(currentPath, output, "icon@2x.png"))
# Step 7:
if not os.path.exists(os.path.join(currentPath, "dist")):
os.makedirs(os.path.join(currentPath, "dist"))
tarFile = tarfile.open(os.path.join(currentPath, "dist", tarFileName), "w:gz")
for root, dirNames, fileNames in os.walk(output):
for fileName in fileNames:
fullPath = os.path.join(root, fileName)
tarFile.add(fullPath)
tarFile.close()
# Step 8: feed url
feedTemplate = Template('''<entry>
<version>${version}</version>
<sha1>${sha1Value}</sha1>
<url>https://raw.githubusercontent.com/magicsky/${appName}/master/dist/${tarFileName}</url>
</entry>''')
fout = open(os.path.join(currentPath, "dist", tarFileName), "rb")
sha1Value = hashlib.sha1(fout.read()).hexdigest()
fout.close()
fin = open(os.path.join(currentPath, feedName), "w")
fin.write(feedTemplate.render(sha1Value = sha1Value, appName = appName, tarFileName = tarFileName, version = version))
fin.close()
| 30.835938 | 122 | 0.698759 |
1c2c1ecff02208f628aa2e65eae53abaf0c94bd6 | 1,527 | py | Python | docs/conf.py | alexweav/nisystemlink-clients-python | f19a30907a7fef536043ecbddc5a755e5fedf846 | [
"MIT"
] | null | null | null | docs/conf.py | alexweav/nisystemlink-clients-python | f19a30907a7fef536043ecbddc5a755e5fedf846 | [
"MIT"
] | null | null | null | docs/conf.py | alexweav/nisystemlink-clients-python | f19a30907a7fef536043ecbddc5a755e5fedf846 | [
"MIT"
] | null | null | null | import os
import sys
sys.path.insert(0, os.path.abspath(".."))
# --------------------------------------------------------------------------------------
project = "nisystemlink"
copyright = "2020, National Instruments"
author = "National Instruments"
# The short X.Y version
version = "0.1"
# The full version, including alpha/beta/rc tags
release = "0.1.3"
# --------------------------------------------------------------------------------------
extensions = [
"sphinx.ext.autodoc",
"sphinx.ext.napoleon",
"sphinx.ext.viewcode",
"sphinx_autodoc_typehints",
"docs.cleanup",
]
master_doc = "index"
html_theme = "sphinx_rtd_theme"
html_extra_path = [
"../LICENSE",
]
nitpicky = True
nitpick_ignore = [
("py:class", "datetime.datetime"),
("py:class", "datetime.timedelta"),
("py:class", "pathlib.Path"),
("py:data", "typing.Any"),
("py:data", "typing.Awaitable"),
("py:data", "typing.Dict"),
("py:data", "typing.Iterable"),
("py:data", "typing.List"),
("py:data", "typing.Optional"),
("py:data", "typing.Sequence"),
("py:data", "typing.Tuple"),
("py:data", "typing.Union"),
]
autodoc_default_options = {
"inherited-members": True,
"special-members": "__init__",
"no-private-members": True,
}
# Don't let napoleon force methods to be included in the docs; use autodoc flags and our
# own docs.cleanup module for that.
napoleon_include_init_with_doc = False
napoleon_include_private_with_doc = False
napoleon_include_special_with_doc = False
| 26.789474 | 88 | 0.587426 |
1c2c9eed7b32e658c90b6a2885b2e30dd90f1dbc | 2,702 | py | Python | multinet/api/views/common.py | multinet-app/multinet-api | a658d787f0fb9ba415ed85a1e37c29953486287f | [
"Apache-2.0"
] | null | null | null | multinet/api/views/common.py | multinet-app/multinet-api | a658d787f0fb9ba415ed85a1e37c29953486287f | [
"Apache-2.0"
] | 91 | 2021-03-15T19:00:15.000Z | 2022-03-11T00:04:05.000Z | multinet/api/views/common.py | multinet-app/multinet-api | a658d787f0fb9ba415ed85a1e37c29953486287f | [
"Apache-2.0"
] | 1 | 2022-02-05T15:53:04.000Z | 2022-02-05T15:53:04.000Z | from typing import Dict, List
from arango.cursor import Cursor
from django.http.response import Http404
from django.shortcuts import get_object_or_404
from rest_framework.pagination import LimitOffsetPagination
from rest_framework.request import Request
from rest_framework_extensions.mixins import NestedViewSetMixin
from multinet.api.models import Workspace, WorkspaceRole
from multinet.api.utils.arango import ArangoQuery
| 33.358025 | 96 | 0.703923 |
1c2cf799737827ae82cb008c68687ac40ab5260f | 2,613 | py | Python | scripts/tests/generate_host_files.py | NDevTK/cel | e97226416b6e12245564bfc1c3631d610d62f052 | [
"BSD-3-Clause"
] | null | null | null | scripts/tests/generate_host_files.py | NDevTK/cel | e97226416b6e12245564bfc1c3631d610d62f052 | [
"BSD-3-Clause"
] | null | null | null | scripts/tests/generate_host_files.py | NDevTK/cel | e97226416b6e12245564bfc1c3631d610d62f052 | [
"BSD-3-Clause"
] | null | null | null | # Copyright 2018 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import argparse
import logging
import os
import sys
if __name__ == '__main__':
args = ParseArgs()
ConfigureLogging(args)
logging.info("Arguments: %s" % args)
if not os.path.exists(args.template):
raise ValueError('Template host file not found: %s' % args.template)
if not os.path.exists(args.destination):
raise ValueError('Destination directory not found: %s' % args.destination)
# Generate all the host files based off the arguments passed.
with open(args.template, 'r') as f:
template = f.read()
for project_id in args.projects.split(';'):
filename = "%s.host.textpb" % project_id
destination = os.path.join(args.destination, filename)
with open(destination, 'w') as f:
logging.info("Generating %s" % destination)
content = template.replace("<project_id>", project_id)
content = content.replace("<storage_bucket>", args.storage_bucket)
content = content.replace("<storage_prefix>", args.storage_prefix)
f.write(content)
sys.exit(0)
| 31.107143 | 78 | 0.677 |
1c2db146a81095258082a5e01445b3cddf1eab20 | 8,037 | py | Python | users/models.py | moshthepitt/probsc | 9b8cab206bb1c41238e36bd77f5e0573df4d8e2d | [
"MIT"
] | null | null | null | users/models.py | moshthepitt/probsc | 9b8cab206bb1c41238e36bd77f5e0573df4d8e2d | [
"MIT"
] | null | null | null | users/models.py | moshthepitt/probsc | 9b8cab206bb1c41238e36bd77f5e0573df4d8e2d | [
"MIT"
] | null | null | null | from django.conf import settings
from django.db import models
from django.utils.translation import ugettext_lazy as _
from django.utils.encoding import python_2_unicode_compatible
from django.urls import reverse
from django_extensions.db.models import TimeStampedModel
from mptt.models import MPTTModel, TreeForeignKey
from .managers import UserProfileManager, DepartmentManager, PositionManager
User = settings.AUTH_USER_MODEL
| 33.911392 | 79 | 0.630459 |
1c2e98b8bfffd32e002ee05aa4877b21658d72a4 | 59,466 | py | Python | azure-devops/azure/devops/released/build/build_client.py | imafidon2020/azure-devops-python-api | ea9075f0c54dbc10115a23a8b7ad34feacbbdc14 | [
"MIT"
] | 248 | 2019-05-10T14:20:24.000Z | 2022-03-29T12:17:27.000Z | azure-devops/azure/devops/released/build/build_client.py | AzureMentor/azure-devops-python-api | 3838e91d662dba1f77b43ad560ca23c1cb7e84e8 | [
"MIT"
] | 147 | 2019-05-08T14:20:49.000Z | 2022-03-28T19:36:21.000Z | azure-devops/azure/devops/released/build/build_client.py | AzureMentor/azure-devops-python-api | 3838e91d662dba1f77b43ad560ca23c1cb7e84e8 | [
"MIT"
] | 121 | 2019-05-08T06:24:39.000Z | 2022-03-01T12:58:02.000Z | # --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
# Generated file, DO NOT EDIT
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------------------------
from msrest import Serializer, Deserializer
from ...client import Client
from ...v5_1.build import models
| 54.505958 | 411 | 0.618437 |
1c2ecd7374ac4b43cc0c12a94a556e95164106a8 | 190 | py | Python | tests/test_train.py | hugobb/sgda | 69dcda47bb2c5b76d46ead32eb46ab5fb5e5e6d3 | [
"MIT"
] | 1 | 2022-02-16T04:20:02.000Z | 2022-02-16T04:20:02.000Z | tests/test_train.py | hugobb/sgda | 69dcda47bb2c5b76d46ead32eb46ab5fb5e5e6d3 | [
"MIT"
] | null | null | null | tests/test_train.py | hugobb/sgda | 69dcda47bb2c5b76d46ead32eb46ab5fb5e5e6d3 | [
"MIT"
] | null | null | null | import unittest
from gamesopt.train import train, TrainConfig | 27.142857 | 45 | 0.736842 |
1c2ee79c50e5332807a24a1c5c70089c0090c76c | 91 | py | Python | loadCSVdata.py | christostsekouronas/academyposttestanalysis | 913a0c13ad0482927a323b2fb3a97a8e2ca26517 | [
"MIT"
] | null | null | null | loadCSVdata.py | christostsekouronas/academyposttestanalysis | 913a0c13ad0482927a323b2fb3a97a8e2ca26517 | [
"MIT"
] | null | null | null | loadCSVdata.py | christostsekouronas/academyposttestanalysis | 913a0c13ad0482927a323b2fb3a97a8e2ca26517 | [
"MIT"
] | null | null | null | import pandas as pd
| 13 | 30 | 0.692308 |
1c2f9886c30209c8f8c18348757a2729fc8d5b30 | 1,832 | py | Python | sdk/applicationinsights/azure-mgmt-applicationinsights/azure/mgmt/applicationinsights/v2015_05_01/models/_application_insights_management_client_enums.py | iscai-msft/azure-sdk-for-python | 83715b95c41e519d5be7f1180195e2fba136fc0f | [
"MIT"
] | 8 | 2021-01-13T23:44:08.000Z | 2021-03-17T10:13:36.000Z | sdk/applicationinsights/azure-mgmt-applicationinsights/azure/mgmt/applicationinsights/v2015_05_01/models/_application_insights_management_client_enums.py | iscai-msft/azure-sdk-for-python | 83715b95c41e519d5be7f1180195e2fba136fc0f | [
"MIT"
] | 226 | 2019-07-24T07:57:21.000Z | 2019-10-15T01:07:24.000Z | sdk/applicationinsights/azure-mgmt-applicationinsights/azure/mgmt/applicationinsights/v2015_05_01/models/_application_insights_management_client_enums.py | iscai-msft/azure-sdk-for-python | 83715b95c41e519d5be7f1180195e2fba136fc0f | [
"MIT"
] | 2 | 2020-05-21T22:51:22.000Z | 2020-05-26T20:53:01.000Z | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from enum import Enum
| 17.960784 | 76 | 0.600437 |
1c2fb781ddcd4218fd8a81658d8b1820f7658753 | 425 | py | Python | setup.py | dhruvdcoder/allennlp-wandb | 160dceb1f4cec8e893b856d333bc302748afdd74 | [
"MIT"
] | null | null | null | setup.py | dhruvdcoder/allennlp-wandb | 160dceb1f4cec8e893b856d333bc302748afdd74 | [
"MIT"
] | null | null | null | setup.py | dhruvdcoder/allennlp-wandb | 160dceb1f4cec8e893b856d333bc302748afdd74 | [
"MIT"
] | null | null | null | from setuptools import setup, find_packages
install_requires = [
"allennlp>=0.9.0",
"wandb==0.8.15",
]
setup(
name='allennlp_wandb',
version='0.0.1',
description='Utilities to use allennlp with wandb',
packages=find_packages(
exclude=["*.tests", "*.tests.*", "tests.*", "tests"]),
package_data={'allennlp_wandb': ['py.typed']},
install_requires=install_requires,
zip_safe=False)
| 25 | 62 | 0.647059 |
1c30848fe8db838bf2ea7ab14ebea0d07ae3d297 | 2,311 | py | Python | setup.py | mark-mishyn/django-axes | dfaf67810abd21a0e76200a4906c1bffdd4fa9c9 | [
"MIT"
] | null | null | null | setup.py | mark-mishyn/django-axes | dfaf67810abd21a0e76200a4906c1bffdd4fa9c9 | [
"MIT"
] | null | null | null | setup.py | mark-mishyn/django-axes | dfaf67810abd21a0e76200a4906c1bffdd4fa9c9 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
from setuptools import setup, find_packages
setup(
name="django-axes",
description="Keep track of failed login attempts in Django-powered sites.",
long_description="\n".join(
[
open("README.rst", encoding="utf-8").read(),
open("CHANGES.rst", encoding="utf-8").read(),
]
),
keywords="authentication django pci security",
author=", ".join(
[
"Josh VanderLinden",
"Philip Neustrom",
"Michael Blume",
"Alex Clark",
"Camilo Nova",
"Aleksi Hakli",
]
),
author_email="security@jazzband.co",
maintainer="Jazzband",
maintainer_email="security@jazzband.co",
url="https://github.com/jazzband/django-axes",
project_urls={
"Documentation": "https://django-axes.readthedocs.io/",
"Source": "https://github.com/jazzband/django-axes",
"Tracker": "https://github.com/jazzband/django-axes/issues",
},
license="MIT",
package_dir={"axes": "axes"},
use_scm_version=True,
setup_requires=["setuptools_scm"],
python_requires="~=3.6",
install_requires=["django>=1.11", "django-appconf>=1.0.3", "django-ipware>=2.0.2"],
include_package_data=True,
packages=find_packages(),
classifiers=[
"Development Status :: 5 - Production/Stable",
"Environment :: Web Environment",
"Environment :: Plugins",
"Framework :: Django",
"Framework :: Django :: 1.11",
"Framework :: Django :: 2.2",
"Framework :: Django :: 3.0",
"Intended Audience :: Developers",
"Intended Audience :: System Administrators",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: Implementation :: CPython",
"Programming Language :: Python :: Implementation :: PyPy",
"Topic :: Internet :: Log Analysis",
"Topic :: Security",
"Topic :: System :: Logging",
],
zip_safe=False,
)
| 34.492537 | 87 | 0.581134 |
1c30c09f1bd3070f07f121e14a73ab704dad99b4 | 106 | py | Python | achievements/admin.py | peterkrauz/rpg-achievements-django | c65ec12237b2bee9f12d259fedd5f18934ff6c96 | [
"Apache-2.0"
] | 1 | 2021-08-31T10:52:55.000Z | 2021-08-31T10:52:55.000Z | achievements/admin.py | peterkrauz/rpg-achievements-django | c65ec12237b2bee9f12d259fedd5f18934ff6c96 | [
"Apache-2.0"
] | null | null | null | achievements/admin.py | peterkrauz/rpg-achievements-django | c65ec12237b2bee9f12d259fedd5f18934ff6c96 | [
"Apache-2.0"
] | null | null | null | from django.contrib import admin
from achievements import models
admin.site.register(models.Achievement)
| 21.2 | 39 | 0.849057 |
1c30e979a316677653e10a7d840b2373d881b549 | 1,925 | py | Python | src/modules/loss.py | ab3llini/BlindLess | 46c50fb2748b9d372044d00b901f0cde91946684 | [
"MIT"
] | 1 | 2022-03-19T09:19:12.000Z | 2022-03-19T09:19:12.000Z | src/modules/loss.py | ab3llini/BlindLess | 46c50fb2748b9d372044d00b901f0cde91946684 | [
"MIT"
] | 1 | 2020-02-06T18:26:07.000Z | 2020-02-06T18:26:07.000Z | src/modules/loss.py | ab3llini/BlindLess | 46c50fb2748b9d372044d00b901f0cde91946684 | [
"MIT"
] | null | null | null | from torch.nn import CrossEntropyLoss
| 31.048387 | 75 | 0.619221 |
1c31541017e2e3db5152ae18abbb5211d1ab50d4 | 6,481 | py | Python | analyze_tls.py | khushhallchandra/CN-project | 405ce86e4e65e116531aa19287b8d05c959b1441 | [
"MIT"
] | null | null | null | analyze_tls.py | khushhallchandra/CN-project | 405ce86e4e65e116531aa19287b8d05c959b1441 | [
"MIT"
] | null | null | null | analyze_tls.py | khushhallchandra/CN-project | 405ce86e4e65e116531aa19287b8d05c959b1441 | [
"MIT"
] | null | null | null | import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
if __name__ == '__main__':
files_http1 = ['./results/benchmark_size/http1_txt1.csv', './results/benchmark_size/http1_txt2.csv', './results/benchmark_size/http1_txt3.csv', './results/benchmark_size/http1_txt4.csv', './results/benchmark_size/http1_txt5.csv']
files_http1_tls = ['./results/benchmark_size/http1_tls_txt1.csv', './results/benchmark_size/http1_tls_txt2.csv', './results/benchmark_size/http1_tls_txt3.csv', './results/benchmark_size/http1_tls_txt4.csv', './results/benchmark_size/http1_tls_txt5.csv']
files_http2 = ['./results/benchmark_size/http2_txt1.csv', './results/benchmark_size/http2_txt2.csv', './results/benchmark_size/http2_txt3.csv', './results/benchmark_size/http2_txt4.csv', './results/benchmark_size/http2_txt5.csv']
files_http2_tls = ['./results/benchmark_size/http2_tls_txt1.csv', './results/benchmark_size/http2_tls_txt2.csv', './results/benchmark_size/http2_tls_txt3.csv', './results/benchmark_size/http2_tls_txt4.csv', './results/benchmark_size/http2_tls_txt5.csv']
time_tot_http2, time_contentTransfer_http2 = [], []
std_tot_http2, std_contentTransfer_http2 = [], []
time_tot_http1, time_contentTransfer_http1 = [], []
std_tot_http1, std_contentTransfer_http1 = [], []
time_tot_http2_tls, time_contentTransfer_http2_tls = [], []
std_tot_http2_tls, std_contentTransfer_http2_tls = [], []
time_tot_http1_tls, time_contentTransfer_http1_tls = [], []
std_tot_http1_tls, std_contentTransfer_http1_tls = [], []
for f in files_http2:
t1, t2, std1, std2 = main(f)
time_contentTransfer_http2.append(t1)
time_tot_http2.append(t2)
std_contentTransfer_http2.append(2*std1)
std_tot_http2.append(2*std2)
for f in files_http1:
t1, t2, std1, std2 = main(f)
time_contentTransfer_http1.append(t1)
time_tot_http1.append(t2)
std_contentTransfer_http1.append(2*std1)
std_tot_http1.append(2*std2)
for f in files_http2_tls:
t1, t2, std1, std2 = main(f)
time_contentTransfer_http2_tls.append(t1)
time_tot_http2_tls.append(t2)
std_contentTransfer_http2_tls.append(2*std1)
std_tot_http2_tls.append(2*std2)
for f in files_http1_tls:
t1, t2, std1, std2 = main(f)
time_contentTransfer_http1_tls.append(t1)
time_tot_http1_tls.append(t2)
std_contentTransfer_http1_tls.append(2*std1)
std_tot_http1_tls.append(2*std2)
x = [100, 1000, 10000, 100000, 1000000]
time_tot_http2, time_contentTransfer_http2 = np.array(time_tot_http2), np.array(time_contentTransfer_http2)
std_tot_http2, std_contentTransfer_http2 = np.array(std_tot_http2), np.array(std_contentTransfer_http2)
time_tot_http1, time_contentTransfer_http1 = np.array(time_tot_http1), np.array(time_contentTransfer_http1)
std_tot_http1, std_contentTransfer_http1 = np.array(std_tot_http1), np.array(std_contentTransfer_http1)
time_tot_http2_tls, time_contentTransfer_http2_tls = np.array(time_tot_http2_tls), np.array(time_contentTransfer_http2_tls)
std_tot_http2_tls, std_contentTransfer_http2_tls = np.array(std_tot_http2_tls), np.array(std_contentTransfer_http2_tls)
time_tot_http1_tls, time_contentTransfer_http1_tls = np.array(time_tot_http1_tls), np.array(time_contentTransfer_http1_tls)
std_tot_http1_tls, std_contentTransfer_http1_tls = np.array(std_tot_http1_tls), np.array(std_contentTransfer_http1_tls)
fig, ax = plt.subplots()
ax.grid()
ax.plot(x, time_contentTransfer_http1, 'o-', color='r', label="HTTP1")
ax.plot(x, time_contentTransfer_http1_tls, 'o-', color='g', label="HTTP1_with_tls")
ax.plot(x, time_contentTransfer_http2, 'o-', color='b', label="SPDY")
ax.plot(x, time_contentTransfer_http2_tls, 'o-', color='k', label="SPDY_with_tls")
ax.fill_between(x, time_contentTransfer_http1 - std_contentTransfer_http1, time_contentTransfer_http1 + std_contentTransfer_http1, color='gray', alpha=0.3)
ax.fill_between(x, time_contentTransfer_http2 - std_contentTransfer_http2, time_contentTransfer_http2 + std_contentTransfer_http2, color='gray', alpha=0.3)
ax.fill_between(x, time_contentTransfer_http1_tls - std_contentTransfer_http1_tls, time_contentTransfer_http1_tls + std_contentTransfer_http1_tls, color='gray', alpha=0.3)
ax.fill_between(x, time_contentTransfer_http2_tls - std_contentTransfer_http2_tls, time_contentTransfer_http2_tls + std_contentTransfer_http2_tls, color='gray', alpha=0.3)
# ax.errorbar(x, time_contentTransfer_http2, yerr=std_contentTransfer_http2, fmt='-', color='r', label="HTTP2")
# ax.errorbar(x, time_contentTransfer_quic, yerr=std_contentTransfer_quic, fmt='-', color='b', label="QUIC")
ax.set_xlabel('Size of data (Length)')
ax.set_ylabel('Time (in ms)')
ax.legend()
ax.set_xscale('log')
ax.set_title('Comparison of Time Taken for Data Transfer with TLS ON/OFF')
fig.savefig('results/plots/time_contentTransfer_tls.png', dpi=fig.dpi)
fig, ax = plt.subplots()
ax.grid()
ax.plot(x, time_tot_http1, 'o-', color='r', label="HTTP1")
ax.plot(x, time_tot_http1_tls, 'o-', color='g', label="HTTP1_with_tls")
ax.plot(x, time_tot_http2, 'o-', color='b', label="SPDY")
ax.plot(x, time_tot_http2_tls, 'o-', color='k', label="SPDY_with_tls")
ax.fill_between(x, time_tot_http1 - std_tot_http1, time_tot_http1 + std_tot_http1, color='gray', alpha=0.3)
ax.fill_between(x, time_tot_http2 - std_tot_http2, time_tot_http2 + std_tot_http2, color='gray', alpha=0.3)
ax.fill_between(x, time_tot_http1_tls - std_tot_http1_tls, time_tot_http1_tls + std_tot_http1_tls, color='gray', alpha=0.3)
ax.fill_between(x, time_tot_http2_tls - std_tot_http2_tls, time_tot_http2_tls + std_tot_http2_tls, color='gray', alpha=0.3)
# ax.errorbar(x, time_tot_http2, yerr=std_tot_http2, fmt='-', color='r', label="HTTP2")
# ax.errorbar(x, time_tot_quic, yerr=std_tot_quic, fmt='-', color='b', label="QUIC")
ax.set_xlabel('Size of data (Length)')
ax.set_ylabel('Time (in ms)')
ax.legend()
ax.set_xscale('log')
ax.set_title('Comparison of Total Time with TLS ON/OFF')
fig.savefig('results/plots/total_time_tls.png', dpi=fig.dpi) | 54.923729 | 257 | 0.738158 |
1c32015a3c35228c38c5bac706f794e1cdc33050 | 7,376 | py | Python | validation/utils/m1.py | PedrV/stfX | 017436cd4ade7f0ea95185d82408697c43ac6ce6 | [
"MIT"
] | null | null | null | validation/utils/m1.py | PedrV/stfX | 017436cd4ade7f0ea95185d82408697c43ac6ce6 | [
"MIT"
] | null | null | null | validation/utils/m1.py | PedrV/stfX | 017436cd4ade7f0ea95185d82408697c43ac6ce6 | [
"MIT"
] | null | null | null | import unittest
import os
from matplotlib import pyplot as plt
from shapely import geometry, affinity
X_COORDINATE = 0
Y_COORDINATE = 1
def extract_x_y(polygon: list) -> (list, list):
"""Extract the x and y coordinates as two separate lists"""
x_list = []
y_list = []
for vertex in polygon:
x_list.append(vertex[X_COORDINATE])
y_list.append(vertex[Y_COORDINATE])
return (x_list, y_list)
def save_fig(dir: str):
"""Save the current plt figure in the given directory under the name: m1.png"""
plt.savefig(dir + '/m1.png')
plt.clf()
def plot_polygons(hull: list, min_hull: list, perceived_poly: list, real_poly: list, dir: str = None):
"""Plot the given two polygons, in a single figure, with different colors"""
h1_x, h1_y = extract_x_y(hull)
h2_x, h2_y = extract_x_y(min_hull)
p1_x, p1_y = extract_x_y(perceived_poly)
p2_x, p2_y = extract_x_y(real_poly)
# Figure settings
fig = plt.figure()
# fig.suptitle('Convex hull area (red) VS real representation area (blue)')
plt.xlabel('x')
plt.ylabel('y')
# Plotting hulls
plt.fill(h1_x, h1_y, color="#FF000020")
plt.fill(h2_x, h2_y, color="#0000FF20")
# Plotting polygons lines
plt.plot(p1_x, p1_y, color="#FF000060") # Red perceived poly
plt.plot(p2_x, p2_y, color="#0000FF60") # Blue real poly
# Plotting polygons points
for p in perceived_poly:
plt.plot(p[X_COORDINATE], p[Y_COORDINATE], 'o', color="#FF0000A0")
for p in real_poly:
plt.plot(p[X_COORDINATE], p[Y_COORDINATE], 'x', color="#0000FFA0")
# plt.show()
if dir is not None:
save_fig(dir)
def surveyor_formula(polygon: list) -> float:
"""Find the area of the given polygon using the surveyor formula"""
# Check if first and last points of polygon are equal
parsed_poly = polygon[0:-1]\
if polygon[0] == polygon[len(polygon)-1]\
else polygon
area = 0
for i in range(-1, len(parsed_poly)-1):
area += parsed_poly[i][X_COORDINATE] * parsed_poly[i+1][Y_COORDINATE] -\
parsed_poly[i][Y_COORDINATE] * parsed_poly[i+1][X_COORDINATE]
return abs(area / 2)
def polygon_to_vertices_list(polygon: geometry.Polygon) -> list:
"""Extract the polygon vertices as a list"""
return list(polygon.exterior.coords)
def apply_transformations(initial_representation: list, events: list) -> float:
"""Apply the transformations in the events list to the initial representation"""
scale = 1
rot_angle = 0
trans_vector = [0, 0]
for item in events:
for event in item["events"]:
if event["type"] == "TRANSLATION":
trans_vector[X_COORDINATE] += event["trigger"]["transformation"][X_COORDINATE]
trans_vector[Y_COORDINATE] += event["trigger"]["transformation"][Y_COORDINATE]
elif event["type"] == "ROTATION":
rot_angle += event["trigger"]["transformation"]
elif event["type"] == "UNIFORM_SCALE":
scale *= event["trigger"]["transformation"]
# Apply multiplication
polygon = geometry.Polygon(initial_representation)
s_polygon = affinity.scale(polygon,
xfact=scale,
yfact=scale,
origin=(0, 0))
r_s_polygon = affinity.rotate(s_polygon,
rot_angle,
origin=(0, 0))
t_r_s_polygon = affinity.translate(r_s_polygon,
xoff=trans_vector[0],
yoff=trans_vector[1])
return polygon_to_vertices_list(t_r_s_polygon)
def apply_m1(real_representation: list, perceived_representation: list, dir: str = None) -> float:
"""Apply the metric M1 and obtain its result, between 0 and 1"""
joint_point_set = real_representation + perceived_representation
# Getting necessary hulls
real_convex_hull = geometry.MultiPoint(real_representation).convex_hull
perceived_hull = geometry.MultiPoint(perceived_representation).convex_hull
convex_hull = geometry.MultiPoint(joint_point_set).convex_hull
# Getting vertices of hulls
real_vertices = polygon_to_vertices_list(real_convex_hull)
perceived_vertices = polygon_to_vertices_list(perceived_hull)
joint_vertices = polygon_to_vertices_list(convex_hull)
# Getting the min area
real_area = surveyor_formula(real_vertices)
perceived_area = surveyor_formula(perceived_vertices)
if real_area <= perceived_area:
min_area = real_area
min_vertices = real_vertices
else:
min_area = perceived_area
min_vertices = perceived_vertices
plot_polygons(hull=joint_vertices,
min_hull=min_vertices,
perceived_poly=perceived_representation,
real_poly=real_representation,
dir=dir)
return min_area / surveyor_formula(joint_vertices)
if __name__ == '__main__':
unittest.main()
| 33.990783 | 102 | 0.590564 |
1c334c43ec9647ed0e0ec846ea0ec8b0f1abcbfa | 1,332 | py | Python | movefiles.py | linhailan/JPG-PNG-to-MNIST-NN-Format | c2ff84cb8d2dc6cd49c4d462b4d8ea2ba4620719 | [
"Apache-2.0"
] | null | null | null | movefiles.py | linhailan/JPG-PNG-to-MNIST-NN-Format | c2ff84cb8d2dc6cd49c4d462b4d8ea2ba4620719 | [
"Apache-2.0"
] | null | null | null | movefiles.py | linhailan/JPG-PNG-to-MNIST-NN-Format | c2ff84cb8d2dc6cd49c4d462b4d8ea2ba4620719 | [
"Apache-2.0"
] | null | null | null | import os
from PIL import Image
from array import *
from random import shuffle
import shutil
# Load from and save to
Names = [['./training-images','train'], ['./test-images','test']]
for name in Names:
FileList = []
for dirname in os.listdir(name[0]):
path = os.path.join(name[0],dirname)
print(path,":",len(os.listdir(path)))
| 25.615385 | 65 | 0.553303 |
1c33dae046d778c2acefa8efab3c4ae7565e1bc3 | 348 | py | Python | spark_work.py | nszceta/spark-python-celery-demo | c5b03be4bb96699f8e41aa8a42fecd4c25c76331 | [
"MIT"
] | 8 | 2016-01-19T15:59:36.000Z | 2018-04-25T09:00:57.000Z | spark_work.py | nszceta/spark-python-celery-demo | c5b03be4bb96699f8e41aa8a42fecd4c25c76331 | [
"MIT"
] | null | null | null | spark_work.py | nszceta/spark-python-celery-demo | c5b03be4bb96699f8e41aa8a42fecd4c25c76331 | [
"MIT"
] | null | null | null | import sys
from pyspark import SparkContext
import json
print('spark got python path -> ' + str(sys.executable))
logfile = sys.argv[1]
sc = SparkContext()
logdata = sc.textFile(logfile).cache()
a_count = logdata.filter(lambda s: 'a' in s).count()
b_count = logdata.filter(lambda s: 'b' in s).count()
print(json.dumps({'a': a_count, 'b': b_count}))
| 31.636364 | 56 | 0.70977 |
1c33fa15ddbf9c5dfc357e4226f51b2734c6f579 | 738 | py | Python | nodes/List/GetTaskRenderListIndex.py | atticus-lv/RenderNode | 8a4797a2186b76fedebc5d634cff298e69089474 | [
"Apache-2.0"
] | 17 | 2021-11-21T09:26:55.000Z | 2022-03-09T06:56:01.000Z | nodes/List/GetTaskRenderListIndex.py | atticus-lv/RenderNode | 8a4797a2186b76fedebc5d634cff298e69089474 | [
"Apache-2.0"
] | 1 | 2021-12-05T13:02:48.000Z | 2021-12-06T08:02:34.000Z | nodes/List/GetTaskRenderListIndex.py | atticus-lv/RenderNode | 8a4797a2186b76fedebc5d634cff298e69089474 | [
"Apache-2.0"
] | 4 | 2021-11-23T14:49:34.000Z | 2021-12-30T15:04:58.000Z | import bpy
from bpy.props import *
from ...nodes.BASE.node_base import RenderNodeBase
| 26.357143 | 81 | 0.730352 |
1c3521323cf7d57dc8b2b240d95a181b90cc3144 | 1,188 | py | Python | src/recognizeDigit.py | RsTaK/Sudoku | 8daa0a06906ce61d9a71586a8d28a3931ca4e5e3 | [
"MIT"
] | 2 | 2020-01-22T14:32:40.000Z | 2021-12-23T20:42:52.000Z | src/recognizeDigit.py | RsTaK/Sudoku | 8daa0a06906ce61d9a71586a8d28a3931ca4e5e3 | [
"MIT"
] | 4 | 2020-11-13T18:54:24.000Z | 2022-02-10T02:10:00.000Z | src/recognizeDigit.py | RsTaK/Sudoku | 8daa0a06906ce61d9a71586a8d28a3931ca4e5e3 | [
"MIT"
] | 1 | 2020-01-22T14:02:50.000Z | 2020-01-22T14:02:50.000Z | from keras.models import load_model
import cv2
import pickle
import keras.backend as K
import numpy as np
from src.model_path import MODEL_PATH
'''def predict(self, cell):
model = load_model('./model/Model.h5')
f = K.function([model.layers[0].input, K.learning_phase()],[model.layers[-1].output])
rescaled_cell = self.rescale(cell)
result = []
for _ in range(10):
result.append(f([rescaled_cell, 1]))
result = np.array(result)
prediction = result.mean(axis=0)
uncertainty = result.var(axis=0)
if uncertainty.argmax() > 3:
new_prediction = 0
print(prediction.argmax(),uncertainty.argmax(),new_prediction)
else:
print(prediction.argmax(),uncertainty.argmax())''' | 27 | 87 | 0.705387 |
1c3558d607658f8dea73cab624fa5807f1ade4f4 | 4,544 | py | Python | plots.py | olihawkins/penguin-models | fabecdf6336390fc50e67cfd8494ade69fc3ef7f | [
"BSD-3-Clause"
] | 1 | 2021-05-05T10:17:01.000Z | 2021-05-05T10:17:01.000Z | plots.py | olihawkins/penguin-models | fabecdf6336390fc50e67cfd8494ade69fc3ef7f | [
"BSD-3-Clause"
] | null | null | null | plots.py | olihawkins/penguin-models | fabecdf6336390fc50e67cfd8494ade69fc3ef7f | [
"BSD-3-Clause"
] | null | null | null | # -*- coding: utf-8 -*-
"""A module for plotting penguins data for modelling with scikit-learn."""
# Imports ---------------------------------------------------------------------
import matplotlib as mpl
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
# Constants -------------------------------------------------------------------
SPECIES_COLORS = {
'Adelie': '#4daf4a',
'Gentoo': '#ffb000',
'Chinstrap': '#0084f7'
}
X_AXIS = [30, 60]
Y_AXIS = [12, 22]
# Set style -------------------------------------------------------------------
# Load the style from a file
plt.style.use('./style/eda.mplstyle')
# Alternatively, load the style from the library in ~/.matplotlib/stylelib
# plt.style.use(['eda'])
# Functions -------------------------------------------------------------------
def get_contour_data(model, pipeline, n_points=1000):
"""Create the data used to show the boundary of the decision function."""
x0s = np.linspace(X_AXIS[0], X_AXIS[1], n_points)
x1s = np.linspace(Y_AXIS[0], Y_AXIS[1], n_points)
x0, x1 = np.meshgrid(x0s, x1s)
X = np.c_[x0.ravel(), x1.ravel()]
df_X = pd.DataFrame(X, columns=['bill_length_mm', 'bill_depth_mm'])
X = pipeline.transform(df_X)
y_pred = model.predict(X).reshape(x0.shape)
y_decision = model.decision_function(X).reshape(x0.shape)
return x0, x1, y_pred, y_decision
def get_target_colors(target):
"""Create a dictionary of colors to use in binary classification plots."""
return {
target : '#984ea3',
'Other': '#ff7f00'
}
# Plots -----------------------------------------------------------------------
def plot_target_by_features(df):
"""Plot the different target species."""
fig, ax = plt.subplots()
ax.set_title(
label='Palmer penguins by species and bill characteristics',
loc='center')
ax.get_xaxis().set_major_formatter(
mpl.ticker.FormatStrFormatter('%.0f'))
ax.set_xlim(X_AXIS[0], X_AXIS[1])
ax.set_xlabel('Bill length (mm)')
ax.get_yaxis().set_major_formatter(
mpl.ticker.FormatStrFormatter('%.0f'))
ax.set_ylim(Y_AXIS[0], Y_AXIS[1])
ax.set_ylabel('Bill depth (mm)')
grouped = df.groupby('species')
for key, group in grouped:
ax.scatter(
group['bill_length_mm'],
group['bill_depth_mm'],
c=SPECIES_COLORS[key],
s=40,
label=key,
alpha=0.55)
ax.legend(loc='lower left', handletextpad=0.2)
fig.savefig('plots/target-by-features.png', format='png')
plt.close()
def plot_model(df, model, pipeline, f_score, target, title, filename):
"""Plot the results of a binary classification model."""
fig, ax = plt.subplots()
ax.set_title(title, loc='center')
ax.get_xaxis().set_major_formatter(
mpl.ticker.FormatStrFormatter('%.0f'))
ax.set_xlim(X_AXIS[0], X_AXIS[1])
ax.set_xlabel('Bill length (mm)')
ax.get_yaxis().set_major_formatter(
mpl.ticker.FormatStrFormatter('%.0f'))
ax.set_ylim(Y_AXIS[0], Y_AXIS[1])
ax.set_ylabel('Bill depth (mm)')
# Plot the boundary of the decision function
x0, x1, y_pred, y_decision = get_contour_data(model, pipeline)
ax.contourf(x0, x1, y_pred, cmap=plt.cm.PuOr, alpha=0.2)
# This plots the decision score, if needed
# ax.contourf(x0, x1, y_decision, cmap=plt.cm.PuOr, alpha=0.1)
df = df.copy()
df['species'] = df['target'].apply(lambda t: target if t == 1 else 'Other')
colors = get_target_colors(target)
grouped = df.groupby('species')
for key, group in grouped:
ax.scatter(
group['bill_length_mm'],
group['bill_depth_mm'],
c=colors[key],
s=40,
label=key,
alpha=0.55)
ax.legend(loc='lower left', handletextpad=0.2)
bbox_style = {
'boxstyle': 'round',
'facecolor': '#ffffff',
'edgecolor': '#d4d4d4',
'alpha': 0.8
}
ax.text(53, 12.415, '$F_1$ score: {0}'.format(f_score), bbox=bbox_style)
fig.savefig('plots/{0}.png'.format(filename), format='png')
plt.close() | 28.759494 | 79 | 0.574604 |
1c357d3712292b01ee95a5bca2342315acb4f8ef | 623 | py | Python | dojo/db_migrations/0147_rename_sslyze_parser.py | dant24/django-DefectDojo | caf5c91b3f8870d5f466dfaaf5a3a096f8812ad9 | [
"BSD-3-Clause"
] | 249 | 2016-09-06T21:04:40.000Z | 2018-01-19T15:59:44.000Z | dojo/db_migrations/0147_rename_sslyze_parser.py | dant24/django-DefectDojo | caf5c91b3f8870d5f466dfaaf5a3a096f8812ad9 | [
"BSD-3-Clause"
] | 255 | 2016-09-06T21:36:37.000Z | 2018-01-19T19:57:57.000Z | dojo/db_migrations/0147_rename_sslyze_parser.py | dant24/django-DefectDojo | caf5c91b3f8870d5f466dfaaf5a3a096f8812ad9 | [
"BSD-3-Clause"
] | 152 | 2016-09-06T21:04:54.000Z | 2018-01-18T08:52:24.000Z | from django.db import migrations
| 25.958333 | 83 | 0.678973 |
1c35f69ad59be07090db7f3539f86ff7d6d0b4e8 | 4,203 | py | Python | server/forestgame/game/test_world.py | Nick-Pearson/forestgame | 8a37225adbe6da9df7851eba34ad06806da0ce48 | [
"0BSD"
] | null | null | null | server/forestgame/game/test_world.py | Nick-Pearson/forestgame | 8a37225adbe6da9df7851eba34ad06806da0ce48 | [
"0BSD"
] | 5 | 2021-03-10T14:18:45.000Z | 2022-03-12T00:28:29.000Z | server/forestgame/game/test_world.py | Nick-Pearson/forestgame | 8a37225adbe6da9df7851eba34ad06806da0ce48 | [
"0BSD"
] | null | null | null | import unittest
from forestgame.game.world import World
| 26.601266 | 66 | 0.610278 |
1c362cfcd82b4292b1b1b46edbeee9a97e7fba89 | 9,756 | py | Python | invconv/xlsx.py | TechPowerAwaits/ax-toolkit | d49924ef2dcd3f54f494ba3859afb070cc12ef91 | [
"0BSD"
] | null | null | null | invconv/xlsx.py | TechPowerAwaits/ax-toolkit | d49924ef2dcd3f54f494ba3859afb070cc12ef91 | [
"0BSD"
] | 16 | 2021-04-14T03:46:37.000Z | 2022-02-11T16:15:00.000Z | invconv/xlsx.py | TechPowerAwaits/ax-toolkit | d49924ef2dcd3f54f494ba3859afb070cc12ef91 | [
"0BSD"
] | null | null | null | # Copyright 2021 Richard Johnston <techpowerawaits@outlook.com>
# SPDX-license-identifier: 0BSD
import string
from loguru import logger
try:
import cell_pos
from exceptions import InvconvMissingHeaders
import ftype
import msg_handler
except ModuleNotFoundError:
import invconv.cell_pos as cell_pos
from invconv.exceptions import InvconvMissingHeaders
import invconv.ftype as ftype
import invconv.msg_handler as msg_handler
used = True
try:
from openpyxl import load_workbook
except ModuleNotFoundError:
used = False
# load_workbook is used repeatedly with similar settings
# every time.
WB_SETTINGS = {
"read_only": True,
"keep_vba": False,
"data_only": True,
"keep_links": False,
}
# Will store a file, worksheet tuple-like class
# with additional data accessible.
xlsx_data_list = ftype.FtypeDataList()
# Contains just a list of file, worksheet tuples.
xlsx_tuple_list = []
# xlsx files always start counting at 1.
INVALID_ROW = 0
if used:
ftype.add("xlsx", start)
| 33.410959 | 119 | 0.625974 |
1c369e5832adc50f438c555f56dfcb9a9431f342 | 5,501 | py | Python | solvers/generation_solver/img_interface.py | Anthony102899/Lego-ImageGenerator | 52b19c8bb20f77a3394675e7c037c943a50c1e15 | [
"Unlicense"
] | 1 | 2022-03-20T10:23:38.000Z | 2022-03-20T10:23:38.000Z | solvers/generation_solver/img_interface.py | Anthony102899/Lego-ImageGenerator | 52b19c8bb20f77a3394675e7c037c943a50c1e15 | [
"Unlicense"
] | null | null | null | solvers/generation_solver/img_interface.py | Anthony102899/Lego-ImageGenerator | 52b19c8bb20f77a3394675e7c037c943a50c1e15 | [
"Unlicense"
] | null | null | null | import os
from tkinter import *
import tkinter.filedialog as tkfd
from PIL import Image
import numpy as np
import solvers.generation_solver.image_seperation as IS
if __name__ == '__main__':
print(show_interface()) | 42.315385 | 158 | 0.616797 |
1c3759df5a38cc9eec92e29506b100742f627706 | 953 | py | Python | Constellations/get_brightest_stars.py | PatD123/Polar-Constellation | 86f54ae2028a4f351b9f1a056aa3166f49541679 | [
"MIT"
] | null | null | null | Constellations/get_brightest_stars.py | PatD123/Polar-Constellation | 86f54ae2028a4f351b9f1a056aa3166f49541679 | [
"MIT"
] | null | null | null | Constellations/get_brightest_stars.py | PatD123/Polar-Constellation | 86f54ae2028a4f351b9f1a056aa3166f49541679 | [
"MIT"
] | null | null | null | from bs4 import BeautifulSoup as soup
from urllib.request import urlopen as uReq
import re, json
# Getting the page
URL = "https://www.astronomytrek.com/star-constellations-brightest-stars/"
uClient = uReq(url=URL)
page_html = uClient.read()
page_soup = soup(page_html, "html.parser")
# Opening a file to write in
stars_file = open("brightest_stars.txt", 'w')
#
brightest_uncleaned = page_soup.find_all("tr")
for html in brightest_uncleaned:
col_4 = html.contents[4].contents[0]
col_5 = html.contents[5].string
if col_5 is not None:
idx = find_space(col_5)
col_5 = col_5[0:idx]
if col_5 == "Brightest Star": continue
stars_file.write(col_5 + "\n")
else:
idx = find_space(col_4)
col_4 = col_4[0:idx]
stars_file.write(col_4 + "\n")
stars_file.close() | 27.228571 | 74 | 0.651626 |
1c38a65740967a1e49c94a99e84549d3470de0b7 | 493 | py | Python | TwoPointers/Leetcode11.py | Rylie-W/LeetRecord | 623c4efe88b3af54b8a65f6ec23db850b8c6f46f | [
"Apache-2.0"
] | null | null | null | TwoPointers/Leetcode11.py | Rylie-W/LeetRecord | 623c4efe88b3af54b8a65f6ec23db850b8c6f46f | [
"Apache-2.0"
] | null | null | null | TwoPointers/Leetcode11.py | Rylie-W/LeetRecord | 623c4efe88b3af54b8a65f6ec23db850b8c6f46f | [
"Apache-2.0"
] | null | null | null |
if __name__ == '__main__':
sol=Solution()
# height = [1, 1]
height=[1,3,2,5,25,24,5]
print(sol.maxArea(height))
| 25.947368 | 69 | 0.543611 |
1c38c6e2555cdc9fef807ccf4fe2adf10311bc9a | 13,688 | py | Python | tensorflow_text/python/ops/bert_tokenizer_test.py | hashim361/text | 141ed3ae72078a5da431831ce718c8d09fbf4f92 | [
"Apache-2.0"
] | 1 | 2020-10-10T14:10:07.000Z | 2020-10-10T14:10:07.000Z | tensorflow_text/python/ops/bert_tokenizer_test.py | pranayjoshi/text | 5a12211ac370f989ca359d232d3081a889e859dd | [
"Apache-2.0"
] | null | null | null | tensorflow_text/python/ops/bert_tokenizer_test.py | pranayjoshi/text | 5a12211ac370f989ca359d232d3081a889e859dd | [
"Apache-2.0"
] | null | null | null | # coding=utf-8
# Copyright 2020 TF.Text Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# encoding=utf-8
r"""Tests for BertTokenizer."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from absl.testing import parameterized
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import test_util
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import lookup_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import string_ops
from tensorflow.python.ops.ragged import ragged_factory_ops
from tensorflow.python.ops.ragged import ragged_map_ops
from tensorflow.python.ops.ragged import ragged_tensor
from tensorflow.python.platform import test
from tensorflow_text.python.ops import bert_tokenizer
# TODO(thuang513): It appears there isn't a Ragged version of substr; consider
# checking this into core TF.
_VOCAB = [
b'[unused1]',
b'[unused23]',
b"'",
b'##%',
b'##af',
b'##book',
b'##c',
b'##fr',
b'##hey',
b'##is',
b'##o',
b'##ost',
b'##s',
b'##tri',
b'##y',
b'$',
b'%',
b'&',
b'(',
b')',
b'*',
b'-',
b'.',
b'20',
b':',
b'?',
b'[CLS]',
b'[SEP]',
_utf8(u''),
_utf8(u''),
_utf8(u''),
_utf8(u''),
_utf8(u''),
_utf8(u''),
_utf8(u''),
_utf8(u''),
b'^',
b'a',
b'ago',
b'among',
b'an',
b'and',
b'are',
b'aren',
b'awesome',
b'between',
b'candy',
b'china',
b'companies',
b'company',
b'crushed',
b'dug',
b'earnings',
b'engaged',
b'even',
b'few',
b'forecast',
b'getting',
b'had',
b'han',
b'has',
b'hers',
b'high',
b'hit',
b'hs',
b'hurting',
b'in',
b'indie',
b'is',
b'isn',
b'ka',
b'ku',
b'major',
b'maker',
b'moth',
b'nearly',
b'new',
b'now',
b'president',
b'record',
b'regulators',
b'reported',
b'rift',
b'rust',
b'sales',
b'shares',
b'slightly',
b'sprint',
b'states',
b'stock',
b't',
b'taste',
b'tension',
b'that',
b'the',
b'this',
b'today',
b'told',
b'topped',
b'trade',
b'trump',
b'united',
b'up',
b'weeks',
b'what',
b'why',
b'with',
b'year',
b'yo',
b'yu',
_utf8(u'\u7231'),
_utf8(u'\u4e0a'),
_utf8(u'\u4e00'),
_utf8(u'\u4e2a'),
_utf8(u'\u4e0d'),
_utf8(u'\u56de'),
_utf8(u'\u5bb6'),
_utf8(u'\u7684'),
_utf8(u'\u4eba'),
]
if __name__ == '__main__':
test.main()
| 31.179954 | 80 | 0.512566 |
1c3b16c69b0c5704668f2afab4edc623fff685bf | 5,324 | py | Python | tests/index_test.py | DubeySandeep/pending-review-notification | 353fa74d98eeb6c8386818273a2fe02af39d6b9d | [
"Apache-2.0"
] | null | null | null | tests/index_test.py | DubeySandeep/pending-review-notification | 353fa74d98eeb6c8386818273a2fe02af39d6b9d | [
"Apache-2.0"
] | null | null | null | tests/index_test.py | DubeySandeep/pending-review-notification | 353fa74d98eeb6c8386818273a2fe02af39d6b9d | [
"Apache-2.0"
] | 1 | 2021-10-20T16:24:04.000Z | 2021-10-20T16:24:04.000Z | """Unit test for the index.py file."""
import unittest
from datetime import datetime, timedelta, timezone
import json
from unittest.mock import patch, mock_open
import requests_mock
from src import index
from src import github_services
| 35.493333 | 79 | 0.523666 |
1c3c2ebbf2a88dc388bb0314813d8b32b385e4b0 | 3,133 | py | Python | rqalpha/data/instrument_mixin.py | mysky528/rqalpha | ecd550fc30aee96f9995e8152e2c48f5512f8b11 | [
"Apache-2.0"
] | 3 | 2017-07-11T15:37:24.000Z | 2021-11-22T14:21:13.000Z | rqalpha/data/instrument_mixin.py | mysky528/rqalpha | ecd550fc30aee96f9995e8152e2c48f5512f8b11 | [
"Apache-2.0"
] | null | null | null | rqalpha/data/instrument_mixin.py | mysky528/rqalpha | ecd550fc30aee96f9995e8152e2c48f5512f8b11 | [
"Apache-2.0"
] | 2 | 2019-04-26T07:51:08.000Z | 2020-12-01T20:59:04.000Z | # -*- coding: utf-8 -*-
#
# Copyright 2017 Ricequant, Inc
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import six
| 40.166667 | 100 | 0.620172 |
1c3ca96a8752bea73f340ee28894ea1bdab8af22 | 215 | py | Python | Python 3/19.prac_no2.py | ByeonUi-Hyeok/practice | 6f55ddcb662e2bf8e0c3fb4c4af0beb77a1c7d2d | [
"MIT"
] | 1 | 2021-06-11T08:55:03.000Z | 2021-06-11T08:55:03.000Z | Python 3/19.prac_no2.py | ByeonUi-Hyeok/practice | 6f55ddcb662e2bf8e0c3fb4c4af0beb77a1c7d2d | [
"MIT"
] | null | null | null | Python 3/19.prac_no2.py | ByeonUi-Hyeok/practice | 6f55ddcb662e2bf8e0c3fb4c4af0beb77a1c7d2d | [
"MIT"
] | null | null | null | import funcvote as vote
votes = input(" >>>")
# print(votes)
# print(type(votes))
result = vote.str2int(votes)
print(vote.countvotes(result))
result = vote.countvotes(result)
vote.printvote(result)
# | 14.333333 | 32 | 0.716279 |
1c3d09dc17bc58a64b3b41021ca264b66d8e9b31 | 427 | py | Python | tutorials/30-days-of-code/30-operators.py | PingHuskar/hackerrank | 1bfdbc63de5d0f94cd9e6ae250476b4a267662f2 | [
"Unlicense"
] | 41 | 2018-05-11T07:54:34.000Z | 2022-03-29T19:02:32.000Z | tutorials/30-days-of-code/30-operators.py | PingHuskar/hackerrank | 1bfdbc63de5d0f94cd9e6ae250476b4a267662f2 | [
"Unlicense"
] | 2 | 2021-09-13T10:03:26.000Z | 2021-10-04T10:21:05.000Z | tutorials/30-days-of-code/30-operators.py | PingHuskar/hackerrank | 1bfdbc63de5d0f94cd9e6ae250476b4a267662f2 | [
"Unlicense"
] | 21 | 2019-01-23T19:06:59.000Z | 2021-12-23T16:03:47.000Z | # Day 2: Operators
# Start using arithmetic operators.
#
# https://www.hackerrank.com/challenges/30-operators/problem
#
#!/bin/python3
import sys
if __name__ == "__main__":
meal_cost = float(input().strip())
tip_percent = int(input().strip())
tax_percent = int(input().strip())
cost = meal_cost * (1 + tip_percent / 100 + tax_percent / 100)
print("The total meal cost is {:.0f} dollars.".format(cost))
| 22.473684 | 66 | 0.665105 |
1c3e669806f961c690e3e607d0c5ebaae5ffefbe | 2,503 | py | Python | articles/views.py | qwghlm/CommentIsMee | 2c11be1376ec693df28123727c3d86b38404fd71 | [
"MIT"
] | null | null | null | articles/views.py | qwghlm/CommentIsMee | 2c11be1376ec693df28123727c3d86b38404fd71 | [
"MIT"
] | null | null | null | articles/views.py | qwghlm/CommentIsMee | 2c11be1376ec693df28123727c3d86b38404fd71 | [
"MIT"
] | null | null | null | from django.http import HttpResponse
from django.template import RequestContext, loader
from django.shortcuts import render, get_object_or_404, redirect
from django.core.urlresolvers import reverse
from django.core.cache import cache
from articles.models import CIFArticle
from .forms import CIFArticleForm
def index(request):
"""
Handle requests to the homepage
"""
article = None
# If a user has submitted a URL...
if request.POST:
form = CIFArticleForm(request.POST)
if (form.is_valid()):
try:
article = form.save(commit=False)
existing_articles = CIFArticle.objects.filter(url=article.url).count()
if existing_articles:
article = CIFArticle.objects.get(url=article.url)
else:
article.measure_ego()
article.save()
except ValueError, e:
article = None
form._errors["url"] = form.error_class([str(e)])
# If no URL submitted, just set up a blank form
else:
form = CIFArticleForm()
# If an article is found or created due to a user submission, redirect there
if article:
return redirect(reverse("articles:detail", args=(article.id,)))
# Else show the homepage & rendered form
else:
top_articles = cache.get('cim:top_articles')
if top_articles is None:
top_articles = CIFArticle.objects.filter(is_cif=1).order_by('-score')[:10]
cache.set('cim:top_articles', top_articles, 60)
latest_articles = cache.get('cim:latest_articles')
if latest_articles is None:
latest_articles = CIFArticle.objects.filter(is_cif=1).order_by('-id')[:5]
cache.set('cim:latest_articles', latest_articles, 30)
return render(request, 'articles/index.html', {
'form' : form ,
'top_articles' : top_articles,
'latest_articles' : latest_articles
})
def detail(request, article_id):
"""
Handle detail view for an article
"""
# Quite simple, set up article and form
form = CIFArticleForm()
article_key = 'cim:article:%s' % article_id
article = cache.get(article_key)
if article is None:
article = get_object_or_404(CIFArticle, id=article_id)
cache.set(article_key, article, 300)
return render(request, 'articles/detail.html', {
'article' : article,
'form' : form })
| 32.934211 | 86 | 0.62485 |
1c3f21c6980082d2b5b98180066cf9ba8b94eb50 | 156 | py | Python | utils/runtime_mode.py | omiderfanmanesh/dengue-infections-prediction | 6b4e4aa4af6f6e2cc581fd7828634bbfdc446340 | [
"Apache-2.0"
] | null | null | null | utils/runtime_mode.py | omiderfanmanesh/dengue-infections-prediction | 6b4e4aa4af6f6e2cc581fd7828634bbfdc446340 | [
"Apache-2.0"
] | null | null | null | utils/runtime_mode.py | omiderfanmanesh/dengue-infections-prediction | 6b4e4aa4af6f6e2cc581fd7828634bbfdc446340 | [
"Apache-2.0"
] | 1 | 2021-06-05T10:05:44.000Z | 2021-06-05T10:05:44.000Z | # Copyright (c) 2021, Omid Erfanmanesh, All rights reserved.
| 19.5 | 61 | 0.666667 |
1c41c0dd3400c46c01883be0652a07078deef3cb | 2,616 | py | Python | pydoc_fork/__main__.py | matthewdeanmartin/pydoc_fork | 174475b15be966f3751d5563b4db0beecc3ab1f9 | [
"MIT"
] | null | null | null | pydoc_fork/__main__.py | matthewdeanmartin/pydoc_fork | 174475b15be966f3751d5563b4db0beecc3ab1f9 | [
"MIT"
] | 1 | 2022-01-17T16:28:45.000Z | 2022-01-17T16:28:45.000Z | pydoc_fork/__main__.py | matthewdeanmartin/pydoc_fork | 174475b15be966f3751d5563b4db0beecc3ab1f9 | [
"MIT"
] | null | null | null | # noinspection PyPep8
"""pydoc_fork
A fork of pydoc that is optimized for generating html documentation in a CI context
Usage:
pydoc_fork <package>... [options]
pydoc_fork (-h | --help)
pydoc_fork --version
Options:
-h --help Show this screen.
-v --version Show version.
--quiet No printing or logging.
--verbose Crank up the logging.
--config <config> pyproject.toml or other toml config.
--document_internals respect underscore or __all__ private
--prefer_docs_python_org link to python.org or generate own stdlib docs
-o --output <folder> where to write files
"""
# TODO: implement this
# pydoc_fork dot_notation <importable>... [--output=<folder>] [--document_internals]
# pydoc_fork source_path <path>... [--output=<folder>] [--document_internals]
import logging
import sys
import docopt
from pydoc_fork import commands, settings
from pydoc_fork.settings import load_config
LOGGER = logging.getLogger(__name__)
LOGGERS = []
__version__ = "3.0.0"
def main() -> int:
"""Get the args object from command parameters"""
arguments = docopt.docopt(__doc__, version=f"pydoc_fork {__version__}")
config_path = arguments.get("<config>")
if config_path:
load_config(config_path)
LOGGER.debug(f"Invoking with docopts: {str(arguments)}")
output_folder = arguments["--output"]
# TODO: add lists of packages
package = arguments["<package>"] or []
# quiet = bool(arguments.get("--quiet", False))
if arguments.get("--document_internals"):
settings.DOCUMENT_INTERNALS = arguments["--document_internals"]
if arguments.get("--prefer_docs_python_org"):
settings.PREFER_DOCS_PYTHON_ORG = arguments["--prefer_docs_python_org"]
if arguments.get("--verbose"):
# root logger, all modules
for root in ("pydoc_fork", "__main__"):
logger = logging.getLogger(root)
logger.setLevel(logging.DEBUG)
handler = logging.StreamHandler()
handler.setLevel(logging.DEBUG)
log_format = "%(asctime)s - %(name)s - %(levelname)s - %(message)s"
formatter = logging.Formatter(log_format)
handler.setFormatter(formatter)
logger.addHandler(handler)
LOGGERS.append(logger)
commands.process_path_or_dot_name(
package,
output_folder=output_folder,
)
# # TODO
# print("Don't recognize that command.")
# return -1
return 0
if __name__ == "__main__":
sys.exit(main())
| 31.518072 | 86 | 0.64526 |
1c41d05846e91ffb115828352ba38c0ccc9074be | 444 | py | Python | backend/src/libs/strings.py | codeglitchz/attendance-system | c82a8d75375069b15e0b827608209bfacb67cde7 | [
"MIT"
] | 37 | 2019-12-15T17:39:38.000Z | 2022-03-13T08:16:09.000Z | backend/src/libs/strings.py | codeglitchz/attendance-system | c82a8d75375069b15e0b827608209bfacb67cde7 | [
"MIT"
] | 16 | 2020-05-05T14:17:26.000Z | 2022-03-02T09:09:38.000Z | backend/src/libs/strings.py | codeglitchz/attendance-system | c82a8d75375069b15e0b827608209bfacb67cde7 | [
"MIT"
] | 18 | 2019-12-15T17:39:43.000Z | 2022-01-22T10:42:41.000Z | """
libs.strings
By default, uses `en-gb.json` file inside the `strings` top-level folder.
If language changes, set `libs.strings.default_locale` and run `libs.strings.refresh()`.
"""
import json
default_locale = "en-us"
cached_strings = {}
refresh()
| 17.76 | 88 | 0.702703 |
1c420085b055ce7cdac960f6e45563c43bc3b205 | 5,881 | py | Python | nemo_cmd/deflate.py | SalishSeaCast/NEMO-Cmd | a1fb05c4430e152a7dae57296bce364f73752129 | [
"Apache-2.0"
] | 1 | 2020-03-26T16:42:26.000Z | 2020-03-26T16:42:26.000Z | nemo_cmd/deflate.py | SalishSeaCast/NEMO-Cmd | a1fb05c4430e152a7dae57296bce364f73752129 | [
"Apache-2.0"
] | 10 | 2020-03-23T21:19:25.000Z | 2021-11-01T22:12:17.000Z | nemo_cmd/deflate.py | SalishSeaCast/NEMO-Cmd | a1fb05c4430e152a7dae57296bce364f73752129 | [
"Apache-2.0"
] | null | null | null | # Copyright 2013-2021 The Salish Sea MEOPAR Contributors
# and The University of British Columbia
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""NEMO-Cmd command plug-in for deflate sub-command.
Deflate variables in netCDF files using Lempel-Ziv compression.
"""
import logging
import math
import multiprocessing
from pathlib import Path
import shlex
import subprocess
import time
import attr
import cliff.command
logger = logging.getLogger(__name__)
def deflate(filepaths, max_concurrent_jobs):
"""Deflate variables in each of the netCDF files in filepaths using
Lempel-Ziv compression.
Converts files to netCDF-4 format.
The deflated file replaces the original file.
:param sequence filepaths: Paths/names of files to be deflated.
:param int max_concurrent_jobs: Maximum number of concurrent deflation
processes allowed.
"""
logger.info(
"Deflating in up to {} concurrent sub-processes".format(
int(max_concurrent_jobs)
)
)
jobs = [DeflateJob(fp) for fp in filepaths if fp.exists()]
jobs_in_progress = _launch_initial_jobs(jobs, max_concurrent_jobs)
while jobs or jobs_in_progress:
time.sleep(1)
_poll_and_launch(jobs, jobs_in_progress)
| 32.672222 | 87 | 0.631185 |
1c4260852b0f621da5efbc981c92c14d38f9bbe8 | 1,469 | py | Python | src/UQpy/distributions/collection/__init__.py | SURGroup/UncertaintyQuantification | a94c8db47d07134ea2b3b0a3ca53ca818532c3e6 | [
"MIT"
] | null | null | null | src/UQpy/distributions/collection/__init__.py | SURGroup/UncertaintyQuantification | a94c8db47d07134ea2b3b0a3ca53ca818532c3e6 | [
"MIT"
] | null | null | null | src/UQpy/distributions/collection/__init__.py | SURGroup/UncertaintyQuantification | a94c8db47d07134ea2b3b0a3ca53ca818532c3e6 | [
"MIT"
] | null | null | null | """distributions module."""
from UQpy.distributions.collection.Beta import Beta
from UQpy.distributions.collection.Binomial import Binomial
from UQpy.distributions.collection.Cauchy import Cauchy
from UQpy.distributions.collection.ChiSquare import ChiSquare
from UQpy.distributions.collection.Exponential import Exponential
from UQpy.distributions.collection.Gamma import Gamma
from UQpy.distributions.collection.GeneralizedExtreme import GeneralizedExtreme
from UQpy.distributions.collection.InverseGaussian import InverseGauss
from UQpy.distributions.collection.Laplace import Laplace
from UQpy.distributions.collection.Levy import Levy
from UQpy.distributions.collection.Logistic import Logistic
from UQpy.distributions.collection.Lognormal import Lognormal
from UQpy.distributions.collection.Maxwell import Maxwell
from UQpy.distributions.collection.Multinomial import Multinomial
from UQpy.distributions.collection.MultivariateNormal import MultivariateNormal
from UQpy.distributions.collection.Normal import Normal
from UQpy.distributions.collection.Pareto import Pareto
from UQpy.distributions.collection.Poisson import Poisson
from UQpy.distributions.collection.Rayleigh import Rayleigh
from UQpy.distributions.collection.TruncatedNormal import TruncatedNormal
from UQpy.distributions.collection.Uniform import Uniform
from UQpy.distributions.collection.JointIndependent import JointIndependent
from UQpy.distributions.collection.JointCopula import JointCopula
| 58.76 | 79 | 0.884275 |
1c4262cdeb92ebd6c335d957cdc8fd8bfca03129 | 190 | py | Python | Learning Python/Exercise Files/Ch2/helloworld_my.py | RomanShevtsiv/linkedin-learning | d7ec85953b7e88905f87928ede067d32344b984f | [
"MIT"
] | null | null | null | Learning Python/Exercise Files/Ch2/helloworld_my.py | RomanShevtsiv/linkedin-learning | d7ec85953b7e88905f87928ede067d32344b984f | [
"MIT"
] | null | null | null | Learning Python/Exercise Files/Ch2/helloworld_my.py | RomanShevtsiv/linkedin-learning | d7ec85953b7e88905f87928ede067d32344b984f | [
"MIT"
] | null | null | null | #
# Example file for HelloWorld
#
if __name__ == "__main__":
main()
| 13.571429 | 39 | 0.594737 |
1c42d191e50517487ce29edd00a0d3e85b40a9be | 15,309 | py | Python | RocketSimulation.py | pietrotrope/SolarSystemSimulation | 905eec31eb73e1203ee23a32846954b30bbc5925 | [
"MIT"
] | null | null | null | RocketSimulation.py | pietrotrope/SolarSystemSimulation | 905eec31eb73e1203ee23a32846954b30bbc5925 | [
"MIT"
] | null | null | null | RocketSimulation.py | pietrotrope/SolarSystemSimulation | 905eec31eb73e1203ee23a32846954b30bbc5925 | [
"MIT"
] | null | null | null | import sys
import csv
import json
import math
import pygame
import numpy as np
from pygame.locals import *
import pandas as pd
from data import *
from agent import agentsList, Agent
global screenSize
screenSize = [1920, 1080]
def run_simulation(burn_time):
params = load_parameters("RocketSimulationData/info.json")
env = Environment(params[1])
s = System(params[0], env, burn_time)
s.launch()
def renderAgents(screen, res, ratio):
screen.fill((0, 0, 0))
pygame.draw.rect(screen, (0, 0, 255), (0, 1080-108, 1920, 108))
pos = screenSize[1]-158 - res["altitude"]*ratio
# print("altitude: "+str(res["altitude"])+", pos: "+str(pos))
pygame.draw.rect(screen, (255, 255, 255), (940, pos, 20, 50))
pygame.display.update()
def simulateRocket(screen):
run_simulation(150)
df = pd.read_csv('RocketSimulationData/Flight.csv')
result = df.to_dict("index")
ratio = screenSize[1]/1000000
interestingPoint = None
for res in result:
# print("time: "+str(result[res]["t"])+" Altitude: "+str(result[res]["altitude"]))
for event in pygame.event.get():
if event.type == QUIT:
pygame.quit()
sys.exit()
renderAgents(screen, result[res], ratio)
if result[res]["altitude"] < 800000:
interestingPoint = result[res]
pygame.display.update()
return interestingPoint
| 33.720264 | 118 | 0.528317 |
1c43093fa85de4f6e1de23a0ecc3b43530f42260 | 126 | py | Python | sourcecode/GAN/FID/__init__.py | toufeeqahamedns/GeneratingHumanFaces | 93048bf5f6ae99424f918b0d0fea46d21abee0cb | [
"MIT"
] | null | null | null | sourcecode/GAN/FID/__init__.py | toufeeqahamedns/GeneratingHumanFaces | 93048bf5f6ae99424f918b0d0fea46d21abee0cb | [
"MIT"
] | null | null | null | sourcecode/GAN/FID/__init__.py | toufeeqahamedns/GeneratingHumanFaces | 93048bf5f6ae99424f918b0d0fea46d21abee0cb | [
"MIT"
] | null | null | null | """ Package has implementation for the FID score calculation
"""
from GAN.FID import fid_score
from GAN.FID import inception
| 21 | 60 | 0.785714 |