blob_id stringlengths 40 40 | directory_id stringlengths 40 40 | path stringlengths 3 288 | content_id stringlengths 40 40 | detected_licenses listlengths 0 112 | license_type stringclasses 2 values | repo_name stringlengths 5 115 | snapshot_id stringlengths 40 40 | revision_id stringlengths 40 40 | branch_name stringclasses 684 values | visit_date timestamp[us]date 2015-08-06 10:31:46 2023-09-06 10:44:38 | revision_date timestamp[us]date 1970-01-01 02:38:32 2037-05-03 13:00:00 | committer_date timestamp[us]date 1970-01-01 02:38:32 2023-09-06 01:08:06 | github_id int64 4.92k 681M ⌀ | star_events_count int64 0 209k | fork_events_count int64 0 110k | gha_license_id stringclasses 22 values | gha_event_created_at timestamp[us]date 2012-06-04 01:52:49 2023-09-14 21:59:50 ⌀ | gha_created_at timestamp[us]date 2008-05-22 07:58:19 2023-08-21 12:35:19 ⌀ | gha_language stringclasses 147 values | src_encoding stringclasses 25 values | language stringclasses 1 value | is_vendor bool 2 classes | is_generated bool 2 classes | length_bytes int64 128 12.7k | extension stringclasses 142 values | content stringlengths 128 8.19k | authors listlengths 1 1 | author_id stringlengths 1 132 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
10c9ed49de23d070efdc6ced2c308317186bc7a6 | 71e1c13ba888477e8b5547518d36596d9b813201 | /q2_micom/tests/test_tradeoff.py | d29cdfe400485a8dbac81941098531b0731b1606 | [
"Apache-2.0"
] | permissive | khemlalnirmalkar/q2-micom | bc465d924ad11288210b447473f1e991d6ac44dc | 8ff3a29feddbe7624793b3d4c0c3726ae1e22a8f | refs/heads/master | 2022-12-04T20:09:39.142056 | 2020-08-26T23:29:36 | 2020-08-26T23:29:36 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,022 | py | """Test if tradeoff analysis works."""
import numpy as np
import os.path as path
import pandas as pd
import pytest
import qiime2 as q2
import q2_micom as q2m
this_dir = q2m.tests.this_dir
medium = q2.Artifact.load(path.join(this_dir, "data", "medium.qza")).view(
pd.DataFrame
)
models = q2.Artifact.load(path.join(this_dir, "data", "build.qza"))
res = q2m.tradeoff(
models.view(q2m._formats_and_types.CommunityModelDirectory),
medium)
def test_tradeoff_values():
assert np.allclose(res.tradeoff.min(), 0.1)
assert np.allclose(res.tradeoff.max(), 1.0)
assert res.tradeoff.nunique() == 10
def test_growth_rates():
low = res.growth_rate[res.tradeoff == 0.1]
high = res.growth_rate[res.tradeoff == 1.0]
assert(low.sum() < high.sum())
def test_sane_tradeoff():
with pytest.raises(ValueError):
q2m.tradeoff(
models.view(q2m._formats_and_types.CommunityModelDirectory),
medium,
tradeoff_min=0.5,
tradeoff_max=0.4
)
| [
"ch.diener@gmail.com"
] | ch.diener@gmail.com |
95f9b98dee3a9a430148aba4d1756e5d4cfa3066 | aca209472c7288d69adf57124c197baf98c7a6e7 | /OpenCV讀者資源/讀者資源/程式實例/ch29/ch29_1.py | f67382807fb13fb3f3d33e9baac0b20b1a10cf61 | [] | no_license | Hank-Liao-Yu-Chih/document | 712790325e48b9d8115d04b5cc2a90cd78431e61 | fafe616678cd224e70936296962dcdbbf55e38b3 | refs/heads/master | 2022-09-22T12:40:33.284033 | 2022-09-08T00:33:41 | 2022-09-08T00:33:41 | 102,203,601 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 948 | py | # ch29_1.py
import cv2
import numpy as np
face_db = [] # 建立空串列
face_db.append(cv2.imread("ch29_1\\hung1.jpg",cv2.IMREAD_GRAYSCALE))
face_db.append(cv2.imread("ch29_1\\hung2.jpg",cv2.IMREAD_GRAYSCALE))
face_db.append(cv2.imread("ch29_1\\star1.jpg",cv2.IMREAD_GRAYSCALE))
face_db.append(cv2.imread("ch29_1\\star2.jpg",cv2.IMREAD_GRAYSCALE))
labels = [0,0,1,1] # 建立標籤串列
faceNames = {"0":"Hung", "1":"Unistar"} # 建立對應名字的字典
recognizer = cv2.face.LBPHFaceRecognizer_create() # 建立人臉辨識物件
recognizer.train(face_db, np.array(labels)) # 訓練人臉辨識
# 讀取要辨識的人臉
face = cv2.imread("ch29_1\\face.jpg",cv2.IMREAD_GRAYSCALE)
label,confidence = recognizer.predict(face) # 執行人臉辨識
print(f"Name = {faceNames[str(label)]}")
print(f"Confidence = {confidence:6.2f}")
| [
"hank.liao@vicorelogic.com"
] | hank.liao@vicorelogic.com |
6cbe27584c2dab8abaf90e88fe6fec974ff6fd67 | a2e638cd0c124254e67963bda62c21351881ee75 | /Extensions/AMWIDealTaker/FPythonCode/FCustomizedFpML.py | ecbed3358079d3af61ad3725bd57da298e6c4981 | [] | no_license | webclinic017/fa-absa-py3 | 1ffa98f2bd72d541166fdaac421d3c84147a4e01 | 5e7cc7de3495145501ca53deb9efee2233ab7e1c | refs/heads/main | 2023-04-19T10:41:21.273030 | 2021-05-10T08:50:05 | 2021-05-10T08:50:05 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,723 | py | """------------------------------------------------------------------------
MODULE
FCustomizedFpML -
DESCRIPTION:
This file is used to modify the FpML as per the incoming source
VERSION: 1.0.30
RESTRICTIONS/ LIMITATIONS:
1. Any modifications to the scripts/ encrypted modules/ clear text code within the core is not supported.
2. This module is not customizable
3. The component may not work as expected with any modifications done to this module at user end
--------------------------------------------------------------------------"""
import base64, zlib, imp, marshal
if imp.get_magic() == '\x03\xf3\r\n':
__pyc = marshal.loads(zlib.decompress(base64.b64decode("""
eNqtU0tPGzEQ9m4CNNv0xaGnHnxBQqoIantDVVXYJFWkJKDd0AOXaGtPwLDZddeTQio40f/dzngTVahST3jlsWc8j2+8n5VYjZDm
Z5ruLQktxBnJQOhQ5IE4CwQE4jIQ92QPhW6IX7Rp8Gm62+SgNxS+90gjGh13T4e9SNLoxwuH5dz8BN23o6Hci7q9NE4GJ5PB8fjA
u0wujJMzk4OkdeFASyzlvNRmtpR4AdLHZU5aqLxuCkUJi3PpykWlIPraS1JOJneSnSjppZNkEHP2dF8OB6PB5NArB1HrXUceFss6
tVEZmrJwXIuTOlUZi25fQqGqpUVCQX6LHMikcsioNNygVKUGeW3wwhQ+TJWVh12UKN3C2rKiyI6MWu87dV91krWLWl1G9i2HqPWB
fTjH3JYFFCjn2dK7XZfVFXcMNxYUQ+GKMvsHu6awuoG/lTLkO6yoDy2jvUcb5jeNsWoQVzZo8hozbywJFEwtItt9IO4a4tbz6ioU
1YyZRlTMQ3HXFLdNcUmnQtySJ1loQ1Rses+x0BviLhTfZ964uQ7fWhkL4QNDMWPGPqGiY3xKsn8yGk7T49Mk7iFjOjr64pHic1IG
hYaboSmuQMfd1LUYczeV3ozPSEuJRTmMszmQ3cfT6t8DviKhymJmzqc2q8gDoXI+6zlgbfqR5QtAxtKnssxS5BJ05oC1Xb4o5Gwz
O8+xvdqMiUJD49DjtxX9NYWTpYW6H8v9eF4nW6TvirVw23z88DV17BIjMtePhS0v154ieBG06dum2fK7ui9OOp3qUk2n+JozclTs
Gz3hphLINFQJQ0+ePQDwXxR8Cx9rBn7iPtwmic2gHbbDPye1JMs=""")))
else:
__pyc = marshal.loads(zlib.decompress(base64.b64decode("""
The system cannot find the path specified.""")))
del base64, zlib, imp, marshal
exec(__pyc)
del __pyc
| [
"nencho.georogiev@absa.africa"
] | nencho.georogiev@absa.africa |
f6784a2efadd835fc1bebda97261f36e0d6e0d39 | ba60d3ccf11157abaf6c7bcf3a81aace27c6af88 | /spoj/lovebirds.py | 90b400cd6db14f86ef9eb2fbf8e0987a6707406c | [] | no_license | eightnoteight/compro | 9a09628593cdd3201f4d3bcf271f1ca6a4e5efca | 1e5d32ee83e9d8f27623dee7262decad3d107bd5 | refs/heads/master | 2021-01-21T04:19:02.746824 | 2016-08-01T20:51:16 | 2016-08-01T20:51:16 | 44,669,361 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 399 | py | #!/usr/bin/python
# -*- encoding: utf-8 -*-
# pylint: disable=invalid-name,missing-docstring,bad-builtin
from sys import stdin
def main():
catalan = [1]
for x in xrange(1, 1001):
catalan.append(2*(2*x + 1)*catalan[-1] // (x + 2))
dstream = iter(map(int, stdin.read().split()))
print '\n'.join([str(catalan[next(dstream) - 1] % 1908) for _ in xrange(next(dstream))])
main()
| [
"mr.eightnoteight@gmail.com"
] | mr.eightnoteight@gmail.com |
0ee114876823cd86a77166b1ce845a4a38d03557 | 066ee4df594a5dc90335d271b9d5a1b1e2a4d34c | /y/google-cloud-sdk/platform/google_appengine/google/appengine/tools/devappserver2/python/pdb_sandbox.py | 93f585410c26310319af9f492d5e168e08a36358 | [
"LGPL-2.1-or-later",
"Apache-2.0",
"BSD-3-Clause",
"MIT",
"GPL-2.0-or-later",
"MPL-1.1",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | ychen820/microblog | a2d82447525325ec58285c2e5db58b79cceaca1b | d379afa2db3582d5c3be652165f0e9e2e0c154c6 | refs/heads/master | 2021-01-20T05:58:48.424357 | 2015-04-28T22:03:09 | 2015-04-28T22:03:09 | 32,948,331 | 0 | 2 | BSD-3-Clause | 2020-07-25T05:04:35 | 2015-03-26T19:45:07 | Python | UTF-8 | Python | false | false | 2,835 | py | #!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Modify pdb to work with the devappserver2 sandbox."""
import sys
import threading
def install(config):
"""Install the necessary changes to pdb.
Monkeypatch pdb so that it can be used in the devappserver sandbox. Must
be called after the sandbox has been installed but before stdin/stdout
objects have been reassigned.
Args:
config: The runtime_config_pb2.Config to use to configure the sandbox.
"""
# Import here (i.e. after sandbox installed) to get the post sandbox pdb.
# Extremely important so that we monkeypatch the same pdb the apps can
# import.
import pdb as pdb_postsandbox
# Save stdin/stdout as the references will not be available when user
# code runs.
real_stdin = sys.stdin
real_stdout = sys.stdout
# Capture the original Pdb so we can forward the __init__ call after
# monkeypatching (if not captured, forwarding the call results in infinite
# recursion).
pdb_premonkeypatch = pdb_postsandbox.Pdb
if config.threadsafe or config.max_instances != 1:
warning = """
********************************************************************************
* WARNING: please read before using PDB:
* https://developers.google.com/appengine/docs/python/tools/devserver#Python_Debugging_with_PDB
********************************************************************************
"""
lock = threading.Lock()
else:
warning = ''
class _Pdb(pdb_postsandbox.Pdb):
_warning_written = False
# TODO: improve argument handling so if new arguments are added
# in the future or the defaults change, this does not need to be updated.
def __init__(self, completekey='tab', stdin=None, stdout=None, skip=None):
if stdin is None:
stdin = real_stdin
if stdout is None:
stdout = real_stdout
# Pdb is old style class so no super().
pdb_premonkeypatch.__init__(self, completekey, stdin, stdout, skip)
if warning:
with lock:
# Note: while the goal is to write the warning only one time, it
# may be written multiple times (once each per instance).
if not _Pdb._warning_written:
stdout.write(warning)
_Pdb._warning_written = True
pdb_postsandbox.Pdb = _Pdb
| [
"ychen207@binghamton.edu"
] | ychen207@binghamton.edu |
d2ee2d0a7b652d2c7469c2b5320dcd4cf34c8a62 | de24f83a5e3768a2638ebcf13cbe717e75740168 | /moodledata/vpl_data/134/usersdata/218/46244/submittedfiles/escadarolante.py | ce5ff2ed83c850366a6d95b6864aa99715f36420 | [] | no_license | rafaelperazzo/programacao-web | 95643423a35c44613b0f64bed05bd34780fe2436 | 170dd5440afb9ee68a973f3de13a99aa4c735d79 | refs/heads/master | 2021-01-12T14:06:25.773146 | 2017-12-22T16:05:45 | 2017-12-22T16:05:45 | 69,566,344 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 198 | py | # -*- coding: utf-8 -*-
n=int(input('digite a quantidade de pessoas:'))
T=0
for i in range (0,n,1):
t=int(input('tempo:'))
if i==0:
t1=t
if i==n:
nf=t+10
T=nf-t1
print(T) | [
"rafael.mota@ufca.edu.br"
] | rafael.mota@ufca.edu.br |
26b2a67d895180bab09500fbaec44f381f50fe05 | 29df1c3c67f6d32c25a6b88399238126696b46b0 | /benchmarks/benchmarks/sparse_linalg_onenormest.py | a23dae2b400a224a69f4bd129715f852217a6250 | [
"MIT",
"Qhull",
"BSD-3-Clause",
"Python-2.0",
"Apache-2.0",
"BSD-2-Clause",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | odidev/scipy | 2347f38c562f18f863d84e907048fb8a61562ff2 | 0def9e108548b00263c79248df96f0fa1c0c0cd8 | refs/heads/master | 2022-01-24T13:54:00.539960 | 2022-01-06T13:16:38 | 2022-01-06T13:16:38 | 252,180,143 | 1 | 0 | BSD-3-Clause | 2020-04-01T13:16:17 | 2020-04-01T13:16:16 | null | UTF-8 | Python | false | false | 1,848 | py | """Compare the speed of exact one-norm calculation vs. its estimation.
"""
import numpy as np
try:
import scipy.sparse
import scipy.special # import cycle workaround for some versions
import scipy.sparse.linalg
except ImportError:
pass
from .common import Benchmark
class BenchmarkOneNormEst(Benchmark):
params = [
[2, 3, 5, 10, 30, 100, 300, 500, 1000, 1e4, 1e5, 1e6],
['exact', 'onenormest']
]
param_names = ['n', 'solver']
def setup(self, n, solver):
np.random.seed(1234)
nrepeats = 100
shape = (int(n), int(n))
if solver == 'exact' and n >= 300:
# skip: slow, and not useful to benchmark
raise NotImplementedError()
if n <= 1000:
# Sample the matrices.
self.matrices = []
for i in range(nrepeats):
M = np.random.randn(*shape)
self.matrices.append(M)
else:
max_nnz = 100000
nrepeats = 1
self.matrices = []
for i in range(nrepeats):
M = scipy.sparse.rand(shape[0], shape[1], min(max_nnz/(shape[0]*shape[1]), 1e-5))
self.matrices.append(M)
def time_onenormest(self, n, solver):
if solver == 'exact':
# Get the exact values of one-norms of squares.
for M in self.matrices:
M.dot(M)
scipy.sparse.linalg.matfuncs._onenorm(M)
elif solver == 'onenormest':
# Get the estimates of one-norms of squares.
for M in self.matrices:
scipy.sparse.linalg.matfuncs._onenormest_matrix_power(M, 2)
# Retain old benchmark results (remove this if changing the benchmark)
time_onenormest.version = "f7b31b4bf5caa50d435465e78dab6e133f3c263a52c4523eec785446185fdb6f"
| [
"pav@iki.fi"
] | pav@iki.fi |
67fdb530ec4acf4b959afd9117ae5dda82b668ec | 79cf58db2795344616cbebd7ee84e7623a2f1dd1 | /Image/img_to_features.py | 50b5796751200b616031eb47f02c5b0f9d9e1324 | [] | no_license | zpeng1989/Machine_Learing_ZP_2020 | 77f290000f00a8083026681059d2c905ce575774 | 63cadb672265b9a1fba13edd5c4a91f8e2a94910 | refs/heads/main | 2023-02-28T04:10:52.286020 | 2021-02-05T02:18:08 | 2021-02-05T02:18:08 | 319,541,071 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 607 | py | import cv2
import numpy as np
from matplotlib import pyplot as plt
image = cv2.imread('plane.jpeg', cv2.IMREAD_COLOR)
image_10 = cv2.resize(image,(10,10))
print(image_10.flatten())
#plt.imshow(image_10, cmap='gray')
#plt.axis('off')
#plt.show()
channels = cv2.mean(image_10)
observation = np.array([(channels[2], channels[1], channels[0])])
print(observation)
features = []
colors = ['r', 'g', 'b']
for i, channel in enumerate(colors):
histogram = cv2.calcHist([image], [i], None, [256], [0,256])
features.extend(histogram)
observation = np.array(features).flatten()
print(observation)
| [
"592392714@qq.com"
] | 592392714@qq.com |
950dce45bcc59842e8f000b131db9780ed98bb5c | cfdf08cb9592760a1b676282b458e58779abb0dd | /PlaneGame/manage.py | 427eeb140bc89ac984861ea69e78866f852becc1 | [
"MIT"
] | permissive | webturing/Python3 | d830c22921fe1d1e6182e9481bcd0a930309f65c | 6fcf6ee6cc5afba0225bd56e6928b01882eb9ecb | refs/heads/master | 2021-06-12T13:13:44.421511 | 2020-06-28T11:56:01 | 2020-06-28T11:56:01 | 128,641,162 | 1 | 2 | null | null | null | null | UTF-8 | Python | false | false | 777 | py | #! /usr/bin/env python
# -*- coding: utf-8 -*-
from bin.main import main
if __name__ == '__main__':
"""
环境: python3 + pygame
running 起来就可以打飞机了O(∩_∩)O~.
"""
main()
"""
PlayPlane/
|-- bin/
| |-- main.py 程序运行主体程序
|-- config/
| |-- settings.py 程序配置(例如: 游戏背景音乐的加载等)
|-- material 程序素材放置(打飞机游戏素材放置)
|-- ...
|-- src/ 程序主体模块存放
| |-- __init__.py
| |-- bullet.py 我方飞机发射子弹实现代码存放
| |-- enemy.py 敌方飞机实现代码存放
| |-- plane.py 我方飞机实现代码存放
|-- manage.py 程序启动文件
|-- README.md
"""
| [
"zj@webturing.com"
] | zj@webturing.com |
0c3928fbbb1c31312df8cd2f0e2e59b5eb9f44ca | a6e4a6f0a73d24a6ba957277899adbd9b84bd594 | /sdk/python/pulumi_azure_native/web/v20190801/list_static_site_build_function_app_settings.py | 76e8903e60e6d4e3bf71b53f3db33a36c9efa0e6 | [
"BSD-3-Clause",
"Apache-2.0"
] | permissive | MisinformedDNA/pulumi-azure-native | 9cbd75306e9c8f92abc25be3f73c113cb93865e9 | de974fd984f7e98649951dbe80b4fc0603d03356 | refs/heads/master | 2023-03-24T22:02:03.842935 | 2021-03-08T21:16:19 | 2021-03-08T21:16:19 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,013 | py | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union
from ... import _utilities, _tables
__all__ = [
'ListStaticSiteBuildFunctionAppSettingsResult',
'AwaitableListStaticSiteBuildFunctionAppSettingsResult',
'list_static_site_build_function_app_settings',
]
@pulumi.output_type
class ListStaticSiteBuildFunctionAppSettingsResult:
"""
String dictionary resource.
"""
def __init__(__self__, id=None, kind=None, name=None, properties=None, type=None):
if id and not isinstance(id, str):
raise TypeError("Expected argument 'id' to be a str")
pulumi.set(__self__, "id", id)
if kind and not isinstance(kind, str):
raise TypeError("Expected argument 'kind' to be a str")
pulumi.set(__self__, "kind", kind)
if name and not isinstance(name, str):
raise TypeError("Expected argument 'name' to be a str")
pulumi.set(__self__, "name", name)
if properties and not isinstance(properties, dict):
raise TypeError("Expected argument 'properties' to be a dict")
pulumi.set(__self__, "properties", properties)
if type and not isinstance(type, str):
raise TypeError("Expected argument 'type' to be a str")
pulumi.set(__self__, "type", type)
@property
@pulumi.getter
def id(self) -> str:
"""
Resource Id.
"""
return pulumi.get(self, "id")
@property
@pulumi.getter
def kind(self) -> Optional[str]:
"""
Kind of resource.
"""
return pulumi.get(self, "kind")
@property
@pulumi.getter
def name(self) -> str:
"""
Resource Name.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def properties(self) -> Mapping[str, str]:
"""
Settings.
"""
return pulumi.get(self, "properties")
@property
@pulumi.getter
def type(self) -> str:
"""
Resource type.
"""
return pulumi.get(self, "type")
class AwaitableListStaticSiteBuildFunctionAppSettingsResult(ListStaticSiteBuildFunctionAppSettingsResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return ListStaticSiteBuildFunctionAppSettingsResult(
id=self.id,
kind=self.kind,
name=self.name,
properties=self.properties,
type=self.type)
def list_static_site_build_function_app_settings(name: Optional[str] = None,
pr_id: Optional[str] = None,
resource_group_name: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableListStaticSiteBuildFunctionAppSettingsResult:
"""
String dictionary resource.
:param str name: Name of the static site.
:param str pr_id: The stage site identifier.
:param str resource_group_name: Name of the resource group to which the resource belongs.
"""
__args__ = dict()
__args__['name'] = name
__args__['prId'] = pr_id
__args__['resourceGroupName'] = resource_group_name
if opts is None:
opts = pulumi.InvokeOptions()
if opts.version is None:
opts.version = _utilities.get_version()
__ret__ = pulumi.runtime.invoke('azure-native:web/v20190801:listStaticSiteBuildFunctionAppSettings', __args__, opts=opts, typ=ListStaticSiteBuildFunctionAppSettingsResult).value
return AwaitableListStaticSiteBuildFunctionAppSettingsResult(
id=__ret__.id,
kind=__ret__.kind,
name=__ret__.name,
properties=__ret__.properties,
type=__ret__.type)
| [
"noreply@github.com"
] | MisinformedDNA.noreply@github.com |
c84bc2a5625c487a309556a47160aabac50d97c7 | 5e8a33d64c9c82f91d70b4641cb24978ff81aec5 | /mealpy/bio_based/IWO.py | 1492290eb6a9298d5353ebffc602b72e2abee160 | [
"MIT"
] | permissive | Alhassan20/mealpy | 495e77a557f102ab61daccfdcc0cfe5949b561bb | 7ed365c5c495ad1c1e066662c90159b3d5e9b8e3 | refs/heads/master | 2023-07-31T08:09:47.285307 | 2021-09-16T01:22:12 | 2021-09-16T01:22:12 | 406,805,947 | 0 | 0 | MIT | 2021-09-15T14:41:24 | 2021-09-15T14:41:23 | null | UTF-8 | Python | false | false | 6,063 | py | #!/usr/bin/env python
# ------------------------------------------------------------------------------------------------------%
# Created by "Thieu Nguyen" at 12:17, 18/03/2020 %
# %
# Email: nguyenthieu2102@gmail.com %
# Homepage: https://www.researchgate.net/profile/Thieu_Nguyen6 %
# Github: https://github.com/thieu1995 %
#-------------------------------------------------------------------------------------------------------%
from numpy.random import uniform
from numpy import ceil
from copy import deepcopy
from mealpy.optimizer import Root
class BaseIWO(Root):
"""
My version of: weed colonization (IWO)
A novel numerical optimization algorithm inspired from weed colonization
Noted:
https://pdfs.semanticscholar.org/734c/66e3757620d3d4016410057ee92f72a9853d.pdf
"""
def __init__(self, obj_func=None, lb=None, ub=None, verbose=True, epoch=750, pop_size=100,
seeds=(2, 10), exponent=2, sigma=(0.5, 0.001), **kwargs):
super().__init__(obj_func, lb, ub, verbose, kwargs)
self.epoch = epoch
self.pop_size = pop_size
self.seeds = seeds # (Min, Max) Number of Seeds
self.exponent = exponent # Variance Reduction Exponent
self.sigma = sigma # (Initial, Final) Value of Standard Deviation
def train(self):
pop = [self.create_solution() for _ in range(self.pop_size)]
pop, g_best = self.get_sorted_pop_and_global_best_solution(pop, self.ID_FIT, self.ID_MIN_PROB)
fit_best = g_best[self.ID_FIT]
fit_worst = pop[self.ID_MAX_PROB][self.ID_FIT]
for epoch in range(self.epoch):
# Update Standard Deviation
sigma = ((self.epoch - epoch) / (self.epoch - 1)) ** self.exponent * (self.sigma[0] - self.sigma[1]) + self.sigma[1]
# Reproduction
pop_new = []
for item in pop:
ratio = (item[self.ID_FIT] - fit_worst) / (fit_best - fit_worst + self.EPSILON)
s = int(ceil(self.seeds[0] + (self.seeds[1] - self.seeds[0]) * ratio))
for j in range(s):
# Initialize Offspring and Generate Random Location
pos_new = item[self.ID_POS] + sigma * uniform(self.lb, self.ub)
pos_new = self.amend_position_faster(pos_new)
fit = self.get_fitness_position(pos_new)
pop_new.append([pos_new, fit])
# Re-calculate best train and worst train
pop = pop + pop_new
pop, g_best = self.update_sorted_population_and_global_best_solution(pop, self.ID_MIN_PROB, g_best)
pop = pop[:self.pop_size]
fit_worst = pop[self.ID_MAX_PROB][self.ID_FIT]
fit_best = pop[self.ID_MIN_PROB][self.ID_FIT]
self.loss_train.append(g_best[self.ID_FIT])
if self.verbose:
print("> Epoch: {}, Best fit: {}".format(epoch + 1, g_best[self.ID_FIT]))
self.solution = g_best
return g_best[self.ID_POS], g_best[self.ID_FIT], self.loss_train
class OriginalIWO(Root):
"""
Original version of: weed colonization (IWO)
A novel numerical optimization algorithm inspired from weed colonization
Link:
https://pdfs.semanticscholar.org/734c/66e3757620d3d4016410057ee92f72a9853d.pdf
"""
def __init__(self, obj_func=None, lb=None, ub=None, verbose=True, epoch=750, pop_size=100,
seeds=(2, 10), exponent=2, sigma=(0.5, 0.001), **kwargs):
super().__init__(obj_func, lb, ub, verbose, kwargs)
self.epoch = epoch
self.pop_size = pop_size
self.seeds = seeds # (Min, Max) Number of Seeds
self.exponent = exponent # Variance Reduction Exponent
self.sigma = sigma # (Initial, Final) Value of Standard Deviation
def train(self):
pop = [self.create_solution() for _ in range(self.pop_size)]
pop_sorted, g_best = self.get_sorted_pop_and_global_best_solution(pop, self.ID_FIT, self.ID_MIN_PROB)
cost_best = g_best[self.ID_FIT]
cost_worst = pop_sorted[self.ID_MAX_PROB][self.ID_FIT]
for epoch in range(self.epoch):
# Update Standard Deviation
sigma = ((self.epoch - epoch) / (self.epoch - 1)) ** self.exponent * (self.sigma[0] - self.sigma[1]) + self.sigma[1]
# Reproduction
pop_new = []
for item in pop:
ratio = (item[self.ID_FIT] - cost_worst) / (cost_best - cost_worst)
S = int(ceil(self.seeds[0] + (self.seeds[1] - self.seeds[0]) * ratio))
for j in range(S):
# Initialize Offspring and Generate Random Location
pos_new = item[self.ID_POS] + sigma * uniform(self.lb, self.ub)
pos_new = self.amend_position_faster(pos_new)
fit = self.get_fitness_position(pos_new)
pop_new.append([pos_new, fit])
# Merge Populations
pop = pop + pop_new
pop = sorted(pop, key=lambda temp: temp[self.ID_FIT])
pop = pop[:self.pop_size]
# Re-calculate best train and worst train
cost_worst = pop[self.ID_MAX_PROB][self.ID_FIT]
if cost_best > pop[self.ID_MIN_PROB][self.ID_FIT]:
g_best = deepcopy(pop[self.ID_MIN_PROB])
cost_best = g_best[self.ID_FIT]
self.loss_train.append(g_best[self.ID_FIT])
if self.verbose:
print("> Epoch: {}, Best fit: {}".format(epoch + 1, g_best[self.ID_FIT]))
self.solution = g_best
return g_best[self.ID_POS], g_best[self.ID_FIT], self.loss_train
| [
"nguyenthieu2102@gmail.com"
] | nguyenthieu2102@gmail.com |
b0152453501e9413c085c5e3d68b541fb358198b | 7ce2b2000cfefe8fbefc2271ebc7df2061c88194 | /CAIL2020/rlfl/utils.py | 4923804ca7c3f8f7275cdb4a9a8cc3047c173528 | [
"Apache-2.0"
] | permissive | generalzgd/CAIL | f06d79acf42ac2188938c02087f7d07b9b43095c | 57529e64ee2f602324a500ff9bed660ddcde10bb | refs/heads/master | 2023-01-24T01:14:05.382525 | 2020-11-20T03:40:47 | 2020-11-20T03:40:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,598 | py | """Utils tools.
Author: Yixu GAO yxgao19@fudan.edu.cn
"""
import logging
import os
from collections import OrderedDict
import torch
def get_path(path):
"""Create the path if it does not exist.
Args:
path: path to be used
Returns:
Existed path
"""
if not os.path.exists(path):
os.makedirs(path)
return path
def get_csv_logger(log_file_name,
title='',
log_format='%(message)s',
log_level=logging.INFO):
"""Get csv logger.
Args:
log_file_name: file name
title: first line in file
log_format: default: '%(message)s'
log_level: default: logging.INFO
Returns:
csv logger
"""
logger = logging.getLogger(log_file_name)
logger.setLevel(log_level)
file_handler = logging.FileHandler(log_file_name, 'w')
formatter = logging.Formatter(log_format)
file_handler.setFormatter(formatter)
logger.addHandler(file_handler)
logger.propagate = False
if title:
logger.info(title)
return logger
def load_torch_model(model, model_path):
"""Load state dict to model.
Args:
model: model to be loaded
model_path: state dict file path
Returns:
loaded model
"""
pretrained_model_dict = torch.load(model_path, map_location="cpu")
new_state_dict = OrderedDict()
for k, value in pretrained_model_dict.items():
# name = k[7:] # remove `module.`
new_state_dict[k] = value
model.load_state_dict(new_state_dict, strict=True)
return model
| [
"bangtech@sina.com"
] | bangtech@sina.com |
ee91d134d504e616ede6c544cdcee453df6d38dc | 9545652800884f0e54fe6595d8634c29ea4827a2 | /高级算法/leetCode_97_缺失的第一个正数.py | 3a17cc5a3fe47f855b2992c4e538bc986324c951 | [] | no_license | challeger/leetCode | 662d9f600a40fd8970568679656f6911a6fdfb05 | d75c35b6f8ab33c158de7fa977ab0b16dac4fc25 | refs/heads/master | 2023-01-13T07:34:42.464959 | 2020-11-13T02:40:31 | 2020-11-13T02:40:31 | 286,426,790 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,734 | py | """
day: 2020-08-25
url: https://leetcode-cn.com/leetbook/read/top-interview-questions-hard/xwkftg/
题目名: 缺失的第一个正数
题目描述: 给你一个未排序的整数数组,请你找出其中没有出现的最小正整数
算法的时间复杂度应该为O(n),并且只能使用常数级别的额外空间
示例:
输入: [3,4,-1,1]
输出: 2
输入: [7,8,9,11,12]
输出: 1
思路:
1. 哈希
我们可以将数组作为一个哈希表,它的索引+1为键,它的 是否为正数 为值
遍历哈希表,第一个值为正数的键,就是我们要找的数.
所以我们可以分为三步:
N = len(n)
1.将数组中所有的非正数 赋值N+1(N+1是必定不在正确的数组中的)
2.根据数组的值,将这个值-1对应的格子,标记为负数
3.遍历数组,若当前格子的值是正数,那么当前格子的索引+1就是我们缺失的数字
2. 置换
对于一个数组[3, 2, 4, 1, 6]
正确的未缺失数组应该是[1, 2, 3, 4, 5]
也就是,该数组的第i-1个元素,它的值应该是i
所以我们第一次遍历数组,如果这个格子的值是有效的索引
把这个格子的值与它应该在的格子交换值,因为交换后当前格子
的值可能还可以置换,所以我们应该继续交换该格子..
若当前格子的值等于它要交换的格子的值,说明出现了重复变量,那就
直接选择不置换
然后我们第二次遍历,若当前的d[i] != i+1,那么i+1就是我们的缺失的
正数
"""
from typing import List
class Solution:
def firstMissingPositive(self, nums: List[int]) -> int:
# n = len(nums)
# # 将所有非正数标记为N+1
# for i in range(n):
# if nums[i] <= 0:
# nums[i] = n + 1
# # 将nums中所有在[1, n]范围内的数作为索引
# # 将对应的格子标记为负数
# for i in range(n):
# num = abs(nums[i])
# if num <= n:
# nums[num-1] = -abs(nums[num-1])
# # 第一个正数的索引+1,就是第一个未出现的正数
# for i in range(n):
# if nums[i] > 0:
# break
# return i + 1
n = len(nums)
for i in range(n):
# 判断这个数是否是有效的索引,然后将数字放到它应该在的位置
while 0 < nums[i] <= n and nums[nums[i]-1] != nums[i]:
nums[nums[i]-1], nums[i] = nums[i], nums[nums[i]-1]
for i in range(n):
if nums[i] != i+1:
return i + 1
return n + 1
if __name__ == "__main__":
test = [1, 2, 2]
s = Solution()
print(s.firstMissingPositive(test))
| [
"799613500@qq.com"
] | 799613500@qq.com |
ebed68d8bce26e2dcea621972a7d517e79bd3cca | 77e39afb2752ad63bbb775b256e2ed48fc50fde6 | /contents/views.py | 3fabf597a4a2cf036545d8639135be17de7b01bf | [] | no_license | soohyun-lee/Docker-python | f6b08eab6c0eec3efe37ba74b751e69aef153d38 | f318fc2df24a6983469c64e6ac15fd7ab0e202c2 | refs/heads/master | 2023-04-22T17:50:01.603450 | 2021-05-12T08:08:05 | 2021-05-12T08:08:05 | 366,639,580 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,248 | py | from typing import List
from contents.models import Listup
import re, requests
import json
from django.shortcuts import render
from django.views import View
from django.http import JsonResponse
from .models import Listup
class Contents(View):
def post(self, request):
data = json.loads(request.body)
Listup.objects.create(
study_date = data['studya_date'],
title = data['title'],
study_contents = data['contents']
)
return JsonResponse({'message' : 'success'}, status=200)
def get(self, reuquest):
all_contents = Listup.object.all()
data = [{
'id' : content.id,
'date' : content.study_date,
'title' : content.title,
'study_contetns' : content.study_contents
}for content in all_contents]
return JsonResponse({'data' : data}, status=200)
def patch(self, request):
data = json.loads(request.body)
Listup.object.filter(id = data['id']).update(
'date' : data['data']
'title' : data['title']
'study_contetns' : data['study_contetns']
)
return JsonResponse({'message': 'success'}, status=200) | [
"soohyun527@gmail.com"
] | soohyun527@gmail.com |
9142155f37a0bf512415188dc621c7f8b1e37f45 | 1710a1785c45f4e9cf35486ee96d7d14d767c82a | /matrices.py | 3ae4ba36e94a4acc24ac67af1c1cb601a345af81 | [] | no_license | Th3Lourde/spaceGame | 2a73ae90dbb68d456fbbb23a3c244c1b740bc780 | 8e7c4e990d658fdc7f3dd21188d4493c09146fe5 | refs/heads/master | 2022-07-10T02:30:19.680893 | 2020-05-14T01:47:26 | 2020-05-14T01:47:26 | 263,761,039 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 857 | py | import numpy
t = numpy.pi/36
u = 1
T = 0
Rotate = numpy.array([[numpy.cos(t),-numpy.sin(t),0], [numpy.sin(t), numpy.cos(t), 0],[0,0,1]])
Rotate_Neg = numpy.array([[numpy.cos(t),numpy.sin(t),0], [-numpy.sin(t), numpy.cos(t), 0],[0,0,1]])
Shift = numpy.array([[1,0, u*numpy.cos(T)], [0,1,u*numpy.sin(T)], [0,0,1]])
i = numpy.array([[0, 0],[0, 2],[1, 1]])
shift_towards = numpy.array([[1,0, i[0][0]],[0,1,i[1][0]], [0,0,1]])
shift_away = numpy.array([[1,0, i[0][0]],[0,1, i[1][0]], [0,0,1]])
i
def moveForwards(coord):
shift = numpy.array([[1,0, u*numpy.cos(T)], [0,1,u*numpy.sin(T)], [0,0,1]])
coord = shift@coord
return coord
def getPosition():
# (0,0), (0,2)
p1 = i[0][0]
pos = [(i[0][0] + i[1][0])/2, (i[0][1] + i[1][1])/2]
print("position is: {}".format(pos))
getPosition()
i = moveForwards(i)
print(i) | [
"th3sylvia.lourde@gmail.com"
] | th3sylvia.lourde@gmail.com |
12ea2600b5eb5ac6f1f983418ca1c299d535df57 | e8ab6a8108801dfedb694557626fd847651564e2 | /Dragon/python/dragon/__init__.py | 01bcb0e88bd4653deaa63c50b2dc5a69b6aac13b | [
"BSD-2-Clause"
] | permissive | Spark001/Dragon-1 | 87f722bcb0feaec7fad29d923c60681cf9584267 | 310bcb5f6d9a6623bb58ed3d1ad02d1f440da474 | refs/heads/master | 2020-03-09T22:52:08.171500 | 2018-04-01T10:13:50 | 2018-04-01T10:13:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 929 | py | # ------------------------------------------------------------
# Copyright (c) 2017-preseent, SeetaTech, Co.,Ltd.
#
# Licensed under the BSD 2-Clause License.
# You should have received a copy of the BSD 2-Clause License
# along with the software. If not, See,
#
# <https://opensource.org/licenses/BSD-2-Clause>
#
# ------------------------------------------------------------
# config
from dragon.config import *
import dragon.config as config
# core
from dragon.core.tensor import Tensor
import dragon.core.workspace as workspace
# ops
from dragon.ops import *
# updaters
from dragon.updaters import *
# theano utilities
from dragon.vm.theano.compile.function import function as function
from dragon.vm.theano.tensor import grad as grad
# scope
from dragon.core.scope import TensorScope as name_scope
from dragon.core.scope import PhaseScope as phase_scope
from dragon.core.scope import DeviceScope as device_scope
| [
"ting.pan@seetatech.com"
] | ting.pan@seetatech.com |
7d1653be04f8139a72d11e847ec2123d319b7518 | 9d74cbd676e629f8acdc68a4bac3dea0a98b9776 | /yc213/825.py | 201024344db09ebb4a8336c0ddfbc2f1d28dbd79 | [
"MIT"
] | permissive | c-yan/yukicoder | 01166de35e8059eaa8e3587456bba52f35bd0e44 | dcfd89b0a03759156dcea8c2e61a7705543dc0d4 | refs/heads/master | 2022-03-20T06:50:48.225922 | 2022-02-25T15:48:50 | 2022-02-25T15:48:50 | 237,735,377 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 445 | py | A, B, C = map(int, input().split())
if B == 0:
if C >= A or A == 0:
print('Impossible')
else:
print(A - C)
else:
if C >= A + B + 10:
print('Impossible')
elif C >= A + B:
print(A + B + 9 - C)
else:
while A >= 10 and (A - 10) + (B + 1) > C:
B += 1
A -= 10
if A + B - C <= A:
print(A + B - C)
else:
print((B - C) * 10 + A)
| [
"c-yan@users.noreply.github.com"
] | c-yan@users.noreply.github.com |
bf2719d65f4f33e7e5cd99ef493bae50a8b389f5 | 09e57dd1374713f06b70d7b37a580130d9bbab0d | /data/p2DJ/New/R2/benchmark/startPyquil191.py | e48239dddba4fcb815b00f48785364fe8b897f0f | [
"BSD-3-Clause"
] | permissive | UCLA-SEAL/QDiff | ad53650034897abb5941e74539e3aee8edb600ab | d968cbc47fe926b7f88b4adf10490f1edd6f8819 | refs/heads/main | 2023-08-05T04:52:24.961998 | 2021-09-19T02:56:16 | 2021-09-19T02:56:16 | 405,159,939 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,159 | py | # qubit number=2
# total number=12
import pyquil
from pyquil.api import local_forest_runtime, QVMConnection
from pyquil import Program, get_qc
from pyquil.gates import *
import numpy as np
conn = QVMConnection()
def make_circuit()-> Program:
prog = Program() # circuit begin
prog += H(0) # number=1
prog += X(1) # number=2
prog += H(1) # number=9
prog += CZ(0,1) # number=10
prog += H(1) # number=11
prog += CNOT(0,1) # number=7
prog += X(1) # number=8
prog += X(1) # number=5
prog += CNOT(0,1) # number=6
# circuit end
return prog
def summrise_results(bitstrings) -> dict:
d = {}
for l in bitstrings:
if d.get(l) is None:
d[l] = 1
else:
d[l] = d[l] + 1
return d
if __name__ == '__main__':
prog = make_circuit()
qvm = get_qc('1q-qvm')
results = qvm.run_and_measure(prog,1024)
bitstrings = np.vstack([results[i] for i in qvm.qubits()]).T
bitstrings = [''.join(map(str, l)) for l in bitstrings]
writefile = open("../data/startPyquil191.csv","w")
print(summrise_results(bitstrings),file=writefile)
writefile.close()
| [
"wangjiyuan123@yeah.net"
] | wangjiyuan123@yeah.net |
d910418d27bae4339185576b152e789a949bc49d | 9b07e3fc9436f876a426bf5b02d07733de10d775 | /tests/bind_tests/boolean_tests/operation_tests/test_process_events.py | 44fd2d01a13dee2e67120d5b8ff44fef1ed96b03 | [
"MIT"
] | permissive | lycantropos/martinez | 019e859ec513cc7ad38901e22dff8e897615776c | 86db48324cb50ecb52be8ab2e4278a6d5cdd562b | refs/heads/master | 2021-07-10T04:19:23.372706 | 2020-11-28T00:58:47 | 2020-11-28T00:58:47 | 224,819,004 | 7 | 1 | MIT | 2020-12-20T15:47:17 | 2019-11-29T09:16:26 | Python | UTF-8 | Python | false | false | 521 | py | from typing import (List,
Tuple)
from hypothesis import given
from tests.bind_tests.hints import (BoundOperation,
BoundSweepEvent)
from . import strategies
@given(strategies.operations_with_events_lists)
def test_basic(operation_with_events: Tuple[BoundOperation,
List[BoundSweepEvent]]) -> None:
operation, events = operation_with_events
result = operation.process_events(events)
assert result is None
| [
"azatibrakov@gmail.com"
] | azatibrakov@gmail.com |
ec67db41e7d10ffa59c7feac6008fe09eeab37f3 | b0f101d115d30b59e6ab3887e782e08e57d0fb52 | /cbow/main.py | fa1848f55df982f9208377e5cf6cca13a2dc004e | [] | no_license | trigrass2/torch_light | 9c15c75aeb290fb54b2f247b0a0daec428415414 | 17f54f50309c50749711d7483f1563756e8d8afd | refs/heads/master | 2021-05-07T18:55:07.337457 | 2017-10-29T06:29:26 | 2017-10-29T06:29:26 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,205 | py | import torch
import torch.nn.functional as F
import torch.nn as nn
import torch.optim as optim
import torch.autograd as autograd
CONTEXT_SIZE = 2 # 2 words to the left, 2 to the right
EMBEDDING_DIM = 64
raw_text = """We are about to study the idea of a computational process.
Computational processes are abstract beings that inhabit computers.
As they evolve, processes manipulate other abstract things called data.
The evolution of a process is directed by a pattern of rules
called a program. People create programs to direct processes. In effect,
we conjure the spirits of the computer with our spells.""".split()
vocab = set(raw_text)
vocab_size = len(vocab)
word_to_ix = {word: i for i, word in enumerate(vocab)}
data = []
for i in range(2, len(raw_text) - 2):
context = [raw_text[i - 2], raw_text[i - 1],
raw_text[i + 1], raw_text[i + 2]]
target = raw_text[i]
data.append((context, target))
class CBOW(nn.Module):
def __init__(self, vocab_size, ebd_size, cont_size):
super(CBOW, self).__init__()
self.ebd = nn.Embedding(vocab_size, ebd_size)
self.ebd.weight.data.uniform_(-0.1, 0.1)
self.lr1 = nn.Linear(ebd_size*cont_size*2, 128)
self.lr2 = nn.Linear(128, vocab_size)
def forward(self, inputs):
out = self.ebd(inputs).view(1, -1)
out = F.relu(self.lr1(out))
out = self.lr2(out)
out = F.log_softmax(out)
return out
def make_context_vector(context, word_to_ix):
idxs = [word_to_ix[w] for w in context]
tensor = torch.LongTensor(idxs)
return autograd.Variable(tensor)
loss_function = nn.NLLLoss()
model = CBOW(vocab_size, EMBEDDING_DIM, CONTEXT_SIZE)
optimizer = optim.Adam(model.parameters(), lr=0.001)
for epoch in range(1, 41):
total_loss = 0.0
for context, target in data:
v_ctx = make_context_vector(context, word_to_ix)
v_tar = autograd.Variable(torch.LongTensor([word_to_ix[target]]))
model.zero_grad()
out = model(v_ctx)
loss = loss_function(out, v_tar)
total_loss += loss.data
loss.backward()
optimizer.step()
print("end of epoch {} | loss {:2.3f}".format(epoch, total_loss[0]))
| [
"422618856@qq.com"
] | 422618856@qq.com |
6c07e3d6f5128ce364ddac46fea5a3d9630b40c1 | 67a48a7a2db56247fdd84474efa35124565fd8b9 | /Codeforces/1559/1559c.py | 163287d73c0a3162fb31da550314ccaa6c48cae6 | [] | no_license | qazz625/Competitive-Programming-Codes | e3de31f9276f84e919a6017b2cf781c946809862 | e5df9cdc4714d78b7b6a7535ed7a45e07d3781c3 | refs/heads/master | 2022-08-30T07:57:55.172867 | 2022-08-10T08:02:07 | 2022-08-10T08:02:07 | 242,182,922 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 553 | py | t = int(input())
for _ in range(t):
n = int(input())
arr = [int(j) for j in input().split()]
if arr[-1] == 0:
ans = [i for i in range(1, n+2)]
print(*ans)
continue
if arr[0] == 1:
ans = [n+1] + [i for i in range(1, n+1)]
print(*ans)
continue
ind = -1
for i in range(n-1):
if arr[i] == 0 and arr[i+1] == 1:
ind = i
break
# print(arr)
assert ind != -1
# print(ind)
ans = []
for i in range(ind+1):
ans += [i]
ans += [n]
for i in range(ind+1, n):
ans += [i]
for i in range(len(ans)):
ans[i] += 1
print(*ans) | [
"arun49804@gmail.com"
] | arun49804@gmail.com |
fdd396e48bbd6514054391792a4eec855608cd68 | eac22714038e840028cc5abb72bc750004626ebb | /mct_utilities/src/mct_utilities/iface_tools.py | ea4e2aa90833414c05934e0eca84b528708099fb | [
"Apache-2.0"
] | permissive | iorodeo/mct | 79b19f6dab9f6567452df7274d67245bf64b1801 | fa8b85f36533c9b1486ca4f6b0c40c3daa6f4e11 | refs/heads/master | 2022-11-11T18:03:18.178182 | 2014-08-20T19:21:27 | 2014-08-20T19:21:27 | 273,790,182 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 214 | py | import netifaces
def get_ip_addr(iface):
"""
Returns the IP address for the given interface.
"""
ifaddresses = netifaces.ifaddresses(iface)
ip = ifaddresses[2][0]['addr']
return ip
| [
"will@iorodeo.com"
] | will@iorodeo.com |
77055629a54447ef734ce62f2f95dff7d4883e48 | 282ec49f8ce8aa176c24e4f13a8852c9b0752e4a | /educational/optimization-methods/constrained-optimization/test_suites.py | 55553d168f71c88dbc2351812c09262cac6095b8 | [] | no_license | montreal91/workshop | b118b9358094f91defdae1d11ff8a1553d67cee6 | 8c05e15417e99d7236744fe9f960f4d6b09e4e31 | refs/heads/master | 2023-05-22T00:26:09.170584 | 2023-01-28T12:41:08 | 2023-01-28T12:41:08 | 40,283,198 | 3 | 1 | null | 2023-05-01T20:19:11 | 2015-08-06T03:53:44 | C++ | UTF-8 | Python | false | false | 4,275 | py |
from utilities import Struct
from vector2 import Vector2
def Target1( vec ):
return vec.x ** 2 + vec.y ** 2
def DerTarget1( vec ):
return Vector2( 2 * vec.x, 2 * vec.y )
def Target2(vec):
return vec.x ** 2 + vec.y ** 3
def DerTarget2(vec):
return Vector2(2 * vec.x, 3 * vec.y ** 2)
def Target3(vec):
return (vec.x **2 + 6 * vec.y) ** 4 + 8 * vec.x * vec.y
def DerTarget3(vec):
x = ((vec.x ** 2 + 6 * vec.y) ** 3 ) * 8 * vec.x + 8 * vec.y
y = ((vec.x ** 2 + 6 * vec.y) ** 3 ) * 24 + 8 * vec.y
return Vector2(x, y)
def e11(vec):
return vec.x + 1.235
def de11(vec):
return Vector2(1, 0)
def e12(vec):
return vec.y - 1
def de12(vec):
return Vector2(0, 1)
def e13(vec):
return vec.x - 2 * vec.y ** 2 + 8 * vec.y
def de13(vec):
return Vector2(1, -4 * vec.y + 8)
def i21(vec):
return (vec.x + 2) ** 2 + (vec.y + 2) ** 2 - 4
def di21(vec):
return Vector2(2 * vec.x + 4, 2 * vec.y + 4)
def i22(vec):
return vec.x ** 2 + vec.y ** 2 - 1
def di22(vec):
return Vector2(2 * vec.x, 2 * vec.y)
def i31(vec):
return vec.x - 1
def di31(vec):
return Vector2(1, 0)
def i32(vec):
return - 3 * vec.x + vec.y - 6
def di32(vec):
return Vector2( -3, 1)
def i33(vec):
return vec.x - vec.y - 4
def di33(vec):
return Vector2(1, -1)
def i34(vec):
return -0.5 * vec.x + vec.y + 2
def di34(vec):
return Vector2(-0.5, 1)
def i35(vec):
return -vec.x - vec.y - 4
def di35(vec):
return Vector2(-1, -1)
def TargetBygfoot(vec):
return - vec.x * vec.y
def DerTargetBygfoot(vec):
return Vector2(-vec.y, -vec.x)
def i_bfoot1(vec):
return -vec.x
def di_bfoot1(vec):
return Vector2(-1, 0)
def i_bfoot2(vec):
return vec.x - 30000
def di_bfoot2(vec):
return Vector2(1, 0)
def i_bfoot3(vec):
return -vec.y
def di_bfoot3(vec):
return Vector2(0, -1)
def i_bfoot4(vec):
return vec.y - 30
def di_bfoot4(vec):
return Vector2(0, 1)
bf_suite1 = Struct()
bf_suite1.target_cb = TargetBygfoot
bf_suite1.d_target_cb = DerTargetBygfoot
bf_suite1.constraints_eq_l = []
bf_suite1.d_constraints_eq_l = []
bf_suite1.constraints_ineq_l = [i_bfoot1, i_bfoot2, i_bfoot3, i_bfoot4]
bf_suite1.d_constraints_ineq_l = [di_bfoot1, di_bfoot2, di_bfoot3, di_bfoot4]
bf_suite1.start_point = Vector2(50000, 100)
suite1 = Struct()
suite1.target_cb = Target1
suite1.d_target_cb = DerTarget1
suite1.constraints_eq_l = [e11]
suite1.d_constraints_eq_l = [de11]
suite1.constraints_ineq_l = []
suite1.d_constraints_ineq_l = []
suite1.start_point = Vector2(4, 3)
suite2 = Struct()
suite2.target_cb = Target1
suite2.d_target_cb = DerTarget1
suite2.constraints_eq_l = [e11, e12]
suite2.d_constraints_eq_l = [de11, de12]
suite2.constraints_ineq_l = []
suite2.d_constraints_ineq_l = []
suite2.start_point = Vector2(-4, 3)
suite3 = Struct()
suite3.target_cb = Target1
suite3.d_target_cb = DerTarget1
suite3.constraints_eq_l = [e13]
suite3.d_constraints_eq_l = [de13]
suite3.constraints_ineq_l = []
suite3.d_constraints_ineq_l = []
suite3.start_point = Vector2(3, -2)
suite4 = Struct()
suite4.target_cb = Target2
suite4.d_target_cb = DerTarget2
suite4.constraints_eq_l = []
suite4.d_constraints_eq_l = []
suite4.constraints_ineq_l = [i21]
suite4.d_constraints_ineq_l = [di21]
suite4.start_point = Vector2(4, -4)
suite5 = Struct()
suite5.target_cb = Target2
suite5.d_target_cb = DerTarget2
suite5.constraints_eq_l = []
suite5.d_constraints_eq_l = []
suite5.constraints_ineq_l = [i21, i22]
suite5.d_constraints_ineq_l = [di21, di22]
suite5.start_point = Vector2(4, 4)
suite6 = Struct()
suite6.target_cb = Target3
suite6.d_target_cb = DerTarget3
suite6.constraints_eq_l = []
suite6.d_constraints_eq_l = []
suite6.constraints_ineq_l = [i31, i32, i33, i34, i35]
suite6.d_constraints_ineq_l = [di31, di32, di33, di34, di35]
suite6.start_point = Vector2(-4, 4)
| [
"nefedov.alexander91@yandex.ru"
] | nefedov.alexander91@yandex.ru |
556d819f1e8715a82a6f1ec8b75bf6c9475578ed | ccd400e42d98c5c9be099e60fc8f7081951438b4 | /idb/grpc/tests/xctest_log_parser.py | 6842ac576f11b2b902c72705cd15b997cf1f7a16 | [
"MIT"
] | permissive | Unity-Technologies/idb | d0b986c1ac082f8f07fa04ed263fa85259130c4c | f02a51d8018f497e4975e943dc5e94a9d4785fdd | refs/heads/unity-main | 2023-07-17T13:15:31.637414 | 2021-12-23T11:07:15 | 2021-12-23T11:07:15 | 317,798,876 | 5 | 0 | MIT | 2021-12-27T16:01:11 | 2020-12-02T08:36:26 | Objective-C | UTF-8 | Python | false | false | 3,499 | py | #!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import json
from unittest import TestCase
from idb.grpc.xctest_log_parser import XCTestLogParser
def _begin_test(class_name: str, method_name: str) -> str:
return json.dumps(
{"className": class_name, "methodName": method_name, "event": "begin-test"}
)
def _end_test(class_name: str, method_name: str) -> str:
return json.dumps(
{"className": class_name, "methodName": method_name, "event": "end-test"}
)
class XCTestLogParserTestCase(TestCase):
def test_ignores_line_missing_class_name(self) -> None:
parser = XCTestLogParser()
for line in [
"some line",
'{"event": "begin-test", "methodName": "MyTestMethod"}',
"abc",
_end_test("MyTestClass", "MyTestMethod"),
]:
parser.parse_streaming_log(line)
self.assertCountEqual({}, parser._logs)
def test_ignores_line_with_mismatched_types(self) -> None:
parser = XCTestLogParser()
for line in [
"some line",
'{"event": "begin-test", "className": "MyTestClass", "methodName": 42}',
"abc",
_end_test("MyTestClass", "MyTestMethod"),
]:
parser.parse_streaming_log(line)
self.assertCountEqual({}, parser._logs)
def test_ignores_line_that_is_too_long(self) -> None:
parser = XCTestLogParser()
method_name = "a" * 10_001
for line in [
_begin_test("MyTestClass", method_name),
"abc",
"def",
_end_test("MyTestClass", method_name),
]:
parser.parse_streaming_log(line)
self.assertCountEqual({}, parser._logs)
def test_ignores_log_lines_outside_test(self) -> None:
parser = XCTestLogParser()
for line in ["some line", '{"this line": "has json"}']:
parser.parse_streaming_log(line)
self.assertCountEqual({}, parser._logs)
def test_adds_lines_to_distinct_tests(self) -> None:
parser = XCTestLogParser()
for line in [
_begin_test("MyTestClass", "MyTestMethod"),
"abc",
"def",
_end_test("MyTestClass", "MyTestMethod"),
_begin_test("MyTestClass", "OtherMethod"),
"123",
"456",
_end_test("MyTestClass", "OtherMethod"),
]:
parser.parse_streaming_log(line)
self.assertListEqual(
parser.get_logs_for_test("MyTestClass", "MyTestMethod"), ["abc", "def"]
)
self.assertListEqual(
parser.get_logs_for_test("MyTestClass", "OtherMethod"), ["123", "456"]
)
def test_handles_mismatched_starts(self) -> None:
parser = XCTestLogParser()
for line in [
_begin_test("MyTestClass", "MyTestMethod"),
"abc",
"def",
_begin_test("MyTestClass", "OtherMethod"),
"123",
"456",
_end_test("MyTestClass", "OtherMethod"),
]:
parser.parse_streaming_log(line)
self.assertListEqual(
parser.get_logs_for_test("MyTestClass", "MyTestMethod"), ["abc", "def"]
)
self.assertListEqual(
parser.get_logs_for_test("MyTestClass", "OtherMethod"), ["123", "456"]
)
| [
"facebook-github-bot@users.noreply.github.com"
] | facebook-github-bot@users.noreply.github.com |
1e8b004d0c84a1e74b561476b9d095ddbd2e22d9 | 9507ff9e9bca2ca8104369c9e25acd74d308e9b3 | /base_data_from_net/get_base_data.py | 18446831f88c2d3a3a0c490889e28e8464b49c00 | [] | no_license | yangkang411/python_tool | 03e483c7ec7e1e76284f93cf5b9086fdf98af826 | 713071a9fbabfabcbc3c16ce58d1382c410a7ea3 | refs/heads/master | 2023-03-17T16:14:03.332332 | 2020-09-10T02:37:05 | 2020-09-10T02:37:05 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,752 | py | import socket
from time import sleep
import base64
import _thread
import serial
import time
import datetime
import os
host = '47.116.1.17'
port = 2201
#mountPoint = 'RTKBASE'
mountPoint = 'WX02'
userAgent = 'NTRIP Aceinna CloudRTK 1.0'
username = 'yundong'
password = 'TEL8IOZTBJVVJ0IT'
com_port = 'com34'
def get_utc_day():
year = int(time.strftime("%Y"))
month = int(time.strftime("%m"))
day = int(time.strftime("%d"))
hour = int(time.strftime("%H"))
minute = int(time.strftime("%M"))
second = int(time.strftime("%S"))
local_time = datetime.datetime(year, month, day, hour, minute, second)
time_struct = time.mktime(local_time.timetuple())
utc_st = datetime.datetime.utcfromtimestamp(time_struct)
d1 = datetime.datetime(year, 1, 1)
utc_sub = utc_st - d1
utc_str = utc_sub.__str__()
utc_day_int = int(utc_str.split( )[0])
utc_day_str = str(utc_day_int + 1)
return utc_day_str
def mkdir(path):
path=path.strip()
path=path.rstrip("\\")
isExists=os.path.exists(path)
if not isExists:
os.makedirs(path)
print (path+' mkdir suc')
return True
else:
print ('mkdir exist')
return False
def rev_ntrip_data(client,is_log):
if is_log:
day = get_utc_day()
try:
mkdir(day)
except:
pass
file_time = time.strftime("%Y_%m_%d_%H_%M_%S", time.localtime())
log_file = day + '/' + 'ntrip_' + file_time +'.bin'
fs = open(log_file,'wb')
while True:
rev_data = client.recv(1024)
if(len(rev_data) > 0):
#print('len = %s' % (len(rev_data)))
#port_handle.write(rev_data)
if is_log:
fs.write(rev_data)
#print(rev_data)
sleep(0.1)
def rev_uart_data(port_handle,client,is_log):
if is_log:
day = get_utc_day()
try:
mkdir(day)
except:
pass
file_time = time.strftime("%Y_%m_%d_%H_%M_%S", time.localtime())
log_file = day + '/' + 'sta8100_' + file_time +'.log'
fs = open(log_file,'w')
while True:
data = port_handle.read_all()
if len(data) > 0 :
client.send(data)
print(str(data))
if is_log:
fs.write(str(data))
sleep(0.1)
def ntip_connect():
'''
uart_handle=serial.Serial(com_port,460800, timeout=1)
if(uart_handle.is_open):
print('wrong port')
'''
rtk_client = socket.socket()
rtk_client.connect((host, port))
auth = username + ':' + password
bytes_auth = auth.encode("utf-8")
authorization = base64.b64encode(bytes_auth)
#authorization = username + ':' + password
info = "GET /%s HTTP/1.0\r\nUser-Agent: %s\r\nAuthorization: Basic %s\r\n\r\n"%(mountPoint,userAgent,authorization.decode('utf-8'))
print ("info = %s" % info)
rtk_client.send(info.encode("utf8"))
rev_data = rtk_client.recv(1024)
if('ICY 200 OK' in str(rev_data)):
print ('connect ntrip suc start connect com')
_thread.start_new_thread(rev_ntrip_data,(rtk_client,1,))
#_thread.start_new_thread(rev_uart_data,(uart_handle,rtk_client,1,))
'''
try:
serial=serial.Serial(com_port,460800, timeout=1)
if(serial == NULL):
print('wrong port')
rtk_client = socket.socket()
rtk_client.connect((host, port))
auth = username + ':' + password
bytes_auth = auth.encode("utf-8")
authorization = base64.b64encode(bytes_auth)
#authorization = username + ':' + password
info = "GET /%s HTTP/1.0\r\nUser-Agent: %s\r\nAuthorization: Basic %s\r\n\r\n"%(mountPoint,userAgent,authorization.decode('utf-8'))
print (info)
rtk_client.send(info.encode("utf8"))
rev_data = rtk_client.recv(1024)
if('ICY 200 OK' in str(rev_data)):
print ('connect ntrip suc start connect com')
_thread.start_new_thread(rev_ntrip_data,(serial,rtk_client,0,))
_thread.start_new_thread(rev_uart_data,(serial,rtk_client,0,))
except:
print('error')
return
'''
if __name__ == '__main__':
ntip_connect()
while 1:
pass
| [
"41727862+geqian@users.noreply.github.com"
] | 41727862+geqian@users.noreply.github.com |
c8a2c709881af6d33928d6a76a1e18aa42103ef1 | 5000d053f7a8bd90d68d5580d2398e2b74259719 | /alerta/plugins/normalise.py | c6410e2032e29795d43a316d846b49a734ac5882 | [
"Apache-2.0"
] | permissive | alvsgithub/alerta | 8203d7632ff58e0ef5e2e9e64156ab797f322710 | 25f4257347d2135a259b37d5646da51514fc8e64 | refs/heads/master | 2021-01-18T08:12:49.658867 | 2014-10-18T22:15:18 | 2014-10-18T22:15:18 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 268 | py |
from alerta.plugins import PluginBase, RejectException
class NormaliseAlert(PluginBase):
def pre_receive(self, alert):
alert.text = '%s: %s' % (alert.severity.upper(), alert.text)
return alert
def post_receive(self, alert):
pass | [
"nick.satterly@guardian.co.uk"
] | nick.satterly@guardian.co.uk |
0dab4666a712bbc8aa7a63cfd615561f4fd6e6f5 | a36c6c96e21a61ac622d53b2f441adf70da8e465 | /httpx_http.py | 199982e6ef1b01d5eaa8c6e7ed0e28644460feac | [] | no_license | pen960223/ProxyTest | a7fff2e394cb7dc5bcea42e47bb4834f456432df | 003abe06cb4ebd77281b27886dbdf58bc3d11848 | refs/heads/master | 2023-06-17T10:59:11.931023 | 2021-07-10T14:10:48 | 2021-07-10T14:10:48 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 247 | py | import httpx
proxy = '127.0.0.1:7890'
proxies = {
'http://': 'http://' + proxy,
'https://': 'http://' + proxy,
}
with httpx.Client(proxies=proxies) as client:
response = client.get('https://httpbin.org/get')
print(response.text)
| [
"cqc@cuiqingcai.com"
] | cqc@cuiqingcai.com |
1ba5e73d31ba4cce03b8f7156ce2da8d41f8b346 | 060ce17de7b5cdbd5f7064d1fceb4ded17a23649 | /fn_joe_sandbox_analysis/fn_joe_sandbox_analysis/__init__.py | 09b22a32edc9da84a9bffe6bfd13f979462e9f3f | [
"MIT"
] | permissive | ibmresilient/resilient-community-apps | 74bbd770062a22801cef585d4415c29cbb4d34e2 | 6878c78b94eeca407998a41ce8db2cc00f2b6758 | refs/heads/main | 2023-06-26T20:47:15.059297 | 2023-06-23T16:33:58 | 2023-06-23T16:33:58 | 101,410,006 | 81 | 107 | MIT | 2023-03-29T20:40:31 | 2017-08-25T14:07:33 | Python | UTF-8 | Python | false | false | 197 | py | # (c) Copyright IBM Corp. 2018. All Rights Reserved.
import pkg_resources
try:
__version__ = pkg_resources.get_distribution(__name__).version
except pkg_resources.DistributionNotFound:
pass | [
"shane.curtin@ie.ibm.com"
] | shane.curtin@ie.ibm.com |
1372764e6f68c43882fa2ccf7731bcd1330247bc | a8fc7cc0f64571ddcb4d78278f9aa166a74038e5 | /worlds/oribf/Options.py | ac6808aa8d34ab5aab58f13897fd300f02c407c1 | [
"MIT"
] | permissive | adampziegler/Archipelago | 8d10ad309e8e3e71fc008d692b3fb1efc6f6ce6a | 6b0b78d8e07d275aceead3f5a1890eac5f153bf0 | refs/heads/main | 2023-08-23T15:09:06.365113 | 2021-11-03T22:27:09 | 2021-11-03T22:27:09 | 396,093,506 | 0 | 0 | NOASSERTION | 2021-08-14T18:25:23 | 2021-08-14T18:25:23 | null | UTF-8 | Python | false | false | 249 | py | from .RulesData import location_rules
from Options import Toggle
options = {
"open" : Toggle,
"openworld": Toggle
}
for logic_set in location_rules:
if logic_set != "casual-core":
options[logic_set.replace("-", "_")] = Toggle
| [
"fabian.dill@web.de"
] | fabian.dill@web.de |
1a943889ad313bd6c151019a90fcd91d175507d1 | 93a12156f091858f6e48c7c85c84483742c221f7 | /programmers/heap_disk.py | 1d24814334908fa99d48aecc1c414bbdc5aaed55 | [] | no_license | dojinkimm/AlgorithmPractice | 74cd5c63f9973792f034fe019504b2160ad51e40 | 211857e7541faeeec2f21690eecc8c17d31ad142 | refs/heads/master | 2021-09-28T14:54:12.756098 | 2021-09-26T13:23:38 | 2021-09-26T13:23:38 | 205,849,589 | 2 | 1 | null | 2019-09-30T03:20:10 | 2019-09-02T12:12:24 | Python | UTF-8 | Python | false | false | 538 | py | # 디스크 컨트롤러 - Heap
from heapq import heappush, heappop
def solution(jobs):
time, end, q = 0, -1, []
answer = 0
cnt = 0
length = len(jobs)
while cnt < length:
for job in jobs:
if end < job[0] <= time:
answer += (time - job[0])
heappush(q, job[1])
if len(q) > 0:
answer += len(q) * q[0]
end = time
time += heappop(q)
cnt += 1
else:
time += 1
return (int(answer / length)) | [
"dojinkim119@gmail.com"
] | dojinkim119@gmail.com |
328acbeccc71240aef3b45402311a0e1884a6353 | 1779f1d096d4af6d3112e39ad36c63ff4b04ae00 | /utils/fluids/time_providers.py | a2eeadc833a15e904487727367793f266f11bbde | [] | no_license | 10erick-cpu/RL_Mixing_Colours | eb753f7b6b309cf54fe41f74f993dfada83d7867 | a11756d6988422929a452dc1e19ccf6b051f0832 | refs/heads/master | 2023-07-27T22:38:34.299006 | 2021-09-09T16:16:33 | 2021-09-09T16:16:33 | 398,141,652 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,874 | py | import datetime
import time
class TimeProvider(object):
def __init__(self):
self.env = None
def get_time(self):
raise NotImplementedError("base")
def advance_time_s(self, seconds, sleep_time=0.2, progress_callback=None):
done_time = self.get_time() + seconds
remain = done_time - self.get_time()
while remain > 0:
s_time = min(sleep_time, remain)
start_sleep = self.get_time()
self.sleep(s_time)
if progress_callback:
progress_callback(delta_time=self.get_time() - start_sleep)
remain = done_time - self.get_time()
def get_human_time_str(self):
return str(datetime.timedelta(seconds=self.get_time()))
def sleep(self, seconds):
raise NotImplementedError()
def set_parent_env(self, env):
self.env = env
class RealTime(TimeProvider):
def sleep(self, seconds):
time.sleep(seconds)
def get_time(self):
return time.time()
def advance_time_s(self, seconds, sleep_time=0.2, progress_callback=None):
if self.env.step_start:
diff = time.time() - self.env.step_start
rel_advance = seconds - diff
#print("Advance realtime: %f seconds" % rel_advance)
if rel_advance >= 0:
super(RealTime, self).advance_time_s(rel_advance, sleep_time, progress_callback)
else:
#print("Advance realtime: %d seconds" % seconds)
super(RealTime, self).advance_time_s(seconds, sleep_time, progress_callback)
class SimulatedTime(TimeProvider):
def sleep(self, seconds):
self.time += 1
#print("SimTime", self.get_time(), self.get_time() / 60 / 60, "hrs")
def __init__(self):
super().__init__()
self.time = 0
def get_time(self):
return self.time
| [
"leonelerick59@gmail.com"
] | leonelerick59@gmail.com |
a5f85460b290b63e1434daa2e7dc4a7d0178df63 | db56d3d14d85e2171c1d8be5564b227da544a056 | /Accounts/migrations/0002_auto_20170827_1753.py | 59f1f8ecd5adc531ef53b702ba5ef59775a871cc | [] | no_license | onkar27/BillDesk | cd039793f90916745399dfcbbef661b8cc42e9af | 0c0046c3fd54198b24042d52b3e206204560d02f | refs/heads/master | 2020-03-12T16:55:28.711387 | 2018-03-15T10:59:50 | 2018-03-15T10:59:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 509 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.2 on 2017-08-27 12:23
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('Accounts', '0001_initial'),
]
operations = [
migrations.RemoveField(
model_name='profile',
name='Authority_Admin',
),
migrations.RemoveField(
model_name='profile',
name='Authority_Customer',
),
]
| [
"vvt5676@gmail.com"
] | vvt5676@gmail.com |
eaaaa0d8824cc670b4ad903fe69ed5ca32f2c0ab | c703b8ac3b5545857f6c95efa2d61eaf7a664021 | /iPERCore/tools/human_pose2d_estimators/openpose/dataset.py | 0bb0536d30befaa5bad1483400beaff9707f7918 | [
"MIT",
"LicenseRef-scancode-unknown-license-reference",
"LicenseRef-scancode-proprietary-license",
"Apache-2.0",
"BSD-2-Clause"
] | permissive | iPERDance/iPERCore | d29681d229b3098b3517b1abf4f7ea65f579de73 | fcf9a18ffd66bf3fdd3eea4153a3bc4785131848 | refs/heads/main | 2023-07-30T15:04:15.835396 | 2023-04-12T14:21:23 | 2023-04-12T14:21:23 | 313,664,064 | 2,520 | 339 | Apache-2.0 | 2023-05-12T03:26:52 | 2020-11-17T15:36:25 | Python | UTF-8 | Python | false | false | 2,564 | py | # Copyright (c) 2020-2021 impersonator.org authors (Wen Liu and Zhixin Piao). All rights reserved.
import numpy as np
import math
import cv2
import os
def normalize(img, img_mean, img_scale):
img = np.array(img, dtype=np.float32)
img = (img - img_mean) * img_scale
return img
def pad_width(img, stride, pad_value, min_dims):
h, w, _ = img.shape
h = min(min_dims[0], h)
min_dims[0] = math.ceil(min_dims[0] / float(stride)) * stride
min_dims[1] = max(min_dims[1], w)
min_dims[1] = math.ceil(min_dims[1] / float(stride)) * stride
pad = []
pad.append(int(math.floor((min_dims[0] - h) / 2.0)))
pad.append(int(math.floor((min_dims[1] - w) / 2.0)))
pad.append(int(min_dims[0] - h - pad[0]))
pad.append(int(min_dims[1] - w - pad[1]))
padded_img = cv2.copyMakeBorder(img, pad[0], pad[2], pad[1], pad[3],
cv2.BORDER_CONSTANT, value=pad_value)
return padded_img, pad
def preprocess(img, net_input_height_size=368, stride=8,
pad_value=(0, 0, 0), img_mean=(128, 128, 128), img_scale=1/256):
"""
Args:
img:
Returns:
"""
height, width, _ = img.shape
scale = net_input_height_size / height
scaled_img = cv2.resize(img, (0, 0), fx=scale, fy=scale, interpolation=cv2.INTER_CUBIC)
scaled_img = normalize(scaled_img, img_mean, img_scale)
min_dims = [net_input_height_size, max(scaled_img.shape[1], net_input_height_size)]
padded_img, pad = pad_width(scaled_img, stride, pad_value, min_dims)
outputs = {
"img": padded_img,
"pad": pad,
"scale": scale
}
return outputs
class ImageFolderDataset(object):
def __init__(self, root_dir, valid_names=None):
if valid_names is None:
img_names = os.listdir(root_dir)
img_names.sort()
else:
img_names = valid_names
self.root_dir = root_dir
self.img_names = img_names
self.file_paths = [os.path.join(root_dir, img_name) for img_name in img_names]
self.max_idx = len(img_names)
self.idx = 0
def __iter__(self):
self.idx = 0
return self
def __len__(self):
return self.max_idx
def __next__(self):
if self.idx == self.max_idx:
raise StopIteration
img = cv2.imread(self.file_paths[self.idx], cv2.IMREAD_COLOR)
if img.size == 0:
raise IOError('Image {} cannot be read'.format(self.file_paths[self.idx]))
self.idx = self.idx + 1
return img
| [
"liuwen@shanghaitech.edu.cn"
] | liuwen@shanghaitech.edu.cn |
2d191c757142a720141d3a5d96c2126e29b3ef28 | 163bbb4e0920dedd5941e3edfb2d8706ba75627d | /Code/CodeRecords/2094/60737/239642.py | 0098c5af191cc1c3f4d216f01622a587b1ca1811 | [] | no_license | AdamZhouSE/pythonHomework | a25c120b03a158d60aaa9fdc5fb203b1bb377a19 | ffc5606817a666aa6241cfab27364326f5c066ff | refs/heads/master | 2022-11-24T08:05:22.122011 | 2020-07-28T16:21:24 | 2020-07-28T16:21:24 | 259,576,640 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 819 | py | def is_number(s):
s.strip()
if len(s)<1:
return False
i = 0
numstr = list(s)
if (numstr[i] == '+') or (numstr[i] == '-'):
i += 1
pcount = 0
dcount = 0
while (numstr[i].isdigit() or numstr[i] == '.') and (i<len(numstr)-1):
if numstr[i] == '.':
pcount += 1
else:
dcount += 1
i += 1
if pcount>1 or dcount<1:
return False
if numstr[i] == 'e' and i<len(numstr):
dcount = 0
i += 1
if (numstr[i] == '+') or (numstr[i] == '-'):
i += 1
while numstr[i].isdigit() and i<len(numstr)-1:
dcount += 1
i += 1
if dcount<0:
return False
return numstr[i].isdigit()
if __name__ == "__main__":
s = input()
print(is_number(s))
| [
"1069583789@qq.com"
] | 1069583789@qq.com |
adca9735678e580feace8d4d2e07ce4733269084 | 999879f8d18e041d7fa313132408b252aded47f8 | /01-codes/scipy-master/scipy/stats/mstats.py | 90fbe30640d5e1062c6ca2064baf7cdf5d159f3c | [
"MIT"
] | permissive | QPanProjects/Surrogate-Model | ebcaf05728e82dcbcd924c2edca1b490ab085173 | 848c7128201218b0819c9665e2cec72e3b1d29ac | refs/heads/master | 2022-10-11T19:03:55.224257 | 2020-06-09T14:37:35 | 2020-06-09T14:37:35 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,827 | py | """
===================================================================
Statistical functions for masked arrays (:mod:`scipy.stats.mstats`)
===================================================================
.. currentmodule:: scipy.stats.mstats
This module contains a large number of statistical functions that can
be used with masked arrays.
Most of these functions are similar to those in scipy.stats but might
have small differences in the API or in the algorithm used. Since this
is a relatively new package, some API changes are still possible.
.. autosummary::
:toctree: generated/
argstoarray
betai
chisquare
count_tied_groups
describe
f_oneway
f_value_wilks_lambda
find_repeats
friedmanchisquare
kendalltau
kendalltau_seasonal
kruskalwallis
ks_twosamp
kurtosis
kurtosistest
linregress
mannwhitneyu
plotting_positions
mode
moment
mquantiles
msign
normaltest
obrientransform
pearsonr
plotting_positions
pointbiserialr
rankdata
scoreatpercentile
sem
signaltonoise
skew
skewtest
spearmanr
theilslopes
threshold
tmax
tmean
tmin
trim
trima
trimboth
trimmed_stde
trimr
trimtail
tsem
ttest_onesamp
ttest_ind
ttest_onesamp
ttest_rel
tvar
variation
winsorize
zmap
zscore
compare_medians_ms
gmean
hdmedian
hdquantiles
hdquantiles_sd
hmean
idealfourths
kruskal
ks_2samp
median_cihs
meppf
mjci
mquantiles_cimj
rsh
sen_seasonal_slopes
trimmed_mean
trimmed_mean_ci
trimmed_std
trimmed_var
ttest_1samp
"""
from __future__ import division, print_function, absolute_import
# Functions that support masked array input in stats but need to be kept in the
# mstats namespace for backwards compatibility:
| [
"quanpan302@hotmail.com"
] | quanpan302@hotmail.com |
16ddc0aaeea6e0a6e5a683e93612337aec6b10bb | 691e4890a070b18feb74e1bf817af8ebe9db342b | /V-Scrack/exp/payload/hikvision_default_password.py | d3453024c65e669d0aa9cde7fc9fc71dc845938c | [] | no_license | witchfindertr/Python-crack | 764af457b140fad4f28f259dc14c444d2445b287 | 88659e72e98d4cec0df52d2f5b4b9c8af35a39a6 | refs/heads/master | 2023-02-23T12:45:11.475706 | 2020-03-03T14:41:05 | 2020-03-03T14:41:05 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,108 | py | # coding: utf-8
import sys
import requests
import warnings
def verify(protocol,ip,port):
url = protocol + '://' + ip + ':' + str(port)
warnings.filterwarnings("ignore")
print('testing if hikvision default password admin+12345 vul')
headers = {
"User-Agent": "Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_8; en-us) AppleWebKit/534.50 (KHTML, like Gecko) Version/5.1 Safari/534.50",
"Authorization": "Basic YWRtaW46MTIzNDU="
}
payload = '/ISAPI/Security/userCheck'
vulnurl = url + payload
try:
req = requests.get(vulnurl, headers=headers, timeout=3, verify=False)
if r"<statusValue>200</statusValue>" in req.text:
msg = 'There is hikvision default password vul on url: ' + url + ' with password : admin:12345 .'
number = 'v108'
return True, url, number, msg
else:
pass
except Exception as e:
msg = str(e)
number = 'v0'
return False, url,number,msg
msg = 'There is no hikvision default password vul'
number = 'v0'
return False, url, number, msg
| [
"xianghgoog@gmail.com"
] | xianghgoog@gmail.com |
bb16ed90dc453ca90c077f239ce91588832f0a79 | 877eca8af51e43f7bd4c2766251152cd64846163 | /Disqus/templatetags/disqus.py | 560980b2a7aa0019a72f65a19d3842337ff305a0 | [] | no_license | dpitkevics/MyOpinion | 2a949aa5bf0c073b5b0cb564a1d7d827bd115375 | e81182c4a24d139501c82c430758e9e769b477b6 | refs/heads/master | 2020-12-24T17:18:09.334732 | 2015-05-05T14:16:47 | 2015-05-05T14:16:47 | 34,736,697 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,595 | py | from django.template import Library
from django.core.urlresolvers import reverse
from django.core.cache import get_cache
import time
from datetime import datetime
from Disqus import DisqusAPI, APIError
from MyOpinion import settings
register = Library()
cache = get_cache('default')
@register.assignment_tag()
def get_comments(link):
disqus = DisqusAPI(settings.DISQUS_SECRET_KEY, settings.DISQUS_PUBLIC_KEY)
thread_query = 'link:%s' % link
posts_list = disqus.get('threads.listPosts', forum=settings.DISQUS_FORUM_NAME, thread=thread_query, method='get')
return posts_list
@register.assignment_tag()
def get_comment_count(link):
key_format = 'disqus_comment_count_%s'
comment_count = cache.get(key_format % link)
if not comment_count:
try:
posts_list = get_comments(link)
comment_count = len(posts_list)
except APIError:
comment_count = 0
cache.set(key_format % link, comment_count, 300)
return comment_count
@register.assignment_tag()
def get_forum_url(request, slug):
full_url = 'http://%s%s' % (request.get_host(), reverse('Topics:view_opinion', kwargs={'slug': slug}))
return full_url
@register.assignment_tag()
def get_latest_action(link):
try:
posts_list = get_comments(link)
except APIError:
return None
try:
latest_post = posts_list[0]
except IndexError:
return None
time_struct = time.strptime(latest_post['createdAt'], '%Y-%m-%dT%H:%M:%S')
dt = datetime.fromtimestamp(time.mktime(time_struct))
return dt | [
"daniels.pitkevics@gmail.com"
] | daniels.pitkevics@gmail.com |
480549448a7b184241c21a165d42e3a8e0a0271a | e845f7f61ff76b3c0b8f4d8fd98f6192e48d542a | /djangocg/db/backends/postgresql_psycopg2/creation.py | 40279f3b84b86e2377c9140cbc52db5d64624a57 | [
"BSD-3-Clause"
] | permissive | timothyclemans/djangocg | fd150c028013cb5f53f5a3b4fdc960a07fdaaa78 | 52cf28e046523bceb5d436f8e6bf61e7d4ba6312 | refs/heads/master | 2021-01-18T13:20:13.636812 | 2012-08-31T23:38:14 | 2012-08-31T23:38:14 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,133 | py | import psycopg2.extensions
from djangocg.db.backends.creation import BaseDatabaseCreation
from djangocg.db.backends.util import truncate_name
class DatabaseCreation(BaseDatabaseCreation):
# This dictionary maps Field objects to their associated PostgreSQL column
# types, as strings. Column-type strings can contain format strings; they'll
# be interpolated against the values of Field.__dict__ before being output.
# If a column type is set to None, it won't be included in the output.
data_types = {
'AutoField': 'serial',
'BooleanField': 'boolean',
'CharField': 'varchar(%(max_length)s)',
'CommaSeparatedIntegerField': 'varchar(%(max_length)s)',
'DateField': 'date',
'DateTimeField': 'timestamp with time zone',
'DecimalField': 'numeric(%(max_digits)s, %(decimal_places)s)',
'FileField': 'varchar(%(max_length)s)',
'FilePathField': 'varchar(%(max_length)s)',
'FloatField': 'double precision',
'IntegerField': 'integer',
'BigIntegerField': 'bigint',
'IPAddressField': 'inet',
'GenericIPAddressField': 'inet',
'NullBooleanField': 'boolean',
'OneToOneField': 'integer',
'PositiveIntegerField': 'integer CHECK ("%(column)s" >= 0)',
'PositiveSmallIntegerField': 'smallint CHECK ("%(column)s" >= 0)',
'SlugField': 'varchar(%(max_length)s)',
'SmallIntegerField': 'smallint',
'TextField': 'text',
'TimeField': 'time',
}
def sql_table_creation_suffix(self):
assert self.connection.settings_dict['TEST_COLLATION'] is None, "PostgreSQL does not support collation setting at database creation time."
if self.connection.settings_dict['TEST_CHARSET']:
return "WITH ENCODING '%s'" % self.connection.settings_dict['TEST_CHARSET']
return ''
def sql_indexes_for_field(self, model, f, style):
if f.db_index and not f.unique:
qn = self.connection.ops.quote_name
db_table = model._meta.db_table
tablespace = f.db_tablespace or model._meta.db_tablespace
if tablespace:
tablespace_sql = self.connection.ops.tablespace_sql(tablespace)
if tablespace_sql:
tablespace_sql = ' ' + tablespace_sql
else:
tablespace_sql = ''
def get_index_sql(index_name, opclass=''):
return (style.SQL_KEYWORD('CREATE INDEX') + ' ' +
style.SQL_TABLE(qn(truncate_name(index_name,self.connection.ops.max_name_length()))) + ' ' +
style.SQL_KEYWORD('ON') + ' ' +
style.SQL_TABLE(qn(db_table)) + ' ' +
"(%s%s)" % (style.SQL_FIELD(qn(f.column)), opclass) +
"%s;" % tablespace_sql)
output = [get_index_sql('%s_%s' % (db_table, f.column))]
# Fields with database column types of `varchar` and `text` need
# a second index that specifies their operator class, which is
# needed when performing correct LIKE queries outside the
# C locale. See #12234.
db_type = f.db_type(connection=self.connection)
if db_type.startswith('varchar'):
output.append(get_index_sql('%s_%s_like' % (db_table, f.column),
' varchar_pattern_ops'))
elif db_type.startswith('text'):
output.append(get_index_sql('%s_%s_like' % (db_table, f.column),
' text_pattern_ops'))
else:
output = []
return output
def set_autocommit(self):
self._prepare_for_test_db_ddl()
def _prepare_for_test_db_ddl(self):
"""Rollback and close the active transaction."""
self.connection.connection.rollback()
self.connection.connection.set_isolation_level(
psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT)
| [
"timothy.clemans@gmail.com"
] | timothy.clemans@gmail.com |
20e848ad66bc4ecfd92b62c50a9ccdb374c6066c | 3e5487663e2aeb98972347cdbdac282617f601f8 | /releases/tests.py | d8e58602ea87d47161ce3386ca4f0be25f82207a | [] | no_license | areski/djangoproject.com | 58b652ecaea6e5ea130e226d164a111bb563e2a5 | 60a94e4a0dddc15d38bd7fd6db8568be8747c192 | refs/heads/master | 2021-01-18T11:03:49.893412 | 2014-08-05T14:03:20 | 2014-08-05T14:03:20 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 981 | py | from __future__ import absolute_import, unicode_literals
from django.contrib.redirects.models import Redirect
from django.test import TestCase
from .models import create_releases_up_to_1_5
class LegacyURLsTests(TestCase):
fixtures = ['redirects-downloads'] # provided by the legacy app
def test_legacy_redirects(self):
# Save list of redirects, then wipe them
redirects = list(Redirect.objects.values_list('old_path', 'new_path'))
Redirect.objects.all().delete()
# Ensure the releases app faithfully reproduces the redirects
create_releases_up_to_1_5()
for old_path, new_path in redirects:
response = self.client.get(old_path, follow=False)
location = response.get('Location', '')
if location.startswith('http://testserver'):
location = location[17:]
self.assertEquals(location, new_path)
self.assertEquals(response.status_code, 301)
| [
"aymeric.augustin@m4x.org"
] | aymeric.augustin@m4x.org |
570ec3cd5bad80c562187222d7de713a20453bd6 | 1f5420fda4359bfc21b53de3a5f6e6a93b47b996 | /ch06/ch6_602.py | f51eb0830ff133aac9bec233c9abb451e7456f50 | [] | no_license | fl0wjacky/wxPython | 600f5bfccad3ef5589e11573b30cffd1e2708b83 | 50b3cd5a63750d36065684b73aab0da70ff650a7 | refs/heads/master | 2022-09-02T04:24:47.540157 | 2022-08-10T04:13:17 | 2022-08-10T04:13:17 | 13,976,582 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 967 | py | #! /usr/bin/env python
# -*- coding:utf-8 -*-
import wx
from ch06_SketchWindow import SketchWindow
class SketchFrame(wx.Frame):
def __init__(self,parent):
wx.Frame.__init__(self,parent,-1,"Sketch Frame",size=(800,600))
self.sketch = SketchWindow(self,-1)
self.sketch.Bind(wx.EVT_MOTION,self.OnSketchMotion)
self.statusbar = self.CreateStatusBar()
self.statusbar.SetFieldsCount(3)
self.statusbar.SetStatusWidths([-1,-2,-3])
def OnSketchMotion(self,event):
self.statusbar.SetStatusText(str(event.GetPositionTuple()))
event.Skip()
self.statusbar.SetStatusText("Current Pts:%s" % len(self.sketch.curLine),1)
self.statusbar.SetStatusText("Line Count:%s" % len(self.sketch.lines),2)
class App(wx.App):
def OnInit(self):
self.frame = SketchFrame(None)
self.frame.Show(True)
return True
if __name__ == '__main__':
app = App()
app.MainLoop()
| [
"flowjacky@gmail.com"
] | flowjacky@gmail.com |
2db9a31d23446db98d3f0babc65ad406e63c0b02 | 498474967e1480acf5cc0f25756e1d748c677195 | /mmdetection3d/mmdet3d/ops/furthest_point_sample/points_sampler.py | 469037fb9ca004d1f2baa136f5d3484cc941ac49 | [
"MIT",
"Apache-2.0"
] | permissive | hustvl/MapTR | adc37f78cbae9d8c909dd8648088a4930bf55377 | feb0664e64684d3207859279f047fa54a1a806f6 | refs/heads/main | 2023-08-25T17:44:47.672149 | 2023-08-14T13:31:17 | 2023-08-14T13:31:17 | 518,672,305 | 643 | 95 | MIT | 2023-09-14T03:30:23 | 2022-07-28T02:20:43 | Python | UTF-8 | Python | false | false | 5,260 | py | import torch
from mmcv.runner import force_fp32
from torch import nn as nn
from typing import List
from .furthest_point_sample import furthest_point_sample, furthest_point_sample_with_dist
from .utils import calc_square_dist
def get_sampler_type(sampler_type):
"""Get the type and mode of points sampler.
Args:
sampler_type (str): The type of points sampler.
The valid value are "D-FPS", "F-FPS", or "FS".
Returns:
class: Points sampler type.
"""
if sampler_type == "D-FPS":
sampler = DFPS_Sampler
elif sampler_type == "F-FPS":
sampler = FFPS_Sampler
elif sampler_type == "FS":
sampler = FS_Sampler
else:
raise ValueError(
'Only "sampler_type" of "D-FPS", "F-FPS", or "FS"' f" are supported, got {sampler_type}"
)
return sampler
class Points_Sampler(nn.Module):
"""Points sampling.
Args:
num_point (list[int]): Number of sample points.
fps_mod_list (list[str]: Type of FPS method, valid mod
['F-FPS', 'D-FPS', 'FS'], Default: ['D-FPS'].
F-FPS: using feature distances for FPS.
D-FPS: using Euclidean distances of points for FPS.
FS: using F-FPS and D-FPS simultaneously.
fps_sample_range_list (list[int]): Range of points to apply FPS.
Default: [-1].
"""
def __init__(
self,
num_point: List[int],
fps_mod_list: List[str] = ["D-FPS"],
fps_sample_range_list: List[int] = [-1],
):
super(Points_Sampler, self).__init__()
# FPS would be applied to different fps_mod in the list,
# so the length of the num_point should be equal to
# fps_mod_list and fps_sample_range_list.
assert len(num_point) == len(fps_mod_list) == len(fps_sample_range_list)
self.num_point = num_point
self.fps_sample_range_list = fps_sample_range_list
self.samplers = nn.ModuleList()
for fps_mod in fps_mod_list:
self.samplers.append(get_sampler_type(fps_mod)())
self.fp16_enabled = False
@force_fp32()
def forward(self, points_xyz, features):
"""forward.
Args:
points_xyz (Tensor): (B, N, 3) xyz coordinates of the features.
features (Tensor): (B, C, N) Descriptors of the features.
Return:
Tensor: (B, npoint, sample_num) Indices of sampled points.
"""
indices = []
last_fps_end_index = 0
for fps_sample_range, sampler, npoint in zip(
self.fps_sample_range_list, self.samplers, self.num_point
):
assert fps_sample_range < points_xyz.shape[1]
if fps_sample_range == -1:
sample_points_xyz = points_xyz[:, last_fps_end_index:]
sample_features = (
features[:, :, last_fps_end_index:] if features is not None else None
)
else:
sample_points_xyz = points_xyz[:, last_fps_end_index:fps_sample_range]
sample_features = (
features[:, :, last_fps_end_index:fps_sample_range]
if features is not None
else None
)
fps_idx = sampler(sample_points_xyz.contiguous(), sample_features, npoint)
indices.append(fps_idx + last_fps_end_index)
last_fps_end_index += fps_sample_range
indices = torch.cat(indices, dim=1)
return indices
class DFPS_Sampler(nn.Module):
"""DFPS_Sampling.
Using Euclidean distances of points for FPS.
"""
def __init__(self):
super(DFPS_Sampler, self).__init__()
def forward(self, points, features, npoint):
"""Sampling points with D-FPS."""
fps_idx = furthest_point_sample(points.contiguous(), npoint)
return fps_idx
class FFPS_Sampler(nn.Module):
"""FFPS_Sampler.
Using feature distances for FPS.
"""
def __init__(self):
super(FFPS_Sampler, self).__init__()
def forward(self, points, features, npoint):
"""Sampling points with F-FPS."""
assert features is not None, "feature input to FFPS_Sampler should not be None"
features_for_fps = torch.cat([points, features.transpose(1, 2)], dim=2)
features_dist = calc_square_dist(features_for_fps, features_for_fps, norm=False)
fps_idx = furthest_point_sample_with_dist(features_dist, npoint)
return fps_idx
class FS_Sampler(nn.Module):
"""FS_Sampling.
Using F-FPS and D-FPS simultaneously.
"""
def __init__(self):
super(FS_Sampler, self).__init__()
def forward(self, points, features, npoint):
"""Sampling points with FS_Sampling."""
assert features is not None, "feature input to FS_Sampler should not be None"
features_for_fps = torch.cat([points, features.transpose(1, 2)], dim=2)
features_dist = calc_square_dist(features_for_fps, features_for_fps, norm=False)
fps_idx_ffps = furthest_point_sample_with_dist(features_dist, npoint)
fps_idx_dfps = furthest_point_sample(points, npoint)
fps_idx = torch.cat([fps_idx_ffps, fps_idx_dfps], dim=1)
return fps_idx
| [
"cnliao62@gmail.com"
] | cnliao62@gmail.com |
7b0fbc9f015cf2412785d50bd90a065d9d4a77f2 | c032a4c0f3f8bce2e9271df49a3113509ac80be8 | /code/python/echomesh/base/Quit.py | 94de10817f214ca37f9c3b2caaeabcbb69bf461d | [
"MIT"
] | permissive | neteler/echomesh | e36fad86f84188014515ab4d67790f776b149280 | 23fdc02f852d7f2fd8e118da2e75be9612ada8c7 | refs/heads/master | 2020-12-03T09:12:09.665447 | 2014-07-05T18:39:24 | 2014-07-05T18:39:24 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 856 | py | from __future__ import absolute_import, division, print_function, unicode_literals
import atexit
QUITTING = False
HANDLERS = set()
PRINT_EXCEPTIONS = not False
_REGISTERED = False
def register_atexit(handler):
HANDLERS.add(handler)
if not _REGISTERED:
_register_quit()
def unregister_atexit(handler):
try:
HANDLERS.remove(handler)
except ValueError:
pass
def request_quit():
global QUITTING
QUITTING = True
for h in HANDLERS:
try:
h()
except Exception as e:
if PRINT_EXCEPTIONS:
print('Exception during quit:', e)
def _register_quit():
global _REGISTERED
_REGISTERED = True
def atexit_quit():
if QUITTING:
reason = 'at your request'
else:
reason = 'due to a fatal error'
request_quit()
print('echomesh shut down %s.' % reason)
atexit.register(atexit_quit)
| [
"tom@swirly.com"
] | tom@swirly.com |
f186bad3e46a2bb84657547fa0215f79598e566a | ecad7a72a981f2c1f4a0819cc4770a9b6be0e1d4 | /generate_letter.py | 7075d8ceddff8373d1448de790f5c8e02b65488c | [
"MIT"
] | permissive | drewlinsley/cabc | ecc06ba6f94727485e92327a8e395f529b948d23 | 74726509e542d5f0f04bf297c211cca9f6e87b56 | refs/heads/master | 2022-04-17T23:40:21.331822 | 2020-04-18T19:11:34 | 2020-04-18T19:11:34 | 256,832,692 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,373 | py | import PIL
from PIL import ImageFont
from PIL import Image
from PIL import ImageDraw
import numpy as np
import matplotlib.pyplot as plt
import os
from skimage.filters import threshold_otsu
import scipy
from scipy import ndimage
from scipy.interpolate import griddata
import cv2
import preprocess
if __name__ == "__main__":
# DEFINE AND LOAD FONT
script_root = '/Users/junkyungkim/Documents/PycharmProjects/cluttered_nist'
fontnames = ['FUTRFW.ttf',
'Instruction.otf',
'absender1.ttf',
'5Identification-Mono.ttf',
'7Segment.ttf',
'VCR_OSD_MONO_1.001.ttf',
'Instruction.otf',
'Segment16B Regular.ttf']
std_fontsizes = [225, 240, 225, 150, 255, 255, 255, 255]
std_thin_iters = [6, 15, 4, 9, 9, 2]
scale = 1 # 0.5
for fontname, std_fontsize, std_thin_iter in zip(fontnames, std_fontsizes, std_thin_iters):
std_fontsize = int(std_fontsize*scale)
std_thin_iter = int(std_thin_iter*scale)
font = ImageFont.truetype(os.path.join(script_root,'fonts',fontname), std_fontsize)
# RENDER
img=Image.new("RGBA", (2500, 300), (255, 255, 255))
draw = ImageDraw.Draw(img)
draw.text((0, 0), "ABCDEFGXYZ", (0, 0, 0), font=font)
draw = ImageDraw.Draw(img)
# MORPHOLOGICAL POSTPROC (FOR CONSTNAT STROKE THICKNESS)
img = 255 - np.mean(np.array(img), axis=2)
binary = img > 128
# img_closed = scipy.ndimage.binary_closing(binary.astype(np.int), iterations=20)##np.maximum(iterations / 2, 1))
img_eroded = (scipy.ndimage.morphology.binary_erosion(binary, iterations=std_thin_iter) * 255).astype(np.uint8)
landscape = preprocess.generate_distortion_mask(img_eroded, sigma=[4000,2000], num_centers=[30,20])
warped = preprocess.custom_warp(img_eroded, landscape, power=0.07)
# img_dist = img_eroded
# distCoeffs = [-.1, 1.0, 1.0, 1.0]
# focal_length = [1000, 1000]
# for coord in [[400,100],[500,150],[600,200]]:
# distCoeffs[0] = distCoeffs[0]*-1
# img_dist = custom_fisheye(img_dist, coord, distCoeffs, focal_length)
# import preprocess
# im_pixelated = preprocess.pixelate_obj(img_eroded, [10 * scale, 10 * scale], 0.1, 5 * scale, ignore_fit=True)
plt.subplot(211);plt.imshow(binary, cmap='gray')
plt.subplot(212);plt.imshow(warped, cmap='gray')
plt.show()
# thinned = zhangSuen(binary)
# plt.subplot(121)
# plt.imshow(img)
# plt.subplot(122)
# plt.imshow(thinned)
# plt.show() | [
"drewlinsley@gmail.com"
] | drewlinsley@gmail.com |
17dcde27a40003f391afec81b727a34d7431e102 | bcc199a7e71b97af6fbfd916d5a0e537369c04d9 | /leetcode/solved/1386_Shift_2D_Grid/solution.py | 1c0ab2bfa3017fd43451588edc1f42f31d0e64dc | [] | no_license | sungminoh/algorithms | 9c647e82472905a2c4e505c810b622b734d9d20d | 1389a009a02e90e8700a7a00e0b7f797c129cdf4 | refs/heads/master | 2023-05-01T23:12:53.372060 | 2023-04-24T06:34:12 | 2023-04-24T06:34:12 | 87,406,513 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,482 | py | #! /usr/bin/env python
# -*- coding: utf-8 -*-
# vim:fenc=utf-8
#
# Copyright © 2020 sungminoh <smoh2044@gmail.com>
#
# Distributed under terms of the MIT license.
"""
Given a 2D grid of size m x n and an integer k. You need to shift the grid k times.
In one shift operation:
Element at grid[i][j] moves to grid[i][j + 1].
Element at grid[i][n - 1] moves to grid[i + 1][0].
Element at grid[m - 1][n - 1] moves to grid[0][0].
Return the 2D grid after applying shift operation k times.
Example 1:
Input: grid = [[1,2,3],[4,5,6],[7,8,9]], k = 1
Output: [[9,1,2],[3,4,5],[6,7,8]]
Example 2:
Input: grid = [[3,8,1,9],[19,7,2,5],[4,6,11,10],[12,0,21,13]], k = 4
Output: [[12,0,21,13],[3,8,1,9],[19,7,2,5],[4,6,11,10]]
Example 3:
Input: grid = [[1,2,3],[4,5,6],[7,8,9]], k = 9
Output: [[1,2,3],[4,5,6],[7,8,9]]
Constraints:
m == grid.length
n == grid[i].length
1 <= m <= 50
1 <= n <= 50
-1000 <= grid[i][j] <= 1000
0 <= k <= 100
"""
import sys
from typing import List
import pytest
class Solution:
def shiftGrid(self, grid: List[List[int]], k: int) -> List[List[int]]:
"""
In-place
"""
if not grid or not grid[0] or k == 0:
return grid
def gcd(a, b):
if a < b:
return gcd(b, a)
if a % b == 0:
return b
return gcd(b, a % b)
def next_index(i, j, k):
j += k
x, j = divmod(j, n)
i += x
_, i = divmod(i, m)
return i, j
def iter_indexes(i, j, k):
_i, _j = next_index(i, j, k)
while (_i, _j) != (i, j):
yield _i, _j
_i, _j = next_index(_i, _j, k)
yield _i, _j
m, n = len(grid), len(grid[0])
i, j = 0, 0
for _ in range(gcd(m*n, k)):
tmp = grid[i][j]
for _i, _j in iter_indexes(i, j, k):
tmp, grid[_i][_j] = grid[_i][_j], tmp
i, j = next_index(i, j, 1)
return grid
@pytest.mark.parametrize('grid, k, expected', [
([[1,2,3],[4,5,6],[7,8,9]], 1, [[9,1,2],[3,4,5],[6,7,8]]),
([[3,8,1,9],[19,7,2,5],[4,6,11,10],[12,0,21,13]], 4, [[12,0,21,13],[3,8,1,9],[19,7,2,5],[4,6,11,10]]),
([[1,2,3],[4,5,6],[7,8,9]], 9, [[1,2,3],[4,5,6],[7,8,9]]),
])
def test(grid, k, expected):
assert expected == Solution().shiftGrid(grid, k)
if __name__ == '__main__':
sys.exit(pytest.main(["-s", "-v"] + sys.argv))
| [
"smoh2044@gmail.com"
] | smoh2044@gmail.com |
836ee8709c8b048ff79134e0e08213d8f01fd856 | 635c9f0501039f5a099849a3108e19de76092aea | /ssafy_project/project_190322/project_8/movies/urls.py | 1136c2901dad4d2cbab7e8155d3ad5706fba69fe | [] | no_license | Hansung-Lee/SSAFY | 87ebea0808bb40381678d678e1035dc5fa2c2eb0 | cdb7ae1bba0e98e733eed703da2c62217c319462 | refs/heads/master | 2020-04-14T20:03:05.975040 | 2019-05-16T08:57:21 | 2019-05-16T08:57:21 | 164,080,393 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 533 | py | from django.urls import path
from . import views
app_name = 'movies'
urlpatterns = [
path('', views.index, name="index"),
path('<int:movie_id>/', views.detail, name="detail"),
path('<int:movie_id>/delete/', views.delete, name="delete"),
path('<int:movie_id>/scores/new/', views.create_score, name="create_score"),
path('<int:movie_id>/scores/<int:score_id>/delete/', views.delete_score, name="delete_score"),
path('<int:movie_id>/edit/', views.edit, name="edit"),
path('new/', views.new, name="new"),
]
| [
"ajtwlsgkst@naver.com"
] | ajtwlsgkst@naver.com |
896945d867a0323b7a1fb35358ffa30a466dee9a | 53fab060fa262e5d5026e0807d93c75fb81e67b9 | /backup/user_229/ch74_2020_04_13_02_53_22_051219.py | 5eaf44ad0d6ec022abd09d63fa6ae9c97885c49d | [] | no_license | gabriellaec/desoft-analise-exercicios | b77c6999424c5ce7e44086a12589a0ad43d6adca | 01940ab0897aa6005764fc220b900e4d6161d36b | refs/heads/main | 2023-01-31T17:19:42.050628 | 2020-12-16T05:21:31 | 2020-12-16T05:21:31 | 306,735,108 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 331 | py | def conta_bigramas(string):
dic = dict()
i = 0
while i <= len(string):
if '{0}{1}'.format(string[i], string[i+1]) in dic:
dic['{0}{1}'.format(string[i], string[i+1])] += 1
i += 1
else:
dic['{0}{1}'.format(string[i], string[i+1])] = 1
i += 1
return dic | [
"you@example.com"
] | you@example.com |
ed8923af89964cb325794d4ffef156318ce5703a | c36d9d70cbb257b2ce9a214bcf38f8091e8fe9b7 | /623_add_one_row_to_tree.py | 770582da734c9a59f464bf65486460b2ac4d96b9 | [] | no_license | zdadadaz/coding_practice | 3452e4fc8f4a79cb98d0d4ea06ce0bcae85f96a0 | 5ed070f22f4bc29777ee5cbb01bb9583726d8799 | refs/heads/master | 2021-06-23T17:52:40.149982 | 2021-05-03T22:31:23 | 2021-05-03T22:31:23 | 226,006,763 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 731 | py | # Definition for a binary tree node.
# class TreeNode:
# def __init__(self, val=0, left=None, right=None):
# self.val = val
# self.left = left
# self.right = right
class Solution:
def addOneRow(self, root: TreeNode, v: int, d: int) -> TreeNode:
def dfs(root, depth, prv_left):
if depth == d:
if prv_left:
return TreeNode(v, root, None)
else:
return TreeNode(v, None, root)
if not root:
return None
root.left = dfs(root.left, depth+1, True)
root.right = dfs(root.right, depth+1, False)
return root
return dfs(root, 1, True) | [
"zdadadaz5566@gmail.com"
] | zdadadaz5566@gmail.com |
1e8215053bd8f5b9014a0cb27339c62856eb3a95 | 5141a0ec55675aebf6b2a3f2454022e3fd8ad639 | /tractseg/experiments/pretrained_models/TractSeg_HR_3D_DAug.py | 24f5daef597317290e5b07a3d0f6571cd0134d58 | [
"Apache-2.0"
] | permissive | bbastardes/TractSeg | 303871586c682983b451885e069a75822a6c09db | 70714fca0e0dc241946ffc704f05b65095c7effb | refs/heads/master | 2020-05-27T03:02:27.806224 | 2019-05-16T14:50:12 | 2019-05-16T14:50:12 | 188,459,492 | 0 | 0 | null | 2019-05-24T17:04:07 | 2019-05-24T17:04:06 | null | UTF-8 | Python | false | false | 432 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
from tractseg.experiments.tract_seg import Config as TractSegConfig
class Config(TractSegConfig):
EXP_NAME = os.path.basename(__file__).split(".")[0]
MODEL = "UNet3D_Pytorch_DeepSup_sm"
DIM = "3D"
UPSAMPLE_TYPE = "trilinear"
BATCH_SIZE = 1
UNET_NR_FILT = 8
"""
Memory consumption
NR_FILT=8: 10900MB
System RAM running full (>30GB) from DAug
""" | [
"j.wasserthal@dkfz.de"
] | j.wasserthal@dkfz.de |
f75fc4dbabe8144a65242333ea6b50c9c71cc1ee | 5e9fe8ea97cb75326566b6469b7c4903aff680dc | /models/academic/test.py | 6b6a3ab1cdbeb5239df483a71c28ef113bcd2cc6 | [] | no_license | Trilokan/vetrivel | a2f5444cff88319c4bde2016694ee084df5912fc | e6d4f1cae23f70a3b82da2e5971706d4b417f056 | refs/heads/master | 2020-04-22T13:16:32.663207 | 2019-05-01T11:04:22 | 2019-05-01T11:04:22 | 170,403,357 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,043 | py | # -*- coding: utf-8 -*-
from odoo import models, fields, api
PROGRESS_INFO = [("draft", "Draft"),
("confirmed", "Confirmed"),
("done", "Done"),
("cancel", "Cancel")]
class ArcTest(models.Model):
_name = "arc.test"
date = fields.Date(string="Date", default="", required=True)
name = fields.Char(string="Name", readonly=True)
academic_id = fields.Many2one(comodel_name="arc.academic", string="Academic")
standard_id = fields.Many2one(comodel_name="arc.standard", string="Standard")
section_id = fields.Many2one(comodel_name="arc.section", string="Section", required=True)
subject_id = fields.Many2one(comodel_name="arc.subject", string="Subject", required=True)
syllabus_id = fields.Many2one(comodel_name="arc.syllabus", string="Syllabus", required=True)
teacher_id = fields.Many2one(comodel_name="arc.person", string="Teacher", required=True)
total_marks = fields.Float(string="Total Marks", default=0.0, required=True)
test_detail = fields.One2many(comodel_name="arc.test.detail", inverse_name="test_id")
progress = fields.Selection(selection=PROGRESS_INFO, default="draft")
@api.multi
def add_test_detail(self):
pass
@api.multi
def trigger_done(self):
self.write({"progress": "done"})
@api.model
def create(self, vals):
vals["name"] = 0
section_id = self.env["arc.section"].search([("id", "=", vals["section_id"])])
vals["academic_id"] = section_id
vals["standard_id"] = section_id
vals["academic_id"] = section_id
vals["academic_id"] = section_id
return super(ArcTest, self).create(vals)
class ArcTestDetail(models.Model):
_name = "arc.test.detail"
student_id = fields.Many2one(comodel_name="arc.student", string="Student")
marks = fields.Float(string="Total Marks")
test_id = fields.Many2one(comodel_name="arc.test", string="Test")
progress = fields.Selection(selection=PROGRESS_INFO, related="test_id.progress")
| [
"ram@hk.com"
] | ram@hk.com |
d17005dff1ae118402fafd7e028e8a38dd73de61 | 78c4b89e8b4d6ca2eeb1ad8979bfa19f8ce67c06 | /Curso em Video/desafio066b.py | aee4bf97cc810bd2fad09b5677c464a5580044aa | [] | no_license | lucasoliveiraprofissional/Cursos-Python | 75d42b60167cfdf4324e615f8e24868d324de350 | 3eb000949f159ff69b7b92dbb5ff37df1cd0e1de | refs/heads/main | 2023-01-28T19:35:54.490747 | 2020-12-01T01:11:02 | 2020-12-01T01:11:02 | 315,455,606 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 723 | py | '''Crie um programa que leia vários números
inteiros pelo teclado. O programa só vai parar
quando o usuário digitar o valor 999, que é a
condição de parada. No final, mostre quantos números
foram digitados e qual foi a soma entre eles
(desconsiderando o flag)'''
'''Copiei a versão dele já de cara pois fiquei
mais de uma semana sem treinar Python'''
soma = cont = 0
while True:
num= int(input('Digite um valor (999 para parar): '))
if num == 999:
break
#tem que ser desse modo, antes dele chegar a somar, tem
#que cortar o barato dele, não pode deixar ele chegar a
#somar. Por isso esse if vem antes.
cont += 1
soma += num
print(f'A soma dos {cont} valores foi: {soma}')
| [
"lucas.oliveira@quaestum.com.br"
] | lucas.oliveira@quaestum.com.br |
3ed5f3604345089482ce1a65e6c380353de26a12 | b212ec9d705fb77cac102dceb12eb668099fd1ae | /advanced/exams/retake_april_2020/find_the_eggs.py | 3387b0e159d1bb4a370dc576f7ad2e8ee941d536 | [] | no_license | xpucko/Software-University-SoftUni | 20ef91a0be91a8a09a56d9fdc15888f91409de2f | a1fc1781424f025954948299be7f75d317e32dc1 | refs/heads/master | 2023-02-04T11:58:33.068431 | 2020-12-24T00:39:11 | 2020-12-24T00:39:11 | 280,227,310 | 2 | 2 | null | null | null | null | UTF-8 | Python | false | false | 590 | py | def find_strongest_eggs(*args):
nums, sublist_count = args
matrix = [[] for _ in range(sublist_count)]
while nums:
for i in range(len(matrix)):
matrix[i].append(nums[0])
nums.pop(0)
result = []
for sublist in matrix:
left_num = sublist[(len(sublist) // 2) - 1]
mid_num = sublist[len(sublist) // 2]
right_num = sublist[(len(sublist) // 2) + 1]
cond_1 = left_num < mid_num > right_num
cond_2 = left_num < right_num
if cond_1 and cond_2:
result.append(mid_num)
return result
| [
"hristiyan.plamenov.valchev@gmail.com"
] | hristiyan.plamenov.valchev@gmail.com |
5b0105d2c2eee49411aac32a2215f322fa9297fe | a60e81b51935fb53c0900fecdadba55d86110afe | /python/note/上下文管理器/context_manager.py | cb902170d1e29a50a78f3b9cbdc96dc0205641cc | [] | no_license | FrankieZhen/Lookoop | fab6855f5660467f70dc5024d9aa38213ecf48a7 | 212f8b83d6ac22db1a777f980075d9e12ce521d2 | refs/heads/master | 2020-07-27T08:12:45.887814 | 2019-09-16T11:48:20 | 2019-09-16T11:48:20 | 209,021,915 | 1 | 0 | null | 2019-09-17T10:10:46 | 2019-09-17T10:10:46 | null | UTF-8 | Python | false | false | 699 | py | # coding=utf-8
# 2019-1-28
# 上下文管理器
# __enter__ 与 __exit__ 用于资源分配以及释放相关工作, 如打开关闭文件, 异常处理, 断开流的链接以及锁分配
class MyContextManager(object):
def __enter__(self):
print("extering...")
def __exit__(self, exception_type, exception_value, traceback):
print("leaving...")
if exception_type is None:
print("No Exception.")
return False
elif exception_type is ValueError:
print("Value error")
return False
else:
print("other error")
return True
if __name__ == '__main__':
# with MyContextManager():
# print("Testing..")
# raise(ValueError)
with MyContextManager():
print("Testing..") | [
"33798487+YangXiaoo@users.noreply.github.com"
] | 33798487+YangXiaoo@users.noreply.github.com |
ed0b68197695bd7616d45d70acdfda006ea8500d | 28cab1ef484a5796fc9b0897043e918f9a28e650 | /dalangshen/urls.py | 6fddb5ed1f496258092b663337e2019cbb419cce | [] | no_license | bxxfighting/dalangshen | 12cb58d2078804327dbf7a01be0fc2a0d27f4495 | e174147b8778c188941d5fd0f5e33de65afc8b00 | refs/heads/main | 2023-01-15T08:07:57.429342 | 2020-11-16T03:49:34 | 2020-11-16T03:49:34 | 313,184,879 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 170 | py | from django.urls import path
from django.urls import include
urlpatterns = [
path('api/v1/', include({
path('account/', include('account.urls')),
}))
]
| [
"boxingxing@limikeji.com"
] | boxingxing@limikeji.com |
fd0d07b0637a2c64fcfc0b85df0f40ca42457059 | 2f98aa7e5bfc2fc5ef25e4d5cfa1d7802e3a7fae | /python/python_15695.py | b7ea853e6f7881ca090ce7e50b8f64f6d852da95 | [] | no_license | AK-1121/code_extraction | cc812b6832b112e3ffcc2bb7eb4237fd85c88c01 | 5297a4a3aab3bb37efa24a89636935da04a1f8b6 | refs/heads/master | 2020-05-23T08:04:11.789141 | 2015-10-22T19:19:40 | 2015-10-22T19:19:40 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 144 | py | # calculate 95 percentile of the list values in python
import numpy as np
for i in finalvalues.values():
print np.percentile(map(int,i),95)
| [
"ubuntu@ip-172-31-7-228.us-west-2.compute.internal"
] | ubuntu@ip-172-31-7-228.us-west-2.compute.internal |
f5c59364b1ad428e4c9a85d5771d8f42799e1390 | e562f7e0a51273475e50a7e61d1d377f88775622 | /WebMirror/SpecialCase.py | 5cc8b15a5803bf4e34658c351f8014916ff5a21f | [] | no_license | bloodcurdle/ReadableWebProxy | d1c6ae0220fdb04ea7ab82963c86e776a0dbbfd9 | 10f68f913a78f8b0e47582996d9860a61da55dd6 | refs/heads/master | 2021-05-29T19:58:32.965610 | 2015-11-09T18:25:00 | 2015-11-09T18:25:00 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,427 | py |
import logging
import datetime
import random
import WebMirror.database as db
import multiprocessing
import time
random.seed()
# import WebMirror.rules
# import WebMirror.LogBase as LogBase
# import runStatus
# import time
# import os.path
# import os
# import sys
# import sqlalchemy.exc
# from sqlalchemy import desc
# from sqlalchemy.sql import text
# from sqlalchemy import distinct
# from sqlalchemy.dialects import postgresql
# import WebMirror.util.urlFuncs
# import urllib.parse
# import traceback
# import datetime
# from sqlalchemy.sql import text
# from sqlalchemy.sql import func
# import WebMirror.util.webFunctions as webFunctions
# import hashlib
# from WebMirror.Fetch import DownloadException
# import WebMirror.Fetch
# import WebMirror.database as db
# from config import C_RESOURCE_DIR
# from activePlugins import INIT_CALLS
# if "debug" in sys.argv:
# CACHE_DURATION = 1
# RSC_CACHE_DURATION = 1
# # CACHE_DURATION = 60 * 5
# # RSC_CACHE_DURATION = 60 * 60 * 5
# else:
# CACHE_DURATION = 60 * 60 * 24 * 7
# RSC_CACHE_DURATION = 60 * 60 * 24 * 147
ACTIVE_FETCHES = {
# Populated at runtime
}
FETCH_LOCK = multiprocessing.Lock()
log = logging.getLogger("Main.Web.SpecialCaseHandler")
def handleRemoteFetch(params, job, engine):
print("Remote fetch command!")
pass
def handleRateLimiting(params, job, engine):
allowable = params[0]
with FETCH_LOCK:
if not job.netloc in ACTIVE_FETCHES:
ACTIVE_FETCHES[job.netloc] = 0
log.info("Active fetchers for domain %s - %s", job.netloc, ACTIVE_FETCHES[job.netloc])
if ACTIVE_FETCHES[job.netloc] > allowable:
log.info("Too many instances of fetchers for domain %s active. Forcing requests to sleep for a while", job.netloc)
job.ignoreuntiltime = datetime.datetime.now() + datetime.timedelta(seconds=60*5 + random.randrange(0, 60*5))
db.get_session().commit()
return
else:
with FETCH_LOCK:
ACTIVE_FETCHES[job.netloc] += 1
engine.do_job(job)
time.sleep(5)
with FETCH_LOCK:
ACTIVE_FETCHES[job.netloc] -= 1
dispatchers = {
'remote_fetch' : handleRemoteFetch,
'rate_limit' : handleRateLimiting,
}
def handleSpecialCase(job, engine, rules):
commands = rules[job.netloc]
op, params = commands[0], commands[1:]
if op in dispatchers:
dispatchers[op](params, job, engine)
else:
log.error("Error! Unknown special-case filter!")
print("Filter name: '%s', parameters: '%s', job URL: '%s'", op, params, job.url)
| [
"something@fake-url.com"
] | something@fake-url.com |
75442cee4229c57d333ef72e9ab9a0ece37171d7 | bffcbc372a73aa386d20e50cf85f6943bdb640fe | /image_processing_lab2/read_grey_image_general_kernel_v2.py | 1b060512bb69294623a1789bd823b90e80d3e49d | [] | no_license | liketheflower/img_processing_csc475 | 5852cefe411c3b0b55b7383c697727e450930594 | d50b1b65e5bdd509065d6eb7bbda3db23b9a9005 | refs/heads/master | 2021-05-04T13:43:02.928721 | 2018-05-07T15:27:41 | 2018-05-07T15:27:41 | 120,321,405 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,510 | py | import numpy as np
import matplotlib.pyplot as plt
import matplotlib.image as mpimg
def crop(start, end, origin_image):
return origin_image[start[0]:end[0]+1,start[1]:end[1]+1]
def show_img(img):
plt.figure()
plt.imshow(img, cmap='gray', aspect='auto')
plt.show()
def mean_filter_new(img):
# mean filter
filter_ =[[1/9.0 ,1/9.0, 1/9.0],
[1/9.0 ,1/9.0, 1/9.0],
[1/9.0 ,1/9.0, 1/9.0]]
# gaussian filter
filter_ =[[1/16.0 ,2/16.0, 1/16.0],
[2/16.0 ,4/16.0, 2/16.0],
[1/16.0 ,2/16.0, 1/16.0]]
# sobel filter row(x) direction
filter_ =[[-1 ,0, 1],
[-2 ,0, 2],
[-1 ,0, 1]]
# sobel filter col(y) direction
""" filter_ =[[-1 ,-2, -1],
[0 , 0, 0],
[1 , 2, 1]]
"""
new_img = np.zeros(img.shape)
R, C = new_img.shape
D = 1
for i in xrange(R):
for j in xrange(C):
start = (i-D,j-D)
end = (i+D,j+D)
if i-D<0 or j-D<0 or i+D>R-1 or j+D >C-1:
continue
crop_img=crop(start, end, img)
for ii in xrange(3):
for jj in xrange(3):
new_img[i,j] +=crop_img[ii,jj]*filter_[ii][jj]
return new_img
def mean_filter(filter_size, img):
new_img = np.zeros(img.shape)
R, C = new_img.shape
print type(R), type(C)
D = filter_size/2
for i in xrange(R):
for j in xrange(C):
start = (i-D,j-D)
end = (i+D,j+D)
if i-D<0 or j-D<0 or i+D>R-1 or j+D >C-1:
continue
crop_img=crop(start, end, img)
new_img[i,j] = np.average(crop_img)
return new_img
file_name = "lena_gray.bmp"
#file_name ="zelda2.bmp"
#file_name = "./../../DM/ID13_YANGJUYING_OD01/Intensity/ID13_YANGJUYING_OD01_0.bmp"
img = mpimg.imread(file_name)
#img = np.zeros((200,200))
#tem_img = np.ones((100,200))
#img[:100,:] = tem_img
plt.figure()
plt.imshow(img, cmap='gray', aspect='auto')
plt.show()
filter_size = 3
#new_img = mean_filter(filter_size,img)
new_img = mean_filter_new(img)
plt.figure()
plt.imshow(new_img, cmap='gray', aspect='auto')
plt.title("image after new mean filter with size of "+str(filter_size))
plt.show()
start =(100,100)
end =(300,300)
croped_image = crop(start, end, img)
print croped_image
show_img(croped_image)
croped_image = crop((100,100), (110,110), img)
print croped_image
show_img(croped_image)
| [
"jim.morris.shen@gmail.com"
] | jim.morris.shen@gmail.com |
45e24bd2f0b4c25cfc4070133cbbbf1078100f86 | a5a682cfcb90660b35806b895b7c6e8dfa9914c2 | /h/streamer/test/streamer_test.py | 790f76a40822f2510c40638715ceee05cbf1973d | [
"MIT",
"BSD-2-Clause",
"BSD-3-Clause",
"BSD-2-Clause-Views"
] | permissive | jazahn/h | 507ead458bb16134b79c270110ff72b7a8bfd600 | 8cfd1012d4ee41a21d1ad81427961ac2ac59464f | refs/heads/master | 2021-01-15T14:49:51.006483 | 2016-04-15T18:14:53 | 2016-04-15T18:14:53 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,534 | py | # -*- coding: utf-8 -*-
import mock
from mock import call
import pytest
from h.streamer import nsq
from h.streamer import streamer
from h.streamer import websocket
def test_process_work_queue_sends_nsq_messages_to_nsq_handle_message(session):
message = nsq.Message(topic='foo', payload='bar')
queue = [message]
streamer.process_work_queue({}, queue, session_factory=lambda: session)
nsq.handle_message.assert_called_once_with(message,
topic_handlers=mock.ANY)
def test_process_work_queue_uses_appropriate_topic_handlers_for_nsq_messages(session):
message = nsq.Message(topic='foo', payload='bar')
queue = [message]
streamer.process_work_queue({'nsq.namespace': 'wibble'},
queue,
session_factory=lambda: session)
topic_handlers = {
'wibble-annotations': nsq.handle_annotation_event,
'wibble-user': nsq.handle_user_event,
}
nsq.handle_message.assert_called_once_with(mock.ANY,
topic_handlers=topic_handlers)
def test_process_work_queue_sends_websocket_messages_to_websocket_handle_message(session):
message = websocket.Message(socket=mock.sentinel.SOCKET, payload='bar')
queue = [message]
streamer.process_work_queue({}, queue, session_factory=lambda: session)
websocket.handle_message.assert_called_once_with(message)
def test_process_work_queue_commits_after_each_message(session):
message1 = websocket.Message(socket=mock.sentinel.SOCKET, payload='bar')
message2 = nsq.Message(topic='foo', payload='bar')
queue = [message1, message2]
streamer.process_work_queue({}, queue, session_factory=lambda: session)
assert session.commit.call_count == 2
def test_process_work_queue_rolls_back_on_handler_exception(session):
message = nsq.Message(topic='foo', payload='bar')
queue = [message]
nsq.handle_message.side_effect = RuntimeError('explosion')
streamer.process_work_queue({}, queue, session_factory=lambda: session)
session.commit.assert_not_called()
session.rollback.assert_called_once_with()
def test_process_work_queue_rolls_back_on_unknown_message_type(session):
message = 'something that is not a message'
queue = [message]
streamer.process_work_queue({}, queue, session_factory=lambda: session)
session.commit.assert_not_called()
session.rollback.assert_called_once_with()
def test_process_work_queue_calls_close_after_commit(session):
message = nsq.Message(topic='foo', payload='bar')
queue = [message]
streamer.process_work_queue({}, queue, session_factory=lambda: session)
assert session.method_calls[-2:] == [
call.commit(),
call.close()
]
def test_process_work_queue_calls_close_after_rollback(session):
message = nsq.Message(topic='foo', payload='bar')
queue = [message]
nsq.handle_message.side_effect = RuntimeError('explosion')
streamer.process_work_queue({}, queue, session_factory=lambda: session)
assert session.method_calls[-2:] == [
call.rollback(),
call.close()
]
@pytest.fixture
def session():
return mock.Mock(spec_set=['close', 'commit', 'execute', 'rollback'])
@pytest.fixture(autouse=True)
def nsq_handle_message(patch):
return patch('h.streamer.nsq.handle_message')
@pytest.fixture(autouse=True)
def websocket_handle_message(patch):
return patch('h.streamer.websocket.handle_message')
| [
"nick@whiteink.com"
] | nick@whiteink.com |
38b9929591eb96f24bcdef78f5439e7dda136927 | 0baeeda7b48da3a199cf471e731b9679c95cebcc | /sc/templer/core/generic_setup.py | 227cb6cfbd2f890b0d44703164d3f4f413988ef0 | [] | no_license | simplesconsultoria/sc.templer.core | 15ee9340fbaa4615ac405d8cec2ce6985fa91113 | 8f1e8a7cf5b1bcd77bf963cd406b5ac152c7cfa4 | refs/heads/master | 2021-01-20T10:41:40.380015 | 2012-04-24T23:50:49 | 2012-04-24T23:50:49 | 2,964,807 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 750 | py | # -*- coding:utf-8 -*-
from templer.core.vars import EXPERT
from templer.core.vars import BooleanVar
gs_vars = [
BooleanVar(
'add_profile',
title='Register a Default Profile',
description='Should this package register a Default GS Profile',
modes=(EXPERT, ),
default=True,
structures={'False': None, 'True': 'gs_nested_default'},
),
BooleanVar(
'add_profile_uninstall',
title='Register an Uninstall Profile',
description='Should this package register an Uninstall GS Profile',
modes=(EXPERT, ),
default=True,
structures={'False': None, 'True': 'gs_nested_uninstall'},
),
] | [
"erico@simplesconsultoria.com.br"
] | erico@simplesconsultoria.com.br |
634fd510f27f199252bf1c265194f3f4af44359e | 5a0dfe1326bb166d6dfaf72ce0f89ab06e963e2c | /leetcode/lc361.py | d32d70cb24c130e5f8349d2bcb7669a777c4c691 | [] | no_license | JasonXJ/algorithms | 7bf6a03c3e26f917a9f91c53fc7b2c65669f7692 | 488d93280d45ea686d30b0928e96aa5ed5498e6b | refs/heads/master | 2020-12-25T15:17:44.345596 | 2018-08-18T07:20:27 | 2018-08-18T07:20:27 | 67,798,458 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,007 | py | class Solution(object):
def maxKilledEnemies(self, grid):
"""
:type grid: List[List[str]]
:rtype: int
"""
if len(grid) == 0 or len(grid[0]) == 0:
return 0
nrows = len(grid)
ncols = len(grid[0])
can_kill = [ [ [0] * 4 for _ in range(ncols) ] for _ in range(nrows)]
for row in range(nrows):
for col in range(ncols):
if grid[row][col] == 'E':
can_kill[row][col] = [1] * 4
# Handle upward and leftward enemies
for row in range(nrows):
for col in range(ncols):
if grid[row][col] != 'W':
if row > 0:
can_kill[row][col][0] += can_kill[row-1][col][0]
if col > 0:
can_kill[row][col][1] += can_kill[row][col-1][1]
# Handle downward and rightward enemies
for row in range(nrows-1, -1, -1):
for col in range(ncols-1, -1, -1):
if grid[row][col] != 'W':
if row < nrows - 1:
can_kill[row][col][2] += can_kill[row+1][col][2]
if col < ncols - 1:
can_kill[row][col][3] += can_kill[row][col+1][3]
maximum = 0
for row in range(nrows):
for col in range(ncols):
if grid[row][col] == '0':
maximum = max(maximum, sum(can_kill[row][col]))
return maximum
def test():
def check(grid, expected):
converted_grid = [
list(line.strip())
for line in grid.strip().splitlines()
]
assert Solution().maxKilledEnemies(converted_grid) == expected
check(
"""
0E00
E0WE
0E00
""", 3
)
check('', 0)
check(
"""
0EE0
E0WE
0E00
""", 3
)
check(
"""
E0EE
E0WE
0E00
""", 4
)
| [
"lxj2048@gmail.com"
] | lxj2048@gmail.com |
0d93a4c9daefd91f328a835a36e56e41ff4c3d2c | 4252102a1946b2ba06d3fa914891ec7f73570287 | /pylearn2/sandbox/cuda_convnet/tests/test_weight_acts.py | 8158dd02343ec2f4e08001c2116ffb5405fd8327 | [] | no_license | lpigou/chalearn2014 | 21d487f314c4836dd1631943e20f7ab908226771 | 73b99cdbdb609fecff3cf85e500c1f1bfd589930 | refs/heads/master | 2020-05-17T00:08:11.764642 | 2014-09-24T14:42:00 | 2014-09-24T14:42:00 | 24,418,815 | 2 | 3 | null | null | null | null | UTF-8 | Python | false | false | 4,673 | py | __authors__ = "Ian Goodfellow"
__copyright__ = "Copyright 2010-2012, Universite de Montreal"
__credits__ = ["Ian Goodfellow"]
__license__ = "3-clause BSD"
__maintainer__ = "Ian Goodfellow"
__email__ = "goodfeli@iro"
from pylearn2.testing.skip import skip_if_no_gpu
skip_if_no_gpu()
import numpy as np
from theano import shared
from pylearn2.sandbox.cuda_convnet.filter_acts import FilterActs
from pylearn2.sandbox.cuda_convnet.weight_acts import WeightActs
from theano.sandbox.cuda import gpu_from_host
from theano.sandbox.cuda import host_from_gpu
from theano.sandbox.rng_mrg import MRG_RandomStreams
import theano.tensor as T
from theano.tensor.nnet.conv import conv2d
from theano.tensor import as_tensor_variable
from theano import function
import warnings
def test_match_grad_valid_conv():
# Tests that weightActs is the gradient of FilterActs
# with respect to the weights.
for partial_sum in [0, 1, 4]:
rng = np.random.RandomState([2012,10,9])
batch_size = 3
rows = 7
cols = 9
channels = 8
filter_rows = 4
filter_cols = filter_rows
num_filters = 16
images = shared(rng.uniform(-1., 1., (channels, rows, cols,
batch_size)).astype('float32'), name='images')
filters = shared(rng.uniform(-1., 1., (channels, filter_rows,
filter_cols, num_filters)).astype('float32'), name='filters')
gpu_images = gpu_from_host(images)
gpu_filters = gpu_from_host(filters)
output = FilterActs(partial_sum=partial_sum)(gpu_images, gpu_filters)
output = host_from_gpu(output)
images_bc01 = images.dimshuffle(3,0,1,2)
filters_bc01 = filters.dimshuffle(3,0,1,2)
filters_bc01 = filters_bc01[:,:,::-1,::-1]
output_conv2d = conv2d(images_bc01, filters_bc01,
border_mode='valid')
output_conv2d = output_conv2d.dimshuffle(1,2,3,0)
theano_rng = MRG_RandomStreams(2013 + 1 + 31)
coeffs = theano_rng.normal(avg=0., std=1., size=output_conv2d.shape, dtype='float32')
cost_conv2d = (coeffs * output_conv2d).sum()
weights_grad_conv2d = T.grad(cost_conv2d, filters)
cost = (coeffs * output).sum()
hid_acts_grad = T.grad(cost, output)
weights_grad = WeightActs(partial_sum=partial_sum)(
gpu_images,
gpu_from_host(hid_acts_grad),
as_tensor_variable((4, 4))
)[0]
weights_grad = host_from_gpu(weights_grad)
f = function([], [output, output_conv2d, weights_grad, weights_grad_conv2d])
output, output_conv2d, weights_grad, weights_grad_conv2d = f()
if np.abs(output - output_conv2d).max() > 8e-6:
assert type(output) == type(output_conv2d)
assert output.dtype == output_conv2d.dtype
if output.shape != output_conv2d.shape:
print 'cuda-convnet shape: ',output.shape
print 'theano shape: ',output_conv2d.shape
assert False
err = np.abs(output - output_conv2d)
print 'absolute error range: ', (err.min(), err.max())
print 'mean absolute error: ', err.mean()
print 'cuda-convnet value range: ', (output.min(), output.max())
print 'theano value range: ', (output_conv2d.min(), output_conv2d.max())
assert False
warnings.warn("""test_match_grad_valid_conv success criterion is not very strict. Can we verify that this is OK?
One possibility is that theano is numerically unstable and Alex's code is better.
Probably theano CPU 64 bit is OK but it's worth checking the others.""")
if np.abs(weights_grad - weights_grad_conv2d).max() > 8.6e-6:
if type(weights_grad) != type(weights_grad_conv2d):
raise AssertionError("weights_grad is of type " + str(weights_grad))
assert weights_grad.dtype == weights_grad_conv2d.dtype
if weights_grad.shape != weights_grad_conv2d.shape:
print 'cuda-convnet shape: ',weights_grad.shape
print 'theano shape: ',weights_grad_conv2d.shape
assert False
err = np.abs(weights_grad - weights_grad_conv2d)
print 'absolute error range: ', (err.min(), err.max())
print 'mean absolute error: ', err.mean()
print 'cuda-convnet value range: ', (weights_grad.min(), weights_grad.max())
print 'theano value range: ', (weights_grad_conv2d.min(), weights_grad_conv2d.max())
assert False
if __name__ == '__main__':
test_match_grad_valid_conv()
| [
"lionelpigou@gmail.com"
] | lionelpigou@gmail.com |
8e63a7fcf360bfed8263a4df56a298eee731d690 | 33c4bc9ca463ce0ec61945fca5841c9d8a18ab8e | /thrift/lib/py3/test/exceptions.py | 43419dbc5e515f17dcb9c999576bb9179c86b613 | [
"Apache-2.0"
] | permissive | gaurav1086/fbthrift | d54bb343bf1a8503dd329fbfcd0b46fe9f70754c | 68d1a8790bfd5b3974e1b966c8071f9c456b6c6a | refs/heads/master | 2020-12-27T22:41:09.452839 | 2020-02-03T23:56:20 | 2020-02-03T23:58:33 | 238,088,855 | 0 | 0 | Apache-2.0 | 2020-02-04T00:13:04 | 2020-02-04T00:13:03 | null | UTF-8 | Python | false | false | 3,529 | py | #!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
from testing.types import UnusedError, HardError, SimpleError, Color, UnfriendlyError
from .exception_helper import simulate_UnusedError, simulate_HardError
from thrift.py3 import Error
class ExceptionTests(unittest.TestCase):
def test_hashability(self) -> None:
hash(UnusedError())
def test_creation_optional_from_c(self) -> None:
msg = "this is what happened"
x = simulate_UnusedError(msg)
self.assertIsInstance(x, UnusedError)
self.assertIn(msg, str(x))
self.assertIn(msg, x.args)
self.assertEqual(msg, x.message)
self.assertEqual(UnusedError(*x.args), x) # type: ignore
def test_exception_message_annotation(self) -> None:
x = UnusedError(message="something broke")
self.assertEqual(x.message, str(x))
y = HardError("WAT!", 22) # type: ignore
self.assertEqual(y.errortext, str(y))
z = UnfriendlyError("WAT!", 22) # type: ignore
self.assertNotEqual(z.errortext, str(z))
self.assertNotEqual(str(y), str(z))
def test_creation_optional_from_python(self) -> None:
msg = "something broke"
UnusedError()
x = UnusedError(msg) # type: ignore
y = UnusedError(message=msg)
self.assertEqual(x, y)
self.assertEqual(x.args, y.args)
self.assertEqual(x.message, y.message)
self.assertEqual(str(x), str(x))
def test_creation_required_from_c(self) -> None:
msg = "ack!"
code = 22
x = simulate_HardError(msg, code)
self.assertIsInstance(x, HardError)
self.assertIn(msg, str(x))
self.assertIn(msg, x.args)
self.assertIn(code, x.args)
self.assertEqual(code, x.code)
self.assertEqual(msg, x.errortext)
self.assertEqual(x, HardError(*x.args)) # type: ignore
def test_creation_required_from_python(self) -> None:
msg = "ack!"
code = 22
with self.assertRaises(TypeError):
HardError(msg) # type: ignore
x = HardError(msg, code) # type: ignore
y = HardError(msg, code=code) # type: ignore
self.assertEqual(x, y)
self.assertEqual(x.args, y.args)
self.assertEqual(x.errortext, y.errortext)
self.assertEqual(x.code, y.code)
self.assertEqual(str(x), str(y))
def test_raise(self) -> None:
with self.assertRaises(SimpleError):
raise SimpleError()
with self.assertRaises(Error):
raise SimpleError(Color.red)
with self.assertRaises(Exception):
raise SimpleError()
with self.assertRaises(BaseException):
raise SimpleError()
x = SimpleError(Color.blue)
self.assertIsInstance(x, BaseException)
self.assertIsInstance(x, Exception)
self.assertIsInstance(x, Error)
self.assertIsInstance(x, SimpleError)
| [
"facebook-github-bot@users.noreply.github.com"
] | facebook-github-bot@users.noreply.github.com |
4d5fa5eafc532235892784af3064d77145c79f15 | 85b402cd9e762b2749c978105ea362b10d335e5c | /114_auto_image_quality_assessment_BRISQUE.py | 8f4ffa79a306138610b23420218c52a56ad6434d | [] | no_license | bnsreenu/python_for_microscopists | 29c08f17461baca95b5161fd4cd905be515605c4 | 4b8c0bd4274bc4d5e906a4952988c7f3e8db74c5 | refs/heads/master | 2023-09-04T21:11:25.524753 | 2023-08-24T18:40:53 | 2023-08-24T18:40:53 | 191,218,511 | 3,010 | 2,206 | null | 2023-07-25T07:15:22 | 2019-06-10T17:53:14 | Jupyter Notebook | UTF-8 | Python | false | false | 3,095 | py | #!/usr/bin/env python
__author__ = "Sreenivas Bhattiprolu"
__license__ = "Feel free to copy, I appreciate if you acknowledge Python for Microscopists"
# https://youtu.be/Vj-YcXswTek
"""
BRISQUE calculates the no-reference image quality score for an image using the
Blind/Referenceless Image Spatial Quality Evaluator (BRISQUE).
BRISQUE score is computed using a support vector regression (SVR) model trained on an
image database with corresponding differential mean opinion score (DMOS) values.
The database contains images with known distortion such as compression artifacts,
blurring, and noise, and it contains pristine versions of the distorted images.
The image to be scored must have at least one of the distortions for which the model was trained.
Mittal, A., A. K. Moorthy, and A. C. Bovik. "No-Reference Image Quality Assessment in the Spatial Domain.
" IEEE Transactions on Image Processing. Vol. 21, Number 12, December 2012, pp. 4695–4708.
https://live.ece.utexas.edu/publications/2012/TIP%20BRISQUE.pdf
To install imquality
https://pypi.org/project/image-quality/
"""
import numpy as np
from skimage import io, img_as_float
import imquality.brisque as brisque
#img = img_as_float(io.imread('noisy_images/BSE.jpg', as_gray=True))
img = img_as_float(io.imread('noisy_images/BSE_50sigma_noisy.jpg', as_gray=True))
score = brisque.score(img)
print("Brisque score = ", score)
#Now let us check BRISQUE scores for a bunch of blurred images.
img0 = img_as_float(io.imread('noisy_images/BSE.jpg', as_gray=True))
img25 = img_as_float(io.imread('noisy_images/BSE_1sigma_blur.jpg', as_gray=True))
img50 = img_as_float(io.imread('noisy_images/BSE_2sigma_blur.jpg', as_gray=True))
img75 = img_as_float(io.imread('noisy_images/BSE_3sigma_blur.jpg', as_gray=True))
img100 = img_as_float(io.imread('noisy_images/BSE_5sigma_blur.jpg', as_gray=True))
img200 = img_as_float(io.imread('noisy_images/BSE_10sigma_blur.jpg', as_gray=True))
score0 = brisque.score(img0)
score25 = brisque.score(img25)
score50 = brisque.score(img50)
score75 = brisque.score(img75)
score100 = brisque.score(img100)
score200 = brisque.score(img200)
print("BRISQUE Score for 0 blur = ", score0)
print("BRISQUE Score for 1 sigma blur = ", score25)
print("BRISQUE Score for 2 sigma blur = ", score50)
print("BRISQUE Score for 3 sigma blur = ", score75)
print("BRISQUE Score for 5 sigma blur = ", score100)
print("BRISQUE Score for 10 sigma blur = ", score200)
# Peak signal to noise ratio (PSNR) is Not a good metric.
from skimage.metrics import peak_signal_noise_ratio
psnr_25 = peak_signal_noise_ratio(img0, img25)
psnr_50 = peak_signal_noise_ratio(img0, img50)
psnr_75 = peak_signal_noise_ratio(img0, img75)
psnr_100 = peak_signal_noise_ratio(img0, img100)
psnr_200 = peak_signal_noise_ratio(img0, img200)
print("PSNR for 1 sigma blur = ", psnr_25)
print("PSNR for 2 sigma blur = ", psnr_50)
print("PSNR for 3 sigma blur = ", psnr_75)
print("PSNR for 5 sigma blur = ", psnr_100)
print("PSNR for 10 sigma blur = ", psnr_200)
| [
"noreply@github.com"
] | bnsreenu.noreply@github.com |
57325adb88ec75ed2ebb7aad5993a18a0458adc8 | 08191bb5dd52ab63aa0210d81c4d7dfd856365f7 | /osa05-18_vanhin_henkiloista/src/vanhin_henkiloista.py | 5926727dbbae0d2b290b29ec83fc82466c28a81a | [] | no_license | JoukoRintamaki/mooc-ohjelmointi-21 | 446d77477a8752182cb4568cce184795371cd506 | ebc80495e4c6654d6beab47593d2c1b06bf12d08 | refs/heads/main | 2023-03-11T14:53:33.949801 | 2021-02-28T14:57:24 | 2021-02-28T14:57:24 | 336,796,705 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 391 | py | def vanhin(henkilot: list):
vanhin=""
vanhinIka=2999
for henkilo in henkilot:
if henkilo[1] < vanhinIka:
vanhin=henkilo[0]
vanhinIka=henkilo[1]
return vanhin
if __name__ == "__main__":
h1 = ("Arto", 1977)
h2 = ("Einari", 1985)
h3 = ("Maija", 1953)
h4 = ("Essi", 1997)
hlista = [h1, h2, h3, h4]
print(vanhin(hlista))
| [
"jouko.rintamaki@live.fi"
] | jouko.rintamaki@live.fi |
fa73c07e3907644afa9f63ac97e01e28926855d5 | bfb113c3076f5b0570953583e7a2321c774d73ea | /request_modules/request_module_example5.py | 106dde71122751e8cc4a2c53f2b4162e6af91ce3 | [] | no_license | gsudarshan1990/Training_Projects | 82c48d5492cb4be94db09ee5c66142c370794e1c | 2b7edfafc4e448bd558c034044570496ca68bf2d | refs/heads/master | 2022-12-10T15:56:17.535096 | 2020-09-04T06:02:31 | 2020-09-04T06:02:31 | 279,103,151 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 303 | py | """
This is the example of the request module which provides the data in the json format
"""
import requests
response_object = requests.get("https://api.datamuse.com/words?rel_rhy=funny")#This is the query parameter
dict_object = response_object.json()#This obtaining the dictionary
print(dict_object) | [
"sudarshan2009@live.in"
] | sudarshan2009@live.in |
d1ea690ce6fc3362e7be323e77a96e59945debfb | a77a11cccf133168f9cf2178f4a3b572f0ff7338 | /test_data/test_data.py | 6fe0ccd83c3310d52308d4e09e773dcff6027ab2 | [] | no_license | redsnowc/jobplus11-2 | f21ab84c3f7bc2576ef275099355ab88cf763bea | 33fa0e6d0cbd9987de29c0027b676f4ba847115d | refs/heads/master | 2020-05-17T00:11:56.445400 | 2019-05-13T08:45:41 | 2019-05-13T08:45:41 | 183,390,450 | 0 | 0 | null | 2019-04-25T08:27:31 | 2019-04-25T08:27:31 | null | UTF-8 | Python | false | false | 2,371 | py | # -*- coding: UTF-8 -*-
from jobplus.models import db, User, CompanyInfo, UserInfo, Job
from random import randint, choice
from faker import Faker
fe = Faker()
fc = Faker('zh-cn')
def create_user():
for i in range(4):
user = User()
user_info = UserInfo()
user.username = fe.name()
user.email = fe.email()
user.password = '123456'
user_info.name = fc.name()
user_info.phone_number = fc.phone_number()
user_info.experience = randint(0, 10)
user_info.resume = fe.url()
user_info.user = user
db.session.add(user)
db.session.add(user_info)
db.session.commit()
def create_company():
for i in range(16):
user = User()
company_info = CompanyInfo()
user.username = fc.company()
user.email = fe.email()
user.password = '123456'
user.role = 20
company_info.address = fc.city()
company_info.domain = '互联网 AI 大数据'
company_info.intro = ''.join(fc.words(8))
company_info.detail = fc.text()
company_info.logo = 'https://s2.ax1x.com/2019/04/28/EMmHjH.png'
company_info.website = fc.url()
company_info.company = user
db.session.add(user)
db.session.add(company_info)
db.session.commit()
def create_job():
for user in User.query.filter_by(role=20).all():
for i in range(4):
job = Job()
job.title = fc.job()
job.salary_lower = randint(2, 20)
job.salary_upper = job.salary_lower + randint(2, 10)
job.experience_lower = randint(0, 4)
if job.experience_lower == 0:
job.experience_upper = 0
else:
job.experience_upper = job.experience_lower + randint(1,4)
job.education = choice(['大专', '本科', '硕士', '博士', '经验不限'])
job.tags = 'Python Flask Mysql'
job.intro = fc.text()
job.company = user
db.session.add(job)
db.session.commit()
def create_admin():
user = User()
user.username = 'admin'
user.email = 'admin@admin.com'
user.password = '123456'
user.role = 30
db.session.add(user)
db.session.commit()
def run():
create_user()
create_company()
create_job()
create_admin()
| [
"nuanyang.44@gmail.com"
] | nuanyang.44@gmail.com |
e6dd618a067b490a15f501db49b3726a1a3a06ca | e08e192020fa951b57f16aea0ee1376a74115c53 | /irim/external/invertible_rim/examples/irim_denoising.py | 255071fb600c30d84e20b56bf6091e6c853a4764 | [
"MIT"
] | permissive | fcaliva/FNAF-fastMRI | 3d5912a20e6eef063ae534139257ba7dc08c0ed6 | 9e2aea9f8b0d794c617ca822980c6d85ab6557fc | refs/heads/main | 2023-03-21T00:20:42.142264 | 2021-03-03T23:32:25 | 2021-03-03T23:32:25 | 354,087,548 | 1 | 0 | MIT | 2021-04-02T17:27:20 | 2021-04-02T17:27:20 | null | UTF-8 | Python | false | false | 4,571 | py | """
A simple demonstration for training an IRIM as an image denoiser.
This script will lead through the process of defining a gradient function for the IRIM, then
building the RIM model, and how to train the model using invert to learn. This script will utilize
CUDA devices if available.
"""
import torch
from irim import IRIM
from irim import InvertibleUnet
from irim import MemoryFreeInvertibleModule
# Use CUDA if available
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
# ---- Parameters ---
# Working with images, for time series or volumes set to 1 or 3, respectively
conv_nd = 2
# Number of Householder projections for constructing 1x1 convolutions
n_householder = 3
# Number of channels for each layer of the Invertible Unet
n_channels = [16,8,4,8,16]
# Number of hidden channel in the residual functions of the Invertible Unet
n_hidden = [16] * 5
# Downsampling factors
dilations = [1,2,4,2,1]
# Number of IRIM steps
n_steps = 5
# Number of image channels
im_channels = 3
# Number of total samples
n_samples = 64
im_size = 32
learning_rate = 1e-3
def grad_fun(x_est,y):
"""
Defines the gradient function for a denoising problem with White Noise.
This function demonstrates the use of Pytorch's autograd to calculate the gradient.
In this example, the function is equivalent to
def grad_fun(x_est,y):
return x_est - y
:param x_est: Tensor, model estimate of x
:param y: Tensor, noisy measurements
:return: grad_x
"""
# True during training, False during testing
does_require_grad = x_est.requires_grad
with torch.enable_grad():
# Necessary for using autograd
x_est.requires_grad_(True)
# Assuming uniform white noise, in the denoising case matrix A is the identity
error = torch.sum((y - x_est)**2)
# We retain the graph during training only
grad_x = torch.autograd.grad(error, inputs=x_est, retain_graph=does_require_grad,
create_graph=does_require_grad)[0]
# Set requires_grad back to it's original state
x_est.requires_grad_(does_require_grad)
return grad_x
# def grad_fun(x_est, y):
# return x_est - y
# At every iteration of the IRIM we use an Invertible Unet for processing. Note, that the use of ModuleList
# is necessary for Pytorch to properly register all modules.
step_models = torch.nn.ModuleList([InvertibleUnet(n_channels=n_channels,n_hidden=n_hidden,dilations=dilations,
conv_nd=conv_nd, n_householder=n_householder) for i in range(n_steps)])
# Build IRIM
model = IRIM(step_models,grad_fun,im_channels)
# Wrap the model to be trained with invert to learn
model = MemoryFreeInvertibleModule(model)
model.to(device)
# Use DataParallel if multiple devices are available
if torch.cuda.device_count() > 1:
model = torch.nn.DataParallel(model)
optimizer = torch.optim.Adam(model.parameters(), learning_rate)
# We generate a simple toy data set where the ground truth data has the same values in the image
# dimensions but different values across batch and channel dimensions. This demonstrates that the
# IRIM can deal with the implicit structure in the data, with a high range of values, and it can even
# do extrapolation.
x = torch.ones(n_samples,im_channels,*[im_size]*conv_nd, requires_grad=False, device=device)
x = torch.cumsum(x,0)
x = torch.cumsum(x,1)
y = x + torch.randn_like(x)
# Training and test split. This will result un an extrapolation problem on the test set.
y, y_test = torch.chunk(y,2,0)
x, x_test = torch.chunk(x,2,0)
# Initial states of the IRIM
x_in = torch.cat((y,torch.zeros(y.size(0),n_channels[0]-im_channels,*[im_size]*conv_nd, device=device)),1)
x_test_in = torch.cat((y_test,torch.zeros(y_test.size(0),n_channels[0]-im_channels,*[im_size]*conv_nd, device=device)),1)
x_in.requires_grad_(True)
x_test_in.requires_grad_(False)
for i in range(3000):
optimizer.zero_grad()
model.zero_grad()
# We only regress on the image dimensions
x_est = model.forward(x_in, y)[:,:im_channels]
loss = torch.nn.functional.mse_loss(x_est, x)
loss.backward()
optimizer.step()
if i % 100 == 0:
model.eval()
with torch.no_grad():
x_est = model.forward(x_test_in, y_test)[:, :im_channels]
loss = torch.nn.functional.mse_loss(x_est, x_test)
loss_noisy = torch.nn.functional.mse_loss(y_test, x_test)
print('Iteration', i, ': test loss =',loss.item(), ' loss noisy image =',loss_noisy.item())
model.train()
| [
"francesco.caliva@ucsf.edu"
] | francesco.caliva@ucsf.edu |
2acd41e081d0d6ae8fca6257a31843d2bf777385 | 9743d5fd24822f79c156ad112229e25adb9ed6f6 | /xai/brain/wordbase/verbs/_squires.py | 6fb9d7cec5aa9aa6373a452e937b439e2c13cec3 | [
"MIT"
] | permissive | cash2one/xai | de7adad1758f50dd6786bf0111e71a903f039b64 | e76f12c9f4dcf3ac1c7c08b0cc8844c0b0a104b6 | refs/heads/master | 2021-01-19T12:33:54.964379 | 2017-01-28T02:00:50 | 2017-01-28T02:00:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 238 | py |
from xai.brain.wordbase.verbs._squire import _SQUIRE
#calss header
class _SQUIRES(_SQUIRE, ):
def __init__(self,):
_SQUIRE.__init__(self)
self.name = "SQUIRES"
self.specie = 'verbs'
self.basic = "squire"
self.jsondata = {}
| [
"xingwang1991@gmail.com"
] | xingwang1991@gmail.com |
4b8ae280cf7bb7071353068bd315c4f5f8c0896d | c99479824a703df2e22364162533661c7f39df04 | /utils/bp/default/default_interface_chrono_local_date_time.py | 71b323a63ab9b6a1644d627b7ee4b851106c40e1 | [] | no_license | tylerrbowen/sample | d1ccefd7ed823c758d3bc43729d76fd72e0042ff | 0fcfecd532a9679bf3f927d5fcb03edb005c5dc4 | refs/heads/master | 2021-01-01T06:34:05.647543 | 2013-12-07T00:13:02 | 2013-12-07T00:13:02 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,211 | py |
from default_interface_temporal import DefaultInterfaceTemporal
from abc import ABCMeta
from utils.bp.duration import ChronoUnit, ChronoUnitItem
from utils.bp.temporal.chrono_field import ChronoFieldItem, ChronoField
from utils.bp.chrono.chrono_local_date_time import ChronoLocalDateTime
from utils.bp.chrono.chronology import Chronology
from utils.bp.temporal.temporal_queries import TemporalQueries
from utils.bp.local_date import LocalDate
from utils.bp.instant import Instant
class DefaultInterfaceChronoLocalDateTime(DefaultInterfaceTemporal, ChronoLocalDateTime):
__metaclass__ = ABCMeta
def with_adjuster(self, adjuster):
return self.to_local_date().get_chronology().ensure_chrono_local_date_time(
super(DefaultInterfaceChronoLocalDateTime, self).with_adjuster(adjuster))
def plus_temporal(self, temporal_amount):
return self.to_local_date().get_chronology().ensure_chrono_local_date_time(
super(DefaultInterfaceChronoLocalDateTime, self).plus_temporal(temporal_amount))
def minus_temporal(self, temporal_amount):
return self.to_local_date().get_chronology().ensure_chrono_local_date_time(
super(DefaultInterfaceChronoLocalDateTime, self).minus_temporal(temporal_amount))
def minus(self, amount=None, unit=None):
return self.to_local_date().get_chronology().ensure_chrono_local_date_time(
super(DefaultInterfaceChronoLocalDateTime, self).minus(amount, unit))
def adjust_into(self, temporal):
return temporal.with_field(ChronoField.EPOCH_DAY, self.to_local_date().to_epoch_day()).\
with_field(ChronoField.NANO_OF_SECOND, self.to_local_time().to_nano_of_day())
def query(self, query):
if query == TemporalQueries.chronology():
return self.to_local_date().get_chronology()
elif query == TemporalQueries.precision():
return ChronoUnit.NANOS
elif query == TemporalQueries.local_date():
return LocalDate.of_epoch_day(self.to_local_date())
elif query == TemporalQueries.local_time():
return self.to_local_time()
return super(DefaultInterfaceChronoLocalDateTime, self).query(query)
def to_instant(self, offset):
return Instant.of_epoch_second(self.to_epoch_second(offset), self.to_local_time().get_nano())
def to_epoch_second(self, offset):
epoch_day = self.to_local_date().to_epoch_day()
secs = epoch_day * 86400 + self.to_local_time().to_second_of_day()
secs -= offset.get_total_seconds()
return secs
def __cmp__(self, other):
comp = self.to_local_date().__cmp__(other.to_local_date())
if comp == 0:
comp = self.to_local_time().__cmp__(other.to_local_time())
if comp == 0:
comp = self.to_local_date().get_chronology().__cmp__(other.to_local_date().get_chronology())
return comp
def is_after(self, other):
this_ep_day = self.to_local_date().to_epoch_day()
other_ep_day = other.to_local_date().to_epoch_day()
return this_ep_day > other_ep_day or \
(this_ep_day == other_ep_day and self.to_local_time().to_nano_of_day() > other.to_local_time().to_nano_of_day())
def is_before(self, other):
this_ep_day = self.to_local_date().to_epoch_day()
other_ep_day = other.to_local_date().to_epoch_day()
return this_ep_day < other_ep_day or \
(this_ep_day == other_ep_day and self.to_local_time().to_nano_of_day() < other.to_local_time().to_nano_of_day())
def is_equal(self, other):
return self.to_local_time().to_nano_of_day() == other.to_local_time().to_nano_of_day() and \
self.to_local_date().to_epoch_day() == other.to_local_date().to_epoch_day()
def __eq__(self, other):
if self is other:
return True
if isinstance(other, ChronoLocalDateTime):
return self.__cmp__(other) == 0
return False
def __hash__(self):
return self.to_local_date().__hash__() ^ self.to_local_time().__hash__()
def __str__(self):
return self.to_local_date().__str__() + 'T' + self.to_local_time().__str__()
| [
"tyler.r.bowen@gmail.com"
] | tyler.r.bowen@gmail.com |
3efaeb9b2ba2bb7ab55362bd8a91152e1af3525a | 5d0edf31b17c5375faf6126c1a7be8e79bfe2ab8 | /buildout-cache/eggs/Products.PortalTransforms-2.1.12-py2.7.egg/Products/PortalTransforms/transforms/word_to_html.py | 624225f53b94fa9955067ec501f72cb4da975df2 | [] | no_license | renansfs/Plone_SP | 27cba32ebd9fc03dae3941ec23cf1bf0a7b6667a | 8a7bdbdb98c3f9fc1073c6061cd2d3a0ec80caf5 | refs/heads/master | 2021-01-15T15:32:43.138965 | 2016-08-24T15:30:19 | 2016-08-24T15:30:19 | 65,313,812 | 0 | 3 | null | null | null | null | UTF-8 | Python | false | false | 1,519 | py | # -*- coding: utf-8 -*-
from Products.PortalTransforms.interfaces import ITransform
from zope.interface import implementer
import os
EXTRACT_BODY = 1
EXTRACT_STYLE = 0
FIX_IMAGES = 1
IMAGE_PREFIX = "img_"
# disable office_uno because it doesn't support multithread yet
ENABLE_UNO = False
if os.name == 'posix':
try:
if ENABLE_UNO:
from office_uno import document
else:
raise
except:
from office_wvware import document
else:
try:
if ENABLE_UNO:
from office_uno import document
else:
raise
except:
from office_com import document
@implementer(ITransform)
class word_to_html(object):
__name__ = "word_to_html"
inputs = ('application/msword',)
output = 'text/html'
output_encoding = 'utf-8'
tranform_engine = document.__module__
def name(self):
return self.__name__
def convert(self, data, cache, **kwargs):
orig_file = 'unknown.doc'
doc = None
try:
doc = document(orig_file, data)
doc.convert()
html = doc.html()
path, images = doc.subObjects(doc.tmpdir)
objects = {}
if images:
doc.fixImages(path, images, objects)
cache.setData(html)
cache.setSubObjects(objects)
return cache
finally:
if doc is not None:
doc.cleanDir(doc.tmpdir)
def register():
return word_to_html()
| [
"renansfs@gmail.com"
] | renansfs@gmail.com |
b7c0556a288c8c26517fd225c1425ce43b08ea25 | 9e988c0dfbea15cd23a3de860cb0c88c3dcdbd97 | /sdBs/AllRun/fbs_0212+349/sdB_FBS_0212+349_coadd.py | 85c8c3b353e426ba0c09356dcd02332631d61bb9 | [] | no_license | tboudreaux/SummerSTScICode | 73b2e5839b10c0bf733808f4316d34be91c5a3bd | 4dd1ffbb09e0a599257d21872f9d62b5420028b0 | refs/heads/master | 2021-01-20T18:07:44.723496 | 2016-08-08T16:49:53 | 2016-08-08T16:49:53 | 65,221,159 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 433 | py | from gPhoton.gMap import gMap
def main():
gMap(band="NUV", skypos=[33.889042,35.189722], skyrange=[0.0333333333333,0.0333333333333], stepsz = 30., cntfile="/data2/fleming/GPHOTON_OUTPUT/LIGHTCURVES/sdBs/sdB_FBS_0212+349/sdB_FBS_0212+349_movie_count.fits", cntcoaddfile="/data2/fleming/GPHOTON_OUTPUT/LIGHTCURVES/sdB/sdB_FBS_0212+349/sdB_FBS_0212+349_count_coadd.fits", overwrite=True, verbose=3)
if __name__ == "__main__":
main()
| [
"thomas@boudreauxmail.com"
] | thomas@boudreauxmail.com |
dd4973b847efb1f0b4a23a8275038669c6b2eba1 | 5686100c4ed0436347107f4e9faae30fca609c09 | /leetcode/1287. Element Appearing More Than 25% In Sorted Array/Solution.py | 52841b8dd96cec695b503398fb1062cf2a443dd5 | [] | no_license | adamqddnh/algorithm-questions | 7d4f56b7e5ac2ff9460774d43ecf8cba2cd7b0cb | 93a1b082e10ade0dd464deb80b5df6c81552f534 | refs/heads/master | 2023-06-29T04:51:26.635740 | 2021-07-23T09:11:45 | 2021-07-23T09:11:45 | 252,675,682 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 670 | py | class Solution(object):
def findSpecialInteger(self, arr):
"""
:type arr: List[int]
:rtype: int
"""
last = 0
result = 1
length = len(arr)
for i in range(1, length):
if arr[i] == arr[last]:
result = max(result, i - last + 1)
else:
last = i
if result > length / 4:
return arr[last]
return arr[last]
"""
:A new idea
"""
# def findSpecialInteger(self, arr):
# n = len(arr) // 4
# for i in range(len(arr)):
# if arr[i] == arr[i + n]:
# return arr[i]
| [
"noreply@github.com"
] | adamqddnh.noreply@github.com |
1c1affc9dd98fd25a3edc099d6fa7cd07ff3fc6d | d8e8e528b1942b3528c88b12729f0cbc7b7d606f | /pipenv/vendor/dotenv/environ.py | ad3571656fba9a41d98bb9ce8d9e41dbd669291e | [
"MIT",
"BSD-3-Clause"
] | permissive | frostming/pipenv | 997e5f71ac5a4bbac3aacd1fa000da6e0c8161eb | 661184e5ccf9bec3e3b4b03af778e54fe2fbc1a2 | refs/heads/master | 2021-04-15T03:33:03.817473 | 2019-03-18T03:14:33 | 2019-03-18T03:14:33 | 126,263,945 | 1 | 1 | MIT | 2019-03-19T09:44:03 | 2018-03-22T01:48:41 | Python | UTF-8 | Python | false | false | 1,330 | py | import os
class UndefinedValueError(Exception):
pass
class Undefined(object):
"""Class to represent undefined type. """
pass
# Reference instance to represent undefined values
undefined = Undefined()
def _cast_boolean(value):
"""
Helper to convert config values to boolean as ConfigParser do.
"""
_BOOLEANS = {'1': True, 'yes': True, 'true': True, 'on': True,
'0': False, 'no': False, 'false': False, 'off': False, '': False}
value = str(value)
if value.lower() not in _BOOLEANS:
raise ValueError('Not a boolean: %s' % value)
return _BOOLEANS[value.lower()]
def getenv(option, default=undefined, cast=undefined):
"""
Return the value for option or default if defined.
"""
# We can't avoid __contains__ because value may be empty.
if option in os.environ:
value = os.environ[option]
else:
if isinstance(default, Undefined):
raise UndefinedValueError('{} not found. Declare it as envvar or define a default value.'.format(option))
value = default
if isinstance(cast, Undefined):
return value
if cast is bool:
value = _cast_boolean(value)
elif cast is list:
value = [x for x in value.split(',') if x]
else:
value = cast(value)
return value
| [
"dan@danryan.co"
] | dan@danryan.co |
4810fd2a4938fc0e455b6386bfde2aea67b9b938 | e573b586a921084f29a36f8e2de5afcae2c65ff8 | /tasks/part_3/mod_checker.py | e2b4f8d96c27b81ca4cbe1eb745fc2bc267870ef | [] | no_license | HannaKulba/AdaptiveTraining_English | e69c8a0c444c1fa72b4783ba837cb3d9dc055d91 | 46497dc6827df37f4ebb69671912ef5b934ab6f0 | refs/heads/master | 2020-12-28T15:05:25.762072 | 2020-02-19T14:39:22 | 2020-02-19T14:39:22 | 238,381,636 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 255 | py | def mod_checker(x, mod=0):
return lambda y: y % x == mod if True else False
if __name__ == '__main__':
mod_3 = mod_checker(3)
print(mod_3(3)) # True
print(mod_3(4)) # False
mod_3_1 = mod_checker(3, 1)
print(mod_3_1(4)) # True
| [
"anna.mirraza@gmail.com"
] | anna.mirraza@gmail.com |
e7b20be2f99f8e0b7bbe52395f49c97165efe8e8 | f6c7dfc1c77c1e6b2460982925d76ee10f9eaa52 | /media.py | febe922488f1f760cf32c511af681171b211bae3 | [] | no_license | kevinzhuang10/Movie_Trailer_Website | f8bd104688f14c37a72b238b4497acbafde3c166 | e4e61efb27f43faf0cc2acab6ea8e95037333486 | refs/heads/master | 2021-01-22T04:18:14.908976 | 2017-02-13T05:07:28 | 2017-02-13T05:07:28 | 81,530,060 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 794 | py | # Lesson 3.4: Make Classes
# Mini-Project: Movies Website
# In this file, you will define the class Movie. You could do this
# directly in entertainment_center.py but many developers keep their
# class definitions separate from the rest of their code. This also
# gives you practice importing Python files.
import webbrowser
class Movie():
# This class provides a way to store movie related information
'''class Movie takes input of title, image url and video url;
outputs Movie instance'''
def __init__(self, movie_title, movie_poster, movie_trailer):
# initialize instance of class Movie
print('Movie class constructor called')
self.title = movie_title
self.poster_image_url = movie_poster
self.trailer_youtube_url = movie_trailer
| [
"you@example.com"
] | you@example.com |
b966eccf30fa3b858d44755b73fe4bc3cff62c15 | 9a2726744fb12ebcbfd636a05fbd4cd36abfba7a | /config/settings/partials/INSTALLED_APPS.py | 32954a83dd7b1af16b8d0ece97f3628b518b2dc8 | [] | no_license | bartkim0426/drf_project_base | 7d34749269954022a11dfcfba99bc38e48a31ede | 7342c49fbf6da7a0fd5d22a6bfb172d2212d4b91 | refs/heads/master | 2021-08-22T19:32:30.522254 | 2017-12-01T03:00:11 | 2017-12-01T03:00:11 | 112,159,478 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 838 | py | ADMIN_APPS = [
# for admin
'filer',
'suit',
'easy_thumbnails',
]
DJANGO_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
THIRD_PARTY_APPS = [
'rest_framework',
]
LOCAL_APPS = [
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
INSTALLED_APPS = ADMIN_APPS + DJANGO_APPS + THIRD_PARTY_APPS + LOCAL_APPS
| [
"bartkim0426@gmail.com"
] | bartkim0426@gmail.com |
f17e721848f3121c7f9530cc5ef759327dfb25d7 | 6bb6c9f37e61325671a72e4c0b7bf1e6ec062aac | /testinfra/plugin.py | 220ddd6dbf9614e2b6754d01260a738d0a7c8897 | [
"Apache-2.0"
] | permissive | Unity-Technologies/testinfra | 19ef3aadddef7322c66978b6b9576f992fd16a81 | 58fa564f0a3178e9df26b26e2f7cb54aad433d3f | refs/heads/master | 2021-01-19T21:58:21.534575 | 2017-04-27T08:16:54 | 2017-04-27T08:16:54 | 88,728,308 | 1 | 3 | null | 2017-04-19T09:45:41 | 2017-04-19T09:45:41 | null | UTF-8 | Python | false | false | 3,825 | py | # coding: utf-8
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# pylint: disable=redefined-outer-name
from __future__ import unicode_literals
import logging
import sys
import pytest
import testinfra
import testinfra.modules
def _generate_fixtures():
self = sys.modules[__name__]
for modname in testinfra.modules.modules:
def get_fixture(name):
@pytest.fixture()
def f(TestinfraBackend):
return TestinfraBackend.get_module(name)
f.__name__ = str(name)
f.__doc__ = ('https://testinfra.readthedocs.io/en/latest/'
'modules.html#{0}'.format(name.lower()))
return f
setattr(self, modname, get_fixture(modname))
_generate_fixtures()
@pytest.fixture()
def LocalCommand(TestinfraBackend):
"""Run commands locally
Same as `Command` but run commands locally with subprocess even
when the connection backend is not "local".
Note: `LocalCommand` does NOT respect ``--sudo`` option
"""
return testinfra.get_backend("local://").get_module("Command")
@pytest.fixture(scope="module")
def TestinfraBackend(_testinfra_backend):
return _testinfra_backend
def pytest_addoption(parser):
group = parser.getgroup("testinfra")
group.addoption(
"--connection",
action="store",
dest="connection",
help=(
"Remote connection backend (paramiko, ssh, safe-ssh, "
"salt, docker, ansible)"
)
)
group.addoption(
"--hosts",
action="store",
dest="hosts",
help="Hosts list (comma separated)",
)
group.addoption(
"--ssh-config",
action="store",
dest="ssh_config",
help="SSH config file",
)
group.addoption(
"--sudo",
action="store_true",
dest="sudo",
help="Use sudo",
)
group.addoption(
"--sudo-user",
action="store",
dest="sudo_user",
help="sudo user",
)
group.addoption(
"--ansible-inventory",
action="store",
dest="ansible_inventory",
help="Ansible inventory file",
)
group.addoption(
"--nagios",
action="store_true",
dest="nagios",
help="Nagios plugin",
)
def pytest_generate_tests(metafunc):
if "_testinfra_backend" in metafunc.fixturenames:
if metafunc.config.option.hosts is not None:
hosts = metafunc.config.option.hosts.split(",")
elif hasattr(metafunc.module, "testinfra_hosts"):
hosts = metafunc.module.testinfra_hosts
else:
hosts = [None]
params = testinfra.get_backends(
hosts,
connection=metafunc.config.option.connection,
ssh_config=metafunc.config.option.ssh_config,
sudo=metafunc.config.option.sudo,
sudo_user=metafunc.config.option.sudo_user,
ansible_inventory=metafunc.config.option.ansible_inventory,
)
ids = [e.get_pytest_id() for e in params]
metafunc.parametrize(
"_testinfra_backend", params, ids=ids, scope="module")
def pytest_configure(config):
if config.option.verbose > 1:
logging.basicConfig()
logging.getLogger("testinfra").setLevel(logging.DEBUG)
| [
"phil@philpep.org"
] | phil@philpep.org |
e2612c209edccfef435399a9d964423c08face7f | 98bf8263027c3cc6673cc4bd6189860092848bcd | /Livid_DS1_v2_x3/__init__.py | 9eb792ede46228249093840ab9c5064342e48242 | [] | no_license | aumhaa/aumhaa_commission | 09ae898ee5907721236ed2f97f76383ee8e25ae3 | 07ca157ae04e082059c583e4a8030cb73974e9e6 | refs/heads/master | 2022-10-09T11:40:31.332879 | 2022-09-11T03:32:43 | 2022-09-11T03:32:43 | 146,852,967 | 3 | 1 | null | null | null | null | UTF-8 | Python | false | false | 697 | py | # by amounra 0413 : http://www.aumhaa.com
from .DS1 import DS1
from _Framework.Capabilities import controller_id, inport, outport, CONTROLLER_ID_KEY, PORTS_KEY, HIDDEN, NOTES_CC, SCRIPT, REMOTE, SYNC, TYPE_KEY, FIRMWARE_KEY, AUTO_LOAD_KEY
def get_capabilities():
return {CONTROLLER_ID_KEY: controller_id(vendor_id=2536, product_ids=[115], model_name='Livid Instruments DS1'),
PORTS_KEY: [inport(props=[HIDDEN, NOTES_CC, SCRIPT, REMOTE]),
inport(props = []),
outport(props=[HIDDEN, NOTES_CC, SCRIPT, REMOTE]),
outport(props=[])],
TYPE_KEY: 'push',
AUTO_LOAD_KEY: False}
def create_instance(c_instance):
""" Creates and returns the DS1 script """
return DS1(c_instance)
| [
"aumhaa@gmail.com"
] | aumhaa@gmail.com |
5b1582e6cbdeeb188eb19d5c2349313222a6ba83 | da9b9f75a693d17102be45b88efc212ca6da4085 | /sdk/cosmos/azure-cosmos/azure/cosmos/diagnostics.py | 352a5a2f6b01787897e53ff55b1297a1adc2a5ce | [
"MIT",
"LicenseRef-scancode-generic-cla"
] | permissive | elraikhm/azure-sdk-for-python | e1f57b2b4d8cc196fb04eb83d81022f50ff63db7 | dcb6fdd18b0d8e0f1d7b34fdf82b27a90ee8eafc | refs/heads/master | 2021-06-21T22:01:37.063647 | 2021-05-21T23:43:56 | 2021-05-21T23:43:56 | 216,855,069 | 0 | 0 | MIT | 2019-10-22T16:05:03 | 2019-10-22T16:05:02 | null | UTF-8 | Python | false | false | 2,688 | py | # The MIT License (MIT)
# Copyright (c) 2014 Microsoft Corporation
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
"""Diagnostic tools for Cosmos
"""
from requests.structures import CaseInsensitiveDict
class RecordDiagnostics(object):
""" Record Response headers from Cosmos read operations.
The full response headers are stored in the ``headers`` property.
Examples:
>>> rh = RecordDiagnostics()
>>> col = b.create_container(
... id="some_container",
... partition_key=PartitionKey(path='/id', kind='Hash'),
... response_hook=rh)
>>> rh.headers['x-ms-activity-id']
'6243eeed-f06a-413d-b913-dcf8122d0642'
"""
_common = {
"x-ms-activity-id",
"x-ms-session-token",
"x-ms-item-count",
"x-ms-request-quota",
"x-ms-resource-usage",
"x-ms-retry-after-ms",
}
def __init__(self):
self._headers = CaseInsensitiveDict()
self._body = None
self._request_charge = 0
@property
def headers(self):
return CaseInsensitiveDict(self._headers)
@property
def body(self):
return self._body
@property
def request_charge(self):
return self._request_charge
def clear(self):
self._request_charge = 0
def __call__(self, headers, body):
self._headers = headers
self._body = body
self._request_charge += float(headers.get("x-ms-request-charge", 0))
def __getattr__(self, name):
key = "x-ms-" + name.replace("_", "-")
if key in self._common:
return self._headers[key]
raise AttributeError(name)
| [
"noreply@github.com"
] | elraikhm.noreply@github.com |
fc7c905d94a3f8b1e6d92e5841566ba7f9041943 | 228ba698591ce49a0f220bf5acfc4bad67003172 | /weibo/pycharmspace/task/bonusRecognize/BonusRecognize.py | 8cd2b4009f00c9723bb507127f4c937d3176c08c | [] | no_license | HaxByCzy/python | 29e41ef95d713d26895c6f6611541c6487251756 | 3762fed0ea8a9ca63bfdfd70f8a2b62a15179ac0 | refs/heads/master | 2020-03-26T18:36:41.227656 | 2018-05-29T07:30:34 | 2018-05-29T07:30:34 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,943 | py | #-*- coding:utf-8 _*-
"""
--------------------------------------------------------------------
@function: 红包微博识别
@time: 2017-07-13
author:baoquan3
@version:
@modify:
--------------------------------------------------------------------
"""
import sys
from WeiboParser import WeiboParser
defaultencoding = 'utf-8'
if sys.getdefaultencoding() != defaultencoding:
reload(sys)
sys.setdefaultencoding(defaultencoding)
class Weibo(object):
"""
微博特征实例bean
"""
def __init__(self):
self.id = "" #微博id
self.url = "" #用户粉丝数
self.content = "" #微博内容
self.filter = "" #原创filter字段
class Bonus(WeiboParser):
"""
发红包微博识别
"""
id = "ID"
url = "URL"
content = "CONTENT"
filter = "FILTER"
titileTag = "#"
def processWeibo(self):
"""
根据一条微博相关特征,识别是否是属于发红包微博
:return:
"""
if len(self.tmpDict) > 0:
wb = Weibo()
if Bonus.id in self.tmpDict:
wb.id = self.tmpDict[Bonus.id]
if Bonus.url in self.tmpDict:
wb.url = self.tmpDict[Bonus.url]
if Bonus.content in self.tmpDict:
wb.content = self.tmpDict[Bonus.content]
if Bonus.filter in self.tmpDict:
wb.filter = self.tmpDict[Bonus.filter]
status = self.isBonus(wb)
if status:
self.outputReadWeibo(wb)
self.tmpDict.clear()
self.keyList = []
def isBonus(self, wb):
"""
根据微博特征,判断是否是发红包用户
:param wb: 微博特征
:return: true or false
"""
keyword = True if wb.content.find("红包") != -1 and wb.content.find("我") != -1 else False
if keyword:
hyperlink = self.hasHyperlink(wb.content)
if hyperlink:
title = self.hasTitle(wb.content)
if title:
orgin = False if int(wb.filter) & 4 != 0 else True
if orgin:
length = True if len(wb.content) > 10 else False
if length:
return True
else:
False
else:
return False
else:
return False
else:
return False
else:
return False
def hasTitle(self, line):
"""
根据微博内容判断是否包含话题
:param line: 微博内容
:return: true or false
"""
prefixIndex = line.find(Bonus.titileTag)
if prefixIndex != -1:
suffixIndex = line.find(Bonus.titileTag, prefixIndex + 1)
if suffixIndex != -1:
return True
else:
return False
def hasHyperlink(self, line):
"""
判断微博内容是否包含超链接
:param line: 微博内容
:return: true or false
"""
prefixIndex = line.find("<sina:link src=")
if prefixIndex != -1:
suffixIndex = line.find("/>", prefixIndex + 1)
if suffixIndex != -1:
return True
else:
return False
def outputReadWeibo(self, wb):
"""
输出已识别出发红包用户微博,以方便阅读输出格式输出
:param wb: 微博特征
:return:
"""
print "{0}\t{1}\t{2}\t{3}".format(wb.id,wb.filter, wb.url, wb.content)
if __name__ == "__main__":
inFile = "D://data//wbTest.dat"
bonus = Bonus()
with open(inFile,"r") as f:
for line in f:
bonus.processOneRecord(line.strip())
bonus.flush() | [
"tszhbaoquan@163.com"
] | tszhbaoquan@163.com |
c4150bd16ec216344b75814082beff1bc17cdcc2 | d93159d0784fc489a5066d3ee592e6c9563b228b | /RecoLocalCalo/HcalRecProducers/python/HFUpgradeReconstructor_cfi.py | 41371cc1ba7909ad4fc48d8759d1f0f795097d96 | [] | permissive | simonecid/cmssw | 86396e31d41a003a179690f8c322e82e250e33b2 | 2559fdc9545b2c7e337f5113b231025106dd22ab | refs/heads/CAallInOne_81X | 2021-08-15T23:25:02.901905 | 2016-09-13T08:10:20 | 2016-09-13T08:53:42 | 176,462,898 | 0 | 1 | Apache-2.0 | 2019-03-19T08:30:28 | 2019-03-19T08:30:24 | null | UTF-8 | Python | false | false | 500 | py | import FWCore.ParameterSet.Config as cms
hfUpgradeReco = cms.EDProducer("HcalSimpleReconstructor",
correctionPhaseNS = cms.double(0.0),
digiLabel = cms.InputTag("simHcalUnsuppressedDigis","HFUpgradeDigiCollection"),
Subdetector = cms.string('upgradeHF'),
correctForPhaseContainment = cms.bool(False),
correctForTimeslew = cms.bool(False),
dropZSmarkedPassed = cms.bool(True),
firstSample = cms.int32(2),
samplesToAdd = cms.int32(1),
tsFromDB = cms.bool(True)
)
| [
"giulio.eulisse@gmail.com"
] | giulio.eulisse@gmail.com |
87bc3f1a5823b448754c8dc4c28a12e411d905c5 | 3e7a8c5630de986a4d02011b1bd368c041f3f477 | /作图/利用pyecharts制作柱状图.py | 4748a756419e67069fd49d9794be5896276551f3 | [] | no_license | gswyhq/hello-world | b9ef715f80d2b39c8efaa1aa2eb18a6257e26218 | b1ab053a05e1f8c63b8b04d6904a3cdca450bd9f | refs/heads/master | 2023-05-26T13:15:36.788620 | 2023-05-19T13:38:50 | 2023-05-19T13:38:50 | 158,821,148 | 16 | 6 | null | 2021-03-19T02:59:48 | 2018-11-23T11:04:43 | Python | UTF-8 | Python | false | false | 2,317 | py | #!/usr/bin/python3
# coding: utf-8
自从 v0.3.2 开始,为了缩减项目本身的体积以及维持 pyecharts 项目的轻量化运行,pyecharts 将不再自带地图 js 文件。如用户需要用到地图图表,可自行安装对应的地图文件包。下面介绍如何安装。
全球国家地图: echarts-countries-pypkg (1.9MB): 世界地图和 213 个国家,包括中国地图
中国省级地图: echarts-china-provinces-pypkg (730KB):23 个省,5 个自治区
中国市级地图: echarts-china-cities-pypkg (3.8MB):370 个中国城市
中国县区级地图: echarts-china-counties-pypkg (4.1MB):2882 个中国县·区
中国区域地图: echarts-china-misc-pypkg (148KB):11 个中国区域地图,比如华南、华北。
选择自己需要的安装
pip3 install echarts-countries-pypkg
pip3 install echarts-china-provinces-pypkg
pip3 install echarts-china-cities-pypkg
pip3 install echarts-china-counties-pypkg
pip3 install echarts-china-misc-pypkg
pip3 install echarts-united-kingdom-pypkg
from pyecharts.charts import Bar
from pyecharts import options as opts
# 生成 HTML
bar = (
Bar()
.add_xaxis(["衬衫", "毛衣", "领带", "裤子", "风衣", "高跟鞋", "袜子"])
.add_yaxis("商家A", [114, 55, 27, 101, 125, 27, 105])
.add_yaxis("商家B", [57, 134, 137, 129, 145, 60, 49])
.set_global_opts(title_opts=opts.TitleOpts(title="某商场销售情况"))
)
bar.render(path="render2.html")
# 生成图片
from snapshot_selenium import snapshot as driver
from pyecharts import options as opts
from pyecharts.charts import Bar
from pyecharts.render import make_snapshot
def bar_chart() -> Bar:
c = (
Bar()
.add_xaxis(["衬衫", "毛衣", "领带", "裤子", "风衣", "高跟鞋", "袜子"])
.add_yaxis("商家A", [114, 55, 27, 101, 125, 27, 105])
.add_yaxis("商家B", [57, 134, 137, 129, 145, 60, 49])
.reversal_axis()
.set_series_opts(label_opts=opts.LabelOpts(position="right"))
.set_global_opts(title_opts=opts.TitleOpts(title="Bar-测试渲染图片"))
)
return c
# 需要安装 snapshot_selenium; 当然也可以使用: snapshot_phantomjs
make_snapshot(driver, bar_chart().render(path="render2.html"), "bar.png")
def main():
pass
if __name__ == '__main__':
main()
| [
"gswyhq@126.com"
] | gswyhq@126.com |
aa30c3ca06c891c085bb4fa75f45ef0510c91188 | 0c787f10f5fc666e275e01954f430cb894f1fa65 | /__init__.py | 30e33671ae1fa68e0ba341441138f3fd7beb1a7b | [] | no_license | marcinowski/web-archive | db391c32c2cf05bbcee1abee2825b8bb1295f86f | ea590e6699d97c2554e15e2c7598fb8b30a3f292 | refs/heads/master | 2021-01-20T02:31:25.147941 | 2017-04-26T23:17:21 | 2017-04-26T23:17:21 | 89,422,764 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,006 | py | """
:created on: 2017-04-24
:author: Marcin Muszynski
:contact: marcinowski007@gmail.com
UseCase: pass url and a path -> generate one compiled html with css classes
"""
import requests
from lxml import etree
from urllib.parse import urlparse
class WebArchiveException(Exception):
""""""
class WebArchive(object):
def __init__(self, url, path):
self.url = url
self.path = path
self.session = requests.Session()
self.html = self._get_content(self.url)
self.tree = etree.HTML(self.html)
self.url_parsed = urlparse(self.url)
self.result_html = ''
self.css_styles = ''
self.js_scripts = ''
def save(self):
self._handle_links()
self._handle_styles()
self._handle_scripts()
self._insert_elements()
self._save_file()
def _handle_links(self):
for link in self.tree.iter(tag='link'):
if link.get('rel') == 'stylesheet':
url = self._resolve_url(link.get('href'))
self.css_styles += self._get_content(url)
link.getparent().remove(link)
def _handle_styles(self):
for style in self.tree.iter(tag='style'):
self.css_styles += style.text or ''
style.getparent().remove(style)
def _handle_scripts(self):
for script in self.tree.iter(tag='script'):
src = script.get('src')
if not src:
self.js_scripts += script.text or ''
else:
url = self._resolve_url(src)
self.js_scripts += self._get_content(url)
script.getparent().remove(script)
def _insert_elements(self):
self._insert_element('style', self.css_styles)
self._insert_element('script', self.js_scripts)
def _insert_element(self, _tag, content):
tag = etree.Element(_tag)
tag.text = content
self.tree.find('head').append(tag)
def _save_file(self):
with open(self.path, 'wb') as f:
f.write(etree.tostring(self.tree))
def _resolve_url(self, url):
if url.startswith('http'): # absolute url
return url
if url.startswith('//'): # schema relative
return self.url_parsed.schema + ":" + url
if url.starstwith('/'): # relative url
page_root = self.url_parsed.schema + self.url_parsed.netloc
return page_root + url
bwd_path = url.count('../')
base = list(self.tree.iter(tag='base')) # 0 or 1
base_url = base[0].get('href') if base else self.url
return base_url.rsplit('/', bwd_path)[0] + '/' + url
def _get_content(self, url):
try:
return self._get_response(url)
except requests.RequestException:
raise WebArchiveException("Message")
except AttributeError:
raise WebArchiveException("Message")
def _get_response(self, url):
return self.session.get(url).content.decode()
| [
"muszynskimarcin@wp.pl"
] | muszynskimarcin@wp.pl |
67f38c3e694291ed8a8e71da84c20b48d5d4aa3a | 6543b77d0f483be2eb57b4e5e94928c6bff8eed9 | /venv/Scripts/pip3-script.py | bccaaa75db6144c30e115e29250c87f76933bbac | [] | no_license | aswini1414/sampleproject | 0876daaea3195f0a2245a66a4f023927ffdbb433 | 5e225ac796bf5508d2c2c646ed94a764b021fce8 | refs/heads/master | 2020-06-18T14:50:19.580453 | 2019-07-08T04:13:48 | 2019-07-08T04:13:48 | 196,337,684 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 417 | py | #!C:\Users\LOKESH\PycharmProjects\sampleproject\venv\Scripts\python.exe
# EASY-INSTALL-ENTRY-SCRIPT: 'pip==9.0.1','console_scripts','pip3'
__requires__ = 'pip==9.0.1'
import re
import sys
from pkg_resources import load_entry_point
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(
load_entry_point('pip==9.0.1', 'console_scripts', 'pip3')()
)
| [
"lokesh.rapala"
] | lokesh.rapala |
5b24b27095e2a964135f369dc7b99aa2c27fa227 | cca3f6a0accb18760bb134558fea98bb87a74806 | /abc150/C/main.py | d1f234e48a0bc6c9180201d774917dc7eedfb1d2 | [] | no_license | Hashizu/atcoder_work | 5ec48cc1147535f8b9d0f0455fd110536d9f27ea | cda1d9ac0fcd56697ee5db93d26602dd8ccee9df | refs/heads/master | 2023-07-15T02:22:31.995451 | 2021-09-03T12:10:57 | 2021-09-03T12:10:57 | 382,987,918 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,056 | py | #!/usr/bin/env python3
import sys
import math
def solve(N: int, P: "List[int]", Q: "List[int]"):
cnta = 0
cntb = 0
for i, x in enumerate(P):
p_ = x-1
for y in range(i):
if P[y] < x : p_-=1
cnta += p_ * math.factorial(N - 1 - i)
for i, x in enumerate(Q):
p_ = x-1
for y in range(i):
if Q[y] < x : p_-=1
cntb += p_ * math.factorial(N - 1 - i)
# print(cnta, cntb)
print(abs(cnta-cntb))
return
# Generated by 1.1.6 https://github.com/kyuridenamida/atcoder-tools (tips: You use the default template now. You can remove this line by using your custom template)
def main():
def iterate_tokens():
for line in sys.stdin:
for word in line.split():
yield word
tokens = iterate_tokens()
N = int(next(tokens)) # type: int
P = [int(next(tokens)) for _ in range(N)] # type: "List[int]"
Q = [int(next(tokens)) for _ in range(N)] # type: "List[int]"
solve(N, P, Q)
if __name__ == '__main__':
main()
| [
"athenenoctus@gmail.com"
] | athenenoctus@gmail.com |
eb4cf0a9a96c1f9e3d5610478b4b25452ee384db | acd41dc7e684eb2e58b6bef2b3e86950b8064945 | /res/packages/scripts/scripts/client/gui/Scaleform/locale/NATIONS.py | 342d5f81bc76cdde4e0867e06d1e5cc393033665 | [] | no_license | webiumsk/WoT-0.9.18.0 | e07acd08b33bfe7c73c910f5cb2a054a58a9beea | 89979c1ad547f1a1bbb2189f5ee3b10685e9a216 | refs/heads/master | 2021-01-20T09:37:10.323406 | 2017-05-04T13:51:43 | 2017-05-04T13:51:43 | 90,268,530 | 0 | 0 | null | null | null | null | WINDOWS-1250 | Python | false | false | 1,147 | py | # 2017.05.04 15:25:08 Střední Evropa (letní čas)
# Embedded file name: scripts/client/gui/Scaleform/locale/NATIONS.py
"""
This file was generated using the wgpygen.
Please, don't edit this file manually.
"""
from debug_utils import LOG_WARNING
class NATIONS(object):
USSR = '#nations:ussr'
GERMANY = '#nations:germany'
USA = '#nations:usa'
FRANCE = '#nations:france'
UK = '#nations:uk'
JAPAN = '#nations:japan'
CZECH = '#nations:czech'
CHINA = '#nations:china'
SWEDEN = '#nations:sweden'
ALL_ENUM = (USSR,
GERMANY,
USA,
FRANCE,
UK,
JAPAN,
CZECH,
CHINA,
SWEDEN)
@classmethod
def all(cls, key0):
outcome = '#nations:{}'.format(key0)
if outcome not in cls.ALL_ENUM:
LOG_WARNING('Localization key "{}" not found'.format(outcome))
return None
else:
return outcome
# okay decompyling C:\Users\PC\wotmods\files\originals\res\packages\scripts\scripts\client\gui\Scaleform\locale\NATIONS.pyc
# decompiled 1 files: 1 okay, 0 failed, 0 verify failed
# 2017.05.04 15:25:08 Střední Evropa (letní čas)
| [
"info@webium.sk"
] | info@webium.sk |
fcecd995ff300d8a765f6450f92bc9160b5af1bf | 4f0beffcf200dd0b42cc192a4a2b9c2d0f0a2a42 | /rewrite/terms.py | e8441a0d6416715bc03c0303ba9a3a0f8c34b926 | [
"BSD-2-Clause"
] | permissive | rebcabin/pyrewrite | d11a2385920a8b375e9f3f7a4cc81ed8a178cf28 | 226d139ff361b095d46a58c65bc345cd18443008 | refs/heads/master | 2020-03-26T00:09:40.835150 | 2013-02-13T16:46:24 | 2013-02-13T16:46:24 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,345 | py | #------------------------------------------------------------------------
# Terms
#------------------------------------------------------------------------
class ATerm(object):
def __init__(self, term, annotation=None):
self.term = term
self.annotation = annotation
def __str__(self):
if self.annotation is not None:
return str(self.term) + arepr([self.annotation], '{', '}')
else:
return str(self.term)
def __eq__(self, other):
if isinstance(other, ATerm):
return self.term == other.term
else:
return False
def __ne__(self, other):
return not self == other
def __repr__(self):
return str(self)
def __show__(self):
pass
class AAppl(object):
def __init__(self, spine, args):
assert isinstance(spine, ATerm)
self.spine = spine
self.args = args
def __eq__(self, other):
if isinstance(other, AAppl):
return self.spine == other.spine and self.args == other.args
else:
return False
def __ne__(self, other):
return not self == other
def __str__(self):
return str(self.spine) + arepr(self.args, '(', ')')
def __repr__(self):
return str(self)
def __show__(self):
pass
class AString(object):
def __init__(self, val):
assert isinstance(val, str)
self.val = val
def __str__(self):
return '"%s"' % (self.val)
def __repr__(self):
return str(self)
class AInt(object):
def __init__(self, val):
self.val = val
def __str__(self):
return str(self.val)
def __eq__(self, other):
if isinstance(other, AInt):
return self.val == other.val
else:
return False
def __ne__(self, other):
return not self == other
def __repr__(self):
return str(self)
class AReal(object):
def __init__(self, val):
self.val = val
def __str__(self):
return str(self.val)
def __eq__(self, other):
if isinstance(other, AReal):
return self.val == other.val
else:
return False
def __ne__(self, other):
return not self == other
def __repr__(self):
return str(self)
class AList(object):
def __init__(self, args):
assert isinstance(args, list)
self.args = args or []
def __str__(self):
return arepr(self.args, '[', ']')
def __eq__(self, other):
if isinstance(other, AList):
return self.args == other.args
else:
return False
def __ne__(self, other):
return not self == other
def __repr__(self):
return str(self)
class ATuple(object):
def __init__(self, args):
assert isinstance(args, list)
self.args = args or []
def __eq__(self, other):
if isinstance(other, ATuple):
return self.args == other.args
else:
return False
def __ne__(self, other):
return not self == other
def __str__(self):
return arepr(self.args, '(', ')')
def __repr__(self):
return str(self)
class APlaceholder(object):
def __init__(self, type, args):
self.type = type
self.args = args
def __str__(self):
if self.args is not None:
return '<%s(%r)>' % (self.type, self.args)
else:
return arepr([self.type], '<', '>')
def __repr__(self):
return str(self)
#------------------------------------------------------------------------
# Pretty Printing
#------------------------------------------------------------------------
def arepr(terms, l, r):
""" Concatenate str representations with commas and left and right
characters. """
return l + ', '.join(map(str, terms)) + r
#------------------------------------------------------------------------
# Compatability
#------------------------------------------------------------------------
# Goal is to only define the protocol that these need to conform
# to and then allow pluggable backends.
# - ATerm
# - Python AST
# - SymPy
aterm = ATerm
aappl = AAppl
aint = AInt
astr = AString
areal = AReal
atupl = ATuple
alist = AList
aplaceholder = APlaceholder
| [
"stephen.m.diehl@gmail.com"
] | stephen.m.diehl@gmail.com |
719faa299021b6df1bc12e54e26cdac10f9dd376 | 5b9b2ec5fb3142609882a3320c6e64c6b912395c | /GfG/Mathematical Problems/fibGoldenRatio.py | 0e52e95781f08c6731859bed0821762acff507e2 | [] | no_license | anildhaker/DailyCodingChallenge | 459ba7ba968f4394fb633d6ba8b749c1e4cb7fb0 | f1cfc52f156436dc7c0a6c43fa939cefac5cee36 | refs/heads/master | 2020-04-20T18:58:33.133225 | 2020-01-11T14:50:52 | 2020-01-11T14:50:52 | 169,036,874 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 355 | py | # Nth fib number using Golden ratio.
# nth fib = (n-1)th fib * golden ratio
# round up each stage to get the correct result.
Phi = 1.6180339
f = [0, 1, 1, 2, 3, 5]
def fib(n):
if n < 6:
return f[n]
t = 5
tn = 5
while t < n:
tn = round(tn * Phi)
print(t)
t += 1
return tn
n = 9
print(n, "th Fibonacci Number =", fib(n)) | [
"anildhaker777@gmail.com"
] | anildhaker777@gmail.com |
80d5cbec5df9333fbfb7da16a4a745cc46197a3c | 42c815151def498244ad0f6c01bc217eab955b8a | /tools/collect_data_stat.py | 2aa88b23aceb155e010f72497e8c34e297b9f640 | [
"Apache-2.0"
] | permissive | openvinotoolkit/mmaction2 | 5636577a420eafffeb2338c00a7a6e9f3bf8248a | 496cec37a4bae9b3d10c9d8fa46b244e4310b2fe | refs/heads/ote | 2023-07-16T13:22:55.477849 | 2021-09-01T20:42:59 | 2021-09-01T20:42:59 | 300,213,346 | 5 | 4 | NOASSERTION | 2021-09-01T20:43:00 | 2020-10-01T08:59:04 | Python | UTF-8 | Python | false | false | 4,497 | py | # Copyright (C) 2021 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions
# and limitations under the License.
import sys
import argparse
import numpy as np
import mmcv
from mmaction.datasets import build_dataloader, build_dataset
from mmaction.utils import ExtendedDictAction
from mmaction.core.utils import propagate_root_dir
from mmcv.runner import set_random_seed
def update_config(cfg, args):
if args.num_workers is not None and args.num_workers > 0:
cfg.data.workers_per_gpu = args.num_workers
cfg.data.test.test_mode = True
normalize_idx = [i for i, v in enumerate(cfg.data.test.pipeline) if v['type'] == 'Normalize'][0]
cfg.data.test.pipeline[normalize_idx]['mean'] = [0.0, 0.0, 0.0]
cfg.data.test.pipeline[normalize_idx]['std'] = [1.0, 1.0, 1.0]
cfg.data.test.pipeline[normalize_idx]['to_bgr'] = False
return cfg
def merge_configs(cfg1, cfg2):
# Merge cfg2 into cfg1
# Overwrite cfg1 if repeated, ignore if value is None.
cfg1 = {} if cfg1 is None else cfg1.copy()
cfg2 = {} if cfg2 is None else cfg2
for k, v in cfg2.items():
if v:
cfg1[k] = v
return cfg1
def collect_stat(data_loader):
mean_data, std_data = [], []
progress_bar = mmcv.ProgressBar(len(data_loader.dataset))
for data in data_loader:
input_data = data['imgs'].detach().squeeze().cpu().numpy()
mean_data.append(np.mean(input_data, axis=(2, 3, 4)))
std_data.append(np.std(input_data, axis=(2, 3, 4)))
batch_size = len(input_data)
for _ in range(batch_size):
progress_bar.update()
mean_data = np.concatenate(mean_data, axis=0)
std_data = np.concatenate(std_data, axis=0)
return mean_data, std_data
def filter_stat(mean_data, std_data, min_value=1.0):
mask = np.all(mean_data > min_value, axis=1) & np.all(std_data > min_value, axis=1)
return mean_data[mask], std_data[mask]
def dump_stat(mean_data, std_data, out_filepath):
assert mean_data.shape == std_data.shape
with open(out_filepath, 'w') as output_stream:
for mean_value, std_value in zip(mean_data, std_data):
mean_value_str = ','.join(str(v) for v in mean_value)
std_value_str = ','.join(str(v) for v in std_value)
output_stream.write(f'{mean_value_str} {std_value_str}\n')
def parse_args():
parser = argparse.ArgumentParser(description='Test model deployed to ONNX or OpenVINO')
parser.add_argument('config', help='path to configuration file')
parser.add_argument('out', help='path to save stat')
parser.add_argument('--data_dir', type=str,
help='the dir with dataset')
parser.add_argument('--num_workers', type=int,
help='number of CPU workers per GPU')
parser.add_argument('--update_config', nargs='+', action=ExtendedDictAction,
help='Update configuration file by parameters specified here.')
args = parser.parse_args()
return args
def main(args):
# load config
cfg = mmcv.Config.fromfile(args.config)
if args.update_config is not None:
cfg.merge_from_dict(args.update_config)
cfg = update_config(cfg, args)
cfg = propagate_root_dir(cfg, args.data_dir)
if cfg.get('seed'):
print(f'Set random seed to {cfg.seed}')
set_random_seed(cfg.seed)
# build the dataset
dataset = build_dataset(cfg.data, 'test', dict(test_mode=True))
print(f'Test datasets:\n{str(dataset)}')
# build the dataloader
data_loader = build_dataloader(
dataset,
videos_per_gpu=20,
workers_per_gpu=cfg.data.workers_per_gpu,
dist=False,
shuffle=False
)
# collect results
mean_data, std_data = collect_stat(data_loader)
# filter data
mean_data, std_data = filter_stat(mean_data, std_data, min_value=1.0)
# dump stat
dump_stat(mean_data, std_data, args.out)
if __name__ == '__main__':
args = parse_args()
sys.exit(main(args) or 0) | [
"evgeny.izutov@intel.com"
] | evgeny.izutov@intel.com |
298bf01200ac8b294e118cbe01d81b9d7e99da96 | 35e41dab4a3e8eebe17872f5414cf3efb86be54c | /math/0x03-probability/exponential.py | cdca021ebc6f68ec512c14e54e7dd7aecbf85047 | [] | no_license | yasheymateen/holbertonschool-machine_learning | 52eaf217e924404e8833e75e8b48e80807d784dc | 1329ba88bf9ae50693188e1c85383d9b76c2b1f4 | refs/heads/master | 2022-12-24T13:51:02.743526 | 2020-10-08T06:59:04 | 2020-10-08T06:59:04 | 255,277,037 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,017 | py | #!/usr/bin/env python3
""" Exponential distribution class """
class Exponential:
""" Exponential class """
def __init__(self, data=None, lambtha=1.):
""" initial class constructor """
if data is None:
if lambtha <= 0:
raise ValueError("lambtha must be a positive value")
self.lambtha = float(lambtha)
else:
if type(data) is not list:
raise TypeError("data must be a list")
if len(data) < 2:
raise ValueError("data must contain multiple values")
self.lambtha = len(data) / sum(data)
def pdf(self, x):
""" Return pdf of exp distribution of x """
if x < 0:
return 0
return self.lambtha * pow(2.7182818285, -1 *
self.lambtha * x)
def cdf(self, x):
""" Return cdf of exp distribution of x """
if x < 0:
return 0
return 1 - pow(2.7182818285, -1 * self.lambtha * x)
| [
"yasheymateen@gmail.com"
] | yasheymateen@gmail.com |
c07e24422565117f12a61cbaf45b486534d7ab97 | 88906fbe13de27413a51da917ebe46b473bec1b9 | /Part-I/Chapter 9 - Classes/user_class.py | 52ac6d4c5bd85b5435ecd806761bceed66dabb9d | [] | no_license | lonewolfcub/Python-Crash-Course | 0b127e40f5029d84ad036263fd9153f6c88c2420 | 322388dfb81f3335eeffabcdfb8f9c5a1db737a4 | refs/heads/master | 2021-01-01T16:45:50.617189 | 2017-10-27T14:23:58 | 2017-10-27T14:23:58 | 97,911,584 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,598 | py | """A class to model user attributes"""
class User:
"""A simple attempt to model a user"""
def __init__(self, first_name, last_name, username, employee_number,
employee_type, email, password, admin, banned):
self.first_name = first_name
self.last_name = last_name
self.username = username
self.employee_number = employee_number
self.employee_type = employee_type
self.email = email
self.password = password
self.admin = admin
self.banned = banned
self.login_attempts = 0
def describe_user(self):
print("\nUsername: " + self.username)
print(
"Name: " + self.first_name.title() + " " + self.last_name.title())
print("Employee Number: " + str(self.employee_number))
print("Employee Type: " + self.employee_type.title())
print("User Email: " + self.email)
print("Password: " + self.password)
print("User is Admin: " + str(self.admin))
print("User is Banned: " + str(self.banned))
def greet_user(self):
print("\nHello " + self.first_name.title() + "!")
def increment_login_attempts(self):
"""Increment a user's login attempts by 1"""
self.login_attempts += 1
def reset_login_attempts(self):
"""Reset a user's login attempts to 0"""
self.login_attempts = 0
def get_user_login_attempts(self):
"""Print a user's number of login attempts"""
print(self.username + " has attempted to log in " +
str(self.login_attempts) + " times.") | [
"lonewolfcub020@gmail.com"
] | lonewolfcub020@gmail.com |
2dcf6c43da0f2e8dca468ba4306efb52659010fa | 18fa8cd12c0082df6887ab00381d7134ec41ec9e | /actors.py | 3d1ab0d378a18ff6e60f5f1ec3bb6545cd4a2986 | [] | no_license | reddit-pygame/Rotation-Thruster-Movement | 9afb7e87b632f3956160946ad9f82e26a9538b7e | f6ed16061697f6d4349cd34c146f95933abcb03b | refs/heads/master | 2021-01-19T08:05:33.725764 | 2015-08-01T11:10:55 | 2015-08-01T11:10:55 | 39,951,568 | 2 | 3 | null | null | null | null | UTF-8 | Python | false | false | 1,205 | py | """
This module contains the Player class for the user controlled character.
"""
import pygame as pg
import prepare
class Player(pg.sprite.Sprite):
"""
This class represents our user controlled character.
"""
def __init__(self, pos, image, speed=7, *groups):
super(Player, self).__init__(*groups)
self.speed = speed
self.image = pg.transform.rotozoom(image, 90, prepare.SCALE_FACTOR)
self.rect = self.image.get_rect(center=pos)
def update(self, keys, bounding):
"""
Updates the players position based on currently held keys.
"""
move = self.check_keys(keys)
self.rect.move_ip(*move)
self.rect.clamp_ip(bounding)
def check_keys(self, keys):
"""
Find the players movement vector from key presses.
"""
move = [0, 0]
for key in prepare.DIRECT_DICT:
if keys[key]:
for i in (0, 1):
move[i] += prepare.DIRECT_DICT[key][i]*self.speed
return move
def draw(self, surface):
"""
Basic draw function. (not used if drawing via groups)
"""
surface.blit(self.image, self.rect)
| [
"3ror@live.com"
] | 3ror@live.com |
9b20d644629767b38d8dcea784f6bfdbb209c97d | ac4b9385b7ad2063ea51237fbd8d1b74baffd016 | /.history/utils/ocr/handle_image_20210209171341.py | a2f29889d4bc94d4226866258b46adb684002b71 | [] | no_license | preethanpa/ssoemprep | 76297ef21b1d4893f1ac2f307f60ec72fc3e7c6f | ce37127845253c768d01aeae85e5d0d1ade64516 | refs/heads/main | 2023-03-09T00:15:55.130818 | 2021-02-20T06:54:58 | 2021-02-20T06:54:58 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,038 | py | import os
import cv2
import re
import numpy as np
from PIL import Image
import pytesseract
from pytesseract import Output
from fpdf import FPDF
'''
IMAGE HANDLING METHODS
'''
# get grayscale image
def get_grayscale(image):
return cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
# blur removal
def remove_blur(image):
return cv2.medianBlur(image,5)
# noise removal
def remove_noise(image):
return cv2.fastNlMeansDenoisingColored(image, None, 10, 10, 7, 15)
#thresholding
def thresholding(image):
return cv2.threshold(image, 0, 255, cv2.THRESH_BINARY + cv2.THRESH_OTSU)[1]
#dilation
def dilate(image):
kernel = np.ones((5,5),np.uint8)
return cv2.dilate(image, kernel, iterations = 1)
#erosion
def erode(image):
kernel = np.ones((5,5),np.uint8)
return cv2.erode(image, kernel, iterations = 1)
def extract_pdf_from_image(fileName='', pdf_path='', action='', psm=3):
'''
Extract text from image and save as PDF.
fileName=''
pdf_path='',
action='',
psm=3
'''
print(f'FileName is {fileName}')
#custom_config = r'-c tessedit_char_whitelist=123456789MALEPQRETHANabcdefghijklmnopqrstuvwxyz --psm 6'
#custom_config = r'-l eng --psm 11'
custom_config = r'-l eng --psm ' + str(psm)
pdfdir = pdf_path
if not os.path.exists(pdfdir):
os.makedirs(pdfdir)
# pdfFileName = os.path.basename(fileName).split('.')[0] + '.pdf'
pdfFileName = os.path.basename(fileName).split('.')[0]+ '.pdf'
pdfFilePath = pdfdir + '/' + pdfFileName
print(f'PDF File Path {pdfFilePath}')
#d = pytesseract.image_to_data(img, output_type=Output.DICT)
img = cv2.imread(fileName)
img1 = None
if (action == 1):
img1 = remove_noise(img)
if (action == 2):
img1 = get_grayscale(img)
#img1 = erode(img)
if (action == 3):
img1 = remove_blur(img)
#text = pytesseract.image_to_string(img1, config=custom_config,lang='eng')
text = pytesseract.image_to_pdf_or_hocr(img1, extension='pdf')
with open(pdfFilePath, mode = 'w+b') as f:
f.write(text)
return pdfFilePath
def convert_text_to_pdf(text='', pdf_path='', filename=''):
'''
Convert text file to PDF
text=''
pdf_path=''
filename=''
'''
tempdir = "/tmp"
pdfdir = pdf_path
textFileName = tempdir + '/' + filename + ".txt"
pdfFileName = pdfdir + '/' + filename + ".pdf"
if not os.path.exists(tempdir):
os.makedirs(tempdir)
if not os.path.exists(pdfdir):
os.makedirs(pdfdir)
# save FPDF() class into a
# variable pdf
pdf = FPDF()
# Add a page
pdf.add_page()
# set style and size of font
# that you want in the pdf
pdf.set_font("Arial", size = 15)
with open(textFileName, mode = 'w+b') as f:
f.write(text)
line = 1
f = open(textFileName, "r")
for x in f:
x1 = re.sub(u"(\u2014|\u2018|\u2019|\u201c|\u201d)", "", x)
pdf.cell(100, 10, txt=x1, ln=line, align='L')
line=line+1
#save the pdf with name .pdf
pdf.output(pdfFileName,'F')
def mark_region(image_path):
print(f'image_path {image_path}')
image = None
im = cv2.imread(image_path)
gray = cv2.cvtColor(im, cv2.COLOR_BGR2GRAY)
blur = cv2.GaussianBlur(gray, (9,9), 0)
thresh = cv2.adaptiveThreshold(blur,255,cv2.ADAPTIVE_THRESH_GAUSSIAN_C, cv2.THRESH_BINARY_INV,11,30)
# Dilate to combine adjacent text contours
kernel = cv2.getStructuringElement(cv2.MORPH_RECT, (9,9))
dilate = cv2.dilate(thresh, kernel, iterations=4)
# Find contours, highlight text areas, and extract ROIs
cnts = cv2.findContours(dilate, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)
cnts = cnts[0] if len(cnts) == 2 else cnts[1]
line_items_coordinates = []
for c in cnts:
area = cv2.contourArea(c)
x,y,w,h = cv2.boundingRect(c)
if y >= 600 and x <= 1000:
if area > 10000:
image = cv2.rectangle(im, (x,y), (2200, y+h), color=(255,0,255), thickness=3)
line_items_coordinates.append([(x,y), (2200, y+h)])
if y >= 2400 and x<= 2000:
image = cv2.rectangle(im, (x,y), (2200, y+h), color=(255,0,255), thickness=3)
line_items_coordinates.append([(x,y), (2200, y+h)])
print
return (image, line_items_coordinates) | [
"{abhi@third-ray.com}"
] | {abhi@third-ray.com} |
fe0444560a8e563535551046d5c1226618e180c7 | d5370925ce06a5b26a76b3fde3ce6da2100c43cd | /setup.py | 1210b5f0be2366394d3232ace3c1ce2ae80a078d | [
"MIT"
] | permissive | a-tal/esi-bot | c92c5ba8aef40f6a25701c5150bcb9ecdb0aeff1 | 81418d6438957c8299927fdb267c6598ad521bbe | refs/heads/master | 2020-03-20T20:55:18.205210 | 2018-06-18T06:03:46 | 2018-06-18T06:13:10 | 137,714,477 | 0 | 0 | null | 2018-06-18T05:48:08 | 2018-06-18T05:48:07 | null | UTF-8 | Python | false | false | 943 | py | """ESI slack bot."""
import os
from setuptools import setup
from setuptools import find_packages
from setuphelpers import git_version
from setuphelpers import long_description
setup(
name="esi-bot",
version=git_version(),
description="ESI slack bot",
long_description=long_description(),
packages=find_packages(),
author="Adam Talsma",
author_email="adam@talsma.ca",
url="https://github.com/esi/esi-bot/",
download_url="https://github.com/esi/esi-bot/",
install_requires=[
"requests >= 2.18.4",
"slackclient >= 1.2.1",
"gevent >= 1.2.2",
],
setup_requires=["setuphelpers >= 0.1.2"],
entry_points={"console_scripts": ["esi-bot = esi_bot.bot:main"]},
classifiers=[
"Development Status :: 4 - Beta",
"Operating System :: POSIX :: Linux",
"Programming Language :: Python :: 3.6",
"License :: OSI Approved :: MIT License",
],
)
| [
"github@talsma.ca"
] | github@talsma.ca |
08e18455f072120dcb93754c798d761535899ac4 | 0e8e9bef32f40f5d0fd8c302293ae66732770b66 | /2015/pythonlearn/numpy/aboutSinSpeed.py | 1bbd1191245377d4199d35b7cfe85988651d4956 | [] | no_license | tuouo/selfstudy | 4a33ec06d252388816ad38aa44838e2b728178d4 | 139a0d63477298addfff5b9ea8d39fab96734f25 | refs/heads/master | 2021-01-24T18:37:31.023908 | 2018-03-01T02:55:16 | 2018-03-01T02:55:16 | 84,453,967 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 657 | py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
import time, math, numpy as np
x = [i * 0.001 for i in range(100000)]
start = time.clock()
for i, v in enumerate(x):
x[i] = math.sin(v)
end = time.clock()
print("math.sin:", end - start)
x = [i * 0.001 for i in range(100000)]
start2 = time.clock()
x = np.array(x)
np.sin(x, x)
end2 = time.clock()
print("numpy.sin:", end2 - start2)
print("math/numpy:", (end - start) / (end2 - start2))
x = [i * 0.001 for i in range(100000)]
start3 = time.clock()
for i, v in enumerate(x):
x[i] = np.sin(v)
end3 = time.clock()
print("numpy.sin per:", end3 - start3)
print("math/per:", (end - start) / (end3 - start3)) | [
"ltytuotuo@gmail.com"
] | ltytuotuo@gmail.com |
4178c42be2a16843929bf99b3abf9d4f92102c0b | 3d4292db087b8ab98b4751a16b50bcee90dd78bb | /freshdesk/v2/errors.py | 964241d4ecbf820b5877b3f43be210c76f3ed6c6 | [
"BSD-2-Clause"
] | permissive | sjkingo/python-freshdesk | 3a0588ba554e14705a13eb3ab8d8c65f9b1545e1 | 87ccfb3d945c5867bfa92d55aad03dc98466a2aa | refs/heads/master | 2023-01-09T08:10:20.113448 | 2022-09-16T01:29:20 | 2022-09-16T01:29:20 | 28,647,308 | 85 | 82 | BSD-2-Clause | 2023-01-07T00:16:05 | 2014-12-30T22:15:02 | Python | UTF-8 | Python | false | false | 626 | py | from requests import HTTPError
class FreshdeskError(HTTPError):
"""
Base error class.
Subclassing HTTPError to avoid breaking existing code that expects only HTTPErrors.
"""
class FreshdeskBadRequest(FreshdeskError):
"""Most 40X and 501 status codes"""
class FreshdeskUnauthorized(FreshdeskError):
"""401 Unauthorized"""
class FreshdeskAccessDenied(FreshdeskError):
"""403 Forbidden"""
class FreshdeskNotFound(FreshdeskError):
"""404"""
class FreshdeskRateLimited(FreshdeskError):
"""429 Rate Limit Reached"""
class FreshdeskServerError(FreshdeskError):
"""50X errors"""
| [
"sam@sjkwi.com.au"
] | sam@sjkwi.com.au |
d0405afd7c812af5e36619f67ceca4c53351cce2 | 7e145d1fff87cdabf7c9ae9c08637f299fbd3849 | /222. complete binary tree.py | 5ac0c8ab6f3a81cac087cc91c8a121b54628f920 | [] | no_license | dundunmao/LeetCode2019 | 2b39ef181a7f66efc9de7d459b11eb1a4a7f60a8 | 9b38a7742a819ac3795ea295e371e26bb5bfc28c | refs/heads/master | 2020-09-16T16:46:50.482697 | 2020-06-07T08:01:16 | 2020-06-07T08:01:16 | 223,833,958 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,557 | py | # 满足三点:
# 1:左右都是full
# 2:左边min > 右边 max
# 3: 左边max 与 右边min 的差 小于等于1
class TreeNode:
def __init__(self, val):
self.val = val
self.left, self.right = None, None
class Solution:
"""
@param root, the root of binary tree.
@return true if it is a complete binary tree, or false.
"""
def isComplete(self, root):
# Write your code here
validate, max, min= self.helper(root)
return validate
def helper(self,root):
if root is None:
return True, 0,0
left = self.helper(root.left)
right = self.helper(root.right)
if not left[0] or not right[0]:
return False, 0,0
if left[2]<right[1] or left[1]>right[2]+1:
return False, 0,0
return True, left[1]+1, right[2]+1
if __name__ == '__main__':
# TREE 1
# Construct the following tree
# 1
# / \
# 2 3
# / \
# 4 5
# / \
# 6 7
# \
# 8
P = TreeNode(1)
P.left = TreeNode(2)
P.left.left = TreeNode(4)
P.left.right = TreeNode(5)
# P.left.right.left = TreeNode(6)
# P.left.right.right = TreeNode(7)
# P.left.right.right.right = TreeNode(8)
P.right = TreeNode(3)
#
#
# Q = Node(26)
# Q.left = Node(10)
# Q.left.left = Node(4)
# Q.left.right = Node(6)
# Q.right = Node(3)
# # Q.right.right = Node(3)
s = Solution()
print s.isComplete(P)
| [
"dundunmao@gmail.com"
] | dundunmao@gmail.com |
886ac80011b04f26ef9f2b7d6a592fd307102e9c | 904d7d50f22447a0803da412bae719a7cfa04392 | /math/0x00-linear_algebra/8-ridin_bareback.py | 127a28653655529a75ab02fb1d7979c78586e571 | [] | no_license | dbaroli/holbertonschool-machine_learning | 354b57932b8882598bc3ba304fd2004ba4bca169 | 7f9a040f23eda32c5aa154c991c930a01b490f0f | refs/heads/master | 2023-01-21T11:50:59.796803 | 2020-11-24T02:03:51 | 2020-11-24T02:03:51 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 638 | py | #!/usr/bin/env python3
"""function to multiply 2 matrices"""
def mat_mul(mat1, mat2):
""" multiply two matrices
Args:
mat1, mat2: Given matrices
Return:
the new mat: new_mat
"""
if len(mat1[0]) != len(mat2):
return None
else:
new_mat = []
for i in range(len(mat1)):
mat_i = []
for j in range(len(mat2[0])):
vec = 0
for k in range(len(mat2)):
vec += mat1[i][k] * mat2[k][j]
mat_i.append(vec)
new_mat.append(mat_i)
for x in new_mat:
return new_mat
| [
"931@holbertonschool.com"
] | 931@holbertonschool.com |
dd360585048649dfafbd491a46af9e01b08d22f9 | 48c522df53c37a7b9bea3d17f403556e01eeb24c | /ubuntu-20-04-scripts/build_tf_dataset/third/look_at_tokenized_att_disassembly.py | 5db7e41c41aaee82efce70e659362f1e6ba56cd3 | [] | no_license | flobotics/func_sign_prob | 32ee8a8e4d1e33bec9253cd21e7609827d370fc9 | 18000e04ea108fb2530a50a9de4f8996cde398f0 | refs/heads/master | 2023-02-10T19:08:01.536243 | 2021-01-02T23:25:38 | 2021-01-02T23:25:38 | 292,960,645 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,127 | py | import tarfile
import os
import sys
import pickle
def get_all_tar_filenames(tar_file_dir):
tar_files = list()
### check if dir exists
if not os.path.isdir(tar_file_dir):
return tar_files
files = os.listdir(tar_file_dir)
for f in files:
if f.endswith(".pickle"):
tar_files.append(f)
return tar_files
def get_pickle_file_content(full_path_pickle_file):
pickle_file = open(full_path_pickle_file,'rb')
pickle_list = pickle.load(pickle_file, encoding='latin1')
pickle_file.close()
return pickle_list
def print_one_pickle_list_item(pickle_file_content):
item = next(iter(pickle_file_content))
if item:
print(f'caller-and-callee-disassembly: {item[0]}')
print(f'return-type: {item[1]}')
else:
print('Error item[0]')
def main():
files = get_all_tar_filenames('/tmp/save_dir')
for file in files:
cont = get_pickle_file_content('/tmp/save_dir/' + file)
print_one_pickle_list_item(cont)
break
if __name__ == "__main__":
main()
| [
"ubu@ubu-VirtualBox"
] | ubu@ubu-VirtualBox |
5ae4af3e8cafe2a8f7f1d21cc8b9335b1ef170d2 | 3b9b4049a8e7d38b49e07bb752780b2f1d792851 | /src/third_party/WebKit/Tools/Scripts/webkitpy/common/webkit_finder.py | 3681938d8b0d413f85884a827054bea7f375f8a2 | [
"LGPL-2.0-or-later",
"GPL-1.0-or-later",
"MIT",
"Apache-2.0",
"BSD-3-Clause",
"LicenseRef-scancode-warranty-disclaimer",
"LGPL-2.1-only",
"GPL-2.0-only",
"LGPL-2.0-only",
"BSD-2-Clause",
"LicenseRef-scancode-other-copyleft"
] | permissive | webosce/chromium53 | f8e745e91363586aee9620c609aacf15b3261540 | 9171447efcf0bb393d41d1dc877c7c13c46d8e38 | refs/heads/webosce | 2020-03-26T23:08:14.416858 | 2018-08-23T08:35:17 | 2018-09-20T14:25:18 | 145,513,343 | 0 | 2 | Apache-2.0 | 2019-08-21T22:44:55 | 2018-08-21T05:52:31 | null | UTF-8 | Python | false | false | 5,150 | py | # Copyright (c) 2012 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import os
import sys
class WebKitFinder(object):
def __init__(self, filesystem):
self._filesystem = filesystem
self._dirsep = filesystem.sep
self._sys_path = sys.path
self._env_path = os.environ['PATH'].split(os.pathsep)
self._webkit_base = None
self._chromium_base = None
self._depot_tools = None
def webkit_base(self):
"""Returns the absolute path to the top of the WebKit tree.
Raises an AssertionError if the top dir can't be determined."""
# Note: This code somewhat duplicates the code in
# scm.find_checkout_root(). However, that code only works if the top
# of the SCM repository also matches the top of the WebKit tree. Some SVN users
# (the chromium test bots, for example), might only check out subdirectories like
# Tools/Scripts. This code will also work if there is no SCM system at all.
if not self._webkit_base:
self._webkit_base = self._webkit_base
module_path = self._filesystem.abspath(self._filesystem.path_to_module(self.__module__))
tools_index = module_path.rfind('Tools')
assert tools_index != -1, "could not find location of this checkout from %s" % module_path
self._webkit_base = self._filesystem.normpath(module_path[0:tools_index - 1])
return self._webkit_base
def chromium_base(self):
if not self._chromium_base:
self._chromium_base = self._filesystem.dirname(self._filesystem.dirname(self.webkit_base()))
return self._chromium_base
def path_from_webkit_base(self, *comps):
return self._filesystem.join(self.webkit_base(), *comps)
def path_from_chromium_base(self, *comps):
return self._filesystem.join(self.chromium_base(), *comps)
def path_to_script(self, script_name):
"""Returns the relative path to the script from the top of the WebKit tree."""
# This is intentionally relative in order to force callers to consider what
# their current working directory is (and change to the top of the tree if necessary).
return self._filesystem.join("Tools", "Scripts", script_name)
def layout_tests_dir(self):
return self.path_from_webkit_base('LayoutTests')
def perf_tests_dir(self):
return self.path_from_webkit_base('PerformanceTests')
def depot_tools_base(self):
if not self._depot_tools:
# This basically duplicates src/build/find_depot_tools.py without the side effects
# (adding the directory to sys.path and importing breakpad).
self._depot_tools = (self._check_paths_for_depot_tools(self._sys_path) or
self._check_paths_for_depot_tools(self._env_path) or
self._check_upward_for_depot_tools())
return self._depot_tools
def _check_paths_for_depot_tools(self, paths):
for path in paths:
if path.rstrip(self._dirsep).endswith('depot_tools'):
return path
return None
def _check_upward_for_depot_tools(self):
fs = self._filesystem
prev_dir = ''
current_dir = fs.dirname(self._webkit_base)
while current_dir != prev_dir:
if fs.exists(fs.join(current_dir, 'depot_tools', 'pylint.py')):
return fs.join(current_dir, 'depot_tools')
prev_dir = current_dir
current_dir = fs.dirname(current_dir)
def path_from_depot_tools_base(self, *comps):
return self._filesystem.join(self.depot_tools_base(), *comps)
| [
"changhyeok.bae@lge.com"
] | changhyeok.bae@lge.com |
b17666a09326f7efec2051a3498367140a3e60e5 | 547db7801930874bf8298304b7ae453695ab3751 | /zeabus_imaging_sonar/src/sonar_server_gate.py | f1460a2c779ca010d16068c44c1b386bed208eb8 | [] | no_license | skconan/zeabus2018 | 5fc764b5039799fa7d80f7e86345822b50282d2e | 16e2fc1a21daea813833f9afb89f64142b5b8682 | refs/heads/master | 2020-03-25T12:39:38.759263 | 2018-07-27T23:53:50 | 2018-07-27T23:53:50 | 143,786,404 | 0 | 1 | null | 2018-08-06T21:41:52 | 2018-08-06T21:41:52 | null | UTF-8 | Python | false | false | 1,055 | py | #!/usr/bin/env python
import sys
import cv2
import time
import numpy as np
import roslib
import rospy
import math
#from _sonar_msg import *
#from _sonar_srv import *
from zeabus_imaging_sonar.srv import sonar_gatesrv
sonar_image = None
def server(start):
global sonar_image
# try:
response = sonar_image(start)
print response
return response.theta, response.r, response.status
# except rospy.ServiceException, e:
# print "Service call failed: %s"%e
if __name__ == "__main__":
print "Waiting"
start = True
count = 0
# global sonar_image
rospy.wait_for_service('/sonar_image')
sonar_image = rospy.ServiceProxy('/sonar_image', sonar_gatesrv)
print 'service start'
while not rospy.is_shutdown():
count += 1
time.sleep(1)
response = sonar_image()
"""for i, (ri, thetai) in enumerate(zip(response.r, response.theta)):
thetai = thetai-90
print "(%d, %d)" %(ri, thetai)"""
#print (response.data.r)
print response
| [
"supakit.kr@gmail.com"
] | supakit.kr@gmail.com |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.