blob_id
stringlengths
40
40
directory_id
stringlengths
40
40
path
stringlengths
3
288
content_id
stringlengths
40
40
detected_licenses
listlengths
0
112
license_type
stringclasses
2 values
repo_name
stringlengths
5
115
snapshot_id
stringlengths
40
40
revision_id
stringlengths
40
40
branch_name
stringclasses
684 values
visit_date
timestamp[us]date
2015-08-06 10:31:46
2023-09-06 10:44:38
revision_date
timestamp[us]date
1970-01-01 02:38:32
2037-05-03 13:00:00
committer_date
timestamp[us]date
1970-01-01 02:38:32
2023-09-06 01:08:06
github_id
int64
4.92k
681M
star_events_count
int64
0
209k
fork_events_count
int64
0
110k
gha_license_id
stringclasses
22 values
gha_event_created_at
timestamp[us]date
2012-06-04 01:52:49
2023-09-14 21:59:50
gha_created_at
timestamp[us]date
2008-05-22 07:58:19
2023-08-21 12:35:19
gha_language
stringclasses
147 values
src_encoding
stringclasses
25 values
language
stringclasses
1 value
is_vendor
bool
2 classes
is_generated
bool
2 classes
length_bytes
int64
128
12.7k
extension
stringclasses
142 values
content
stringlengths
128
8.19k
authors
listlengths
1
1
author_id
stringlengths
1
132
590926c548bbf4d2c80cd8848ec14070f263882b
4d0bbeb8ab52f7e450aff20056f7509e12751258
/functional_tests/test_list_item_validation.py
b76997f17a27784079ace3f2665c4571d9f5f356
[]
no_license
chicocheco/tdd_book
f7c9246dcb4eb5327704c72f655bf6e187b28849
574b1082aa523c7434f50e0c4cbdf5777ddf50ef
refs/heads/master
2022-05-02T17:44:27.217329
2020-03-13T18:57:22
2020-03-13T18:57:22
197,633,503
0
0
null
2022-04-22T22:19:12
2019-07-18T17:56:43
JavaScript
UTF-8
Python
false
false
4,895
py
from selenium.webdriver.common.keys import Keys from unittest import skip from .base import FunctionalTest class ItemValidationTest(FunctionalTest): # YAGNI, 3 strikes and refactor, not moving helper methods to base.py if not needed elsewhere def get_error_element(self): return self.browser.find_element_by_css_selector('.has-error') def test_cannot_add_empty_list_items(self): # Tania jde na domovskou stranku a omylem zkusi na seznam pridat prazdny text. self.browser.get(self.live_server_url) self.get_item_input_box().send_keys(Keys.ENTER) # every click or enter should be followed by some wait # prohlizec zastavi pozadavek protoze jsme nezadali nic do 'required' policka # (HTML5) browser adds a CSS pseudoselector ":invalid" to the id parameter of the element # and pops up "Fill out the field" alert # fallback: if the browser like Safari, does not fully implement HTML5, so the custom error message will be used self.wait_for(lambda: self.browser.find_element_by_css_selector('#id_text:invalid')) """ lambda: when you want to save a function with arguments to a variable/parameter but not executing it yet >>> myfn = lambda: addthree(2) # note addthree is not called immediately here >>> myfn <function <lambda> at 0x7f3b140339d8> >>> myfn() # execute it here 5 """ # Nyni to zkusi znovu s nejakym textem pro polozku, coz funguje, chyba zmizi # CSS pseudoselector changes from #id_text:invalid to #id_text:valid self.get_item_input_box().send_keys('Buy milk') self.wait_for(lambda: self.browser.find_element_by_css_selector('#id_text:valid')) # a muze bez problemu potvrdit predmet self.get_item_input_box().send_keys(Keys.ENTER) self.wait_for_row_in_list_table('1: Buy milk') # Skodolibe, ted zkusi znovu pridat druhou prazdnou polozku (jiz pro existujici seznam) self.get_item_input_box().send_keys(Keys.ENTER) # Dostane podobne varovani na strance listu, prohlizec nadava self.wait_for(lambda: self.browser.find_element_by_css_selector('#id_text:invalid')) # Ted to muze opravit vyplnenim pole nejakym textem self.get_item_input_box().send_keys('Make tea') self.wait_for(lambda: self.browser.find_element_by_css_selector('#id_text:valid')) self.get_item_input_box().send_keys(Keys.ENTER) self.wait_for_row_in_list_table('1: Buy milk') self.wait_for_row_in_list_table('2: Make tea') # self.fail('Finish this test!') def test_cannot_add_duplicate_items(self): # Tania jde na domovskou stranku a zacne novy list self.browser.get(self.live_server_url) self.add_list_item('Buy wellies') # omylem zkusi zadat stejnou polozku znovu (novy list jiz existuje) self.get_item_input_box().send_keys('Buy wellies') self.get_item_input_box().send_keys(Keys.ENTER) # vidi uzitecnou chybovou zpravu ze zadava duplikat self.wait_for(lambda: self.assertEqual( self.get_error_element().text, "You've already got this in your list" )) def test_error_messages_are_cleared_on_input(self): # Tania otevre novy seznam a zpusobi validaci error # this uses JavaScript and can be tested as: # python manage.py test # functional_tests.test_list_item_validation.ItemValidationTest.test_error_messages_are_cleared_on_input self.browser.get(self.live_server_url) self.add_list_item('Banter too thick') self.get_item_input_box().send_keys('Banter too thick') self.get_item_input_box().send_keys(Keys.ENTER) self.wait_for(lambda: self.assertTrue( self.get_error_element().is_displayed() )) # zacne neco psat do policka aby zmizela chybova hlaska o jiz existujici polozce self.get_item_input_box().send_keys('a') # ma radost, ze chybova hlaska zmizi self.wait_for((lambda: self.assertFalse( self.get_error_element().is_displayed() ))) def test_error_messages_are_cleared_on_focus(self): # Tania otevre novy seznam a zpusobi validaci error self.browser.get(self.live_server_url) self.add_list_item('Banter too thin') self.get_item_input_box().send_keys('Banter too thin') self.get_item_input_box().send_keys(Keys.ENTER) self.wait_for(lambda: self.assertTrue( self.get_error_element().is_displayed() )) # klikne do policka aby zmizela chybova hlaska o jiz existujici polozce self.get_item_input_box().click() # ma radost, ze chybova hlaska zmizi self.wait_for((lambda: self.assertFalse( self.get_error_element().is_displayed() )))
[ "stanislav.matas@gmail.com" ]
stanislav.matas@gmail.com
bf50837fd80b831d40a6bba91fc419a1019c4bd2
fa346a2d5886420e22707a7be03599e634b230a9
/temboo/Library/Amazon/S3/__init__.py
905fa3010c0e9c654a3d2c2b1a47f51be344b1f9
[]
no_license
elihuvillaraus/entity-resolution
cebf937499ed270c3436b1dd25ab4aef687adc11
71dd49118a6e11b236861289dcf36436d31f06bc
refs/heads/master
2021-12-02T17:29:11.864065
2014-01-08T04:29:30
2014-01-08T04:29:30
null
0
0
null
null
null
null
UTF-8
Python
false
false
1,274
py
from PutBucketACL import * from GetBucketLocation import * from DeleteMultipleObjects import * from GetBucketWebsite import * from DeleteBucketCORS import * from DeleteBucketTagging import * from GetObjectACL import * from ZipBucket import * from DeleteBucket import * from PutBucketWebsiteRedirectAll import * from GetBucketVersioning import * from PutBucketWebsiteRedirect import * from GetBucketList import * from PutObjectACL import * from PutBucketCORS import * from GetService import * from GetBucketCORS import * from GetBucketNotification import * from GetBase64EncodedObject import * from DeleteBucketLifecycle import * from GetBucketLifecycle import * from GetObjectTorrent import * from PutBucketNotification import * from PutBucketTagging import * from PutBucketVersioning import * from GetBucketPolicy import * from GetBucketTagging import * from GetBucketACL import * from CopyObject import * from PutBucketLifecycle import * from ZipObject import * from DeleteObject import * from PutBucket import * from PutBucketPolicy import * from GetBucketLogging import * from PutObject import * from PutBucketLogging import * from DeleteBucketPolicy import * from PutBucketRequestPayment import * from DeleteBucketWebsite import * from GetBucketRequestPayment import *
[ "cedric.warny@gmail.com" ]
cedric.warny@gmail.com
8487f1f63135e4bffeb4b1e070046c863dd458cf
52f734b8f04ed0c88e3a41de2b5fb4aa3b2c1a8b
/data_process.py
3eb1ff7174c0319da6678cce0e541f8ad5cfd25f
[]
no_license
hongjy127/CNN_ultrasonics
d79398d947ffca0014da2f917b2871362d624e91
11c5614ac734f4d7af9a4a488ddc52bb4be28b87
refs/heads/master
2023-02-27T00:10:58.396549
2021-02-03T16:39:40
2021-02-03T16:39:40
328,112,167
0
0
null
null
null
null
UTF-8
Python
false
false
1,863
py
import numpy as np import matplotlib.pyplot as plt from pyts.image import RecurrencePlot # 설정 정보 불러오기 class Configuration: def __init__(self): config = self.load() self.fname = config['FNAME'] def load(self): config = {} with open('config.ini','rt') as f: entries = f.readlines() for entry in entries: key, value = entry.split('=') config[key.strip()] = value.strip() return config def __str__(self): return f'<Configuration fname {self.fname}>' # data.csv file 불러오기 def load(fname): datas = np.loadtxt(fname, delimiter=',', dtype=np.float32) signals = datas[:, :-1] labels = datas[:, -1].astype('int') data = (signals, labels) return data # scaling def scaling(signals): signals = signals/np.max(np.abs(signals)) return signals # 이미지로 변환 (memory 문제 - 사용 X) def sig2img(signals): rp = RecurrencePlot(dimension=1, time_delay=1, threshold=None) signals.reshape(1,-1) img = rp.fit_transform(signals) return img # Cross validation def CV(signals, labels): pass if __name__ == "__main__": # config 확인 config = Configuration() fname = config.fname print(fname) # load 확인 data = load(fname) print(data[0][-1]) print(data[1][-1]) print(data[0].shape, data[1].shape) signals = data[0] plt.plot(signals[-1,:]) # scaling 확인 signals = scaling(signals) plt.plot(signals[-1,:]) plt.show() # sig2img 확인 signal = signals[0:2] img = sig2img(signal) print(img.shape) fig = plt.figure() rows = 1 cols = 2 ax1 = fig.add_subplot(rows, cols, 1) ax1.imshow(img[0]) ax2 = fig.add_subplot(rows, cols, 2) ax2.imshow(img[1]) plt.show()
[ "hhhong127@gmail.com" ]
hhhong127@gmail.com
1a4dbf5f1ac9cc33c0301f9ba2db8c21e1972c06
781029dcc468a7d1467a17727870d526da1df985
/algorithm/2806_N-queens/sol.py
d0219b8816f66160b7d8c92f7d7a7d00ec2b6996
[]
no_license
Huijiny/TIL
5f0edec5ad187029e04ed2d69e85ae4d278e048d
d1a974b3cacfb45b2718f87d5c262a23986c6574
refs/heads/master
2023-09-03T15:28:11.744287
2021-10-21T12:38:10
2021-10-21T12:38:10
335,220,747
0
0
null
null
null
null
UTF-8
Python
false
false
850
py
import sys sys.stdin = open('sample_input (3).txt') def is_exist_diagnal(cur_pos): for queen in queens: if abs(cur_pos[0] - queen[0]) == abs(cur_pos[1] - queen[1]): return True return False def n_queens(row): global visited, count if row == N: count += 1 else: for col in range(N): # 같은 열에 다른 퀸이 존재하는지 체크 및 대각선 체크 if not visited[col] and not is_exist_diagnal((col, row)): visited[col] = True queens.append((col, row)) n_queens(row + 1) queens.pop() visited[col] = False T = int(input()) for tc in range(1, T+1): N = int(input()) count = 0 visited = [False] * N queens = [] n_queens(0) print("#{} {}".format(tc, count))
[ "jiin20803@gmail.com" ]
jiin20803@gmail.com
97b61b984b05740f9ba96560cbecd106998ce823
d4ca0866381e577e3d36a22735d02eb4bf817b10
/roman_to_integer.py
17099c8e9ce1347818f6937355f5dd6828331434
[]
no_license
haiwenzhu/leetcode
e842936b69bbaf5695de1f98c8c17507819435dd
bc068c2b00793ae72439efe5bdecaeed029e9f65
refs/heads/master
2021-01-15T13:11:22.672952
2015-07-25T04:45:36
2015-07-25T04:45:36
36,838,199
0
0
null
null
null
null
UTF-8
Python
false
false
724
py
class Solution: """ @see https://oj.leetcode.com/problems/roman-to-integer/ """ # @return an integer def romanToInt(self, s): chart = dict(I=1, V=5, X=10, L=50, C=100, D=500, M=1000) if s == "": return 0 n = chart[s[-1]] for i in range(2, len(s)+1): if chart[s[-i]] < chart[s[-i+1]]: n -= chart[s[-i]] else: n += chart[s[-i]] return n if __name__ == "__main__": solution = Solution() print(solution.romanToInt("")) print(solution.romanToInt("VII") == 7) print(solution.romanToInt("XXXIX") == 39) print(solution.romanToInt("DCCCXC") == 890)
[ "bugwhen@gmail.com" ]
bugwhen@gmail.com
30bded28cc0dcda07789d41d085dfdb3d9e6e17c
82fce9aae9e855a73f4e92d750e6a8df2ef877a5
/Lab/venv/lib/python3.8/site-packages/OpenGL/GLES2/NV/shader_atomic_fp16_vector.py
99ece98dcff7af485333887d940e48ccac1ff4b7
[]
no_license
BartoszRudnik/GK
1294f7708902e867dacd7da591b9f2e741bfe9e5
6dc09184a3af07143b9729e42a6f62f13da50128
refs/heads/main
2023-02-20T19:02:12.408974
2021-01-22T10:51:14
2021-01-22T10:51:14
307,847,589
0
0
null
null
null
null
UTF-8
Python
false
false
883
py
'''OpenGL extension NV.shader_atomic_fp16_vector This module customises the behaviour of the OpenGL.raw.GLES2.NV.shader_atomic_fp16_vector to provide a more Python-friendly API Overview (from the spec) This extension provides GLSL built-in functions and assembly opcodes allowing shaders to perform a limited set of atomic read-modify-write operations to buffer or texture memory with 16-bit floating point vector surface formats. The official definition of this extension is available here: http://www.opengl.org/registry/specs/NV/shader_atomic_fp16_vector.txt ''' from OpenGL.raw.GLES2.NV.shader_atomic_fp16_vector import _EXTENSION_NAME def glInitShaderAtomicFp16VectorNV(): '''Return boolean indicating whether this extension is available''' from OpenGL import extensions return extensions.hasGLExtension( _EXTENSION_NAME ) ### END AUTOGENERATED SECTION
[ "rudnik49@gmail.com" ]
rudnik49@gmail.com
c4d74c2b8f344ee2be3556aa368f15b22c62cd6e
cb1d06e91347a23438057d9f40b5a74cad595766
/autonetkit/anm/__init__.py
4ee754a1a2282b1c46bccc42aaac4a0d566210a7
[]
permissive
plucena24/autonetkit
9f94d3fba6bfad54793a7de58ef17439c2c71f0b
f7e8c03ee685d5b89f9028cb556017e730e0446c
refs/heads/master
2023-08-16T18:03:54.593010
2014-11-07T13:43:39
2014-11-07T13:43:39
27,204,033
0
0
BSD-3-Clause
2023-08-08T18:36:36
2014-11-27T01:36:38
Python
UTF-8
Python
false
false
273
py
from autonetkit.anm.network_model import NetworkModel as NetworkModel from autonetkit.anm.graph import NmGraph as NmGraph from autonetkit.anm.node import NmNode as NmNode from autonetkit.anm.edge import NmEdge as NmEdge from autonetkit.anm.interface import NmPort as NmPort
[ "simon.knight@gmail.com" ]
simon.knight@gmail.com
8be656429e0ccfbc6d5b995c311d4436a9d86d31
3ca67d69abd4e74b7145b340cdda65532f90053b
/programmers/난이도별/level03.표 편집/sangmandu.py
309e210a1f26779c12c4f4c6978734fa138431d5
[]
no_license
DKU-STUDY/Algorithm
19549516984b52a1c5cd73e1ed1e58f774d6d30e
6f78efdbefd8eedab24e43d74c7dae7f95c2893b
refs/heads/master
2023-02-18T06:48:39.309641
2023-02-09T07:16:14
2023-02-09T07:16:14
258,455,710
175
49
null
2023-02-09T07:16:16
2020-04-24T08:42:27
Python
UTF-8
Python
false
false
3,870
py
''' https://programmers.co.kr/learn/courses/30/lessons/81303 표 편집 [풀이] 0. 연결리스트 문제 => 보통 이런문제가 나오면 클래스로 짜는 사람이 있다. 참 대단.. 1. 연결리스트를 딕셔너리로 구현한다. => i번째 노드는 원소가 2개인 list 타입의 값을 가진다. => [0] : left = i-1번째 노드 => [1] : right = i+1번째 노드 => 0번 노드와 n-1번 노드는 양쪽 끝에 None을 가지고 있다. 2. 명령어가 C, Z일때를 조건으로 하고 그 외에는 split()을 한다. 2-1. C일 경우 => rm 리스트에 삭제할 노드의 idx와 left, right를 추가한다. => rm 리스트는 추후에 명령어가 Z일 경우 필요 => 자신의 left 노드와 right 노드를 서로 이어준다 => 이 때 None일 경우의 예외처리 2.2 Z일 경우 => 삭제한 노드가 담겨있는 rm 리스트에서 pop => 되돌리려는 노드가 가장 마지막 노드일 경우와 아닌 경우를 나눈다. => 그리고 자신의 left, right 노드와 자신을 이어준다. => 자신의 left, right가 현재 삭제되있을 가능성은? => 없다. 왜냐하면 자신이 가장 최근에 삭제된 노드기 때문 => 자신의 left, right가 삭제되었다면 이미 또 다른 left, right값을 가지고 있을 것임 2.3 U, D 일 경우 => D면 idx 증가, U면 idx 감소. => 연결리스트의 장점이 드러나는 부분 => 그 외의 방법은 이동하면서 노드가 삭제되었는지 여부를 검사해야한다. => 연결리스트는 연결이 안돼있으면 이미 삭제된 것이기 때문에 검사할 필요가 없음. ''' def solution(n, k, cmd): dic = {} for i in range(0, n): dic[i] = [i-1, i+1] dic[0][0] = dic[n-1][1] = None rm = [] for c in cmd: if c == "C": rm.append([k, dic[k][0], dic[k][1]]) if dic[k][1] is None: k = dic[k][0] dic[k][1] = None else: if dic[k][0] is not None: dic[dic[k][0]][1] = dic[k][1] dic[dic[k][1]][0] = dic[k][0] k = dic[k][1] elif c == "Z": idx, left, right = rm.pop() if left is not None: dic[left][1] = idx if right is not None: dic[right][0] = idx dic[idx] = [left, right] else: move, steps = c.split() for _ in range(int(steps)): k = dic[k][int(move == "D")] answer = ["O"] * n for idx, _, _ in rm: answer[idx] = "X" return ''.join(answer) ''' 리스트로 이 문제를 구현하면 100% 시간초과 날 것을 예상했다. => 삭제 검사, 리스트 중간에 삽입 및 삭제가 일어날 것이기 떄문 그래서 각 리스트의 상태를 기억할 수 있도록 구조를 짰다. 굉장히 좋은 풀이라고 생각했는데 효율성에서 에러가 났다. (아마 실제 인턴십 코테에서도 저랬던 것 같다) 아래 코드는 리스트로 짠 코드. 4개의 테스트 케이스에서 시간초과가 나서 결국 버려야 했다 ㅠㅠ def solution(n, k, cmd): lst = ["O"] * n top = n-1 remove = [] for c in cmd: if c == "C": remove.append(k) lst[k] = "X" drt = 2 * (top != k) - 1 while lst[k+drt] == "X": k += drt k += drt while lst[top] == "X": top -= 1 elif c == "Z": idx = remove.pop() lst[idx] = "O" top = max(idx, top) else: move, steps = c.split() steps = int(steps) drt = 2 * (move == "D") - 1 while steps: k += drt steps -= lst[k] == "O" #print(c, lst, remove, top, k) return ''.join(lst) '''
[ "45033215+sangmandu@users.noreply.github.com" ]
45033215+sangmandu@users.noreply.github.com
009bcc09b6b0e01969e419f841aa942b60421c69
ce36737f134db1726bb189c17a729b9d3abba4e4
/assets/src/ba_data/python/bastd/ui/settings/gamepadselect.py
4c1b4fa15cb76c093ad443c0653ec961e3d5fead
[ "MIT" ]
permissive
Indev450/ballistica
0559940971c69b7596442abfc6ac2818a4987064
27420d3f64c24bf3c9b4b047177a4769977659b1
refs/heads/master
2023-07-20T16:01:04.586170
2020-04-13T09:32:36
2020-04-13T09:32:36
null
0
0
null
null
null
null
UTF-8
Python
false
false
6,740
py
# Copyright (c) 2011-2020 Eric Froemling # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. # ----------------------------------------------------------------------------- """Settings UI related to gamepad functionality.""" from __future__ import annotations from typing import TYPE_CHECKING import _ba import ba if TYPE_CHECKING: from typing import Dict, Any def gamepad_configure_callback(event: Dict[str, Any]) -> None: """Respond to a gamepad button press during config selection.""" from ba.internal import get_remote_app_name from bastd.ui.settings import gamepad # Ignore all but button-presses. if event['type'] not in ['BUTTONDOWN', 'HATMOTION']: return _ba.release_gamepad_input() try: ba.containerwidget(edit=ba.app.main_menu_window, transition='out_left') except Exception: ba.print_exception("Error transitioning out main_menu_window.") ba.playsound(ba.getsound('activateBeep')) ba.playsound(ba.getsound('swish')) inputdevice = event['input_device'] assert isinstance(inputdevice, ba.InputDevice) if inputdevice.allows_configuring: ba.app.main_menu_window = ( gamepad.GamepadSettingsWindow(inputdevice).get_root_widget()) else: width = 700 height = 200 button_width = 100 ba.app.main_menu_window = dlg = (ba.containerwidget( scale=1.7 if ba.app.small_ui else 1.4 if ba.app.med_ui else 1.0, size=(width, height), transition='in_right')) device_name = inputdevice.name if device_name == 'iDevice': msg = ba.Lstr(resource='bsRemoteConfigureInAppText', subs=[('${REMOTE_APP_NAME}', get_remote_app_name())]) else: msg = ba.Lstr(resource='cantConfigureDeviceText', subs=[('${DEVICE}', device_name)]) ba.textwidget(parent=dlg, position=(0, height - 80), size=(width, 25), text=msg, scale=0.8, h_align="center", v_align="top") def _ok() -> None: from bastd.ui.settings import controls ba.containerwidget(edit=dlg, transition='out_right') ba.app.main_menu_window = (controls.ControlsSettingsWindow( transition='in_left').get_root_widget()) ba.buttonwidget(parent=dlg, position=((width - button_width) / 2, 20), size=(button_width, 60), label=ba.Lstr(resource='okText'), on_activate_call=_ok) class GamepadSelectWindow(ba.Window): """Window for selecting a gamepad to configure.""" def __init__(self) -> None: from typing import cast width = 480 height = 170 spacing = 40 self._r = 'configGamepadSelectWindow' super().__init__(root_widget=ba.containerwidget( scale=2.3 if ba.app.small_ui else 1.5 if ba.app.med_ui else 1.0, size=(width, height), transition='in_right')) btn = ba.buttonwidget(parent=self._root_widget, position=(20, height - 60), size=(130, 60), label=ba.Lstr(resource='backText'), button_type='back', scale=0.8, on_activate_call=self._back) # Let's not have anything selected by default; its misleading looking # for the controller getting configured. ba.containerwidget(edit=self._root_widget, cancel_button=btn, selected_child=cast(ba.Widget, 0)) ba.textwidget(parent=self._root_widget, position=(20, height - 50), size=(width, 25), text=ba.Lstr(resource=self._r + '.titleText'), maxwidth=250, color=ba.app.title_color, h_align="center", v_align="center") ba.buttonwidget(edit=btn, button_type='backSmall', size=(60, 60), label=ba.charstr(ba.SpecialChar.BACK)) v: float = height - 60 v -= spacing ba.textwidget(parent=self._root_widget, position=(15, v), size=(width - 30, 30), scale=0.8, text=ba.Lstr(resource=self._r + '.pressAnyButtonText'), maxwidth=width * 0.95, color=ba.app.infotextcolor, h_align="center", v_align="top") v -= spacing * 1.24 if ba.app.platform == 'android': ba.textwidget(parent=self._root_widget, position=(15, v), size=(width - 30, 30), scale=0.46, text=ba.Lstr(resource=self._r + '.androidNoteText'), maxwidth=width * 0.95, color=(0.7, 0.9, 0.7, 0.5), h_align="center", v_align="top") _ba.capture_gamepad_input(gamepad_configure_callback) def _back(self) -> None: from bastd.ui.settings import controls _ba.release_gamepad_input() ba.containerwidget(edit=self._root_widget, transition='out_right') ba.app.main_menu_window = (controls.ControlsSettingsWindow( transition='in_left').get_root_widget())
[ "ericfroemling@gmail.com" ]
ericfroemling@gmail.com
e2d6533a67d8bebf792f226e1e5a1c797c7a7032
39d7ab29356ea5363c783d518b3c92f52c2ef8c2
/crawler/crawlers_BACKUP/sinarollcustom.py
9c3170f6636f92bf98ab84af04baecb8a1e39df5
[]
no_license
wdcpop/Web-Crawler
7daad778bbda9e5852248971845e3b448629175e
725037d17dfd2535e213df3cb7aafda523d39c03
refs/heads/master
2018-07-31T15:12:31.605633
2018-06-02T16:03:36
2018-06-02T16:03:36
121,194,531
4
1
null
null
null
null
UTF-8
Python
false
false
731
py
#!/usr/bin/env python # -*- encoding: utf-8 -*- from .abstracts.crawler_abstract import CrawlerAbstract import re from urlparse import urljoin from urlparse import urlsplit from arrow import Arrow import time class SINAROLLCUSTOM(CrawlerAbstract): title = u'新浪财经 - 滚动自选' start_urls = [ 'http://roll.news.sina.com.cn/s/channel.php?ch=01#col=96,97,98&spec=&type=&ch=01&k=&offset_page=0&offset_num=0&num=60&asc=&page=4' ] url_patterns = [ re.compile(r'(http://.*?\.sina\.com\.cn/[\w/]+?/\d{4}-\d{2}-\d{2}/doc-\w+?\.shtml)') ] content_selector = dict( title='#artibodyTitle', content='.article, #artibody', date_area='.time-source, #pub_date' )
[ "wdcpop@gmail.com" ]
wdcpop@gmail.com
2370bfe569782dc9bb6f537eb8495692b3be2571
51d5bd1f792f3a0fe1285c3ccdeefb58077890df
/anees/migrations/0027_auto_20200926_1337.py
33ea2f35e72c08589a1f98ef7070dd993ad8150f
[ "MIT" ]
permissive
ashish2020kashyap/cessini
667c2d4ab64f34121255a43c327b8110fa499d0b
9713fd76d2e31a95266ec69da2abc98424a46e52
refs/heads/master
2022-12-16T13:30:21.093504
2020-09-29T06:31:12
2020-09-29T06:31:12
299,510,700
0
0
null
null
null
null
UTF-8
Python
false
false
1,233
py
# Generated by Django 3.1.1 on 2020-09-26 13:37 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('anees', '0026_auto_20200926_1336'), ] operations = [ migrations.AlterField( model_name='customer', name='customer_camp', field=models.ManyToManyField(blank=True, null=True, related_name='customer_camp', to='anees.Campaign'), ), migrations.AlterField( model_name='customer', name='customer_email', field=models.ManyToManyField(blank=True, null=True, related_name='customer_email', to='anees.Email'), ), migrations.AlterField( model_name='customer', name='name', field=models.CharField(blank=True, max_length=200, null=True), ), migrations.AlterField( model_name='customer', name='phone', field=models.CharField(blank=True, max_length=200, null=True), ), migrations.AlterField( model_name='email', name='camp', field=models.ManyToManyField(null=True, related_name='camp', to='anees.Campaign'), ), ]
[ "ashish160kumar@gmail.com" ]
ashish160kumar@gmail.com
0a1b40f9c2382b1f57e2db3116afbfc749929daf
1b2fc9666edbbdc65387c854831097e0be8b686c
/BOJ(Baekjoon Online Judge)/Mathematics/2004_조합 0의 개수(counting trailing zero in combination).py
cfc777b9dac881b237d3a84d4de3aec3d7992dda
[]
no_license
seongbeenkim/Algorithm-python
6593878cff8755800f3e8bcdaabdb41625324f38
24fe365a29c61c2405a06345f9105ed200a76bd5
refs/heads/master
2023-09-04T11:05:08.318769
2021-02-18T14:58:20
2021-02-18T14:58:20
null
0
0
null
null
null
null
UTF-8
Python
false
false
515
py
#https://www.acmicpc.net/problem/2004 import sys count_two = 0 count_five = 0 n, m = list(map(int,sys.stdin.readline().split())) i = 2 while n >= i: count_two += n // i i *= 2 i = 2 while (n-m) >= i: count_two -= (n-m) // i i *= 2 i = 2 while m >= i: count_two -= m // i i *= 2 i = 5 while n >= i: count_five += n // i i *= 5 i = 5 while (n-m) >= i: count_five -= (n-m) // i i *= 5 i = 5 while m >= i: count_five -= m // i i *= 5 print(min(count_two,count_five))
[ "seongbeen93@naver.com" ]
seongbeen93@naver.com
f0078d8d238fb95be3367e8c9a6724e692d2f892
d806dd4a6791382813d2136283a602207fb4b43c
/migrations/versions/414eda9f70d0_.py
2eb67987fdcb5ce4328978a6b6c70f45820f1622
[]
no_license
MarsStirner/sirius
5bbf2a03dafb7248db481e13aff63ff989fabbc2
8839460726cca080ca8549bacd3a498e519c8f96
refs/heads/master
2021-03-24T12:09:14.673193
2017-06-06T16:28:53
2017-06-06T16:28:53
96,042,947
0
0
null
null
null
null
UTF-8
Python
false
false
1,037
py
"""empty message Revision ID: 414eda9f70d0 Revises: f088f9315be0 Create Date: 2016-10-20 20:27:23.521000 """ # revision identifiers, used by Alembic. revision = '414eda9f70d0' down_revision = 'f088f9315be0' from alembic import op import sqlalchemy as sa from sqlalchemy.dialects import postgresql def upgrade(): ### commands auto generated by Alembic - please adjust! ### op.drop_constraint(u'_remote_entity_id_uc', 'matching_id', type_='unique') op.drop_index('ix_matching_id_remote_id', table_name='matching_id') op.drop_column(u'matching_id', 'remote_id') ### end Alembic commands ### def downgrade(): ### commands auto generated by Alembic - please adjust! ### op.add_column(u'matching_id', sa.Column('remote_id', sa.INTEGER(), autoincrement=False, nullable=False)) op.create_index('ix_matching_id_remote_id', 'matching_id', ['remote_id'], unique=False) op.create_unique_constraint(u'_remote_entity_id_uc', 'matching_id', ['remote_entity_id', 'remote_id']) ### end Alembic commands ###
[ "paschenko@bars-open.ru" ]
paschenko@bars-open.ru
c2fbf9b3a660a865fe1e20d672d48b4f0f4211bc
50084bf941b61791ac4def5b8cff950e7ddfdd15
/10 Advanced Language Techniques/Functional/04_currying.py
9bfdbe7774d011369592c47083061947c15d5c7e
[]
no_license
gjq91459/mycourse
6904ad191dc6128fb853eb8bdb1b200a46b2059f
211d7707e41f50495375b4a1dfc32b62b76b317b
refs/heads/master
2021-01-21T10:34:44.841349
2017-03-01T11:17:52
2017-03-01T11:17:52
83,456,979
1
0
null
null
null
null
UTF-8
Python
false
false
595
py
# conventional function that takes 5 args def func(a, b, c, d, e): return a, b, c, d, e print func(1, 2, 3, 4, 5) # curried version that takes 1 arg def f(a): def g(b): def h(c): def i(d): def j(e): return a, b, c, d, e return j return i return h return g # f can be called in a variety of ways a = f(1) b = f(1)(2) c = f(1)(2)(3) d = f(1)(2)(3)(4) e = f(1)(2)(3)(4)(5) # missing arguments can be supplied later print a(2)(3)(4)(5) print b(3)(4)(5) print c(4)(5) print d(5) print e
[ "gjq91459@diamtr341.diamond.ac.uk" ]
gjq91459@diamtr341.diamond.ac.uk
bd036bff8f5d9e56e55bb6ba97338a10bbbf2499
de24f83a5e3768a2638ebcf13cbe717e75740168
/moodledata/vpl_data/82/usersdata/231/43199/submittedfiles/decimal2bin.py
cfc4f3a70bf6f9df805aa03d52a228511ebb8f8f
[]
no_license
rafaelperazzo/programacao-web
95643423a35c44613b0f64bed05bd34780fe2436
170dd5440afb9ee68a973f3de13a99aa4c735d79
refs/heads/master
2021-01-12T14:06:25.773146
2017-12-22T16:05:45
2017-12-22T16:05:45
69,566,344
0
0
null
null
null
null
UTF-8
Python
false
false
151
py
# -*- coding: utf-8 -*- n=int(input('digite n: ')) soma=0 i=0 while n>0: resto =n%10 soma=soma+resto*(2**i) n=n//10 i=i+1 print(soma)
[ "rafael.mota@ufca.edu.br" ]
rafael.mota@ufca.edu.br
370b9d1e5738731995469d3572171a1eb0805860
b20cc37e0b986a0b458f7f446d5025beee01ba7a
/326-power-of-three/326-power-of-three.py
ee9a38938a90148c4442109da4f8e747ba9f4374
[]
no_license
Maruf-S/Competitve-programing
e5e405912a4c2d9a1fad35d66411964ecbc10d00
3134d9c1b7e987c6cec3c614512faab4114fe0bd
refs/heads/master
2023-02-04T08:08:43.374565
2023-02-01T17:29:55
2023-02-01T17:29:55
225,399,606
0
1
null
null
null
null
UTF-8
Python
false
false
183
py
class Solution: def isPowerOfThree(self, n: int) -> bool: if n==1: return True if n<1: return False return self.isPowerOfThree(n/3)
[ "56218443+Maruf-S@users.noreply.github.com" ]
56218443+Maruf-S@users.noreply.github.com
54a9a094e0ee5716cca0e893e3b82f367b83dbe1
a0dda8be5892a390836e19bf04ea1d098e92cf58
/7章之后刷题/7章/求一元二次方程的解.py
517c5ec018ad4fd2fec5dfce6394c34f9bf20cf7
[]
no_license
wmm98/homework1
d9eb67c7491affd8c7e77458ceadaf0357ea5e6b
cd1f7f78e8dbd03ad72c7a0fdc4a8dc8404f5fe2
refs/heads/master
2020-04-14T19:22:21.733111
2019-01-08T14:09:58
2019-01-08T14:09:58
164,055,018
1
0
null
null
null
null
UTF-8
Python
false
false
769
py
""" 【问题描述】一元二次方程:ax2+bx+c=0 (a ╪ 0) 【输入形式】输入a、b和c的值(有理数) 【输出形式】输出x的两个值,或者No(即没有有理数的解) 【样例输入】1 2.5 3 【样例输出】No 【样例输入】1 -2 1 【样例输出】1.00 1.00 【样例输出说明】输出的两个解保留两位小数,大的在前 """ import math a, b, c = input().split() if a != 0: a = float(a) b = float(b) c = float(c) if b ** 2 - 4 * a * c < 0: print("No") else: i = math.sqrt(b ** 2 - 4 * a * c) x1 = (-b + i) / (2 * a) x2 = (-b - i) / (2 * a) if x1 > x2: print("%.2f %.2f" % (x1, x2)) else: print("%.2f %.2f" % (x2, x1))
[ "792545884@qq.com" ]
792545884@qq.com
69219886e2fb7b8ea37b60e28d74a109c0dd00ec
508bef828c3ce1f1c53fbe52397632ebcb392a7f
/excercise/DataOutput.py
17f26dd4e5ecf58f8a646828a506123f3f85f981
[]
no_license
wudangqibujie/excercise
bc19a7923836aae9d12e1147b1d282250c0037c6
ff3e255e809e414fd43e7cf16e03466c91b613a2
refs/heads/master
2021-05-02T12:20:09.701592
2018-02-08T09:20:04
2018-02-08T09:20:04
120,738,926
0
0
null
null
null
null
UTF-8
Python
false
false
904
py
import codecs import json class DataOutput(object): def __init__(self): self.datas = [] def store_data(self,data): if data is None: return self.datas.append(data) def output_txt(self): print(self.datas) print(len(self.datas)) def output_html(self): fout = codecs.open("baike.html",'w',encoding="utf-8") fout.write("<html>") fout.write("<body>") fout.write("<table>") for data in self.datas: fout.write("<tr>") fout.write("<td>%s</td>"%data['url']) fout.write("<td>%s</td>"%data['title']) fout.write("<td>%s</td>"%data['summary']) fout.write("</tr>") self.datas.remove(data) fout.write("</table>") fout.write("</body>") fout.write("</html>") fout.close()
[ "noreply@github.com" ]
wudangqibujie.noreply@github.com
cb404b4d172d193fdd675a194c0f74c3dc2bcbec
7e8c799037f47345cb12a6fc7911610d7ac63640
/blog/models/kategory.py
db1f3aed151335e537ac97042a080087a708be89
[]
no_license
SonerArslan2019/Django_Blog_kilicarslan
bcfd953dfc0d530217c02ff9bf1428160a63e68e
8959708689b830c387c76414545add2474beeddd
refs/heads/master
2023-06-04T06:19:28.209246
2021-06-27T14:50:40
2021-06-27T14:50:40
346,781,400
0
0
null
null
null
null
UTF-8
Python
false
false
403
py
from django.db import models from autoslug import AutoSlugField class KategoriModel(models.Model): isim = models.CharField(max_length=30, blank=False, null=False) slug = AutoSlugField(populate_from='isim', unique=True) class Meta: db_table = 'kategori' verbose_name_plural = 'Kategoriler' verbose_name = 'Kategori' def __str__(self): return self.isim
[ "soner@arslanyapi.com.tr" ]
soner@arslanyapi.com.tr
314cb13e39d3721bdde09602caf9430c87651252
e2baefd54ed6f44d351d867b8d8eb937424fae23
/class-10/game-of-greed/tests/version_4/test_keep_scorers.py
ad3c4e7685ac39c0a0bdef3b04d08c355b08a355
[]
no_license
codefellows/seattle-python-401n2
60cc100b5b6dc8bc3d72784a5ec8f9c2c9db1942
24148a2ee6526104566b5df64945a40222cfb3e2
refs/heads/main
2023-04-23T15:35:49.820538
2021-04-24T18:02:51
2021-04-24T18:02:51
316,109,616
8
16
null
2021-03-10T05:36:09
2020-11-26T03:06:19
Jupyter Notebook
UTF-8
Python
false
false
460
py
import pytest from game_of_greed.game_logic import GameLogic pytestmark = [pytest.mark.version_4] @pytest.mark.parametrize( "test_input,expected", [ ((1, 1, 1, 2, 3, 4), (1, 1, 1)), ((1, 1, 5, 2, 3, 5), (1, 1, 5, 5)), ((1, 6, 5, 2, 3, 4), (1, 6, 5, 2, 3, 4)), ((1, 6, 5, 2, 3), (1, 5)), ], ) def test_get_scorers(test_input, expected): actual = GameLogic.get_scorers(test_input) assert actual == expected
[ "rogerhuba@gmail.com" ]
rogerhuba@gmail.com
270a1aae2fd8c1049b534b01bbc44c37a2580987
79e19819aec49b500825f82a7de149eb6a0ba81d
/leetcode/704.py
67ae34f57125b3c89b84138ec42378a347353c62
[]
no_license
seoyeonhwng/algorithm
635e5dc4a2e9e1c50dc0c75d9a2a334110bb8e26
90406ee75de69996e666ea505ff5d9045c2ad941
refs/heads/master
2023-05-03T16:51:48.454619
2021-05-26T00:54:40
2021-05-26T00:54:40
297,548,218
0
0
null
null
null
null
UTF-8
Python
false
false
174
py
class Solution: def search(self, nums: List[int], target: int) -> int: try: return nums.index(target) except ValueError: return -1
[ "seoyeon@nowbusking.com" ]
seoyeon@nowbusking.com
d63a8cf77a7bbfd03771436916c9f84472b354e1
13efb3baccc678f9d57776244c7dc067e486df9e
/students/migrations/0021_auto_20160517_1221.py
e9210404690fc2e9213f7ae9d9febd5fc7d78325
[]
no_license
grydinywka/studentsdb
fd109dfe60f4ffd666c12acbe645ca14e064c29b
bc2c968538f88bd539d931a7caf1b693fbb65843
refs/heads/master
2020-05-31T00:05:22.638475
2016-08-26T17:00:43
2016-08-26T17:00:43
31,331,814
0
0
null
null
null
null
UTF-8
Python
false
false
576
py
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations import students.models.Result_exam class Migration(migrations.Migration): dependencies = [ ('students', '0020_auto_20160421_0936'), ] operations = [ migrations.AlterField( model_name='result_exam', name='valuetion', field=models.DecimalField(verbose_name='\u041e\u0446\u0456\u043d\u043a\u0430', max_digits=2, decimal_places=0, validators=[students.models.Result_exam.validate_value]), ), ]
[ "grydinywka@gmail.com" ]
grydinywka@gmail.com
0ef2bcc9c688462fe238bb21496e160b45812ca0
3e4b8fe54f11bf36f3615c21fdc1dca0ed00fe72
/month04/spider/day01/text/02_tieba.py
63d1010207234daf73d93db6df89f20b4e00ef29
[]
no_license
leinian85/year2019
30d66b1b209915301273f3c367bea224b1f449a4
2f573fa1c410e9db692bce65d445d0543fe39503
refs/heads/master
2020-06-21T20:06:34.220046
2019-11-04T06:37:02
2019-11-04T06:37:02
197,541,549
0
0
null
null
null
null
UTF-8
Python
false
false
1,495
py
from urllib import request from urllib import parse import random import time from fake_useragent import UserAgent import re class TiebaSpider: def __init__(self): self.url = "http://tieba.baidu.com/f?kw={}&pn={}" def set_headers(self): us = UserAgent() self.headers = {"User-Agent":us.random} # 获取响应内容 def get_page(self, url): self.set_headers() req = request.Request(url=url, headers=self.headers) res = request.urlopen(req) html = res.read().decode() return html # 解析,提取数据 def parse_page(self, name): return request.quote(name) # 保存数据 def write_page(self, filename, html): with open(filename, 'w') as f: f.write(html) def run(self, name, start, end): for page in range(start, end + 1): url = self.url.format(self.parse_page(name), (page - 1) * 50) filename = name + "_" + str(page) + ".html" html = self.get_page(url) print(url) # imgs = self.get_imgs(html) self.write_page(filename, html) print("第{}页抓取成功".format(page)) time.sleep(random.randint(1, 3)) def get_imgs(self, html): pattern = re.compile("",re.S) if __name__ == "__main__""": begin = time.time() spider = TiebaSpider() spider.run("赵丽颖", 1, 3) stop = time.time() print("执行时间%.2f" % (stop - begin))
[ "42737521@qq.com" ]
42737521@qq.com
8d1ae267007951a34533aae1e23eb15ed57cf4ee
0d5e40b598ee3ad2c0575a45857df49457a99cc7
/june/handlers/api.py
d6ab577e83fa85e62f43ad5ac12f2b2f71770f36
[ "BSD-3-Clause" ]
permissive
mitnk/june
7fdf928b7de452911a9d683bc50ed52a9f04085c
ab2b8e42e9b632923187333cd91af0f683c16ba6
refs/heads/master
2021-01-18T12:01:19.949051
2012-03-27T07:10:43
2012-03-27T07:10:43
null
0
0
null
null
null
null
UTF-8
Python
false
false
7,530
py
import math from tornado.options import options from june.lib.handler import BaseHandler from june.lib.decorators import require_user from june.models import Topic, Member, Reply from june.models.mixin import NotifyMixin class UpTopicHandler(BaseHandler): """Up a topic will increase impact of the topic, and increase reputation of the creator """ @require_user def post(self, id): topic = self.db.query(Topic).filter_by(id=id).first() if not topic: self.send_error(404) return user_id = self.current_user.id if topic.user_id == user_id: # you can't vote your own topic dct = {'status': 'fail', 'msg': 'cannot up vote your own topic'} self.write(dct) return if user_id in topic.down_users: # you can't up and down vote at the same time dct = {'status': 'fail', 'msg': 'cannot up vote your down topic'} self.write(dct) return creator = self.db.query(Member).filter_by(id=topic.user_id).first() up_users = list(topic.up_users) if user_id in up_users: up_users.remove(user_id) topic.ups = ','.join(str(i) for i in up_users) topic.impact -= self._calc_topic_impact() creator.reputation -= self._calc_user_impact() self.db.add(creator) self.db.add(topic) self.db.commit() dct = {'status': 'ok'} dct['data'] = {'action': 'cancel', 'count': len(up_users)} self.write(dct) self.cache.delete('topic:%s' % str(id)) return up_users.append(user_id) topic.ups = ','.join(str(i) for i in up_users) topic.impact += self._calc_topic_impact() creator.reputation += self._calc_user_impact() self.db.add(topic) self.db.add(creator) self.db.commit() dct = {'status': 'ok'} dct['data'] = {'action': 'active', 'count': len(up_users)} self.write(dct) self.cache.delete('topic:%s' % str(id)) return def _calc_topic_impact(self): if self.current_user.reputation < 2: return 0 factor = int(options.up_factor_for_topic) return factor * int(math.log(self.current_user.reputation)) def _calc_user_impact(self): if self.current_user.reputation < 2: return 0 factor = int(options.up_factor_for_user) impact = factor * int(math.log(self.current_user.reputation)) return min(impact, int(options.up_max_for_user)) class DownTopicHandler(BaseHandler): """Down a topic will reduce impact of the topic, and decrease reputation of the creator """ @require_user def post(self, id): topic = self.db.query(Topic).filter_by(id=id).first() if not topic: self.send_error(404) return user_id = self.current_user.id if topic.user_id == user_id: # you can't vote your own topic dct = {'status': 'fail', 'msg': "cannot down vote your own topic"} self.write(dct) return if user_id in topic.up_users: # you can't down and up vote at the same time dct = {'status': 'fail', 'msg': "cannot down vote your up topic"} self.write(dct) return creator = self.db.query(Member).filter_by(id=topic.user_id).first() down_users = list(topic.down_users) if user_id in down_users: #TODO: can you cancel a down vote ? down_users.remove(user_id) topic.downs = ','.join(str(i) for i in down_users) topic.impact += self._calc_topic_impact() creator.reputation += self._calc_user_impact() self.db.add(creator) self.db.add(topic) self.db.commit() dct = {'status': 'ok'} dct['data'] = {'action': 'cancel', 'count': len(down_users)} self.write(dct) self.cache.delete('topic:%s' % str(id)) return down_users.append(user_id) topic.downs = ','.join(str(i) for i in down_users) topic.impact -= self._calc_topic_impact() creator.reputation -= self._calc_user_impact() self.db.add(creator) self.db.add(topic) self.db.commit() dct = {'status': 'ok'} dct['data'] = {'action': 'active', 'count': len(down_users)} self.write(dct) self.cache.delete('topic:%s' % str(id)) return def _calc_topic_impact(self): if self.current_user.reputation < 2: return 0 factor = int(options.down_factor_for_topic) return factor * int(math.log(self.current_user.reputation)) def _calc_user_impact(self): if self.current_user.reputation < 2: return 0 factor = int(options.down_factor_for_user) impact = factor * int(math.log(self.current_user.reputation)) return min(impact, int(options.down_max_for_user)) class AcceptReplyHandler(BaseHandler, NotifyMixin): """Vote for a reply will affect the topic impact and reply user's reputation """ def _is_exist(self, topic_id, reply_id): reply = self.db.query(Reply).filter_by(id=reply_id).first() if not reply or reply.topic_id != int(topic_id): return False topic = self.db.query(Topic).filter_by(id=topic_id).first() if not topic: return False return reply, topic def _calc_user_impact(self): if self.current_user.reputation < 2: return 0 factor = int(options.accept_reply_factor_for_user) impact = factor * int(math.log(self.current_user.reputation)) return min(impact, int(options.vote_max_for_user)) def post(self, topic_id, reply_id): reply_topic = self._is_exist(topic_id, reply_id) if not reply_topic: self.send_error(404) return reply, topic = reply_topic user_id = self.current_user.id if user_id != topic.user_id: dct = {'status': 'fail', 'msg': 'you are not topic owner'} self.write(dct) return if user_id == reply.user_id: dct = {'status': 'fail', 'msg': 'cannot accept your own reply'} self.write(dct) return creator = self.db.query(Member).filter_by(id=reply.user_id).first() if reply.accepted == 'y': creator.reputation -= self._calc_user_impact() reply.accepted = 'n' self.db.add(creator) self.db.add(reply) self.db.commit() self.cache.delete('ReplyListModule:%s:1' % topic.id) dct = {'status': 'ok', 'data': 'cancel'} self.write(dct) return creator.reputation += self._calc_user_impact() reply.accepted = 'y' self.db.add(reply) self.db.add(creator) link = '/topic/%s' % topic.id self.create_notify(reply.user_id, topic.title, reply.content, link, 'accept') self.db.commit() self.cache.delete('ReplyListModule:%s:1' % topic.id) dct = {'status': 'ok', 'data': 'active'} self.write(dct) return handlers = [ ('/api/topic/(\d+)/up', UpTopicHandler), ('/api/topic/(\d+)/down', DownTopicHandler), ('/api/topic/(\d+)/(\d+)/accept', AcceptReplyHandler), ]
[ "lepture@me.com" ]
lepture@me.com
c44a12d77ff12294af2fe0d956eadc83432d93a5
77900cdd9a815caf1cd04705321ca93f5072179f
/Project/.history/main_20211116163259.py
e8cab25e7e1da7157cd5e01b04d2ba8c87d33c20
[]
no_license
Bom19990111/helloword_python
717799d994223d65de5adaeabecf396ff2bc1fb7
2ee2e67a60043f03c1ce4b070470c7d2dcdc72a7
refs/heads/master
2023-09-06T04:17:02.057628
2021-11-21T20:00:46
2021-11-21T20:00:46
407,063,273
0
1
null
2021-11-21T20:00:47
2021-09-16T07:18:35
Python
UTF-8
Python
false
false
2,680
py
import product as p def choose(): action = 0 while action >= 0: if action == 1: p.AddProduct() print("--------------------------------") elif action == 2: p.DeleteProduct() elif action == 3: p.UpdateProduct() elif action == 4: p.ShowAllProduct() elif action == 5: p.FindProductByName() elif action == 6: p.SortProductNameA_Z() print("Đã sắp xếp thành công! Vui lòng chọn số 4 để xem kết quả".upper()) print("********************************") elif action == 7: p.SortProductNameZ_A() print("Đã sắp xếp thành công! Vui lòng chọn số 4 để xem kết quả".upper()) print("********************************") elif action == 8: p.SortPriceAsc() print("Đã sắp xếp thành công! Vui lòng chọn số 4 để xem kết quả".upper()) print("********************************") elif action == 9: p.SortPriceDesc() print("Đã sắp xếp thành công! Vui lòng chọn số 4 để xem kết quả".upper()) print("********************************") elif action == 10: p.ImportExecel()() elif action == 11: p.ExportExecl() print("Vui lòng chọn chức năng bạn muốn: ") print("0. Thoát khỏi chương trình. ") print("1. Thêm mới sản phẩm. ") print("2. Xóa sản phẩm. ") print("3. Cập nhật thông tin sản phẩm. ") print("4. Xem danh sách tất cả sản phẩm. ") print("5. Tìm kiếm sản phẩm theo tên hoặc theo thương hiệu. ") print("6. Sắp xếp tên sản phẩm A-Z. ") print("7. Sắp xếp tên sản phẩm Z-A. ") print("8. Sắp xếp giá sản phẩm tăng dần. ") print("9. Sắp xếp tên sản phẩm giảm dần. ") print("10. Import file excel. ") print("11. Export file excel. ") try: action = int(input("Bạn chọn chức năng? ")) except ValueError: if action == 12: print("Không có chức năng bạn chọn, mời chọn lại!".upper()) else: print("Không có chức năng bạn chọn, mời chọn lại!".upper()) try: choose() except: print("Dừng chương trình!") if action == 0: print("Đã thoát chương trình") break choose()
[ "phanthituyngoc1995@gmail.com" ]
phanthituyngoc1995@gmail.com
a6793fef67f7d299ac2575aa4f3fdafd5fc99bae
89b0920101eaf09b0afb9a5449f3fabd68ac21c1
/analysis-blocks/scripts/SlowAdder.py
fd81750d4252600627d344e3bba39799ecfc459f
[]
no_license
metamorph-inc/openmeta-examples-and-templates
cffdcecf8b4fca1ea8ae4f8880a5f2720ec05f4b
4f6cc54510c742b9a3bf39338a5a01df510c1243
refs/heads/master
2023-04-10T06:25:28.691426
2020-03-12T23:20:36
2020-03-12T23:20:36
91,151,238
7
3
null
2018-12-03T23:01:19
2017-05-13T05:17:18
Python
UTF-8
Python
false
false
825
py
from __future__ import print_function from openmdao.api import IndepVarComp, Component, Problem, Group, FileRef import numpy as np import time class SlowAdder(Component): def __init__(self): super(SlowAdder, self).__init__() self.add_param("x", val=0.0) self.add_param("y", val=0.0) self.add_output("z", val=0.0) def solve_nonlinear(self, params, unknowns, resids): time.sleep(10) unknowns["z"] = params["x"] + params["y"] def main(): top = Problem() root = top.root = Group() root.add('Input', IndepVarComp([('x', 1.0), ('y', 2.0)])) root.add('p', SlowAdder()) root.connect('Input.x', 'p.x') root.connect('Input.y', 'p.y') top.setup() top.run() print('z =', root.p.unknowns['z']) if __name__ == "__main__": main()
[ "tthomas@metamorphsoftware.com" ]
tthomas@metamorphsoftware.com
892d1f7a730ca334b28fb0d84012c7de626b560d
e2ef58aa444e9e97ed26ef52bc69ac8bd79bc93e
/podoc/tests/test_utils.py
4e8a2b8169b447a65c22f3dda2df52035c514a17
[ "BSD-3-Clause" ]
permissive
willingc/podoc
1dc1c18f40e06b47760f0f6227ec70bd2dd1d19e
7021aec70d47a8a3a934c5799828f412e38b7c23
refs/heads/master
2021-01-17T10:04:47.414887
2016-04-02T09:56:55
2016-04-02T09:56:55
53,595,629
0
0
null
2016-03-10T15:32:25
2016-03-10T15:32:23
Python
UTF-8
Python
false
false
2,014
py
# -*- coding: utf-8 -*- """Test utility functions.""" #------------------------------------------------------------------------------ # Imports #------------------------------------------------------------------------------ import json import logging import os.path as op from pytest import mark, raises from ..utils import (Bunch, Path, load_text, dump_text, _get_file, assert_equal, pandoc, has_pandoc, get_pandoc_formats) logger = logging.getLogger(__name__) require_pandoc = mark.skipif(not(has_pandoc()), reason='pypandoc is not available') #------------------------------------------------------------------------------ # Tests #------------------------------------------------------------------------------ def test_bunch(): obj = Bunch() obj['a'] = 1 assert obj.a == 1 obj.b = 2 assert obj['b'] == 2 assert obj.copy().a == 1 def test_path(): print(Path(__file__)) assert Path(__file__).exists() def test_assert_equal(): assert_equal([0], [0]) assert_equal({'a': 1, 'b': [2, 3], '_c': 0}, {'a': 1, 'b': [2, 3], '_c': 1}) with raises(AssertionError): assert_equal({'a': 1, 'b': [2, 3], '_c': 0}, {'a': 1, 'b': [2, 4], '_c': 0}) #------------------------------------------------------------------------------ # Test file I/O #------------------------------------------------------------------------------ def test_open_dump_text(tempdir): path = op.join(tempdir, 'test.txt') dump_text('hello *world*', path) assert load_text(path) == 'hello *world*' assert _get_file(path, 'r').read() == 'hello *world*' with open(path, 'r') as f: assert _get_file(f, 'r').read() == 'hello *world*' def test_pandoc(): out = pandoc('hello *world*', 'json', format='markdown') assert isinstance(json.loads(out), list) sl, tl = get_pandoc_formats() assert 'markdown' in sl assert 'markdown' in tl
[ "cyrille.rossant@gmail.com" ]
cyrille.rossant@gmail.com
6a6fa620c02a9969300f1da35177f8acf5abe1d9
585bc7a21664e7a371950c4811723aae92256c92
/test.py
f3c46723e224226df8b89453fcbcfb8851c88fe4
[]
no_license
JacquesLucke/ml_test
bfb421ba6c423bfda545dac7aeabbcc81d71abd8
3c743b0b60dcf492a64bea2ed16c7edc4e6a6809
refs/heads/master
2023-06-03T20:24:46.733564
2021-06-19T17:08:10
2021-06-19T17:08:10
378,464,878
0
0
null
null
null
null
UTF-8
Python
false
false
613
py
import numpy as np weights = np.array( [ [[1, 2], [3, 4]], [[-1, -2], [-3, -4]], [[0, 3], [-1, 2]], ] ) data_in = np.array( [ [1, 2], [3, 4], ] ) print(weights.shape) print(weights) print(data_in.shape) print(data_in) print(np.tensordot(weights, data_in, 2)) expected_result = np.array( [ 1 + 4 + 9 + 16, # 30 -1 - 4 - 9 - 16, # -30 0 + 6 - 3 + 8, # 11 ] ) factor = np.array([10, 100, 1]) print(np.tensordot(factor, weights, 1)) # factor = np.broadcast_to(factor, (2, 2, 3)) # print(factor) # print(factor * weights)
[ "mail@jlucke.com" ]
mail@jlucke.com
8de2486482a883800948f7d6d08d5ce1676ba874
6466eef5477db250879a74935b3b776dc878ff3b
/ideas/views.py
e959cd77ba2866f690fca657fdcb6afc1b18108b
[]
no_license
BakdauletBolatE/django_ideas
8edb61a569f436865283e82edba3377a150665a8
ef0258f3aae0c090d38a5098d175bceaddcf67af
refs/heads/master
2023-03-12T00:02:04.969353
2021-03-02T19:41:00
2021-03-02T19:41:00
324,287,500
0
0
null
null
null
null
UTF-8
Python
false
false
359
py
from django.shortcuts import render, redirect from Content.models import Ideas,ICategory def home(request): lastideas = Ideas.objects.all()[:3] categories = ICategory.objects.all()[:4] data = { 'categories':categories, 'lastideas':lastideas } return render(request,'home/home.html',data)
[ "bakosh21345@gmail.com" ]
bakosh21345@gmail.com
824db027a189587196ece3814888aad0001898d6
d95a672d614ea547a79be582fc9e9e97a09f7d9d
/pack/ex3_18.py
2e00fe3851e1b7f4f70930d8e8514b207e749b6b
[]
no_license
vt0311/python
26992c096512df8d0304f6d8b452a663645a8b61
51fa4c240b9f69a81f68d75e3f6ffdd9dada8848
refs/heads/master
2021-09-06T12:01:40.433969
2018-02-06T09:28:32
2018-02-06T09:28:32
107,950,338
0
0
null
null
null
null
UTF-8
Python
false
false
1,487
py
print('방법1 : import 패키지명.모듈명') import pack.mymod1 print(dir(pack.mymod1)) # mymod1에 정의된 멤버 확인 print(pack.mymod1.__file__)# 경로 명 및 파일 명 print(pack.mymod1.__name__)# 모듈 명 print('mymod1의 함수 호출') list1 = [1, 3] list2 = [1, 2] pack.mymod1.ListHap(list1, list2) print('다른 모듈의 전역 변수 : ', pack.mymod1.tot) print('방법2 : from 패키지명 import 모듈명') from pack import mymod1 mymod1.Kbs()# 모듈명.함수명으로 호출한다. print('방법3 : from 패키지명.모듈명 import 함수명') from pack.mymod1 import Mbc Mbc() # 함수명으로 호출한다. print('패키지 경로가 다른 곳에 있는 모듈 읽기') import pack_other.mymod2 print('패키지명.모듈명.함수명()으로 호출') re_hap = pack_other.mymod2.Hap(5, 3) print('합 :', re_hap) print('차 :', pack_other.mymod2.Cha(5, 3)) # PythonPath : C:\Anaconda3\Lib 폴더에 mymod3.py 파일을 미리 복사해둔다. print('PythonPath가 설정된 폴더의 모듈 읽기 실습') import mymod3 print('곱1 :', mymod3.Gop(5, 3)) from mymod3 import * print('곱2 :', Gop(10, 5)) print('\n\n전혀 연관이 없는 폴더의 모듈 읽기') print('방법1 : Pythonpath에 해당 폴더를 추가한다.') print('방법2') import sys sys.path.append(r'c:/work') # run time 시점에 'c:/work'를 읽어 들이므로 문제 없음 # 아래 빨간 줄 무시 import mymod4 print('나누기 :', mymod4.Nanugi(5, 3))
[ "hsw0311@nate.com" ]
hsw0311@nate.com
82c582ff24e69ccb0acace9ec4ffa7596294a51b
8a4c32783b2c8e13eca88d193c767bd25e63306c
/algo/gnn/gat.py
6d087b9d8c3dafdbfde8dff45067056d7cf2b909
[]
no_license
fs302/GraphWorld
6765e4ba36d3af2ed5f820e52514096b3aeb10d7
9864eaca21f41117adf758f74379efa87692f5f8
refs/heads/master
2022-07-31T17:05:32.535380
2022-07-08T08:35:15
2022-07-08T08:35:15
196,156,937
5
5
null
null
null
null
UTF-8
Python
false
false
776
py
import torch from torch.nn import Linear import torch.nn.functional as F from torch_geometric.nn import GATConv class GAT(torch.nn.Module): def __init__(self, input_channels, hidden_channels, out_channels, heads, dropout_ratio=0.): super().__init__() torch.manual_seed(1234567) self.dropout_ratio = dropout_ratio self.conv1 = GATConv(input_channels, hidden_channels, heads) self.conv2 = GATConv(hidden_channels * heads, out_channels) def forward(self, x, edge_index): x = F.dropout(x, p=self.dropout_ratio, training=self.training) x = self.conv1(x, edge_index) x = F.elu(x) x = F.dropout(x, p=self.dropout_ratio, training=self.training) x = self.conv2(x, edge_index) return x
[ "fanshen.fs@alibaba-inc.com" ]
fanshen.fs@alibaba-inc.com
417dfcc3c5f7259d1d81b83fb9ee10f6e487a810
801f367bd19b8f2ab08669fd0a85aad7ace961ac
/rl-fmri/tmp_sort_by_covariance.py
b8f808ec8e32693436251db5c839ebcdb7657592
[ "MIT" ]
permissive
Wendong-Huo/thesis-bodies
d91b694a6b1b6a911476573ed1ed27eb27fb000d
dceb8a36efd2cefc611f6749a52b56b9d3572f7a
refs/heads/main
2023-04-17T18:32:38.541537
2021-03-12T19:53:23
2021-03-12T19:53:23
623,471,326
1
0
null
2023-04-04T12:45:48
2023-04-04T12:45:47
null
UTF-8
Python
false
false
635
py
import numpy as np N = 5 t_org = np.arange(N) pi = np.random.permutation(N) x_org = np.array([np.random.randn(100)*k for k in range(N)]) S_org = np.cov(x_org) print("Covariance of sorted time steps", S_org, sep="\n") t_obs = t_org[pi] x_obs = x_org[pi] S_obs = np.cov(x_obs) print("Covariance of unsorted time steps", S_obs, sep="\n") #%% Using indexing S[p][:,p] p = np.argsort(t_obs) print("Reconstruction equals original:", S_obs[p][:,p] == S_org, sep="\n") #%% Alternative using Permutation matrix P = np.eye(N)[p] print("Permutation matrix", P, sep="\n") print("Reconstruction equals original:", P@S_obs@P.T == S_org, sep="\n")
[ "sliu1@uvm.edu" ]
sliu1@uvm.edu
23ffd822cb9394060d8ed04bdd0e6a6f5ea26806
0df7f40b27cffe0b4e009041c35fc1e78e33f82d
/django_api/users/admin.py
c6a9e761a57586af5b9cd2a6a296661ba21db9e3
[ "MIT" ]
permissive
ridwanray/tay-twitter-microservices
5be11f166bd0e2dba298da1577549264315d0120
d5bdb6b6d4fd8333efbb4c79752f8c5efaccb1f0
refs/heads/master
2023-01-09T13:22:46.917407
2020-11-20T01:16:06
2020-11-20T01:16:06
null
0
0
null
null
null
null
UTF-8
Python
false
false
352
py
"""Admin App Customization is done here""" from django.contrib import admin from .models import User from django.contrib.auth.admin import UserAdmin class CustomUserAdmin(UserAdmin): """Custom Admin Manager for Custom USer Model""" fieldsets = UserAdmin.fieldsets + ( (None, {'fields': ("follower", "following")}), ) admin.site.register(User)
[ "tay2druh@gmail.com" ]
tay2druh@gmail.com
2e610648cd9fb047a28f7e2d76bec256eec5c645
55647258df0565f19179ffb97ac217708d84ba4a
/social/serializers/comments.py
4ac4833bd46f2e9cefdd1058d793c2201b05038b
[]
no_license
beatonma/snommoc
25de0e81af0d9940bdc3aa6420cb5764d50c6d11
0a9d37dcad112c5dd98609c1566e74176ae3d89d
refs/heads/main
2022-03-11T07:53:33.038649
2022-03-05T17:03:56
2022-03-05T17:03:56
188,595,195
0
0
null
2022-02-18T17:54:30
2019-05-25T17:35:58
Python
UTF-8
Python
false
false
1,696
py
import bleach from rest_framework import serializers from social.models.comments import Comment from social.models.mixins import get_target_kwargs from social.models.token import UserToken from social.views import contract class CommentSerializer(serializers.ModelSerializer): username = serializers.CharField(source="user.username") class Meta: model = Comment fields = [ contract.USER_NAME, contract.COMMENT_TEXT, "created_on", "modified_on", ] class PostCommentSerializer(serializers.ModelSerializer): def __init__(self, target, *args, **kwargs): super().__init__(*args, **kwargs) self.target = target token = serializers.CharField() def validate(self, data): original_text = data[contract.COMMENT_TEXT] stripped_text = bleach.clean( original_text, tags=[], attributes={}, styles=[], strip=True ) if original_text != stripped_text: data[contract.FLAGGED] = True data[contract.COMMENT_TEXT] = stripped_text return data class Meta: model = Comment fields = [ contract.USER_TOKEN, contract.COMMENT_TEXT, ] def update(self, instance, validated_data): pass def create(self, validated_data): comment, _ = Comment.objects.get_or_create( user=UserToken.objects.get(token=validated_data.get(contract.USER_TOKEN)), **get_target_kwargs(self.target), text=validated_data.get(contract.COMMENT_TEXT), flagged=validated_data.get(contract.FLAGGED, False), ) return comment
[ "beatonma@gmail.com" ]
beatonma@gmail.com
a3f8bde72496e9b464ce228a2d862429620305ee
e9a737a6a9101d201e1ddf4292b31da9c6ed5919
/ytree/frontends/rockstar/io.py
d16beae61c4767f34351067299f63bb27c35f7ae
[ "BSD-3-Clause" ]
permissive
brittonsmith/ytree
fd1305fc3f35c33741d5441e2c8b6a09cce2bb54
0c6a331f38c9758cca663ffd6e740183d359f7aa
refs/heads/main
2023-05-25T06:02:19.020508
2021-04-19T14:35:18
2021-04-19T14:35:18
203,828,654
0
0
NOASSERTION
2019-08-22T16:01:34
2019-08-22T16:01:34
null
UTF-8
Python
false
false
2,804
py
""" RockstarArbor io classes and member functions """ #----------------------------------------------------------------------------- # Copyright (c) ytree development team. All rights reserved. # # Distributed under the terms of the Modified BSD License. # # The full license is in the file COPYING.txt, distributed with this software. #----------------------------------------------------------------------------- import numpy as np from ytree.data_structures.io import \ CatalogDataFile from ytree.utilities.io import \ f_text_block class RockstarDataFile(CatalogDataFile): def __init__(self, filename, arbor): self.offsets = None super(RockstarDataFile, self).__init__(filename, arbor) def open(self): self.fh = open(self.filename, "r") def _parse_header(self): self.open() f = self.fh f.seek(0, 2) self.file_size = f.tell() f.seek(0) while True: line = f.readline() if line is None: self._hoffset = f.tell() break elif not line.startswith("#"): self._hoffset = f.tell() - len(line) break elif line.startswith("#a = "): self.scale_factor = float(line.split(" = ")[1]) self.close() def _read_data_default(self, rfields, dtypes): if not rfields: return {} fi = self.arbor.field_info field_data = \ self._create_field_arrays(rfields, dtypes) offsets = [] self.open() f = self.fh f.seek(self._hoffset) file_size = self.file_size - self._hoffset for line, offset in f_text_block(f, file_size=file_size): offsets.append(offset) sline = line.split() for field in rfields: field_data[field].append(sline[fi[field]["column"]]) self.close() for field in rfields: field_data[field] = \ np.array(field_data[field], dtype=dtypes[field]) if self.offsets is None: self.offsets = np.array(offsets) return field_data def _read_data_select(self, rfields, tree_nodes, dtypes): if not rfields: return {} fi = self.arbor.field_info nt = len(tree_nodes) field_data = \ self._create_field_arrays(rfields, dtypes, size=nt) self.open() f = self.fh for i in range(nt): f.seek(self.offsets[tree_nodes[i]._fi]) line = f.readline() sline = line.split() for field in rfields: dtype = dtypes[field] field_data[field][i] = dtype(sline[fi[field]["column"]]) self.close() return field_data
[ "brittonsmith@gmail.com" ]
brittonsmith@gmail.com
799a2ad2a3aed25738677f3c563458a4cd38017d
641fa8341d8c436ad24945bcbf8e7d7d1dd7dbb2
/content/DEPS
e891ebc72b9fd7dbdb01980defa7890b1268ebd9
[ "BSD-3-Clause" ]
permissive
massnetwork/mass-browser
7de0dfc541cbac00ffa7308541394bac1e945b76
67526da9358734698c067b7775be491423884339
refs/heads/master
2022-12-07T09:01:31.027715
2017-01-19T14:29:18
2017-01-19T14:29:18
73,799,690
4
4
BSD-3-Clause
2022-11-26T11:53:23
2016-11-15T09:49:29
null
UTF-8
Python
false
false
3,816
# Do NOT add chrome to the list below. We shouldn't be including files # from src/chrome in src/content. include_rules = [ # The subdirectories in content/ will manually allow their own include # directories in content/ so we disallow all of them. "-content", "+content/app/resources/grit/content_resources.h", "+content/common", "+content/grit", "+content/public/common", "+content/public/test", "+content/test", "+blink/public/resources/grit", "+cc", "-cc/blink", # If you want to use any of these files, move them to src/base first. "-cc/base/scoped_ptr_algorithm.h", "-cc/base/scoped_ptr_deque.h", "-cc/base/scoped_ptr_vector.h", "-components", # Content can depend on components that are: # 1) related to the implementation of the web platform # 2) shared code between third_party/WebKit and content # It should not depend on chrome features or implementation details, i.e. the # original components/ directories which was code split out from chrome/ to be # shared with iOS. This includes, but isn't limited to, browser features such # as autofill or extensions, and chrome implementation details such as # settings, packaging details, installation or crash reporting. "+crypto", "+grit/blink_resources.h", "+grit/content_strings.h", "+dbus", "+gpu", "+media", "+mojo/common", "+mojo/edk/embedder", "+mojo/edk/js", "+mojo/edk/test", "+mojo/message_pump", "+mojo/public", "+net", "+ppapi", "+printing", "+sandbox", "+skia", # In general, content/ should not rely on google_apis, since URLs # and access tokens should usually be provided by the # embedder. # # There are a couple of specific parts of content that are excepted # from this rule, e.g. content/browser/speech/DEPS. These are cases of # implementations that are strongly tied to Google servers, i.e. we # don't expect alternate implementations to be provided by the # embedder. "-google_apis", # Don't allow inclusion of these other libs we shouldn't be calling directly. "-v8", "-tools", # Allow inclusion of third-party code: "+third_party/angle", "+third_party/boringssl/src/include", "+third_party/flac", "+third_party/libjingle", "+third_party/mozilla", "+third_party/ocmock", "+third_party/re2", "+third_party/skia", "+third_party/sqlite", "+third_party/khronos", "+third_party/webrtc", "+third_party/webrtc_overrides", "+third_party/zlib/google", "+third_party/WebKit/public", "+ui/accelerated_widget_mac", "+ui/accessibility", "+ui/android", # Aura is analogous to Win32 or a Gtk, so it is allowed. "+ui/aura", "+ui/base", "+ui/compositor", "+ui/display", "+ui/events", "+ui/gfx", "+ui/gl", "+ui/native_theme", "+ui/ozone/public", "+ui/resources/grit/ui_resources.h", "+ui/resources/grit/webui_resources.h", "+ui/resources/grit/webui_resources_map.h", "+ui/shell_dialogs", "+ui/snapshot", "+ui/strings/grit/ui_strings.h", "+ui/surface", "+ui/touch_selection", "+ui/wm", # Content knows about grd files, but the specifics of how to get a resource # given its id is left to the embedder. "-ui/base/l10n", "-ui/base/resource", # These files aren't related to grd, so they're fine. "+ui/base/l10n/l10n_util_android.h", "+ui/base/l10n/l10n_util_win.h", # Content shouldn't depend on views. While we technically don't need this # line, since the top level DEPS doesn't allow it, we add it to make this # explicit. "-ui/views", "+storage/browser", "+storage/common", # For generated JNI includes. "+jni", ] # content -> content/shell dependency is not allowed, except for browser tests. specific_include_rules = { ".*_browsertest[a-z_]*\.(cc|h)": [ "+content/shell/browser", "+content/shell/common", ], }
[ "xElvis89x@gmail.com" ]
xElvis89x@gmail.com
2ff06a22caf04d6abf9ee0dadb6a814e357ba72f
48832d27da16256ee62c364add45f21b968ee669
/res/scripts/client/gui/scaleform/daapi/view/lobby/clans/search/clansearchinfo.py
effd5375670fa8314a02b23ce4895ec79def0848
[]
no_license
webiumsk/WOT-0.9.15.1
0752d5bbd7c6fafdd7f714af939ae7bcf654faf7
17ca3550fef25e430534d079876a14fbbcccb9b4
refs/heads/master
2021-01-20T18:24:10.349144
2016-08-04T18:08:34
2016-08-04T18:08:34
64,955,694
0
0
null
null
null
null
WINDOWS-1250
Python
false
false
7,782
py
# 2016.08.04 19:50:21 Střední Evropa (letní čas) # Embedded file name: scripts/client/gui/Scaleform/daapi/view/lobby/clans/search/ClanSearchInfo.py import weakref import BigWorld from adisp import process from gui import SystemMessages from gui.clans import formatters as clans_fmts from gui.clans.clan_controller import g_clanCtrl from gui.clans.contexts import CreateApplicationCtx from gui.clans.clan_helpers import ClanListener from gui.clans.items import formatField from gui.clans.settings import CLIENT_CLAN_RESTRICTIONS, MAX_CLAN_MEMBERS_COUNT from gui.Scaleform.daapi.view.lobby.profile.ProfileUtils import HeaderItemsTypes, ProfileUtils from gui.Scaleform.daapi.view.meta.ClanSearchInfoMeta import ClanSearchInfoMeta from gui.Scaleform.locale.CLANS import CLANS from gui.shared.formatters import text_styles from gui.shared.utils.functions import makeTooltip from gui.shared.view_helpers import ClanEmblemsHelper from helpers.i18n import makeString as _ms from gui.shared import event_dispatcher as shared_events def _packItemData(text, description, tooltip, icon): return {'type': HeaderItemsTypes.COMMON, 'text': text, 'description': _ms(description), 'iconPath': ProfileUtils.getIconPath(icon), 'tooltip': tooltip, 'enabled': True} class ClanSearchInfo(ClanSearchInfoMeta, ClanListener, ClanEmblemsHelper): def __init__(self): super(ClanSearchInfo, self).__init__() self.__dataProvider = None self.__selectedClan = None return def bindDataProvider(self, dataProvider): self.__dataProvider = weakref.proxy(dataProvider) def openClanProfile(self): shared_events.showClanProfileWindow(self.__selectedClan.getClanDbID(), self.__selectedClan.getClanAbbrev()) def onAccountClanProfileChanged(self, profile): self._updateSetaledState() @process def sendRequest(self): self.as_setWaitingVisibleS(True) context = CreateApplicationCtx([self.__selectedClan.getClanDbID()]) result = yield g_clanCtrl.sendRequest(context, allowDelay=True) if result.isSuccess(): SystemMessages.pushMessage(clans_fmts.getAppSentSysMsg(self.__selectedClan.getClanName(), self.__selectedClan.getClanAbbrev())) self._updateSetaledState() self.as_setWaitingVisibleS(False) def requestData(self, clanId): self.__selectedClan = self.__dataProvider.getClanInfo(clanId) self._updateDetailedInfo() self._updateClanEmblem() self._updateSetaledState() def onClanEmblem128x128Received(self, clanDbID, emblem): if clanDbID == self.__selectedClan.getClanDbID(): self.as_setEmblemS(self.getMemoryTexturePath(emblem)) def _populate(self): super(ClanSearchInfo, self)._populate() self.__initControls() def _updateClanEmblem(self): self.requestClanEmblem128x128(self.__selectedClan.getClanDbID()) def _updateDetailedInfo(self): clanID = self.__selectedClan.getClanDbID() clanName = formatField(self.__selectedClan.getClanFullName) creationDate = formatField(getter=self.__selectedClan.getCreationDate, formatter=BigWorld.wg_getShortDateFormat) rating = formatField(getter=self.__selectedClan.getPersonalRating, formatter=BigWorld.wg_getIntegralFormat) battlesCount = formatField(getter=self.__selectedClan.getBattlesCount, formatter=BigWorld.wg_getIntegralFormat) wins = formatField(getter=self.__selectedClan.getBattleXpAvg, formatter=lambda value: BigWorld.wg_getNiceNumberFormat(value) + '%') avgExp = formatField(getter=self.__selectedClan.getBattlesPerformanceAvg, formatter=BigWorld.wg_getIntegralFormat) stats = [_packItemData(battlesCount, CLANS.SEARCH_INFO_STATS_BATTLES, CLANS.SEARCH_INFO_STATS_BATTLES_TOOLTIP, 'avgBattlesCount40x32.png'), _packItemData(wins, CLANS.SEARCH_INFO_STATS_WINS, CLANS.SEARCH_INFO_STATS_WINS_TOOLTIP, 'avgWins40x32.png'), _packItemData(avgExp, CLANS.SEARCH_INFO_STATS_AVGEXP, CLANS.SEARCH_INFO_STATS_AVGEXP_TOOLTIP, 'avgExp40x32.png')] self.as_setDataS({'clanId': clanID, 'clanName': clanName, 'creationDate': text_styles.main(_ms(CLANS.SEARCH_INFO_CREATIONDATE, date=creationDate)), 'rating': text_styles.promoTitle(rating), 'stats': stats}) def _updateSetaledState(self): requestSentVisible = False sendRequestBtnVisible = True sendRequestBtnEnabled = True sendRequestTooltip = None reason = g_clanCtrl.getLimits().canSendApplication(_ClanAdapter(self.__selectedClan)).reason if reason == CLIENT_CLAN_RESTRICTIONS.NO_RESTRICTIONS: pass elif reason == CLIENT_CLAN_RESTRICTIONS.OWN_CLAN: sendRequestBtnVisible = False elif reason == CLIENT_CLAN_RESTRICTIONS.ALREADY_IN_CLAN: sendRequestBtnVisible = False elif reason == CLIENT_CLAN_RESTRICTIONS.CLAN_IS_FULL: sendRequestBtnEnabled = False sendRequestTooltip = makeTooltip(CLANS.SEARCH_INFO_BANNED_TOOLTIP_HEADER, text_styles.error(_ms(CLANS.SEARCH_INFO_BANNED_TOOLTIP_BODY))) elif reason == CLIENT_CLAN_RESTRICTIONS.CLAN_INVITE_ALREADY_RECEIVED: sendRequestBtnEnabled = False sendRequestTooltip = CLANS.SEARCH_INFO_INVITEALREADYACHIEVED_TOOLTIP elif reason == CLIENT_CLAN_RESTRICTIONS.CLAN_APPLICATION_ALREADY_SENT: sendRequestBtnEnabled = False sendRequestTooltip = CLANS.SEARCH_INFO_REQUESTALREADYSENT_TOOLTIP elif reason == CLIENT_CLAN_RESTRICTIONS.SENT_INVITES_LIMIT_REACHED: sendRequestBtnEnabled = False sendRequestTooltip = CLANS.SEARCH_INFO_REQUESTSLIMITEXCEEDED_TOOLTIP elif reason == CLIENT_CLAN_RESTRICTIONS.CLAN_CONSCRIPTION_CLOSED: sendRequestBtnEnabled = False sendRequestTooltip = CLANS.SEARCH_INFO_REQUESTSARENOTACCEPTED_TOOLTIP elif reason == CLIENT_CLAN_RESTRICTIONS.FORBIDDEN_ACCOUNT_TYPE: sendRequestBtnEnabled = False sendRequestTooltip = makeTooltip(CLANS.SEARCH_INFO_FORBIDDENACCOUNTTYPE_TOOLTIP_HEADER, text_styles.error(_ms(CLANS.SEARCH_INFO_FORBIDDENACCOUNTTYPE_TOOLTIP_BODY))) else: sendRequestBtnVisible = False self.as_setStateDataS({'requestSentVisible': requestSentVisible, 'sendRequestBtnVisible': sendRequestBtnVisible, 'sendRequestBtnEnabled': sendRequestBtnEnabled, 'sendRequestTooltip': sendRequestTooltip, 'alertIconVisible': sendRequestBtnVisible and not sendRequestBtnEnabled}) return def __initControls(self): self.as_setInitDataS({'ratingDescription': text_styles.stats(_ms(CLANS.SEARCH_INFO_RATINGDESCRIPTION)), 'ratingTooltip': CLANS.SEARCH_INFO_RATINGDESCRIPTION_TOOLTIP, 'requestSent': text_styles.success(_ms(CLANS.SEARCH_INFO_REQUESTSENT)), 'clanProfileBtnLabel': _ms(CLANS.SEARCH_INFO_CLANPROFILEBTN), 'sendRequestBtnLabel': _ms(CLANS.SEARCH_INFO_SENDREQUESTBTN)}) self.as_setWaitingVisibleS(False) class _ClanAdapter(object): def __init__(self, clanInfo): super(_ClanAdapter, self).__init__() self.__clanInfo = clanInfo def getDbID(self): return self.__clanInfo.getClanDbID() def canAcceptsJoinRequests(self): return self.__clanInfo.canAcceptsJoinRequests() def hasFreePlaces(self): return MAX_CLAN_MEMBERS_COUNT - self.__clanInfo.getMembersCount() > 0 # okay decompyling c:\Users\PC\wotsources\files\originals\res\scripts\client\gui\scaleform\daapi\view\lobby\clans\search\clansearchinfo.pyc # decompiled 1 files: 1 okay, 0 failed, 0 verify failed # 2016.08.04 19:50:21 Střední Evropa (letní čas)
[ "info@webium.sk" ]
info@webium.sk
359f4e23b3ef1e9e4e2aec0b35aeccbd462a008b
99c6e64c0bf533e702576c516c3092bf5e641637
/server.py
c4ec64b92fe2f6b3466ab170489f3924ceec64fd
[ "MIT" ]
permissive
rjc-development/remote-desktop-control
67ff0f1ae3d7c507f269d982540bacfa666dd322
c138d6665a25053b4001c4e0c12ff851e401dc3f
refs/heads/master
2022-12-22T02:26:46.556215
2020-09-25T20:01:02
2020-09-25T20:01:02
298,668,093
3
1
null
2020-09-25T19:59:46
2020-09-25T19:59:45
null
UTF-8
Python
false
false
1,761
py
from starlette.applications import Starlette from starlette.websockets import WebSocketDisconnect import json import logging import uvicorn logger = logging.getLogger(__name__) logger.setLevel(logging.DEBUG) app = Starlette() websockets = { 'web': {}, 'desktop': {}, } async def receive_json(websocket): message = await websocket.receive_text() return json.loads(message) @app.websocket_route('/ws') async def websocket_endpoint(websocket): await websocket.accept() # "Authentication" message message = await receive_json(websocket) client_mode = message['client_mode'] client_id = message['client_id'] websockets[client_mode][client_id] = websocket # Get mirror mode to broadcast messages to the client on the other side mirror_mode = 'web' if client_mode == 'desktop' else 'desktop' client_string = f'{client_id}[{client_mode}]' logger.info(f'Client connected: {client_string}') while (True): try: # Wait for a message from the client message = await receive_json(websocket) logger.debug(f'Message received from {client_string}: {message}') try: # Broadcast it to the mirror client await websockets[mirror_mode][client_id].send_text( json.dumps(message) ) except KeyError: logger.debug( f'Client {client_id}[{mirror_mode}] not connected' ) except WebSocketDisconnect: break del websockets[client_mode][client_id] await websocket.close() logger.info(f'Client disconnected: {client_string}') if __name__ == '__main__': uvicorn.run(app, host='0.0.0.0', port=8000)
[ "fvoron@gmail.com" ]
fvoron@gmail.com
294636258c2d0c16eda8eebc1460b1c2b8febfb3
8cf3a19eb3d0f69e5c0237fc504977330e95aac2
/workflow/scripts/manticore-plotvcf.py
6fc8c870e989714519620809b58213b502db8b43
[ "MIT" ]
permissive
NBISweden/manticore-smk
0c46ab5da5cdf7a40806bfef5ea05558efea8c5e
fd0b4ccd4239dc91dac423d0ea13478d36702561
refs/heads/main
2023-08-13T05:44:36.125066
2021-10-19T19:12:55
2021-10-19T19:12:55
308,556,800
1
2
MIT
2021-05-13T07:15:20
2020-10-30T07:33:11
Python
UTF-8
Python
false
false
3,059
py
#!/usr/bin/env python3 # -*- coding: utf-8 -*- import os import sys import shutil import argparse import random import numpy as np import allel import zarr import numcodecs import matplotlib.pyplot as plt import seaborn as sns sns.set_style("white") sns.set_style("ticks") populations = ["CHS", "YRI"] pop_colours = {"CHS": "#FF0000", "YRI": "#008000"} sample_population = ["CHS", "YRI"] def do_pca(x, n, ncomp=10): vidx = np.random.choice(x.shape[0], n, replace=False) vidx.sort() y = x.take(vidx, axis=0) coords, model = allel.pca(y, n_components=ncomp, scaler="patterson") return coords, model # Taken from http://alimanfoo.github.io/2015/09/28/fast-pca.html def plot_pca_coords(coords, model, pc1, pc2, ax, sample_population): sns.despine(ax=ax, offset=5) x = coords[:, pc1] y = coords[:, pc2] for pop in populations: flt = sample_population == pop ax.plot( x[flt], y[flt], marker="o", linestyle=" ", color=pop_colours[pop], label=pop, markersize=6, mec="k", mew=0.5, ) ax.set_xlabel( "PC%s (%.1f%%)" % (pc1 + 1, model.explained_variance_ratio_[pc1] * 100) ) ax.set_ylabel( "PC%s (%.1f%%)" % (pc2 + 1, model.explained_variance_ratio_[pc2] * 100) ) def fig_pca(coords, model, title, sample_population): # plot coords for PCs 1 vs 2, 3 vs 4 fig = plt.figure(figsize=(10, 5)) ax = fig.add_subplot(1, 2, 1) plot_pca_coords(coords, model, 0, 1, ax, sample_population) # ax = fig.add_subplot(1, 2, 2) # plot_pca_coords(coords, model, 2, 3, ax, sample_population) ax.legend(bbox_to_anchor=(1, 1), loc="upper left") fig.suptitle(title, y=1.02) fig.tight_layout() return fig if __name__ == "__main__": if snakemake.params.options != "": options = snakemake.params.options.split(" ") sys.argv.extend(options) parser = argparse.ArgumentParser(description="manticore-plotvcf option parser") parser.add_argument( "--subsample-size", metavar="n", type=int, help="subsample size", default=10000 ) args = parser.parse_args() vcf_path = str(snakemake.input.vcf) output = snakemake.output[0] outdir = os.path.dirname(output) plottype = snakemake.wildcards.type dev = snakemake.wildcards.ext ## Convert to zarr zarr_path = os.path.join(outdir, os.path.basename(vcf_path) + ".zarr") allel.vcf_to_zarr( vcf_path, zarr_path, log=sys.stdout, fields="*", alt_number=8, compressor=numcodecs.Blosc(cname="zstd", clevel=1, shuffle=False), ) callset = zarr.open_group(zarr_path, mode="r") g = allel.GenotypeChunkedArray(callset["calldata/GT"]) n = min(len(g), args.subsample_size) gn = g.to_n_alt() coords, model = do_pca(gn, n, ncomp=2) fig = fig_pca(coords, model, "PCA of first four components", sample_population) fig.savefig(output) shutil.rmtree(zarr_path)
[ "per.unneberg@scilifelab.se" ]
per.unneberg@scilifelab.se
eb2ca5d1b00bedc68345d9103057a0bd38c3425f
a79da24bda658f588fd8e71c7e63f01931c1a694
/bigapple/venv/lib/python3.7/site-packages/plotly/graph_objs/bar/_stream.py
8f528873536d267aa02021460ab7bf8155d2a086
[]
no_license
replicantdeca/bigapple-insys
60519b486f13e1a3eb18b5ba637e45deaf8e1d8e
5e7328fb94362fbb04a71c2e297bffd83443eebc
refs/heads/master
2020-03-27T12:57:31.894182
2019-12-01T11:25:13
2019-12-01T11:25:13
146,580,916
0
1
null
2018-08-29T10:00:28
2018-08-29T10:00:27
null
UTF-8
Python
false
false
3,812
py
from plotly.basedatatypes import BaseTraceHierarchyType import copy class Stream(BaseTraceHierarchyType): # maxpoints # --------- @property def maxpoints(self): """ Sets the maximum number of points to keep on the plots from an incoming stream. If `maxpoints` is set to 50, only the newest 50 points will be displayed on the plot. The 'maxpoints' property is a number and may be specified as: - An int or float in the interval [0, 10000] Returns ------- int|float """ return self['maxpoints'] @maxpoints.setter def maxpoints(self, val): self['maxpoints'] = val # token # ----- @property def token(self): """ The stream id number links a data trace on a plot with a stream. See https://plot.ly/settings for more details. The 'token' property is a string and must be specified as: - A non-empty string Returns ------- str """ return self['token'] @token.setter def token(self, val): self['token'] = val # property parent name # -------------------- @property def _parent_path_str(self): return 'bar' # Self properties description # --------------------------- @property def _prop_descriptions(self): return """\ maxpoints Sets the maximum number of points to keep on the plots from an incoming stream. If `maxpoints` is set to 50, only the newest 50 points will be displayed on the plot. token The stream id number links a data trace on a plot with a stream. See https://plot.ly/settings for more details. """ def __init__(self, arg=None, maxpoints=None, token=None, **kwargs): """ Construct a new Stream object Parameters ---------- arg dict of properties compatible with this constructor or an instance of plotly.graph_objs.bar.Stream maxpoints Sets the maximum number of points to keep on the plots from an incoming stream. If `maxpoints` is set to 50, only the newest 50 points will be displayed on the plot. token The stream id number links a data trace on a plot with a stream. See https://plot.ly/settings for more details. Returns ------- Stream """ super(Stream, self).__init__('stream') # Validate arg # ------------ if arg is None: arg = {} elif isinstance(arg, self.__class__): arg = arg.to_plotly_json() elif isinstance(arg, dict): arg = copy.copy(arg) else: raise ValueError( """\ The first argument to the plotly.graph_objs.bar.Stream constructor must be a dict or an instance of plotly.graph_objs.bar.Stream""" ) # Import validators # ----------------- from plotly.validators.bar import (stream as v_stream) # Initialize validators # --------------------- self._validators['maxpoints'] = v_stream.MaxpointsValidator() self._validators['token'] = v_stream.TokenValidator() # Populate data dict with properties # ---------------------------------- _v = arg.pop('maxpoints', None) self.maxpoints = maxpoints if maxpoints is not None else _v _v = arg.pop('token', None) self.token = token if token is not None else _v # Process unknown kwargs # ---------------------- self._process_kwargs(**dict(arg, **kwargs))
[ "mgfcrespo@gmail.com" ]
mgfcrespo@gmail.com
92108883e03e44c0b266ef7a49d9dea628e31e41
61b475c33745dbe11d88ea288cbdee279f89c610
/src/izi/apps/dashboard/widgets.py
8fe3723a9b44c00ad92a5dec957228dfddf69ec0
[ "BSD-3-Clause", "LicenseRef-scancode-unknown-license-reference", "BSD-2-Clause" ]
permissive
izi-ecommerce/izi-core
a092ea285d0dbd83d17427de3157a9f1e77d6c51
21176be2d41f0cf54ca954f294209c585f643dba
refs/heads/master
2020-03-30T08:37:39.045514
2018-10-08T02:58:46
2018-10-08T02:58:46
151,029,291
0
0
null
null
null
null
UTF-8
Python
false
false
2,969
py
import copy import re from django.forms import Widget from django.urls import reverse class RelatedFieldWidgetWrapper(Widget): """ This class is a wrapper to a given widget to add the add icon for the IZI dashboard. """ template_name = 'izi/dashboard/widgets/related_widget_wrapper.html' IS_POPUP_VALUE = '1' IS_POPUP_VAR = '_popup' TO_FIELD_VAR = '_to_field' def __init__(self, widget, rel): self.needs_multipart_form = widget.needs_multipart_form self.attrs = widget.attrs self.choices = widget.choices self.widget = widget self.rel = rel def __deepcopy__(self, memo): obj = copy.copy(self) obj.widget = copy.deepcopy(self.widget, memo) obj.attrs = self.widget.attrs memo[id(self)] = obj return obj @property def is_hidden(self): return self.widget.is_hidden @property def media(self): return self.widget.media def get_related_url(self, info, action, *args): app_label = info[0] model_object_name = info[1] # Convert the model's object name into lowercase, with dashes between # the camel-cased words model_object_name = '-'.join(re.sub('([a-z])([A-Z])', r'\1 \2', model_object_name).lower().split()) # Does not specify current app return reverse("dashboard:%s-%s-%s" % (app_label, model_object_name, action), args=args) def get_context(self, name, value, attrs): rel_opts = self.rel.model._meta info = (rel_opts.app_label, rel_opts.object_name) self.widget.choices = self.choices url_params = '&'.join("%s=%s" % param for param in [ (RelatedFieldWidgetWrapper.TO_FIELD_VAR, self.rel.get_related_field().name), (RelatedFieldWidgetWrapper.IS_POPUP_VAR, RelatedFieldWidgetWrapper.IS_POPUP_VALUE), ]) context = { 'rendered_widget': self.widget.render(name, value, attrs), 'name': name, 'url_params': url_params, 'model': rel_opts.verbose_name, } change_related_template_url = self.get_related_url(info, 'update', '__fk__') context.update( change_related_template_url=change_related_template_url, ) add_related_url = self.get_related_url(info, 'create') context.update( add_related_url=add_related_url, ) delete_related_template_url = self.get_related_url(info, 'delete', '__fk__') context.update( delete_related_template_url=delete_related_template_url, ) return context def value_from_datadict(self, data, files, name): return self.widget.value_from_datadict(data, files, name) def value_omitted_from_data(self, data, files, name): return self.widget.value_omitted_from_data(data, files, name) def id_for_label(self, id_): return self.widget.id_for_label(id_)
[ "diepdt@izi.asia" ]
diepdt@izi.asia
dfb4aafe897b5263ebd18b74bb4a504d4d203e7f
5db0fab37c2b8a618d85d3b60fab9f806c416474
/src/python/pants/backend/go/util_rules/embedcfg.py
d2ed6cc938823c9793be977e5ede6db43a726bc2
[ "Apache-2.0" ]
permissive
pantsbuild/pants
4988d1ac5474ec95f94ce2218aeb759401e4b011
98cbda8545f0d58c586ed2daa76fefd729d5e0d5
refs/heads/main
2023-09-05T03:44:17.646899
2023-09-01T19:52:09
2023-09-01T19:52:09
7,209,075
2,708
593
Apache-2.0
2023-09-14T19:33:33
2012-12-17T17:39:04
Python
UTF-8
Python
false
false
4,141
py
# Copyright 2021 Pants project contributors (see CONTRIBUTORS.md). # Licensed under the Apache License, Version 2.0 (see LICENSE). from __future__ import annotations import json from dataclasses import dataclass from typing import Any, Iterable, Mapping from pants.util.frozendict import FrozenDict from pants.util.strutil import strip_prefix @dataclass(frozen=True) class EmbedConfig: patterns: FrozenDict[str, tuple[str, ...]] files: FrozenDict[str, str] def __init__(self, patterns: Mapping[str, Iterable[str]], files: Mapping[str, str]) -> None: """Configuration passed to the Go compiler to configure file embedding. The compiler relies entirely on the caller to map embed patterns to actual filesystem paths. All embed patterns contained in the package must be mapped. Consult `FirstPartyPkgAnalysis.embed_patterns` for the embed patterns obtained from analysis. :param patterns: Maps each pattern provided via a //go:embed directive to a list of file paths relative to the package directory for files to embed for that pattern. When the embedded variable is an `embed.FS`, those relative file paths define the virtual directory hierarchy exposed by the embed.FS filesystem abstraction. The relative file paths are resolved to actual filesystem paths for their content by consulting the `files` dictionary. :param files: Maps each virtual, relative file path used as a value in the `patterns` dictionary to the actual filesystem path with that file's content. """ object.__setattr__(self, "patterns", FrozenDict({k: tuple(v) for k, v in patterns.items()})) object.__setattr__(self, "files", FrozenDict(files)) @classmethod def from_json_dict( cls, d: dict[str, Any], prefix_to_strip: str | None = None ) -> EmbedConfig | None: patterns = d.get("Patterns", {}) files = d.get("Files", {}) if prefix_to_strip: files = {key: strip_prefix(value, prefix_to_strip) for key, value in files.items()} result = cls( patterns=FrozenDict({key: tuple(value) for key, value in patterns.items()}), files=FrozenDict(files), ) return result if result else None def to_embedcfg(self) -> bytes: data = { "Patterns": dict(self.patterns), "Files": dict(self.files), } return json.dumps(data).encode("utf-8") def __bool__(self) -> bool: return bool(self.patterns) or bool(self.files) def merge(self, other: EmbedConfig) -> EmbedConfig: """Merge two EmbedConfig's into one. Overlapping keys must have the same values. """ overlapping_patterns_keys = set(self.patterns.keys()) & set(other.patterns.keys()) for key in overlapping_patterns_keys: if self.patterns[key] != other.patterns[key]: raise AssertionError( "Unable to merge conflicting golang file embed configurations. This should not have occurred. " "Please open an issue at https://github.com/pantsbuild/pants/issues/new/choose " "with the following information: " f"Patterns Key: {key}; Left: {self.patterns[key]}; Right: {other.patterns[key]} " ) overlapping_files_keys = set(self.files.keys()) & set(other.files.keys()) for key in overlapping_files_keys: if self.files[key] != other.files[key]: raise AssertionError( "Unable to merge conflicting golang file embed configurations. This should not have occurred. " "Please open an issue at https://github.com/pantsbuild/pants/issues/new/choose " "with the following information: " f"Files Key: {key}; Left: {self.patterns[key]}; Right: {other.patterns[key]} " ) return EmbedConfig( patterns={**self.patterns, **other.patterns}, files={**self.files, **other.files}, )
[ "noreply@github.com" ]
pantsbuild.noreply@github.com
411e8160fa633fee9650fda54c0078f8af778b18
62e58c051128baef9452e7e0eb0b5a83367add26
/x12/5011/404005011.py
fca6c0597cac9c112b926b948f6c0cd9e7cdd1ee
[]
no_license
dougvanhorn/bots-grammars
2eb6c0a6b5231c14a6faf194b932aa614809076c
09db18d9d9bd9d92cefbf00f1c0de1c590fe3d0d
refs/heads/master
2021-05-16T12:55:58.022904
2019-05-17T15:22:23
2019-05-17T15:22:23
105,274,633
0
0
null
2017-09-29T13:21:21
2017-09-29T13:21:21
null
UTF-8
Python
false
false
3,993
py
from bots.botsconfig import * from records005011 import recorddefs syntax = { 'version' : '00403', #version of ISA to send 'functionalgroup' : 'SR', } structure = [ {ID: 'ST', MIN: 1, MAX: 1, LEVEL: [ {ID: 'ZC1', MIN: 0, MAX: 1}, {ID: 'BX', MIN: 0, MAX: 1}, {ID: 'BNX', MIN: 0, MAX: 1}, {ID: 'M3', MIN: 1, MAX: 1}, {ID: 'N9', MIN: 1, MAX: 30}, {ID: 'CM', MIN: 0, MAX: 2}, {ID: 'M1', MIN: 0, MAX: 1}, {ID: 'DTM', MIN: 0, MAX: 5}, {ID: 'N7', MIN: 1, MAX: 500, LEVEL: [ {ID: 'EM', MIN: 0, MAX: 1}, {ID: 'VC', MIN: 0, MAX: 36, LEVEL: [ {ID: 'N1', MIN: 0, MAX: 2, LEVEL: [ {ID: 'N3', MIN: 0, MAX: 2}, {ID: 'N4', MIN: 0, MAX: 1}, {ID: 'H3', MIN: 0, MAX: 1}, ]}, ]}, {ID: 'M7', MIN: 0, MAX: 5}, {ID: 'N5', MIN: 0, MAX: 1}, {ID: 'IC', MIN: 0, MAX: 1}, {ID: 'IM', MIN: 0, MAX: 1}, {ID: 'M12', MIN: 0, MAX: 2}, {ID: 'E1', MIN: 0, MAX: 2, LEVEL: [ {ID: 'E4', MIN: 0, MAX: 1}, {ID: 'E5', MIN: 0, MAX: 13}, {ID: 'PI', MIN: 0, MAX: 1}, ]}, {ID: 'GA', MIN: 0, MAX: 15}, {ID: 'REF', MIN: 0, MAX: 99, LEVEL: [ {ID: 'N10', MIN: 0, MAX: 15}, {ID: 'N1', MIN: 0, MAX: 5, LEVEL: [ {ID: 'N3', MIN: 0, MAX: 1}, {ID: 'N4', MIN: 0, MAX: 1}, ]}, ]}, ]}, {ID: 'NA', MIN: 0, MAX: 10}, {ID: 'F9', MIN: 1, MAX: 1}, {ID: 'D9', MIN: 1, MAX: 1}, {ID: 'N1', MIN: 1, MAX: 15, LEVEL: [ {ID: 'N2', MIN: 0, MAX: 2}, {ID: 'N3', MIN: 0, MAX: 2}, {ID: 'N4', MIN: 0, MAX: 1}, {ID: 'REF', MIN: 0, MAX: 2}, {ID: 'PER', MIN: 0, MAX: 2}, {ID: 'BL', MIN: 0, MAX: 12}, ]}, {ID: 'S1', MIN: 0, MAX: 12, LEVEL: [ {ID: 'S2', MIN: 0, MAX: 2}, {ID: 'S9', MIN: 0, MAX: 1}, {ID: 'N1', MIN: 0, MAX: 1}, {ID: 'N2', MIN: 0, MAX: 1}, {ID: 'N3', MIN: 0, MAX: 1}, {ID: 'N4', MIN: 0, MAX: 1}, {ID: 'PER', MIN: 0, MAX: 1}, ]}, {ID: 'R2', MIN: 1, MAX: 13}, {ID: 'R9', MIN: 0, MAX: 1}, {ID: 'E1', MIN: 0, MAX: 2, LEVEL: [ {ID: 'E4', MIN: 0, MAX: 1}, {ID: 'E5', MIN: 0, MAX: 13}, {ID: 'PI', MIN: 0, MAX: 1}, ]}, {ID: 'H3', MIN: 0, MAX: 20}, {ID: 'PS', MIN: 0, MAX: 5}, {ID: 'LX', MIN: 1, MAX: 25, LEVEL: [ {ID: 'L5', MIN: 1, MAX: 15}, {ID: 'L0', MIN: 0, MAX: 25, LEVEL: [ {ID: 'MEA', MIN: 0, MAX: 3}, {ID: 'L1', MIN: 0, MAX: 10}, {ID: 'PI', MIN: 0, MAX: 30}, ]}, {ID: 'X1', MIN: 0, MAX: 6}, ]}, {ID: 'T1', MIN: 0, MAX: 64, LEVEL: [ {ID: 'T2', MIN: 0, MAX: 30}, {ID: 'T3', MIN: 0, MAX: 12}, {ID: 'T6', MIN: 0, MAX: 1}, {ID: 'T8', MIN: 0, MAX: 99}, ]}, {ID: 'L3', MIN: 0, MAX: 1}, {ID: 'LS', MIN: 0, MAX: 1, LEVEL: [ {ID: 'LH1', MIN: 1, MAX: 1000, LEVEL: [ {ID: 'LH2', MIN: 0, MAX: 4}, {ID: 'LH3', MIN: 0, MAX: 10}, {ID: 'LFH', MIN: 0, MAX: 20}, {ID: 'LEP', MIN: 0, MAX: 3}, {ID: 'LH4', MIN: 0, MAX: 4}, {ID: 'LHT', MIN: 0, MAX: 3}, {ID: 'LHR', MIN: 0, MAX: 5}, {ID: 'PER', MIN: 0, MAX: 5}, {ID: 'N1', MIN: 0, MAX: 10, LEVEL: [ {ID: 'N3', MIN: 0, MAX: 2}, {ID: 'N4', MIN: 0, MAX: 1}, {ID: 'PER', MIN: 0, MAX: 2}, ]}, ]}, {ID: 'LE', MIN: 1, MAX: 1}, ]}, {ID: 'PER', MIN: 0, MAX: 5}, {ID: 'LH2', MIN: 0, MAX: 6}, {ID: 'LHR', MIN: 0, MAX: 1}, {ID: 'LH6', MIN: 0, MAX: 5}, {ID: 'XH', MIN: 0, MAX: 1}, {ID: 'X7', MIN: 0, MAX: 10}, {ID: 'SE', MIN: 1, MAX: 1}, ]} ]
[ "jason.capriotti@gmail.com" ]
jason.capriotti@gmail.com
992ad12a575aa28017fbf8115beb2a0579bad5bb
5dd47abf7061201d9378e73e51f08fbb314ba2fd
/envdsys/envcontacts/migrations/0065_auto_20210223_0054.py
007d402317fba9799fbecce80a479a8fe0db8323
[ "Unlicense" ]
permissive
NOAA-PMEL/envDataSystem
4d264ae5209015e4faee648f37608d68a4461d0a
4db4a3569d2329658799a3eef06ce36dd5c0597d
refs/heads/master
2023-02-23T22:33:14.334737
2021-07-22T01:09:16
2021-07-22T01:09:16
191,809,007
1
0
Unlicense
2023-02-08T00:45:54
2019-06-13T17:50:03
Python
UTF-8
Python
false
false
1,175
py
# Generated by Django 3.1.7 on 2021-02-23 00:54 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('envcontacts', '0064_auto_20210223_0034'), ] operations = [ migrations.AlterField( model_name='person', name='email1_type', field=models.CharField(choices=[('W', 'Work'), ('H', 'Home'), ('O', 'Other')], default='W', max_length=1), ), migrations.AlterField( model_name='person', name='email2_type', field=models.CharField(choices=[('W', 'Work'), ('H', 'Home'), ('O', 'Other')], default='W', max_length=1), ), migrations.AlterField( model_name='person', name='phone1_type', field=models.CharField(choices=[('O', 'Other'), ('W', 'Work'), ('M', 'Mobile'), ('H', 'Home')], default='M', max_length=1), ), migrations.AlterField( model_name='person', name='phone2_type', field=models.CharField(choices=[('O', 'Other'), ('W', 'Work'), ('M', 'Mobile'), ('H', 'Home')], default='M', max_length=1), ), ]
[ "derek.coffman@noaa.gov" ]
derek.coffman@noaa.gov
69c8613e89b0b70ecdbf7d9a9cc3558b46d87771
7f7bf9a5827d1441f18f568fc75ed5bf0159ca6c
/Декоратор/2/2функцию-декораторv41.py
f7a19566515c70ee88069f9c42bb8055b942a228
[]
no_license
KorsakovPV/yandex_contest
08bcff4eaf38d46a8348ac3abbb5f496857fe8e4
f67917ef710f5b138142b11ec4e6e4678b23e408
refs/heads/master
2023-01-06T13:04:07.955570
2020-10-24T20:22:41
2020-10-24T20:22:41
290,097,693
0
0
null
null
null
null
UTF-8
Python
false
false
815
py
def cache3(func): cache = {'res': func(), 'counter': 0} def save_three_times(): if cache['counter'] == 3: cache['counter'] = 0 cache['res'] = func() return cache['res'] cache['counter'] += 1 return cache['res'] return save_three_times @cache3 def heavy(): print('Сложные вычисления') return 1 print(heavy()) # Сложные вычисления # 1 print(heavy()) # 1 print(heavy()) # 1 # Опять кеш устарел, надо вычислять заново print(heavy()) # Сложные вычисления print(heavy()) print(heavy()) print(heavy()) print(heavy()) print(heavy()) print(heavy()) print(heavy()) print(heavy()) print(heavy()) print(heavy()) print(heavy()) print(heavy()) print(heavy())
[ "pavelkpv@gmail.com" ]
pavelkpv@gmail.com
f5d7ee6e684505ed22b422d7ed9221e0d6c707d5
0eefc9995ce927964969cbae247e28fd98f4998e
/src/hobbits-plugins/analyzers/KaitaiStruct/ksy_py/image/psx_tim.py
12103cca74f785a478567753e17eccc22387f220
[ "MIT" ]
permissive
SamuelWAnderson45/hobbits
993bde59d2fd96b1824e4f85ba1913eba12c8f3f
412f4ca50aa6aa2d26a1d05913f21f2ab0198eba
refs/heads/master
2022-12-14T09:36:46.663303
2020-08-30T17:05:53
2020-08-30T17:05:53
291,514,129
0
0
null
2020-08-30T17:02:44
2020-08-30T17:02:43
null
UTF-8
Python
false
false
3,662
py
# This is a generated file! Please edit source .ksy file and use kaitai-struct-compiler to rebuild from pkg_resources import parse_version import kaitaistruct from kaitaistruct import KaitaiStruct, KaitaiStream, BytesIO from enum import Enum import collections if parse_version(kaitaistruct.__version__) < parse_version('0.9'): raise Exception("Incompatible Kaitai Struct Python API: 0.9 or later is required, but you have %s" % (kaitaistruct.__version__)) class PsxTim(KaitaiStruct): class BppType(Enum): bpp_4 = 0 bpp_8 = 1 bpp_16 = 2 bpp_24 = 3 SEQ_FIELDS = ["magic", "flags", "clut", "img"] def __init__(self, _io, _parent=None, _root=None): self._io = _io self._parent = _parent self._root = _root if _root else self self._debug = collections.defaultdict(dict) def _read(self): self._debug['magic']['start'] = self._io.pos() self.magic = self._io.read_bytes(4) self._debug['magic']['end'] = self._io.pos() if not self.magic == b"\x10\x00\x00\x00": raise kaitaistruct.ValidationNotEqualError(b"\x10\x00\x00\x00", self.magic, self._io, u"/seq/0") self._debug['flags']['start'] = self._io.pos() self.flags = self._io.read_u4le() self._debug['flags']['end'] = self._io.pos() if self.has_clut: self._debug['clut']['start'] = self._io.pos() self.clut = self._root.Bitmap(self._io, self, self._root) self.clut._read() self._debug['clut']['end'] = self._io.pos() self._debug['img']['start'] = self._io.pos() self.img = self._root.Bitmap(self._io, self, self._root) self.img._read() self._debug['img']['end'] = self._io.pos() class Bitmap(KaitaiStruct): SEQ_FIELDS = ["len", "origin_x", "origin_y", "width", "height", "body"] def __init__(self, _io, _parent=None, _root=None): self._io = _io self._parent = _parent self._root = _root if _root else self self._debug = collections.defaultdict(dict) def _read(self): self._debug['len']['start'] = self._io.pos() self.len = self._io.read_u4le() self._debug['len']['end'] = self._io.pos() self._debug['origin_x']['start'] = self._io.pos() self.origin_x = self._io.read_u2le() self._debug['origin_x']['end'] = self._io.pos() self._debug['origin_y']['start'] = self._io.pos() self.origin_y = self._io.read_u2le() self._debug['origin_y']['end'] = self._io.pos() self._debug['width']['start'] = self._io.pos() self.width = self._io.read_u2le() self._debug['width']['end'] = self._io.pos() self._debug['height']['start'] = self._io.pos() self.height = self._io.read_u2le() self._debug['height']['end'] = self._io.pos() self._debug['body']['start'] = self._io.pos() self.body = self._io.read_bytes((self.len - 12)) self._debug['body']['end'] = self._io.pos() @property def has_clut(self): if hasattr(self, '_m_has_clut'): return self._m_has_clut if hasattr(self, '_m_has_clut') else None self._m_has_clut = (self.flags & 8) != 0 return self._m_has_clut if hasattr(self, '_m_has_clut') else None @property def bpp(self): if hasattr(self, '_m_bpp'): return self._m_bpp if hasattr(self, '_m_bpp') else None self._m_bpp = (self.flags & 3) return self._m_bpp if hasattr(self, '_m_bpp') else None
[ "adam@mahletconsulting.com" ]
adam@mahletconsulting.com
dc5f16a21eaf87f548dcafd572e064bea67e1b04
928508a85d16987a1382b7adcd7142dee7ac986f
/Practice_Python/6.String_lists.py
0d9f652a42a79e534901e740e8c4129af9c11942
[]
no_license
TheoRobin76/Data_Engineering22
ed29798d0660d6ac73985f510ef7cea942ab77ad
26299a61a8b4372d9ed2664d4ad623c9d12d23d0
refs/heads/main
2023-06-17T05:43:40.347544
2021-07-14T13:23:23
2021-07-14T13:23:23
378,177,215
1
0
null
null
null
null
UTF-8
Python
false
false
220
py
word = input("Please enter a word and I will tell you if it is a palindrome: ") if word == word[::-1]: print(f"Congratulations, {word} is a palindrome") else: print(f"My Condolences, {word} is not a palindrome")
[ "you@example.com" ]
you@example.com
e516493b77004a907bb16ef524f73968b6760fc5
8cf633e92a0671c8201268620a0372f250c8aeb2
/204.计数质数.py
dc347d29caae6593e34a2ccf06dfa29044656b36
[ "Unlicense" ]
permissive
SprintGhost/LeetCode
76da5c785009d474542e5f2cdac275675b8e60b8
cdf1a86c83f2daedf674a871c4161da7e8fad17c
refs/heads/develop
2021-06-06T04:04:28.883692
2021-01-01T14:09:26
2021-01-01T14:09:26
230,635,046
0
0
Unlicense
2020-12-11T14:55:36
2019-12-28T16:34:39
Python
UTF-8
Python
false
false
607
py
# # @lc app=leetcode.cn id=204 lang=python3 # # [204] 计数质数 # # Accepted # 20/20 cases passed (124 ms) # Your runtime beats 85.76 % of python3 submissions # Your memory usage beats 28.36 % of python3 submissions (36.8 MB) # @lc code=start class Solution: def countPrimes(self, n: int) -> int: if n < 2: return 0 isPrime = [1] * n isPrime[0] = isPrime[1] = 0 for i in range(2, int(n ** 0.5) + 1): if isPrime[i]: isPrime[i * i:n:i] = [0] * ((n - 1 - i * i) // i + 1) return sum(isPrime) # @lc code=end
[ "864047435@qq.com" ]
864047435@qq.com
5b3d7a86142ca1e96291cb8b7355a821f2d2b495
3e7cce8dc203bcdbd35fccfaf974a9af0613d838
/profilemaker/views.py
5bb04d58dafd44c0f3899b259bb47bc561694b2d
[]
no_license
MdReyajuddin/blog
146ac2b0b6967261b943535c819b403817390cd5
7da318865cef3116d50bcf0eb20d800e57290b90
refs/heads/master
2022-12-15T15:44:30.969717
2020-01-25T08:07:37
2020-01-25T08:07:37
236,128,083
0
0
null
2022-11-22T05:16:03
2020-01-25T05:06:16
Python
UTF-8
Python
false
false
892
py
from django.shortcuts import render from .forms import Profile_Form from .models import User_Profile # Create your views here. IMAGE_FILE_TYPES = ['png', 'jpg', 'jpeg'] def create_profile(request): form = Profile_Form() if request.method == 'POST': form = Profile_Form(request.POST, request.FILES) if form.is_valid(): user_pr = form.save(commit=False) user_pr.display_picture = request.FILES['display_picture'] file_type = user_pr.display_picture.url.split('.')[-1] file_type = file_type.lower() if file_type not in IMAGE_FILE_TYPES: return render(request, 'profilemaker/error.html') user_pr.save() return render(request, 'profilemaker/details.html', {'user_pr': user_pr}) context = {"form": form,} return render(request, 'profilemaker/create.html', context)
[ "md.reyajuddin45@gmail.com" ]
md.reyajuddin45@gmail.com
5aa57daf3cc68ef88f14f8f3ba7cba5f03636d50
15f321878face2af9317363c5f6de1e5ddd9b749
/solutions_python/Problem_8/34.py
13a5dbdcd1c07973c0fd5e97e7a59e2773a1dde9
[]
no_license
dr-dos-ok/Code_Jam_Webscraper
c06fd59870842664cd79c41eb460a09553e1c80a
26a35bf114a3aa30fc4c677ef069d95f41665cc0
refs/heads/master
2020-04-06T08:17:40.938460
2018-10-14T10:12:47
2018-10-14T10:12:47
null
0
0
null
null
null
null
UTF-8
Python
false
false
3,034
py
"""The grouper class has been reciped from: http://code.activestate.com/recipes/387776/ """ class Grouper(object): """This class provides a lightweight way to group arbitrary objects together into disjoint sets when a full-blown graph data structure would be overkill. Objects can be joined using .join(), tested for connectedness using .joined(), and all disjoint sets can be retreived using .get(). The objects being joined must be hashable. For example: >>> g = grouper.Grouper() >>> g.join('a', 'b') >>> g.join('b', 'c') >>> g.join('d', 'e') >>> list(g.get()) [['a', 'b', 'c'], ['d', 'e']] >>> g.joined('a', 'b') True >>> g.joined('a', 'c') True >>> g.joined('a', 'd') False""" def __init__(self, init=[]): mapping = self._mapping = {} for x in init: mapping[x] = [x] def join(self, a, *args): """Join given arguments into the same set. Accepts one or more arguments.""" mapping = self._mapping set_a = mapping.setdefault(a, [a]) for arg in args: set_b = mapping.get(arg) if set_b is None: set_a.append(arg) mapping[arg] = set_a elif set_b is not set_a: if len(set_b) > len(set_a): set_a, set_b = set_b, set_a set_a.extend(set_b) for elem in set_b: mapping[elem] = set_a def joined(self, a, b): """Returns True if a and b are members of the same set.""" mapping = self._mapping try: return mapping[a] is mapping[b] except KeyError: return False def __iter__(self): """Returns an iterator returning each of the disjoint sets as a list.""" seen = set() for elem, group in self._mapping.iteritems(): if elem not in seen: yield group seen.update(group) fin = open('B-small.in', 'r'); fout = open('B-small.out', 'w'); primes = [2,3,5,7,11,13,17,19,23,29,31,37,41,43,47,53,59,61,67,71,73,79,83,89,97,101,103,107,109,113,127,131,137,139,149,151,157,163,167,173,179,181,191,193,197,199,211,223,227,229,233,239,241,251,257,263,269,271,277,281,283,293,307,311,313,317,331,337,347,349,353,359,367,373,379,383,389,397,401,409,419,421,431,433,439,443,449,457,461,463,467,479,487,491,499,503,509,521,523,541,547,557,563,569,571,577,587,593,599,601,607,613,617,619,631,641,643,647,653,659,661,673,677,683,691,701,709,719,727,733,739,743,751,757,761,769,773,787,797,809,811,821,823,827,829,839,853,857,859,863,877,881,883,887,907,911,919,929,937,941,947,953,967,971,977,983,991,997] def common(x, y, l): for p in primes: if p >= l and x % p == 0 and y % p == 0: return True return False T = int(fin.readline()); for i in range(T): args = fin.readline().split(' ') A = int(args[0]) B = int(args[1]) P = int(args[2]) g = Grouper(range(A,B+1)) for j in range(A,B+1): for h in range(j+1, B+1): if not g.joined(j, h) and common(j, h, P): g.join(j, h) fout.write('Case #' + str(i + 1) + ': ' + str(len(list(g))) + '\n')
[ "miliar1732@gmail.com" ]
miliar1732@gmail.com
5e034aa33708e97b5aedbcbf1bba6cf51df4c4f9
e06c7fd594c52425ab7fc5498c07ae14daf9578b
/api/admin.py
cafd7a142d565ea707d870d9d0d7f3375129165e
[]
no_license
rwheeler-7864/simplenight-api
bc35560eca1e1c25092a1bcdc4af1633367413b8
602646911a0155df5b70991d1445c10cee18cd33
refs/heads/master
2023-03-12T03:10:51.516499
2021-02-25T20:40:44
2021-02-25T20:40:44
342,370,358
0
0
null
null
null
null
UTF-8
Python
false
false
1,708
py
from django import forms from django.contrib import admin from django.forms import TextInput from api.models.models import ( Booking, OrganizationFeatures, PropertyInfo, Venue, VenueMedia, VenueContact, VenueDetail, PaymentMethod, # ProductMedia, ProductHotelsMedia, ProductsNightLifeMedia, ProductGroup, ProductsNightLife, ProductHotel, ProductsHotelRoomDetails, ProductsHotelRoomPricing, ) @admin.register(Booking) class BookingAdmin(admin.ModelAdmin): list_per_page = 100 @admin.register(OrganizationFeatures) class OrganizationFeatureInline(admin.ModelAdmin): class Form(forms.ModelForm): class Meta: model = OrganizationFeatures fields = "__all__" widgets = { "value": TextInput(attrs={"size": 60}), } form = Form list_display = ("organization_name", "name", "value") list_filter = ("organization__name",) widgets = { "value": TextInput(attrs={"size": 20}), } @admin.register(PropertyInfo) class PropertyInfoAdmin(admin.ModelAdmin): list_display = ("provider", "provider_code", "type", "language_code", "description") list_filter = ("provider_code", "language_code") admin.site.register(Venue) admin.site.register(VenueMedia) admin.site.register(VenueContact) admin.site.register(VenueDetail) admin.site.register(PaymentMethod) admin.site.register(ProductHotelsMedia) admin.site.register(ProductsNightLifeMedia) admin.site.register(ProductHotel) admin.site.register(ProductGroup) admin.site.register(ProductsNightLife) admin.site.register(ProductsHotelRoomDetails) admin.site.register(ProductsHotelRoomPricing)
[ "randmwheeler@gmail.com" ]
randmwheeler@gmail.com
506003b3dae22a077719c309c33a12355ed13292
7bd5ca970fbbe4a3ed0c7dadcf43ba8681a737f3
/codeforces/cf251-275/cf260/a2.py
a83e5ffe122cda371ad06555dc20bfd8fbd03eb2
[]
no_license
roiti46/Contest
c0c35478cd80f675965d10b1a371e44084f9b6ee
c4b850d76796c5388d2e0d2234f90dc8acfaadfa
refs/heads/master
2021-01-17T13:23:30.551754
2017-12-10T13:06:42
2017-12-10T13:06:42
27,001,893
0
0
null
null
null
null
UTF-8
Python
false
false
276
py
# -*- coding: utf-8 -*- import sys,copy,math,heapq,itertools as it,fractions,re,bisect,collections as coll n = int(raw_input()) for loop in xrange(n): a, b = map(int, raw_input().split()) if a < b: print "Happy Alex" break else: print "Poor Alex"
[ "roiti46@gmail.com" ]
roiti46@gmail.com
b548144cbea5bfa89e439e9720a806f5b4e08fe8
544cfadc742536618168fc80a5bd81a35a5f2c99
/tools/test/connectivity/acts_tests/tests/google/nr/sa5g/Sa5gSmsTest.py
011062f94542ad88a258d6dd33ea3382f50f7a21
[]
no_license
ZYHGOD-1/Aosp11
0400619993b559bf4380db2da0addfa9cccd698d
78a61ca023cbf1a0cecfef8b97df2b274ac3a988
refs/heads/main
2023-04-21T20:13:54.629813
2021-05-22T05:28:21
2021-05-22T05:28:21
null
0
0
null
null
null
null
UTF-8
Python
false
false
3,188
py
#!/usr/bin/env python3.4 # # Copyright 2021 - Google # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ Test Script for 5G SA SMS scenarios """ import time from acts.test_decorators import test_tracker_info from acts_contrib.test_utils.tel.TelephonyBaseTest import TelephonyBaseTest from acts_contrib.test_utils.tel.tel_test_utils import ensure_phones_idle from acts_contrib.test_utils.tel.tel_5g_test_utils import disable_apm_mode_both_devices from acts_contrib.test_utils.tel.tel_5g_test_utils import provision_device_for_5g from acts_contrib.test_utils.tel.tel_5g_test_utils import provision_both_devices_for_volte from acts_contrib.test_utils.tel.tel_5g_test_utils import verify_5g_attach_for_both_devices from acts_contrib.test_utils.tel.tel_sms_utils import _sms_test_mo from acts_contrib.test_utils.tel.tel_sms_utils import _long_sms_test_mo class Sa5gSmsTest(TelephonyBaseTest): def setup_class(self): super().setup_class() def setup_test(self): TelephonyBaseTest.setup_test(self) def teardown_test(self): ensure_phones_idle(self.log, self.android_devices) """ Tests Begin """ @test_tracker_info(uuid="8949d1c7-1719-4960-b79c-041b467fb5ef") @TelephonyBaseTest.tel_test_wrap def test_5g_sa_sms_mo_mt(self): """Test SMS between two phones in 5g SA Provision devices in 5g SA Send and Verify SMS from PhoneA to PhoneB Verify both devices are still on 5g SA Returns: True if success. False if failed. """ ads = self.android_devices if not provision_device_for_5g(self.log, ads, sa_5g=True): return False if not _sms_test_mo(self.log, ads): return False if not verify_5g_attach_for_both_devices(self.log, ads, True): return False self.log.info("PASS - SMS test over 5G SA validated") return True @test_tracker_info(uuid="5c7a717b-1f98-44b7-95e7-0e83afb82a84") @TelephonyBaseTest.tel_test_wrap def test_5g_sa_sms_long_message_mo_mt(self): """Test SMS basic function between two phone. Phones in sa 5G network. Airplane mode is off. Send SMS from PhoneA to PhoneB. Verify received message on PhoneB is correct. Returns: True if success. False if failed. """ ads = self.android_devices if not disable_apm_mode_both_devices(self.log, ads): return False if not provision_device_for_5g(self.log, ads, sa_5g=True): return False return _long_sms_test_mo(self.log, ads) """ Tests End """
[ "rick_tan@qq.com" ]
rick_tan@qq.com
f3ff45256bbea619dbbf64fd88fabf51d085e602
0c15310c93117c916211b214bf5e87bdb05e768b
/hyperion/__init__.py
f1feff3c2c50c50e70b91d7fb71f52e9b778df21
[ "BSD-2-Clause" ]
permissive
hyperion-rt/hyperion
553efc0bd2c279409f65381d769546770b728236
743e085dd03ac8f92796773e55a69fd5b50700c1
refs/heads/main
2023-08-30T20:57:38.751726
2023-08-25T20:41:29
2023-08-25T20:41:29
1,442,240
41
19
BSD-2-Clause
2023-08-25T20:41:31
2011-03-05T04:43:44
Python
UTF-8
Python
false
false
2,344
py
from __future__ import print_function, division from .version import __version__ # Set up the test function _test_runner = None def _get_test_runner(): from .testing.helper import TestRunner return TestRunner(__path__[0]) def test(package=None, test_path=None, args=None, plugins=None, verbose=False, pastebin=None, generate_reference=False, bit_level_tests=False, coverage=False): ''' Run Hyperion tests using py.test. A proper set of arguments is constructed and passed to `pytest.main`. Parameters ---------- package : str, optional The name of a specific package to test, e.g. 'model' or 'densities'. If nothing is specified all default Hyperion tests are run. test_path : str, optional Specify location to test by path. May be a single file or directory. Must be specified absolutely or relative to the calling directory. args : str, optional Additional arguments to be passed to `pytest.main` in the `args` keyword argument. plugins : list, optional Plugins to be passed to `pytest.main` in the `plugins` keyword argument. verbose : bool, optional Convenience option to turn on verbose output from py.test. Passing True is the same as specifying `-v` in `args`. pastebin : {'failed','all',None}, optional Convenience option for turning on py.test pastebin output. Set to 'failed' to upload info for failed tests, or 'all' to upload info for all tests. generate_reference : str Generate reference results for bit-level tests bit_level_tests : bool Run bit-level tests. These are time-consuming tests that check the exact validity of the output, but they are disabled by default. coverage : bool, optional Generate a test coverage report. The result will be placed in the directory htmlcov. See Also -------- pytest.main : py.test function wrapped by `run_tests`. ''' test_runner = _get_test_runner() return test_runner.run_tests( package=package, test_path=test_path, args=args, plugins=plugins, verbose=verbose, pastebin=pastebin, generate_reference=generate_reference, bit_level_tests=bit_level_tests, coverage=coverage)
[ "thomas.robitaille@gmail.com" ]
thomas.robitaille@gmail.com
ba79ccbb978bcafbf81db83fd7e2fa58372de286
1ec96ae64d42a735278c6ae718cc0205e846a79e
/qpwcnet/app/quantize/test_infer_tflite.py
45c4358b9a7326e67f1def3e95d1066e81d036f2
[]
no_license
fenguoo/qpwcnet
c3139c04787ecd8abf88d7aa7c106fd8715d92ab
3fa803d4b55d46559eb96543ce0683fa4dd4d737
refs/heads/master
2023-03-08T20:37:50.137699
2021-02-25T18:40:24
2021-02-25T18:40:24
null
0
0
null
null
null
null
UTF-8
Python
false
false
3,352
py
#!/usr/bin/env python3 import tensorflow as tf import numpy as np import faulthandler import cv2 from qpwcnet.core.util import disable_gpu from qpwcnet.data.augment import image_resize, image_augment from qpwcnet.data.tfrecord import get_reader from qpwcnet.core.vis import flow_to_image from qpwcnet.vis.show import show def main(): faulthandler.enable() # NOTE(ycho): Mysteriously, tflite segfaults if `channels_first`. tf.keras.backend.set_image_data_format('channels_last') # my_devices = tf.config.experimental.list_physical_devices(device_type='CPU') # tf.config.experimental.set_visible_devices(devices= my_devices, device_type='CPU') # disable_gpu() # Load the TFLite model and allocate tensors. interpreter = tf.lite.Interpreter(model_path="/tmp/qpwcnet.tflite") interpreter.allocate_tensors() # Get input and output tensors. input_details = interpreter.get_input_details() print(input_details) output_details = interpreter.get_output_details() print(output_details) # Test the model on random input data. input_shape = input_details[0]['shape'] input_data = np.array( np.random.random_sample(input_shape), dtype=np.float32) print(input_data.shape) # 1, 6, 256, 512 print('set_tensor') interpreter.set_tensor(input_details[0]['index'], input_data) print('invoke') interpreter.invoke() print('?') # The function `get_tensor()` returns a copy of the tensor data. # Use `tensor()` in order to get a pointer to the tensor. output_data = interpreter.get_tensor(output_details[-1]['index']) print(output_data.shape) def preprocess(ims, flo): # 0-255 -> 0.0-1.0 ims = tf.cast(ims, tf.float32) * tf.constant(1.0 / 255.0, dtype=tf.float32) # resize, no augmentation. ims, flo = image_resize(ims, flo, (256, 512)) # ims, flo = image_augment(ims, flo, (256, 512)) # 0.0-1.0 -> -0.5, 0.5 ims = ims - 0.5 # Convert to correct data format data_format = tf.keras.backend.image_data_format() if data_format == 'channels_first': ims = einops.rearrange(ims, '... h w c -> ... c h w') flo = einops.rearrange(flo, '... h w c -> ... c h w') return ims, flo if True: # TODO(ycho): Cleanup dataset loading pattern for opt-flow datasets. glob_pattern = '/media/ssd/datasets/sintel-processed/shards/sintel-*.tfrecord' filenames = tf.data.Dataset.list_files(glob_pattern).shuffle(32) # dataset = get_reader(filenames).shuffle(buffer_size=1024).repeat().batch(8) # dataset = get_reader(filenames).batch(8).repeat() dataset = get_reader(filenames).shuffle( buffer_size=32).map(preprocess).batch(1) for ims, flo in dataset: interpreter.set_tensor( input_details[0]['index'], ims) # ims.numpy()? interpreter.invoke() flo_pred = output_data = interpreter.get_tensor( output_details[-1]['index']) flo_pred_rgb = flow_to_image(flo_pred) show('flo_pred_rgb', flo_pred_rgb[0], True) cv2.waitKey(0) break if __name__ == '__main__': main()
[ "jchocholate@gmail.com" ]
jchocholate@gmail.com
066630c5fc08c6a3f8d7aea2d8c7356f89559c49
afc677459e46635ceffccf60d1daf50e62694557
/ACME/utility/isskinny.py
b174ab247419e98a7beff22f70e10383e0180265
[ "MIT" ]
permissive
mauriziokovacic/ACME
056b06da4bf66d89087fcfcbe0fd0a2e255d09f3
2615b66dd4addfd5c03d9d91a24c7da414294308
refs/heads/master
2020-05-23T23:40:06.667416
2020-01-10T14:42:01
2020-01-10T14:42:01
186,997,977
3
1
null
null
null
null
UTF-8
Python
false
false
497
py
from .row import * from .col import * from .ismatrix import * def isskinny(*tensors): """ Returns whether or not the input tensor is a skinny matrix A fat matrix is a matrix where the number of columns is smaller than the rows Parameters ---------- *tensors : Tensor a sequence of tensors Returns ------- bool True if all the tensors are skinny, False otherwise """ return all([ismatrix(t) and (row(t) > col(t)) for t in tensors])
[ "maurizio.kovacic@gmail.com" ]
maurizio.kovacic@gmail.com
555dc967aabaab2c6844ce81e8e30bd1621e7673
af368ad82efda90ca9de73c57f2822aa27a21044
/rigl/rigl_tf2/networks.py
ff96229cb5555728ea5fe8612ba59a1157351158
[ "Apache-2.0" ]
permissive
google-research/rigl
e24f05bfd872f31194a047cf1b3a0bfa12ab45aa
d39fc7d46505cb3196cb1edeb32ed0b6dd44c0f9
refs/heads/master
2023-08-25T04:54:29.014303
2023-01-13T13:40:32
2023-01-26T17:47:13
224,050,000
324
61
Apache-2.0
2022-07-04T22:02:04
2019-11-25T22:03:16
Python
UTF-8
Python
false
false
2,777
py
# coding=utf-8 # Copyright 2022 RigL Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """This module has networks used in experiments. """ from typing import Optional, Tuple # Non-expensive-to-import types. import gin import tensorflow.compat.v2 as tf @gin.configurable(allowlist=['hidden_sizes', 'use_batch_norm']) def lenet5(input_shape, num_classes, activation, kernel_regularizer, use_batch_norm = False, hidden_sizes = (6, 16, 120, 84)): """Lenet5 implementation.""" network = tf.keras.Sequential() kwargs = { 'activation': activation, 'kernel_regularizer': kernel_regularizer, } def maybe_add_batchnorm(): if use_batch_norm: network.add(tf.keras.layers.BatchNormalization()) network.add(tf.keras.layers.Conv2D( hidden_sizes[0], 5, input_shape=input_shape, **kwargs)) network.add(tf.keras.layers.MaxPool2D(pool_size=(2, 2))) maybe_add_batchnorm() network.add(tf.keras.layers.Conv2D(hidden_sizes[1], 5, **kwargs)) network.add(tf.keras.layers.MaxPool2D(pool_size=(2, 2))) maybe_add_batchnorm() network.add(tf.keras.layers.Flatten()) network.add(tf.keras.layers.Dense(hidden_sizes[2], **kwargs)) maybe_add_batchnorm() network.add(tf.keras.layers.Dense(hidden_sizes[3], **kwargs)) maybe_add_batchnorm() kwargs['activation'] = None network.add(tf.keras.layers.Dense(num_classes, **kwargs)) return network @gin.configurable(allowlist=['hidden_sizes', 'use_batch_norm']) def mlp(input_shape, num_classes, activation, kernel_regularizer, use_batch_norm = False, hidden_sizes = (300, 100)): """Lenet5 implementation.""" network = tf.keras.Sequential() kwargs = { 'activation': activation, 'kernel_regularizer': kernel_regularizer } def maybe_add_batchnorm(): if use_batch_norm: network.add(tf.keras.layers.BatchNormalization()) network.add(tf.keras.layers.Flatten(input_shape=input_shape)) network.add(tf.keras.layers.Dense(hidden_sizes[0], **kwargs)) maybe_add_batchnorm() network.add(tf.keras.layers.Dense(hidden_sizes[1], **kwargs)) maybe_add_batchnorm() kwargs['activation'] = None network.add(tf.keras.layers.Dense(num_classes, **kwargs)) return network
[ "evcu@google.com" ]
evcu@google.com
40d07a435766b7b05fd6c3cfdb3b1ec4ba000f2b
8fe42fb556547bc60bd34b8c5230c01d7ef2dc89
/src/operaciones.py
c2b28d668244639975b2d9d79f52146085af3388
[]
no_license
Davidlazarog/Shark-Project
1c8b3225f41a32dbcabace88c776043aeac42066
e6baf3c346aa324566ac687af885a7abcd66b5a2
refs/heads/master
2022-12-06T20:03:15.679049
2020-08-31T19:06:54
2020-08-31T19:06:54
291,052,138
0
0
null
null
null
null
UTF-8
Python
false
false
1,788
py
def fatality(x): if x == "Y": return True elif x == "N": return False elif x == " N": return False elif x == "N ": return False elif x == "y": return True return 'Unknown' def Type(x): if x == "Unprovoked": return 'Unprovoked' elif x == "Provoked": return 'Provoked' elif x == "Sea Disaster": return 'Unprovoked' return 'Unknown' import re arms = ["(.*)?arms(.*)?", "(.*)?hand(.*)?" ] leg = ["(.*)leg(.*)?", "(.*)?foot(.*)?" ] fatal = ["(.*)?atal(.*)?", "(.*)?ody(.*)?" ] ribs = ["(.*)?ribs(.*)?"] noinjury = ["(.*)?o injur(.*)?"] def menu (x) : Arms = "ARMS" Leg = 'LEGS' Fatal = 'FATAL' Ribs = 'RIBS' Noinjury = 'NO INJURY' x = x.lower() if type(x) != str: return 'Unknown' else: for a in arms: if re.search (a,x): x = Arms return x for l in leg: if re.search (l,x): x = Leg return x for i in fatal: if re.search (i,x): x = Fatal return x for r in ribs: if re.search (r,x): x = Ribs return x for n in noinjury: if re.search (n,x): x = Noinjury return x return 'Unknown' pizza = ["(.*)?no injury(.*)?" ] human = ["(.*)?fatal(.*)?", "(.*)?legs(.*)?", "(.*)?arms(.*)?", "(.*)?ribs(.*)?"] def pizzavshuman (x): Pizza = 'PIZZA' Human = 'HUMAN' x = x.lower() for p in pizza: if re.search (p,x): x = Pizza return x for h in human: if re.search (h,x): x = Human return x return 'UNKNOWN'
[ "david@MacBook-Air-de-David.local" ]
david@MacBook-Air-de-David.local
d4d3db0d9c63d6caa39c78c51416d5cd71a05bf5
f07a42f652f46106dee4749277d41c302e2b7406
/Data Set/bug-fixing-2/23fcae7c9e2969cf6c9ffb019dda90e2abce2337-<v2_runner_on_unreachable>-fix.py
a2bf157a0a16543a0a827efdc86b55d5841fa9c6
[]
no_license
wsgan001/PyFPattern
e0fe06341cc5d51b3ad0fe29b84098d140ed54d1
cc347e32745f99c0cd95e79a18ddacc4574d7faa
refs/heads/main
2023-08-25T23:48:26.112133
2021-10-23T14:11:22
2021-10-23T14:11:22
null
0
0
null
null
null
null
UTF-8
Python
false
false
303
py
def v2_runner_on_unreachable(self, result): self._preprocess_result(result) msg = 'unreachable' display_color = C.COLOR_UNREACHABLE task_result = self._process_result_output(result, msg) self._display.display((' ' + task_result), display_color, stderr=self.display_failed_stderr)
[ "dg1732004@smail.nju.edu.cn" ]
dg1732004@smail.nju.edu.cn
d6942f802ebb324f1d183331af8a70085d17d2a8
ca7aa979e7059467e158830b76673f5b77a0f5a3
/Python_codes/p03260/s433501845.py
f916e360749b81eba126bb8d969b1f1e0e46b12a
[]
no_license
Aasthaengg/IBMdataset
7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901
f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8
refs/heads/main
2023-04-22T10:22:44.763102
2021-05-13T17:27:22
2021-05-13T17:27:22
367,112,348
0
0
null
null
null
null
UTF-8
Python
false
false
227
py
import sys def main(): a, b = map(int, input().split()) for c in range(1, 4): if (a * b * c) % 2 == 1: print('Yes') sys.exit(0) print('No') if __name__ == "__main__": main()
[ "66529651+Aastha2104@users.noreply.github.com" ]
66529651+Aastha2104@users.noreply.github.com
3209c668a67a07c6ad75cfad7348b0fa17bc9382
eae3d77ac72c168cee7701462f1fc45d7d4dcd91
/SWEA/5653_줄기세포배양.py
352a068ba658b7efae977a5f073569e60fa98afe
[]
no_license
ByeongjunCho/Algorithm-TIL
ed2f018d50bd2483bd1175ff9bf7e91913c14766
ad79125a1498915fe97c1d57ee6860b06c410958
refs/heads/master
2022-07-19T15:12:23.689319
2020-05-18T08:37:09
2020-05-18T08:37:09
256,399,493
0
0
null
null
null
null
UTF-8
Python
false
false
1,765
py
# 5653. [모의 SW 역량테스트] 줄기세포배양 for tc in range(1, 1+int(input())): N, M, K = map(int, input().split()) # 행, 열, 시간 arr = [list(map(int, input().split())) for _ in range(N)] # 좌표를 dict 에 입력 cell = {} # 살아있는 세포 deadcell = set() # 죽은세포 for i in range(N): for j in range(M): if arr[i][j]: cell.update({(i, j): [arr[i][j], -arr[i][j]]}) # 행렬값, 활성화여부 def clone(): dy = [0, 0, -1, 1] dx = [1, -1, 0, 0] keys = set(cell.keys()) for k in keys: val = cell[k] # 활성화된 것이 아니라면 if 0 > val[1]: cell.update({k: [val[0], val[1] + 1]}) # 활성화된 세포라면 elif 0 <= val[1] < val[0]: cell.update({k: [val[0], val[1] + 1]}) for i in range(4): wy, wx = dy[i] + k[0], dx[i] + k[1] # 기존에 있는 셀이 금방 복사된 것이면서 생명력이 작다면 if (wy, wx) not in keys and cell.get((wy, wx)) and cell.get((wy, wx))[0] == -cell.get((wy, wx))[1] and cell.get((wy, wx))[0] < val[0]: cell.update({(wy, wx): [val[0], -val[0]]}) # 복사하려는 자리에 cell이 없다면 elif not cell.get((wy, wx)) and (wy, wx) not in deadcell: # deadcell.get((wy, wx)) cell.update({(wy, wx): [val[0], -val[0]]}) if val[0] == val[1] +1: cell.pop(k) deadcell.add(k) for _ in range(K): clone() count = len(cell) print('#{} {}'.format(tc, count))
[ "jjgk91@naver.com" ]
jjgk91@naver.com
c6786c9dbf4949ae65f9b5146056f5c8542d97d3
76658a65823ea493038c1d037eb0bc1eda6733d3
/chapter8/decorator.py
1af2950bea033ac2d44c7197d914df3b0ee7bdea
[]
no_license
Asunqingwen/OOR_Edition3
2f14446f26a6615aea58920f67f6656c74257c4c
92481b15fc365f8d5b903f6e5eb0974e9ff2af33
refs/heads/master
2022-08-15T05:49:33.619528
2020-01-13T01:53:47
2020-01-13T01:53:47
230,414,693
0
0
null
2022-07-29T23:04:04
2019-12-27T09:33:05
Python
UTF-8
Python
false
false
933
py
# -*- coding: utf-8 -*- # @Time : 2020/1/7 0007 14:40 # @Author : 没有蜡笔的小新 # @E-mail : sqw123az@sina.com # @FileName: decorator.py # @Software: PyCharm # @Blog :https://blog.csdn.net/Asunqingwen # @GitHub :https://github.com/Asunqingwen # @WebSite : labixiaoxin.me import time def log_calls(func): def wrapper(*args, **kwargs): now = time.time() print( "Calling {0} with {1} and {2}".format( func.__name__, args, kwargs ) ) return_value = func(*args, **kwargs) print( "Executed {0} in {1}ms".format(func.__name__, time.time() - now) ) return return_value return wrapper @log_calls def test1(a, b, c): print("\ttest1 called") @log_calls def test2(a, b): print("\ttest2 called") @log_calls def test3(a, b): print("\ttest3 called") time.sleep(1) # test1 = log_calls(test1) # test2 = log_calls(test2) # test3 = log_calls(test3) test1(1, 2, 3) test2(4, b=5) test3(6, 7)
[ "sqw123az@sina.com" ]
sqw123az@sina.com
aa7d5492970481f8d5ecd055913b223e31a05a4a
f93ea26173e6b72ff46b3abb2a5250bfb0636cdd
/tests/test_multiple.py
fc19b1449f2d61c628ba768b08192cf784420648
[ "MIT" ]
permissive
eng-tools/eqsig
53d1dc695ffbe132a7fef871d825d9b7011f821c
8a70f4c7152bc0f0901d457b6acbca256d1a6473
refs/heads/master
2023-02-26T06:58:43.243878
2022-08-16T03:23:04
2022-08-16T03:23:04
125,842,866
22
10
MIT
2023-02-08T00:41:12
2018-03-19T10:46:43
Python
UTF-8
Python
false
false
720
py
import numpy as np from eqsig import multiple def test_same_start(): time = np.linspace(0, 102, 10200) acc = np.sin(time) dt = 0.01 cluster = multiple.Cluster([acc, acc + 0.3], dt=dt) cluster.same_start() diff = np.sum(cluster.values_by_index(0) - cluster.values_by_index(1)) assert diff < 1.0e-10, diff def test_time_match(): time = np.linspace(0, 102, 1020) acc = np.sin(time) dt = 0.01 cluster = multiple.Cluster([acc[:-6], acc[6:]], dt=dt) cluster.time_match(verbose=0) diff = np.sum(cluster.values_by_index(0)[6:-5] - cluster.values_by_index(1)[6:-5]) assert diff == 0.0, diff if __name__ == '__main__': test_same_start() # test_time_match()
[ "maxim.millen@gmail.com" ]
maxim.millen@gmail.com
bec4c93a740437135214e47923fb069380ba88e1
eb61d62ca1f6f0123e3771105f5dfbbd6115138d
/.history/19-22-07-21_20210905233329.py
1c3e07808a594a922ce4ec611612db0be63d70cf
[]
no_license
Alopezm5/CORRECTO-2
e0f14bcc3a88c0e222d10e3261e68532008bc42e
223613f1fb04dce3fac9f82f243cb2f22fe100f3
refs/heads/main
2023-07-29T06:52:48.147424
2021-09-12T20:33:27
2021-09-12T20:33:27
388,995,308
0
0
null
null
null
null
UTF-8
Python
false
false
2,447
py
class MENU (): def __init__(self,titulo,opciones=[]): self.titulo = titulo self.opciones = opciones def menu(self): print(self.titulo) for opcion in self.opciones: print(opcion) opc=input("Elije opcion [1 ..... {}]:".format(len(self.opciones))) return opc opc=" " while opc!="5": menu1=MENU("Menú Principal" , ["1)Calculadora","2)Numeros","3)Listas","4)Cadenas","5)Salir"]) opc=menu1.menu() if opc=="1": opc1=" " while opc !="" menu1=MENU("Menú Calculadora",["1)Suma","2)Resta","3)Multiplicacion" , "4) División" , "5) Salir" ]) opc1=menu1.menu() if opc1 == "1" : print("Opcion Suma") n1=int(input("Ingresar n1: ")) n2=int(input("Ingresar n2: ")) suma=n1+n2 print("{} + {} = {}".format( n1 , n2 , suma )) elif opc1 == "2" : print ( "Opcion Resta" ) n1 = int ( input ( "Ingresar n1:" )) n2 = int ( input ( "Ingresar n2:" )) resta = n1 - n2 print ( "{} - {} = {}".format( n1 , n2 , resta )) elif opc1 == "3" : print ( "Opcion Multiplicacion" ) n1 = int ( input ( "Ingresar n1:" )) n2 = int ( input ( "Ingresar n2:" )) multiplicacion = n1 * n2 print ( "{} * {} = {}".format( n1 , n2 , multiplicacion )) elif opc1 == "4" : print ( "Opcion Division" ) n1 = int ( input ( "Ingresar n1:" )) n2 = int ( input ( "Ingresar n2:" )) division = n1 / n2 print ( "{} / {} = {}".format( n1 , n2 , division )) elif opc1 == "5" : print ( "Opcion Salir" ) elif opc == "2" : menu2 = MENU ( "Menú Numero" , [ "1) Perfecto" , "2) Primo" , "3) Salir" ]) opc2 = input ( "Elije opcion [1 ..... 3]:" ) elif opc == "3" : print ( "Listas de menú" ) elif opc == "4" : print ( "Menú Cadenas" ) elif opc == "5" : print ( "Menú Salir" ) else: print ( "Opcion no valida" )
[ "85761855+Alopezm5@users.noreply.github.com" ]
85761855+Alopezm5@users.noreply.github.com
f448fb85f2fa97dfe22916d51c24dde06f126299
910c97ce255f39af7ef949664b4346e8cb5d6a0e
/managerlib/dblib/.svn/text-base/db_cloudfs_account.py.svn-base
a35cee46cf8948ff6bed248f5925041ed5a0724e
[]
no_license
sun3shines/manager_monitor
f3742a4fde95b456f51d0a18feb78f3d4048c560
f49d741203d8476f2249a49d90fecc86143ac622
refs/heads/master
2021-01-17T06:47:14.375088
2016-04-29T06:43:05
2016-04-29T06:43:05
57,361,217
0
0
null
null
null
null
UTF-8
Python
false
false
997
# -*- coding: utf-8 -*- from urllib import unquote from managerlib.db.db_account import account2id,insert_account,account_exists from managerlib.db.table.stobj import delete_stobj # atdelete -> db_cloudfs_account_delete # atput -> db_cloudfs_account_put # atexists -> db_cloudfs_account_exists # athead -> db_cloudfs_account_head # atmeta -> db_cloudfs_account_meta # atget -> db_cloudfs_account_get # atpost -> db_cloudfs_account_post def db_cloudfs_account_delete(newPath,conn): aid = account2id(conn,newPath) return delete_stobj(conn,aid) def db_cloudfs_account_put(newPath,conn): return insert_account(conn,newPath) def db_cloudfs_account_exists(newPath,conn): return account_exists(conn,newPath) ###################################### def db_cloudfs_account_head(path): return True,'' def db_cloudfs_account_meta(path): return True,'' def db_cloudfs_account_get(path): return True,'' def db_cloudfs_account_post(path): return True,''
[ "sun__shines@163.com" ]
sun__shines@163.com
2576f671397537847ead9f33f72b37b62d262c6c
61004e474b7b2ad0071c16766f0f7874f04f9466
/examples/dataflow-xml-pubsub-to-gcs/python/beamPubSubXml2Gcs.py
714dcc12868880a5d623a66cf9fb253735ed5a8a
[ "Apache-2.0" ]
permissive
GoogleCloudPlatform/professional-services
eb79751efae765a8c691a745e520f44f51bd715c
0f51121b945bd74c7f667e74e8861fceda87565c
refs/heads/main
2023-09-05T02:57:33.328973
2023-08-30T14:40:30
2023-08-30T14:40:30
91,730,359
2,626
1,381
Apache-2.0
2023-09-14T20:13:42
2017-05-18T19:29:27
Python
UTF-8
Python
false
false
6,666
py
# Copyright 2023 Google LLC # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # https://www.apache.org/licenses/LICENSE-2.0 # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import argparse from datetime import datetime import logging import json import random from apache_beam import DoFn, GroupByKey, io, ParDo, Pipeline, \ PTransform, WindowInto, WithKeys, Map from apache_beam.io import fileio from apache_beam.options.pipeline_options import PipelineOptions from apache_beam.transforms.window import FixedWindows import xml.etree.ElementTree as ET class GroupMessagesByFixedWindows(PTransform): """A composite transform that groups Pub/Sub messages based on publish time and outputs a list of tuples, each containing a message and its publish time. """ def __init__(self, window_size, num_shards=5): # Set window size to 60 seconds * window_size. self.window_size = int(window_size * 60) self.num_shards = num_shards def expand(self, pcoll): return ( pcoll # Bind window info to each element using element timestamp (or \ # publish time). | "Window into fixed intervals" >> WindowInto(FixedWindows(self.window_size)) | "Add timestamp to windowed elements" >> ParDo(AddTimestamp()) # Assign a random key to each windowed element based on the \ # number of shards. | "Add key" >> WithKeys( lambda _: random.randint(0, self.num_shards - 1) ) # Group windowed elements by key. All the elements in the same \ # window must fit memory for this. If not, you need to use \ # `beam.util.BatchElements`. | "Group by key" >> GroupByKey() | "Drop shard key after grouping" >> Map(lambda element: element[1]) ) class AddTimestamp(DoFn): def process(self, element, publish_time=DoFn.TimestampParam): """Processes each parsed element by extracting the message body and its received time into a tuple. """ yield ( { "ts": datetime.utcfromtimestamp(float(publish_time)). strftime("%Y-%m-%d %H:%M:%S.%f") } | element ) class ParseXML(DoFn): def process(self, message_body): """Parse all tags and attributes from an XML and serialize them to a dict for later storage.""" try: parsedXml = ET.fromstring(message_body) allTags = [] allTagsText = [] for element in parsedXml: allTags.append(element.tag) allTagsText.append(element.text) yield {"tags": allTags, "text": allTagsText} except Exception as e: yield {"error": str(e), "raw_contents": message_body} def run(project_id, input_topic, gcs_path, window_size, num_shards, runner, region, pipeline_args=None): # Set `save_main_session` to True so DoFns can access globally imported # modules. input_topic = "projects/{0}/topics/{1}".format(project_id, input_topic) if gcs_path[-1] == "/": gcs_path = gcs_path[:-1] output_path = "{0}/output/".format(gcs_path) provided_args = { "project": project_id, "runner": runner, "region": region, "staging_location": "{0}/staging/".format(gcs_path), "temp_location": "{0}/temp/".format(gcs_path), "streaming": True, "save_main_session": True } pipeline_options = PipelineOptions( pipeline_args, **provided_args ) with Pipeline(options=pipeline_options) as pipeline: ( pipeline # Because `timestamp_attribute` is unspecified in `ReadFromPubSub`, # Beam binds the publish time returned by the Pub/Sub server for # each message to the element's timestamp parameter, accessible via # `DoFn.TimestampParam`. # https://beam.apache.org/releases/pydoc/current/apache_beam.io.gcp.pubsub.html#apache_beam.io.gcp.pubsub.ReadFromPubSub # https://cloud.google.com/pubsub/docs/stream-messages-dataflow#set_up_your_pubsub_project | "Read from Pub/Sub" >> io.ReadFromPubSub(topic=input_topic) | "Parse XML tags and attributes" >> ParDo(ParseXML()) | "Window into" >> GroupMessagesByFixedWindows(window_size, num_shards) | "Serialize" >> Map(json.dumps, indent = 2) | "Write to GCS" >> fileio.WriteToFiles(path=output_path, shards=0) ) if __name__ == "__main__": logging.getLogger().setLevel(logging.INFO) parser = argparse.ArgumentParser() parser.add_argument( "--project_id", help="The GCP project that hosts the PubSub and Dataflow.", ) parser.add_argument( "--input_topic_id", help="The Cloud Pub/Sub topic to read from.", ) parser.add_argument( "--runner", help="""The beam runner to be used. For cloud Dataflow: 'DataflowRunner'. For local debugging: 'DirectRunner'. [Defaults to: 'DataflowRunner']""", default='DataflowRunner', ) parser.add_argument( "--region", help="The GCP region for Dataflow. [Defaults to: 'us-central1']", default='us-central1', ) parser.add_argument( "--window_size", type=float, default=1.0, help="Output file's window size in minutes. [Defaults to: 1.0]", ) parser.add_argument( "--gcs_path", help="Path of the output GCS file including the prefix.", ) parser.add_argument( "--num_shards", type=int, default=5, help="""Number of shards to use when writing windowed elements to GCS. [Defaults to: 5]""", ) known_args, pipeline_args = parser.parse_known_args() run( known_args.project_id, known_args.input_topic_id, known_args.gcs_path, known_args.window_size, known_args.num_shards, known_args.runner, known_args.region, pipeline_args, )
[ "noreply@github.com" ]
GoogleCloudPlatform.noreply@github.com
a7ee7d01542a6a2c392badd3a337b82978760149
978228e0c7291e6dad04a49ac8fdbd2e17322b6b
/PythonAutomats/Tinder/song_of_the_day_tinder.py
71674fd8278ca4cc0f848d30fdfb6f961e6da1bc
[]
no_license
tdworowy/PythonAutomats
f5edbea96b53e1e452f16457ba44a31e7750d7ab
84d30db03b5be936092622d446b0fc9834dfa2de
refs/heads/master
2022-08-27T21:12:24.963091
2022-08-15T08:39:46
2022-08-15T08:39:46
73,401,085
2
0
null
null
null
null
UTF-8
Python
false
false
1,274
py
import sys from random import choice from Api.Songs import ApiAdapter from Chrome_Driver_Folder.driver_path import get_driver_path from Songs.last_fm_parser import FOLDER_PATH, update_songs_distribution from Tinder.tinder_Api import TinderMessageBot, TinderAdapter from Utils.decorators import log_exception from Youtube.Youtube_bot_requests import get_youtube_url from selenium import webdriver @log_exception() def main(login, password, names): update_songs_distribution() tinder_bot = TinderMessageBot() chrome_driver_path = get_driver_path() + '\\chromedriver.exe' driver = webdriver.Chrome(chrome_driver_path) with open(FOLDER_PATH, 'r') as f: songs_list = f.read() songs_list = songs_list.split("\n") song_title = choice(songs_list) adapter = TinderAdapter(tiderBot=tinder_bot, name='tomasz.dworowy', receivers=names, driver=driver) song = ApiAdapter(adapter) song.my_logging.log().info("Get random song") song.login(login, password) url = get_youtube_url(song_title.strip()) song.sent_messages([url]) if __name__ == '__main__': user = sys.argv[1] passw = sys.argv[2] + " " + sys.argv[3] nams = sys.argv[4] # namesList = ['Ilona','Carol'] names = [nams] main(user, passw, names)
[ "dworowytomasz@gmail.com" ]
dworowytomasz@gmail.com
3988f947afc8104c3abbc1371fb28d19b7677e15
ac6e4102dfb49a4e49de0e2766feb6e80ab0b5c2
/h1/model/website_project_instance_create.py
40e2620a7c7b0b2bb04d8a2d6fc79057e3b28627
[ "MIT" ]
permissive
hyperonecom/h1-client-python
df01f05ad295121e3dd391a3274c41e2f5b88e53
4ce355852ba3120ec1b8f509ab5894a5c08da730
refs/heads/master
2023-04-05T01:51:31.637002
2021-03-29T00:05:41
2021-03-29T00:05:41
319,309,525
0
0
null
null
null
null
UTF-8
Python
false
false
7,357
py
""" HyperOne HyperOne API # noqa: E501 The version of the OpenAPI document: 0.1.0 Generated by: https://openapi-generator.tech """ import re # noqa: F401 import sys # noqa: F401 from h1.model_utils import ( # noqa: F401 ApiTypeError, ModelComposed, ModelNormal, ModelSimple, cached_property, change_keys_js_to_python, convert_js_args_to_python_args, date, datetime, file_type, none_type, validate_get_composed_info, ) def lazy_import(): from h1.model.tag_array import TagArray from h1.model.website_env import WebsiteEnv globals()['TagArray'] = TagArray globals()['WebsiteEnv'] = WebsiteEnv class WebsiteProjectInstanceCreate(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech Do not edit the class manually. Attributes: allowed_values (dict): The key is the tuple path to the attribute and the for var_name this is (var_name,). The value is a dict with a capitalized key describing the allowed value and an allowed value. These dicts store the allowed enum values. attribute_map (dict): The key is attribute name and the value is json key in definition. discriminator_value_class_map (dict): A dict to go from the discriminator variable value to the discriminator class name. validations (dict): The key is the tuple path to the attribute and the for var_name this is (var_name,). The value is a dict that stores validations for max_length, min_length, max_items, min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, inclusive_minimum, and regex. additional_properties_type (tuple): A tuple of classes accepted as additional properties values. """ allowed_values = { } validations = { } additional_properties_type = None _nullable = False @cached_property def openapi_types(): """ This must be a method because a model may have properties that are of type self, this must run after the class is loaded Returns openapi_types (dict): The key is attribute name and the value is attribute type. """ lazy_import() return { 'name': (str,), # noqa: E501 'service': (str,), # noqa: E501 'image': (str,), # noqa: E501 'source': (str,), # noqa: E501 'env': ([WebsiteEnv],), # noqa: E501 'tag': (TagArray,), # noqa: E501 } @cached_property def discriminator(): return None attribute_map = { 'name': 'name', # noqa: E501 'service': 'service', # noqa: E501 'image': 'image', # noqa: E501 'source': 'source', # noqa: E501 'env': 'env', # noqa: E501 'tag': 'tag', # noqa: E501 } _composed_schemas = {} required_properties = set([ '_data_store', '_check_type', '_spec_property_naming', '_path_to_item', '_configuration', '_visited_composed_classes', ]) @convert_js_args_to_python_args def __init__(self, name, service, image, *args, **kwargs): # noqa: E501 """WebsiteProjectInstanceCreate - a model defined in OpenAPI Args: name (str): service (str): image (str): Keyword Args: _check_type (bool): if True, values for parameters in openapi_types will be type checked and a TypeError will be raised if the wrong type is input. Defaults to True _path_to_item (tuple/list): This is a list of keys or values to drill down to the model in received_data when deserializing a response _spec_property_naming (bool): True if the variable names in the input data are serialized names, as specified in the OpenAPI document. False if the variable names in the input data are pythonic names, e.g. snake case (default) _configuration (Configuration): the instance to use when deserializing a file_type parameter. If passed, type conversion is attempted If omitted no type conversion is done. _visited_composed_classes (tuple): This stores a tuple of classes that we have traveled through so that if we see that class again we will not use its discriminator again. When traveling through a discriminator, the composed schema that is is traveled through is added to this set. For example if Animal has a discriminator petType and we pass in "Dog", and the class Dog allOf includes Animal, we move through Animal once using the discriminator, and pick Dog. Then in Dog, we will make an instance of the Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) source (str): [optional] # noqa: E501 env ([WebsiteEnv]): [optional] # noqa: E501 tag (TagArray): [optional] # noqa: E501 """ _check_type = kwargs.pop('_check_type', True) _spec_property_naming = kwargs.pop('_spec_property_naming', False) _path_to_item = kwargs.pop('_path_to_item', ()) _configuration = kwargs.pop('_configuration', None) _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) if args: raise ApiTypeError( "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( args, self.__class__.__name__, ), path_to_item=_path_to_item, valid_classes=(self.__class__,), ) self._data_store = {} self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item self._configuration = _configuration self._visited_composed_classes = _visited_composed_classes + (self.__class__,) self.name = name self.service = service self.image = image for var_name, var_value in kwargs.items(): if var_name not in self.attribute_map and \ self._configuration is not None and \ self._configuration.discard_unknown_keys and \ self.additional_properties_type is None: # discard variable. continue setattr(self, var_name, var_value)
[ "action@github.com" ]
action@github.com
aefae7fca95e26baee3b1888e265657b3105ea73
2c3f13857d4a915410de5ac9547745eb2769db5f
/eval/e5/scrape_so.py
60e0110c53ba3c6434b54a83d7b9f733bf2aff48
[]
no_license
andrewhead/StackSkim
43a4cf769645bb70202075f8077fa4d5d7be2a4b
9ac11705ff82aa978d1a87177059e665f4e5ebef
refs/heads/master
2020-06-03T16:15:15.127268
2016-01-16T17:16:36
2016-01-16T17:16:36
50,692,945
2
0
null
null
null
null
UTF-8
Python
false
false
3,928
py
#! /usr/bin/env python # encoding: utf-8 from __future__ import unicode_literals import logging import subprocess import requests import json logging.basicConfig(level=logging.INFO, format="%(message)s") SCRAPE_DIR = "scrape" QUESTIONS_OUTFILE = 'questions.json' ANSWERS_OUTFILE = 'answers.json' ANSWERS = [ {'question': 3190798, 'answer': 3219849}, {'question': 14104228, 'answer': 14104369}, {'question': 364015, 'answer': 364074}, {'question': 12872387, 'answer': 12872443}, {'question': 8115087, 'answer': 8115155}, {'question': 25221352, 'answer': 25222518}, {'question': 12591832, 'answer': 12591905}, {'question': 27391812, 'answer': 27414401}, {'question': 4683639, 'answer': 4684210}, {'question': 5362732, 'answer': 5362764}, {'question': 27045539, 'answer': 27523599}, {'question': 19618268, 'answer': 19618531}, {'question': 19087332, 'answer': 19087631}, {'question': 26759118, 'answer': 26759193}, {'question': 5931223, 'answer': 5931253}, {'question': 4362491, 'answer': 4362514}, {'question': 2187821, 'answer': 2187834}, {'question': 9505971, 'answer': 9506077}, {'question': 23740288, 'answer': 23740522}, {'question': 9003288, 'answer': 9003638}, ] QUESTIONS = [ 4980414, 18851438, 22705019, 2592798, 21219150, 14917510, 2130446, 6233805, 27436551, 17828552, 3929301, 27889586, 9893851, 23877406, 1283646, 23438583, 12332532, 17383236, 25356695, 15234524, ] def fetch_questions(question_ids): question_param = ';'.join([str(q) for q in question_ids]) answer_url = 'https://api.stackexchange.com/2.2/questions/' + question_param more_questions = True page_number = 1 all_questions = {'items': []} while more_questions: questions = requests.get(answer_url, params={ 'site': 'stackoverflow', 'page': page_number, 'pagesize': '100', 'filter': '!9YdnSJ*_S', }).json() more_questions = questions['has_more'] page_number += 1 all_questions['items'].extend(questions['items']) return all_questions def fetch_answers(answer_ids): answer_param = ';'.join([str(a) for a in answer_ids]) answer_url = 'https://api.stackexchange.com/2.2/answers/' + answer_param more_answers = True page_number = 1 all_answers = {'items': []} while more_answers: answers = requests.get(answer_url, params={ 'site': 'stackoverflow', 'page': page_number, 'pagesize': '100', 'filter': '!9YdnSM68i', }).json() more_answers = answers['has_more'] page_number += 1 all_answers['items'].extend(answers['items']) return all_answers def wget_address(address): subprocess.call([ "wget", "-P", SCRAPE_DIR, # output to a scrape directory "--adjust-extension", # download HTML pages with .html extension "-nc", # don't download the same file twice "-w", "1", # wait 1s between requests "-p", "-k", # for downloading stylesheets (doesn't work?) address, ]) def main(): ''' Get ground truth answers from StackExchange API. ''' questions = fetch_questions(QUESTIONS) answers = fetch_answers([a['answer'] for a in ANSWERS]) with open(QUESTIONS_OUTFILE, 'w') as qof: json.dump(questions, qof, indent=2) with open(ANSWERS_OUTFILE, 'w') as aof: json.dump(answers, aof, indent=2) ''' StackOverflow content gets fetched to folder "stackoverflow.com" ''' for q in QUESTIONS: wget_address("http://www.stackoverflow.com/questions/%d" % q) for a in ANSWERS: wget_address("www.stackoverflow.com/a/%d/%d" % (a['question'], a['answer'])) if __name__ == '__main__': main()
[ "head.andrewm@gmail.com" ]
head.andrewm@gmail.com
26d5bc1377b3152d42dffb7588d782be1a251f02
55fefb8017a97d049f035f6771e1dfb6e7eb94a2
/investmtype/migrations/0002_auto_20180912_1801.py
7a085e28a5f0cfda3dea08219c2bed6a64ba48cb
[]
no_license
niravhjoshi/DjangoE2ISAapi
d90df84d1788e2f6a0335f707438afc543b02d56
ff74799bdb122dbc3067b3f131663c0d932355c4
refs/heads/master
2020-03-25T18:01:04.356469
2019-04-23T12:02:32
2019-04-23T12:02:32
144,008,958
0
0
null
null
null
null
UTF-8
Python
false
false
426
py
# -*- coding: utf-8 -*- # Generated by Django 1.11.7 on 2018-09-12 12:31 from __future__ import unicode_literals from django.db import migrations class Migration(migrations.Migration): dependencies = [ ('investmtype', '0001_initial'), ] operations = [ migrations.RenameField( model_name='investtypes', old_name='U_id', new_name='UserName', ), ]
[ "nirav.j05@gmail.com" ]
nirav.j05@gmail.com
8985832a9acfc77b38e220afc8b2162c9d8ceccd
30736dab9d8e682e5603d4803349144a5f6a84fb
/sdk/cognitiveservices/azure-cognitiveservices-search-websearch/azure/cognitiveservices/search/websearch/models/ranking_ranking_group_py3.py
1a865ff2a79a51e178c6aa333a5a293db4672915
[ "MIT", "LicenseRef-scancode-generic-cla" ]
permissive
montgomp/azure-sdk-for-python
6fcaffc59f4321852aa71109691e94ad38c66464
0ffb0b0de095b97cbc5b69309bbce0a3b91d3eb4
refs/heads/master
2020-12-06T11:08:01.683369
2020-01-07T23:24:42
2020-01-07T23:24:42
232,445,563
1
0
MIT
2020-01-08T00:45:33
2020-01-08T00:45:33
null
UTF-8
Python
false
false
1,174
py
# coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for # license information. # # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is # regenerated. # -------------------------------------------------------------------------- from msrest.serialization import Model class RankingRankingGroup(Model): """Defines a search results group, such as mainline. All required parameters must be populated in order to send to Azure. :param items: Required. A list of search result items to display in the group. :type items: list[~azure.cognitiveservices.search.websearch.models.RankingRankingItem] """ _validation = { 'items': {'required': True}, } _attribute_map = { 'items': {'key': 'items', 'type': '[RankingRankingItem]'}, } def __init__(self, *, items, **kwargs) -> None: super(RankingRankingGroup, self).__init__(**kwargs) self.items = items
[ "noreply@github.com" ]
montgomp.noreply@github.com
36c5af91c16098ac83d608a74a948a0ebdc15c5d
396f93d8e73c419ef82a94174815a2cecbb8334b
/.history/tester2_20200321213612.py
acc2caebe079475068d5af0e9ab3be845b0ef9ba
[]
no_license
mirfarzam/ArtificialIntelligence-HeuristicAlgorithm-TabuSearch
8c73d9448b916009c9431526864a4441fdeb682a
90b2dca920c85cddd7c1b3335344ac7b10a9b061
refs/heads/master
2021-03-26T21:16:42.561068
2020-04-17T21:44:26
2020-04-17T21:44:26
247,750,502
0
0
null
null
null
null
UTF-8
Python
false
false
1,333
py
import os import subprocess import re from datetime import datetime import time numberOfTests = 10 tabuIteration = '5' tabuDuration = '0' numberOfCities = '10' final_solution = [] print(f"\n\nTest for Tabu Search with this config: \n\tIterations : {tabuIteration} \n\tDuration(Tabu Memory): {tabuDuration} \n\tNumber of Cities: {numberOfCities}") for i in range(0, numberOfTests): process = subprocess.Popen(['./algo_tabou.exe', tabuIteration, tabuDuration, numberOfCities, 'distances_entre_villes_{}.txt'.format(numberOfCities)],stdout=subprocess.PIPE,stderr=subprocess.PIPE) stdout, stderr = process.communicate() result = stdout result = re.sub(r'\s', ' ', str(result)) solution = (re.findall(r'([0-9]{4}) km', result))[-1] final_solution.append(int(solution)) coverage = re.findall(r'On est dans un minimum local a l\'iteration ([0-9]+) ->', result) if coverage != []: coverage = int(coverage[0])+ 1 else: coverage = 5 number_of_solution_before_coverage = coverage print('best found solution is {} and found in interation {}, number of solutions before coverage : {}'.format(solution, coverage, number_of_solution_before_coverage)) time.sleep( 1 ) print("Summery:") optimum_result = list(filter(lambda x: x == '3473', final_solution)) print(f'number of ')
[ "farzam.mirmoeini@gmail.com" ]
farzam.mirmoeini@gmail.com
fa1d5887fe6ef08cd2f3a5e63792396cc36a1d52
934235f70a390a3ba0d7b464cddd10872f31cda3
/rango/server/.history/tango_with_django/tango_with_django/settings_20210102125402.py
2b4db67f09db82b87456b7e41dffde57e8cabd20
[]
no_license
deji100/Projects
6919041ba23e77a5c74e5ab7692bfcee38ececcb
17e64d954d1d7805be57ec5d8d4344e4944889e6
refs/heads/master
2023-04-30T05:25:03.143303
2021-05-20T15:00:43
2021-05-20T15:00:43
338,844,691
0
0
null
null
null
null
UTF-8
Python
false
false
3,283
py
""" Django settings for tango_with_django project. Generated by 'django-admin startproject' using Django 3.1.3. For more information on this file, see https://docs.djangoproject.com/en/3.1/topics/settings/ For the full list of settings and their values, see https://docs.djangoproject.com/en/3.1/ref/settings/ """ import os from pathlib import Path # Build paths inside the project like this: BASE_DIR / 'subdir'. BASE_DIR = Path(__file__).resolve().parent.parent # Quick-start development settings - unsuitable for production # See https://docs.djangoproject.com/en/3.1/howto/deployment/checklist/ # SECURITY WARNING: keep the secret key used in production secret! SECRET_KEY = '2l8=-3%*fjd#4)kl488nssgt%zu1$#l%)q=j3*#=ztcd9)hqq#' # SECURITY WARNING: don't run with debug turned on in production! DEBUG = True ALLOWED_HOSTS = [] # Application definition INSTALLED_APPS = [ 'django.contrib.admin', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.messages', 'django.contrib.staticfiles', ] MIDDLEWARE = [ 'django.middleware.security.SecurityMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', ] ROOT_URLCONF = 'tango_with_django.urls' TEMPLATES = [ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': [], 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'django.template.context_processors.debug', 'django.template.context_processors.request', 'django.contrib.auth.context_processors.auth', 'django.contrib.messages.context_processors.messages', ], }, }, ] WSGI_APPLICATION = 'tango_with_django.wsgi.application' # Database # https://docs.djangoproject.com/en/3.1/ref/settings/#databases DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': BASE_DIR / 'db.sqlite3', } } AUTH_USER_MODEL = 'rango.U' # Password validation # https://docs.djangoproject.com/en/3.1/ref/settings/#auth-password-validators AUTH_PASSWORD_VALIDATORS = [ { 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator', }, { 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', }, { 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator', }, { 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator', }, ] # Internationalization # https://docs.djangoproject.com/en/3.1/topics/i18n/ LANGUAGE_CODE = 'en-us' TIME_ZONE = 'UTC' USE_I18N = True USE_L10N = True USE_TZ = True # Static files (CSS, JavaScript, Images) # https://docs.djangoproject.com/en/3.1/howto/static-files/ STATIC_URL = '/static/' STATICFILE_DIRS = [ os.path.join(BASE_DIR, 'rango/static') ] MEDIA_URL = '/images/' MEDIR_ROOT = os.path.join(BASE_DIR, 'rango/static/images')
[ "68882568+deji100@users.noreply.github.com" ]
68882568+deji100@users.noreply.github.com
83958d051cfb91f802ae0406167170b153d49d14
98dc91f742e13ff4007ffade532c801ce40c6105
/userblog/apps.py
d579959ef1b454682d87a7d8f0f44a52a4288990
[]
no_license
divyajonna/My_First_Task
6149d0da008e549e9f0d8ad52a90fe36e15775e3
c8c5007a2eb9112cecd5b1e6df1ea449be4c02d9
refs/heads/master
2021-05-16T15:51:05.676726
2018-02-12T05:58:21
2018-02-12T05:58:21
119,347,253
0
0
null
null
null
null
UTF-8
Python
false
false
156
py
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.apps import AppConfig class UserblogConfig(AppConfig): name = 'userblog'
[ "you@example.com" ]
you@example.com
8e994b7fb88bd07b8dd63b38805b854bd33fb1a1
6d9fbe6e6a2abfd8455e92f6dba67a5f02d87f41
/lib/phonenumbers/data/region_KG.py
43602a368e7acf704db78ca08cc9b7ebbdb58f5d
[]
no_license
JamesBrace/InfluenceUWebLaunch
549d0b48ff3259b139cb891a19cb8b5382ffe2c8
332d25940e4b1b45a7a2a8200f77c8413543b199
refs/heads/master
2021-09-04T04:08:47.594900
2018-01-15T16:49:29
2018-01-15T16:49:29
80,778,825
1
1
null
null
null
null
UTF-8
Python
false
false
1,904
py
"""Auto-generated file, do not edit by hand. KG metadata""" from ..phonemetadata import NumberFormat, PhoneNumberDesc, PhoneMetadata PHONE_METADATA_KG = PhoneMetadata(id='KG', country_code=996, international_prefix='00', general_desc=PhoneNumberDesc(national_number_pattern='[235-8]\\d{8,9}', possible_number_pattern='\\d{5,10}', possible_length=(9, 10), possible_length_local_only=(5, 6)), fixed_line=PhoneNumberDesc(national_number_pattern='(?:3(?:1(?:[256]\\d|3[1-9]|47)|2(?:22|3[0-479]|6[0-7])|4(?:22|5[6-9]|6\\d)|5(?:22|3[4-7]|59|6\\d)|6(?:22|5[35-7]|6\\d)|7(?:22|3[468]|4[1-9]|59|[67]\\d)|9(?:22|4[1-8]|6\\d))|6(?:09|12|2[2-4])\\d)\\d{5}', example_number='312123456', possible_length=(9,), possible_length_local_only=(5, 6)), mobile=PhoneNumberDesc(national_number_pattern='(?:20[0-35]|5[124-7]\\d|7[07]\\d)\\d{6}', possible_number_pattern='\\d{9}', example_number='700123456', possible_length=(9,)), toll_free=PhoneNumberDesc(national_number_pattern='800\\d{6,7}', possible_number_pattern='\\d{9,10}', example_number='800123456', possible_length=(9, 10)), premium_rate=PhoneNumberDesc(), shared_cost=PhoneNumberDesc(), personal_number=PhoneNumberDesc(), voip=PhoneNumberDesc(), pager=PhoneNumberDesc(), uan=PhoneNumberDesc(), voicemail=PhoneNumberDesc(), no_international_dialling=PhoneNumberDesc(), national_prefix='0', national_prefix_for_parsing='0', number_format=[NumberFormat(pattern='(\\d{3})(\\d{3})(\\d{3})', format='\\1 \\2 \\3', leading_digits_pattern=['[25-7]|31[25]'], national_prefix_formatting_rule='0\\1'), NumberFormat(pattern='(\\d{4})(\\d{5})', format='\\1 \\2', leading_digits_pattern=['3(?:1[36]|[2-9])'], national_prefix_formatting_rule='0\\1'), NumberFormat(pattern='(\\d{3})(\\d{3})(\\d)(\\d{3})', format='\\1 \\2 \\3 \\4', leading_digits_pattern=['8'], national_prefix_formatting_rule='0\\1')])
[ "james.brace@mail.mcgill.ca" ]
james.brace@mail.mcgill.ca
22c577217301c191b4d44fd8db9ea4e06b3e4e6d
d8673a8b9a34292b54285edbf900d7e130ec39b8
/instant_generator/migrations/0006_auto_20200514_1040.py
16f69bd9642ca9b3b15c7801742a1d7161ef6bae
[]
no_license
AzeezBello/toolx
7383f43e500f300062193d8b43254c0b7af53dbf
daf6a7d585a4b72ace47b24ec86828fc6a2d2982
refs/heads/master
2022-03-07T10:12:32.915043
2022-02-16T00:01:18
2022-02-16T00:01:18
253,473,631
0
0
null
2022-02-16T00:02:45
2020-04-06T11:11:21
HTML
UTF-8
Python
false
false
426
py
# Generated by Django 2.2.9 on 2020-05-14 09:40 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('instant_generator', '0005_auto_20200425_0018'), ] operations = [ migrations.AlterField( model_name='instantgenerator', name='Identify_the_Problem_Your_Audience_Have', field=models.TextField(), ), ]
[ "azeez@scholarx.co" ]
azeez@scholarx.co
c41a8a3c97568c0f098dbb43a50bcf87af787237
b742abb440174ec59f56a334f14871f3accc1743
/util/IpUtil.py
ab987e3cf1d64a0ca66c52b6985f7680bc9bd341
[]
no_license
sunshineywz123/baymax
00e92b71b599df6ce39902652b78167a51100002
25518474cb09644c34febaf556fe8a0449dc7da4
refs/heads/master
2021-12-30T06:37:13.224221
2018-02-07T08:25:03
2018-02-07T08:25:03
null
0
0
null
null
null
null
UTF-8
Python
false
false
232
py
# -*- coding: utf-8 -*- __author__ = 'likangwei' import socket def get_ip_address(): return socket.gethostbyname(socket.gethostname())#得到本地ip def get_host_name(): return socket.gethostname() print get_ip_address()
[ "262708239@qq.com" ]
262708239@qq.com
ecc1f1d2cd3a78e2dc7168fd9d413dc5f440fbe5
2b0eab74af8d23244ff11699830f9bb10fbd717a
/fac/migrations/0088_folder_custom_form_data.py
2743e0be6a25e3364aacb4075b2da20e62c2de44
[]
no_license
alexandrenorman/mixeur
c7e25cd20b03c78b361cb40e3e359a6dc5d9b06b
95d21cd6036a99c5f399b700a5426e9e2e17e878
refs/heads/main
2023-03-13T23:50:11.800627
2021-03-07T15:49:15
2021-03-07T15:49:15
345,384,858
0
0
null
null
null
null
UTF-8
Python
false
false
471
py
# Generated by Django 2.2.17 on 2020-11-24 10:54 import django.contrib.postgres.fields.jsonb from django.db import migrations class Migration(migrations.Migration): dependencies = [ ('fac', '0087_project_custom_form_data'), ] operations = [ migrations.AddField( model_name='folder', name='custom_form_data', field=django.contrib.postgres.fields.jsonb.JSONField(blank=True, null=True), ), ]
[ "norman@xael.org" ]
norman@xael.org
7570fb3bb7d13173c86545c05e5965ec64f35c4c
1b1190a127a21bfd096529d3a3f20d05151b2276
/opsweb4/common/urls.py
d7b6911ed326a13da88a4b690cf0f28bab08413b
[]
no_license
cucy/zrdops4
a9d37d9dd7cfba5a3ecb1180094621754d97c5e9
f0609e90e5a944d30a65d6918b2bfcfd63a92c26
refs/heads/master
2021-01-19T14:39:54.315360
2017-09-24T04:01:27
2017-09-24T04:01:27
88,176,943
0
1
null
null
null
null
UTF-8
Python
false
false
438
py
from django.conf.urls import url from django.contrib import admin from django.contrib.auth.mixins import LoginRequiredMixin from django.views.generic import TemplateView from common.views import success_view, error_view, IndexView urlpatterns = [ # 通用视图 # 主页 url(r'^$', IndexView.as_view(), name='index'), url(r'^success/$', success_view, name='success'), url(r'^error/$', error_view, name='error'), ]
[ "292016176@qq.com" ]
292016176@qq.com
5fbe5a1bfc9e491ef9f1597760db1d4c54b41b56
f7ca89772fc3b19424f537895957cbf3fcafece3
/nusa/version.py
b855b1a6a59a93d4da0487fc4064f1fdbc94ba7d
[ "MIT" ]
permissive
OrganizationUsername/nusa
d829d341da09bb7d10e7d4f630d63333c96ed2e0
05623a72b892330e4b0e059a03ac4614da934ce9
refs/heads/master
2022-12-20T11:51:14.221422
2020-10-07T19:10:25
2020-10-07T19:10:25
null
0
0
null
null
null
null
UTF-8
Python
false
false
168
py
""" NuSA (Numerical Structural Analysis in Python) Author: Pedro Jorge De Los Santos E-mail: delossantosmfq@gmail.com License: MIT License """ __version__ = "0.3.dev0"
[ "delossantosmfq@gmail.com" ]
delossantosmfq@gmail.com
5d85feea87db2cd7b9f58a5a1fb29a01ab6a72f2
32bae996c594282515193ecb2357feb4f3004944
/sap/cli/datadefinition.py
8ec403e875616f73661a190f86cd5fe392adeaa4
[ "Apache-2.0" ]
permissive
corani/sapcli
2559c59073d64285dfe606c0afe491826d2a72ea
7e81776afa08be0c454c0f86754c8a9b5d938522
refs/heads/master
2022-11-13T12:27:44.016007
2020-06-08T11:19:40
2020-06-08T11:20:09
273,421,126
1
0
Apache-2.0
2020-06-19T06:29:13
2020-06-19T06:29:12
null
UTF-8
Python
false
false
954
py
"""ADT proxy for Data Definition (CDS)""" import sap.adt import sap.adt.wb import sap.cli.core import sap.cli.object class CommandGroup(sap.cli.core.CommandGroup): """Adapter converting command line parameters to sap.adt.DataDefinition methods calls. """ def __init__(self): super(CommandGroup, self).__init__('ddl') @CommandGroup.argument('name') @CommandGroup.command() def read(connection, args): """Prints it out based on command line configuration. """ ddl = sap.adt.DataDefinition(connection, args.name) print(ddl.text) @CommandGroup.argument('name', nargs='+') @CommandGroup.command() def activate(connection, args): """Actives the given class. """ activator = sap.cli.wb.ObjectActivationWorker() activated_items = ((name, sap.adt.DataDefinition(connection, name)) for name in args.name) sap.cli.object.activate_object_list(activator, activated_items, count=len(args.name))
[ "jakub@thefilaks.net" ]
jakub@thefilaks.net
9a9deb90c6765a6bc613f4f39852d1b3fc01d628
0f850c7a6746174c3ecc20884e6dc7b2aa6bcce1
/model.py
ca0e76717541a32c12900d843baad3651fbc293f
[]
no_license
wnd2da/launcher_gateone
9e1831637606885374dcfeca36637b20fdcfcbbb
601b00cdaf179e7cdd96e63f9aa8abd427f52cf9
refs/heads/master
2022-12-04T13:38:14.346940
2020-08-23T04:02:27
2020-08-23T04:02:27
null
0
0
null
null
null
null
UTF-8
Python
false
false
1,102
py
# -*- coding: utf-8 -*- ######################################################### # python import os import traceback import json # third-party # sjva 공용 from framework.logger import get_logger from framework import db, app, path_app_root # 패키지 from .plugin import package_name, logger db_file = os.path.join(path_app_root, 'data', 'db', '%s.db' % package_name) app.config['SQLALCHEMY_BINDS'][package_name] = 'sqlite:///%s' % (db_file) class ModelSetting(db.Model): __tablename__ = 'plugin_%s_setting' % package_name __table_args__ = {'mysql_collate': 'utf8_general_ci'} __bind_key__ = package_name id = db.Column(db.Integer, primary_key=True) key = db.Column(db.String(100), unique=True, nullable=False) value = db.Column(db.String, nullable=False) def __init__(self, key, value): self.key = key self.value = value def __repr__(self): return repr(self.as_dict()) def as_dict(self): return {x.name: getattr(self, x.name) for x in self.__table__.columns} #########################################################
[ "cybersol@naver.com" ]
cybersol@naver.com
30b010d2049deb9889ac634c2f45af497d8f3046
5a281cb78335e06c631181720546f6876005d4e5
/ec2-api-8.0.0/ec2api/tests/unit/test_customer_gateway.py
e4dfd8ae776ef4561c265897a509d233ecc7cf74
[ "Apache-2.0" ]
permissive
scottwedge/OpenStack-Stein
d25b2a5bb54a714fc23f0ff0c11fb1fdacad85e8
7077d1f602031dace92916f14e36b124f474de15
refs/heads/master
2021-03-22T16:07:19.561504
2020-03-15T01:31:10
2020-03-15T01:31:10
247,380,811
0
0
Apache-2.0
2020-03-15T01:24:15
2020-03-15T01:24:15
null
UTF-8
Python
false
false
5,305
py
# Copyright 2014 # The Cloudscaling Group, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import mock from ec2api.tests.unit import base from ec2api.tests.unit import fakes from ec2api.tests.unit import matchers from ec2api.tests.unit import tools class CustomerGatewayTestCase(base.ApiTestCase): def test_create_customer_gateway(self): self.db_api.add_item.side_effect = ( tools.get_db_api_add_item(fakes.ID_EC2_CUSTOMER_GATEWAY_2)) resp = self.execute('CreateCustomerGateway', {'IpAddress': fakes.IP_CUSTOMER_GATEWAY_ADDRESS_2, 'Type': 'ipsec.1'}) self.assertEqual({'customerGateway': fakes.EC2_CUSTOMER_GATEWAY_2}, resp) self.db_api.add_item.assert_called_once_with( mock.ANY, 'cgw', {'ip_address': fakes.IP_CUSTOMER_GATEWAY_ADDRESS_2}) resp = self.execute('CreateCustomerGateway', {'IpAddress': fakes.IP_CUSTOMER_GATEWAY_ADDRESS_2, 'Type': 'ipsec.1', 'BgpAsn': '65000'}) self.assertEqual({'customerGateway': fakes.EC2_CUSTOMER_GATEWAY_2}, resp) def test_create_customer_gateway_idempotent(self): self.set_mock_db_items(fakes.DB_CUSTOMER_GATEWAY_1) resp = self.execute('CreateCustomerGateway', {'IpAddress': fakes.IP_CUSTOMER_GATEWAY_ADDRESS_1, 'Type': 'ipsec.1'}) self.assertEqual({'customerGateway': fakes.EC2_CUSTOMER_GATEWAY_1}, resp) self.assertFalse(self.db_api.add_item.called) resp = self.execute('CreateCustomerGateway', {'IpAddress': fakes.IP_CUSTOMER_GATEWAY_ADDRESS_1, 'Type': 'ipsec.1', 'BgpAsn': '65000'}) self.assertEqual({'customerGateway': fakes.EC2_CUSTOMER_GATEWAY_1}, resp) self.assertFalse(self.db_api.add_item.called) def test_create_customer_gateway_invalid_parameters(self): self.assert_execution_error( 'Unsupported', 'CreateCustomerGateway', {'IpAddress': fakes.IP_CUSTOMER_GATEWAY_ADDRESS_1, 'Type': 'ipsec.1', 'BgpAsn': '456'}) def test_delete_customer_gateway(self): self.set_mock_db_items(fakes.DB_CUSTOMER_GATEWAY_2) resp = self.execute( 'DeleteCustomerGateway', {'CustomerGatewayId': fakes.ID_EC2_CUSTOMER_GATEWAY_2}) self.assertEqual({'return': True}, resp) self.db_api.delete_item.assert_called_once_with( mock.ANY, fakes.ID_EC2_CUSTOMER_GATEWAY_2) def test_delete_customer_gateway_invalid_parameters(self): self.set_mock_db_items() self.assert_execution_error( 'InvalidCustomerGatewayID.NotFound', 'DeleteCustomerGateway', {'CustomerGatewayId': fakes.ID_EC2_CUSTOMER_GATEWAY_2}) self.assertFalse(self.db_api.delete_item.called) self.set_mock_db_items(fakes.DB_CUSTOMER_GATEWAY_1, fakes.DB_VPN_CONNECTION_1) self.assert_execution_error( 'IncorrectState', 'DeleteCustomerGateway', {'CustomerGatewayId': fakes.ID_EC2_CUSTOMER_GATEWAY_1}) self.assertFalse(self.db_api.delete_item.called) def test_describe_customer_gateways(self): self.set_mock_db_items(fakes.DB_CUSTOMER_GATEWAY_1, fakes.DB_CUSTOMER_GATEWAY_2) resp = self.execute('DescribeCustomerGateways', {}) self.assertThat(resp['customerGatewaySet'], matchers.ListMatches([fakes.EC2_CUSTOMER_GATEWAY_1, fakes.EC2_CUSTOMER_GATEWAY_2])) resp = self.execute( 'DescribeCustomerGateways', {'CustomerGatewayId.1': fakes.ID_EC2_CUSTOMER_GATEWAY_2}) self.assertThat( resp['customerGatewaySet'], matchers.ListMatches([fakes.EC2_CUSTOMER_GATEWAY_2])) self.db_api.get_items_by_ids.assert_called_once_with( mock.ANY, set([fakes.ID_EC2_CUSTOMER_GATEWAY_2])) self.check_filtering( 'DescribeCustomerGateways', 'customerGatewaySet', [('bgp-asn', 65000), ('customer-gateway-id', fakes.ID_EC2_CUSTOMER_GATEWAY_2), ('ip-address', fakes.IP_CUSTOMER_GATEWAY_ADDRESS_2), ('state', 'available'), ('type', 'ipsec.1')]) self.check_tag_support( 'DescribeCustomerGateways', 'customerGatewaySet', fakes.ID_EC2_CUSTOMER_GATEWAY_2, 'customerGatewayId')
[ "Wayne Gong@minbgong-winvm.cisco.com" ]
Wayne Gong@minbgong-winvm.cisco.com
60a83fbe0010450f8a95ef1138802bc95b62c8fa
67b5c4a03c3da2808054cfabc4001f05c7fdac49
/dataset/cifar_dataset.py
7e78aa20301bc7fdaf7dd7f7f646b65b59594f87
[]
no_license
dannieldwt/deep_learning_algorithm
411b1ffef4fdea1e0a42a09bee82c68bab17bffc
e2a37a378c88e20560ef6c0e8187a751905a51b1
refs/heads/master
2022-04-10T03:46:19.788919
2020-01-18T14:16:14
2020-01-18T14:16:14
null
0
0
null
null
null
null
UTF-8
Python
false
false
5,403
py
#!/usr/bin/env python3 # -*- coding: utf-8 -*- """ Created on Tue Jun 11 17:54:50 2019 @author: ubuntu """ import pickle import numpy as np from dataset.base_dataset import BasePytorchDataset class Cifar10Dataset(BasePytorchDataset): """原版数据集地址http://www.cs.toronto.edu/~kriz/cifar.html cifar10: 10个类别,每个类别6000张 cifar100: 100个类别,每个类别600张 单张图片为RGB 32x32的小图,总计60,000张,其中50,000张训练集,10,000张测试集 该数据集没有索引,所以只能一次性加载到内存 输入:data_type(数据集类型),包括train训练集和test测试集 输出:n,h,w,c (bgr格式), 所有图片源数据都统一用这种格式(包括voc/coco) """ def __init__(self, root_path='/home/ubuntu/MyDatasets/cifar-10-batches-py/', data_type='train', img_transform=None, label_transform=None, bbox_transform=None, **kwargs): super().__init__() self.img_transform = img_transform self.label_transform = label_transform self.bbox_transform = bbox_transform train_path = [root_path + 'data_batch_1', root_path + 'data_batch_2', root_path + 'data_batch_3', root_path + 'data_batch_4', root_path + 'data_batch_5'] test_path = [root_path + 'test_batch'] if data_type == 'train': self.path = train_path elif data_type == 'test': self.path = test_path else: raise ValueError('wrong data type, only support train/test.') self.meta_path = root_path + 'batches.meta' dataset = self.get_dataset() self.imgs = dataset['data'] self.labels = dataset['target'] self.bboxes = dataset.get('bbox', None) self.CLASSES = dataset['target_names'] def get_dataset(self): datas = [] labels = [] # 获取标签 with open(self.meta_path, 'rb') as f: # 参考cifar源网站的python代码 dict = pickle.load(f, encoding='bytes') label_names = [label.decode('utf-8') for label in dict[b'label_names']] # decode用于去掉前缀的b # 获取数据 for path in self.path: with open(path, 'rb') as f: dict = pickle.load(f, encoding='bytes') data = dict[b'data'] # (10000, 3072) label = np.array(dict[b'labels']) # (10000,) datas.append(data) labels.append(label) cat_datas = np.concatenate(datas, axis=0) # (n, 3072)->(50000,3072) cat_labels = np.concatenate(labels) # (n,)->(50000,) # 分别提取R/G/B组成(C,H,W): 原始顺序参考官网说明 cat_datas = cat_datas.reshape(-1, 3, 32, 32).transpose(0,2,3,1)[...,[2,1,0]] # (b,c,h,w)->(b,h,w,c), rgb->bgr # 按sklearn格式返回数据 dataset = {} dataset['data'] = cat_datas dataset['target'] = cat_labels dataset['target_names'] = label_names return dataset def __getitem__(self, idx): """常规数据集传出的是多个变量,这里改为传出dict,再在定制collate中处理堆叠 注意:要求传出的为OrderedDict,这样在自定义collate_fn中不会出错。 """ data_dict = {} img = self.imgs[idx] label = self.labels[idx] if self.label_transform is not None: label = self.label_transform(label) if self.img_transform is not None: img, ori_shape, scale_shape, pad_shape, scale_factor, flip = self.img_transform(img) # transform输出img(img, ori_shape, scale_factor), label img_meta = dict(ori_shape = ori_shape, scale_shape = scale_shape, pad_shape = pad_shape, scale_factor = scale_factor, flip = flip) data_dict = dict(img = img, img_meta = img_meta, gt_labels = label, stack_list = ['img']) return data_dict def __len__(self): return len(self.imgs) class Cifar100Dataset(Cifar10Dataset): """原版数据集地址http://www.cs.toronto.edu/~kriz/cifar.html cifar10: 10个类别,每个类别6000张 cifar100: 100个类别,每个类别600张 单张图片为RGB 32x32的小图,总计60,000张,其中50,000张训练集,10,000张测试集 """ def __init__(self, root_path='../dataset/source/cifar100/', data_type='train', norm=None, label_transform_dict=None, one_hot=None, binary=None, shuffle=None): train_path = [root_path + 'train'] test_path = [root_path + 'test'] if data_type == 'train': self.path = train_path elif data_type == 'test': self.path = test_path else: raise ValueError('wrong data type, only support train/test.') self.meta_path = [root_path + 'meta'] dataset = self.get_dataset() self.imgs = dataset['data'] self.labels = dataset['target'] self.CLASSES = dataset['target_names']
[ "ximitiejiang@163.com" ]
ximitiejiang@163.com
f5472639f09abf2e99dd3af4f8b4e77684efe070
09e57dd1374713f06b70d7b37a580130d9bbab0d
/benchmark/startQiskit2433.py
e2e631040a9f7275ba486935a2c37c61f5898b14
[ "BSD-3-Clause" ]
permissive
UCLA-SEAL/QDiff
ad53650034897abb5941e74539e3aee8edb600ab
d968cbc47fe926b7f88b4adf10490f1edd6f8819
refs/heads/main
2023-08-05T04:52:24.961998
2021-09-19T02:56:16
2021-09-19T02:56:16
405,159,939
2
0
null
null
null
null
UTF-8
Python
false
false
4,149
py
# qubit number=4 # total number=39 import cirq import qiskit from qiskit import QuantumCircuit, QuantumRegister, ClassicalRegister from qiskit import BasicAer, execute, transpile from pprint import pprint from qiskit.test.mock import FakeVigo from math import log2 import numpy as np import networkx as nx def bitwise_xor(s: str, t: str) -> str: length = len(s) res = [] for i in range(length): res.append(str(int(s[i]) ^ int(t[i]))) return ''.join(res[::-1]) def bitwise_dot(s: str, t: str) -> str: length = len(s) res = 0 for i in range(length): res += int(s[i]) * int(t[i]) return str(res % 2) def build_oracle(n: int, f) -> QuantumCircuit: # implement the oracle O_f # NOTE: use multi_control_toffoli_gate ('noancilla' mode) # https://qiskit.org/documentation/_modules/qiskit/aqua/circuits/gates/multi_control_toffoli_gate.html # https://quantumcomputing.stackexchange.com/questions/3943/how-do-you-implement-the-toffoli-gate-using-only-single-qubit-and-cnot-gates # https://quantumcomputing.stackexchange.com/questions/2177/how-can-i-implement-an-n-bit-toffoli-gate controls = QuantumRegister(n, "ofc") target = QuantumRegister(1, "oft") oracle = QuantumCircuit(controls, target, name="Of") for i in range(2 ** n): rep = np.binary_repr(i, n) if f(rep) == "1": for j in range(n): if rep[j] == "0": oracle.x(controls[j]) oracle.mct(controls, target[0], None, mode='noancilla') for j in range(n): if rep[j] == "0": oracle.x(controls[j]) # oracle.barrier() return oracle def make_circuit(n:int,f) -> QuantumCircuit: # circuit begin input_qubit = QuantumRegister(n,"qc") classical = ClassicalRegister(n, "qm") prog = QuantumCircuit(input_qubit, classical) prog.h(input_qubit[3]) # number=16 prog.cz(input_qubit[0],input_qubit[3]) # number=17 prog.rx(-0.5686282702997527,input_qubit[3]) # number=32 prog.h(input_qubit[3]) # number=18 prog.h(input_qubit[3]) # number=26 prog.cz(input_qubit[0],input_qubit[3]) # number=27 prog.h(input_qubit[3]) # number=28 prog.x(input_qubit[3]) # number=21 prog.rx(0.4241150082346221,input_qubit[2]) # number=33 prog.cx(input_qubit[0],input_qubit[3]) # number=22 prog.cx(input_qubit[0],input_qubit[3]) # number=12 prog.h(input_qubit[1]) # number=2 prog.h(input_qubit[2]) # number=3 prog.h(input_qubit[3]) # number=4 prog.h(input_qubit[0]) # number=5 oracle = build_oracle(n-1, f) prog.append(oracle.to_gate(),[input_qubit[i] for i in range(n-1)]+[input_qubit[n-1]]) prog.h(input_qubit[1]) # number=6 prog.h(input_qubit[2]) # number=23 prog.cz(input_qubit[1],input_qubit[2]) # number=24 prog.h(input_qubit[2]) # number=25 prog.h(input_qubit[2]) # number=7 prog.h(input_qubit[3]) # number=8 prog.h(input_qubit[0]) # number=34 prog.cz(input_qubit[2],input_qubit[0]) # number=35 prog.h(input_qubit[0]) # number=36 prog.z(input_qubit[2]) # number=30 prog.cx(input_qubit[2],input_qubit[0]) # number=31 prog.h(input_qubit[0]) # number=9 prog.y(input_qubit[0]) # number=14 prog.y(input_qubit[0]) # number=15 prog.swap(input_qubit[2],input_qubit[0]) # number=37 prog.swap(input_qubit[2],input_qubit[0]) # number=38 # circuit end for i in range(n): prog.measure(input_qubit[i], classical[i]) return prog if __name__ == '__main__': a = "111" b = "0" f = lambda rep: bitwise_xor(bitwise_dot(a, rep), b) prog = make_circuit(4,f) backend = BasicAer.get_backend('qasm_simulator') sample_shot =8000 info = execute(prog, backend=backend, shots=sample_shot).result().get_counts() backend = FakeVigo() circuit1 = transpile(prog,backend,optimization_level=2) writefile = open("../data/startQiskit2433.csv","w") print(info,file=writefile) print("results end", file=writefile) print(circuit1.__len__(),file=writefile) print(circuit1,file=writefile) writefile.close()
[ "wangjiyuan123@yeah.net" ]
wangjiyuan123@yeah.net
9fce71be596e167cbdbd8b915b98f9c62c586643
a90aa4871684f6f24aa5b0daf2ece384418c748b
/basic/python/2_applica/3_scrapy/2_process/multiprocess.py
c422da3970f695c4de502a064be611270dd06869
[]
no_license
Martians/code
fed5735b106963de79b18cc546624893665066cd
653e2c595f4ac011aed7102ca26b842d4f6beaaf
refs/heads/master
2021-07-11T19:22:24.858037
2019-02-22T13:04:55
2019-02-22T13:04:55
110,106,407
1
0
null
null
null
null
UTF-8
Python
false
false
5,604
py
# coding=utf-8 ''' 方案:数据库记录网址信息,本地多进程拉取, 第三课 参考:https://github.com/hezhen/spider-course-4/multi-process ## Install pip install ConfigParser ~/.pyenv/versions/3.6.5/lib/python3.6$ cp configparser.py ConfigParser.py pip install mysql-connector ## Question 1. 下载过程中,发生的编码问题等 2. 遇到下载失败,该如何处理?需要重试避免信息遗漏 3. 如何试探一个网站允许的,单ip最大并发数、拉取频率;有怎样的试探策略 ''' import hashlib import os import threading from dbmanager import DBConnector from lxml import etree import time import urllib3 ''' ## 程序说明 1. 多进程多线程同时开始拉取,数据库层面自动确保了冲突解决 2. 不支持完全的断点续传 如果处于downloading状态,那么重启后不会再次执行 3. 已经拉取过的网站,数据库会自动记录下来,就不需要专门的bloomfilter了 4. 程序默认启动时,清理了所有数据,需要注释后才能多进程同时使用 ## 真正逻辑(对每个线程一致) 1. 获取任务url,包括处于downlading状态、但是已经超时的;或者处于new状态的 2. 设置任务状态为downloading,并记录一个超时时间 3. 开始任务下载,并解析子网址,将子网址加入到数据库中 4. 该任务全部完成后,设置任务状态为done ''' dest_url = "http://www.mafengwo.cn" #dest_url = "http://www.sohu.com" class Crawling: request_headers = { 'host': dest_url[7:], 'connection': "keep-alive", 'cache-control': "no-cache", 'upgrade-insecure-requests': "1", 'user-agent': "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.95 Safari/537.36", 'accept': "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8", 'accept-language': "zh-CN,en-US;q=0.8,en;q=0.6" } max_level = 3 dir_name = "download" if not os.path.exists(dir_name): os.mkdir(dir_name) def file_path(url): return "{0}/{1}{2}".format(crawler.dir_name, url[7:].replace('/', '_'), "" if url.endswith(".html") else ".html") ''' 下载当前页面并解析,将获得的url放入到crawler中 ''' def get_page(url, index, depth): try: path = file_path(url) print("try to download [{0}] - {1}".format(depth, url)) http = urllib3.PoolManager() response = http.request("GET", url, headers = crawler.request_headers) page = response.data if page != b"": file = open(path, "wb+") file.write(page) file.close() else: print("-- zero length for [{}]".format(url)) return except urllib3.exceptions as err: print("download {}, urllib3 err: {}".format(url, err)) return except IOError as err: print("download {}, IOError err: {}".format(url, err)) return except Exception as err: print("download {}, excpetion err: {}".format(url, err)) return ''' 这里限制depth ''' if depth > crawler.max_level: print("url [{}], exceed depth {}".format(url, depth)) else: parse_page(url, index, depth, page) dbmanager.fininsh(index) return def parse_page(url, index, depth, page): try: ''' page 是 byte类型 ''' html = etree.HTML(page.lower().decode('utf-8')) hrefs = html.xpath(u"//a") except Exception as err: print("length: {}, parse {}, err: {}".format(len(page), url, err)) time.sleep(0.5) return for href in hrefs: try: if 'href' in href.attrib: val = href.attrib['href'] if val.find('javascript') != -1: continue if val.startswith("http://") is False: if val.startswith('/'): val = dest_url + val else: continue if val.startswith(dest_url) is False: continue if val[-1] == '/': val = val[0:-1] dbmanager.enqueue(val, depth + 1) except ValueError as err: print("parse {}, err: {}".format(url, err)) return total_thread = 3 threads = [] start_time = time.time() crawler = Crawling() dbmanager = DBConnector(total_thread) dbmanager.enqueue(dest_url, 0) task = dbmanager.dequeue() get_page(task['url'], task['index'], task['depth']) CRAWL_DELAY = 0.6 while True: task = dbmanager.dequeue() if task is None: for t in threads: t.join() break while True: for t in threads: if not t.is_alive(): threads.remove(t) if len(threads) >= total_thread: time.sleep(0.6) continue try: t = threading.Thread(target=get_page, name=None, args=(task['url'], task['index'], task['depth'])) t.setDaemon(True) t.start() threads.append(t) time.sleep(CRAWL_DELAY) break except: print("fail to start thread") exit(0)
[ "liudong@daowoo.com" ]
liudong@daowoo.com
47c7f38df6616f44f27f20887988c3be622a9975
0a21d5e72b4afbabcbf4ec0d65ea84cd8d6159c7
/Contest/weekly-contest-179/A.py
ebd41441c424c583e8fdc4369c9093db724bcbc3
[]
no_license
LuoJiaji/LeetCode-Demo
193f27ba36c93f9030435874a145c63a81d3c0f8
78e6e87c01848a1dc71b7dc0716029ece5f35863
refs/heads/master
2020-06-24T03:03:33.366537
2020-04-05T02:09:41
2020-04-05T02:09:41
198,830,590
0
0
null
null
null
null
UTF-8
Python
false
false
308
py
class Solution(object): def generateTheString(self, n): """ :type n: int :rtype: str """ ans = '' if n % 2 == 1: ans = 'a'*n else: ans = 'a'*(n-1) + 'b' return ans res = Solution().generateTheString(4) print(res)
[ "lt920@126.com" ]
lt920@126.com
61a48c806bd338b16b2b38e7ad2975e6901b5b30
a7c0cc71e6da4615eca2c3d75117dad5b8dce8d3
/export.py
177102ba06cd51436420f1940dfd4e0c28205977
[ "Apache-2.0" ]
permissive
CTFd/CTFd
4b75207aeea3ed8d761cc6269c27a070693ab3ec
d8f0b9e602fca109cabe1895e847d39a46ce7429
refs/heads/master
2023-09-01T19:19:19.767862
2023-08-29T18:46:53
2023-08-29T18:46:53
28,681,142
4,593
2,273
Apache-2.0
2023-09-13T18:24:37
2015-01-01T05:36:55
Python
UTF-8
Python
false
false
770
py
from CTFd import create_app from CTFd.utils import config from CTFd.utils.exports import export_ctf import datetime import sys import shutil app = create_app() with app.app_context(): print( "This file will be deleted in CTFd v4.0. Switch to using `python manage.py export_ctf`" ) backup = export_ctf() if len(sys.argv) > 1: with open(sys.argv[1], "wb") as target: shutil.copyfileobj(backup, target) else: ctf_name = config.ctf_name() day = datetime.datetime.now().strftime("%Y-%m-%d_%T") full_name = "{}.{}.zip".format(ctf_name, day) with open(full_name, "wb") as target: shutil.copyfileobj(backup, target) print("Exported {filename}".format(filename=full_name))
[ "noreply@github.com" ]
CTFd.noreply@github.com
41d081c5e295fcddccd0310aa469c46bfcbe51c5
a5a81bc468d2b9d6a87c49701441cf6dacdaf039
/tester/grabAndPlotLimits.py
61aedf06e2b49f6165e7a313ba5233e52cf45152
[]
no_license
truggles/CombineHarvester
396833e57afe2a203c9c4ee5117b87c3397eb795
8721ef6752006999c014a86bb13e81402cda0662
refs/heads/master
2020-04-05T08:19:11.877201
2016-02-01T14:39:17
2016-02-01T14:39:17
50,054,454
1
1
null
2016-01-20T19:26:20
2016-01-20T19:26:20
null
UTF-8
Python
false
false
3,210
py
import ROOT import array from ROOT import gPad masses1 = [80, 90, 100, 110, 120, 130, 140, 160, 180, 600, 900, 1000, 1200, 1500, 2900, 3200] ROOT.gROOT.SetBatch(True) def plotLimits( signal, channel ) : #if channel == 'em' or channel == 'tt' or channel == 'mt' : masses = masses1 masses = masses1 #limits = [[0 for x in range(5)] for x in range( len(masses) )] limits = [[0 for x in range( len(masses) )] for x in range(5)] #limits = [array.array( 'd', []) for x in range(5)] #print limits mCnt = 0 for mass in masses : if channel != 'll' : f = ROOT.TFile('%s/%s/%i/higgsCombineTest.Asymptotic.mH%i.root' % (signal, channel, mass, mass), 'r') else : f = ROOT.TFile('%s/higgsCombineLL.Asymptotic.mH%i.root' % (signal, mass), 'r') t = f.Get('limit') print "Channel: ",channel," Mass: ",mass i = 0 for row in t : if row.quantileExpected == -1 : continue #print "Sig: ",row.quantileExpected," limit: ",row.limit limits[i][mCnt] = row.limit #limits[i].append( row.limit ) i += 1 mCnt += 1 n = len(masses) neg2 = ROOT.TGraph( len(masses)) neg1 = ROOT.TGraph( len(masses)) med = ROOT.TGraph( len(masses)) pos1 = ROOT.TGraph( len(masses)) pos2 = ROOT.TGraph( len(masses)) midShade = ROOT.TGraph( len(masses)*2) outShade = ROOT.TGraph( len(masses)*2) for i in range( len(masses) ) : neg2.SetPoint( i, masses[i], limits[0][i] ) neg1.SetPoint( i, masses[i], limits[1][i] ) med.SetPoint( i, masses[i], limits[2][i] ) pos1.SetPoint( i, masses[i], limits[3][i] ) pos2.SetPoint( i, masses[i], limits[4][i] ) midShade.SetPoint( i, masses[i],limits[3][i] ) midShade.SetPoint( n+i, masses[n-i-1],limits[1][n-i-1] ) outShade.SetPoint( i, masses[i],limits[4][i] ) outShade.SetPoint( n+i, masses[n-i-1],limits[0][n-i-1] ) outShade.SetFillStyle(1001) outShade.SetFillColor(5) midShade.SetFillStyle(1001) midShade.SetFillColor(3) c2 = ROOT.TCanvas( 'c2', 'c2', 600, 600 ) p1 = ROOT.TPad( 'p1', 'p1', 0, 0, 1, 1) p1.Draw() p1.cd() med.SetLineStyle(2) outShade.GetXaxis().SetTitle('Visible Mass (GeV)') outShade.GetXaxis().SetTitleOffset( outShade.GetXaxis().GetTitleOffset() * 1.3 ) outShade.GetYaxis().SetTitle('95% CL limit on #sigma(gg#phi) x BR(#phi#rightarrow #tau#tau) [pb]') outShade.GetYaxis().SetTitleOffset( outShade.GetYaxis().GetTitleOffset() * 1.3 ) outShade.SetTitle('Expected Limits A/H #rightarrow #tau#tau: Channel %s' % channel) outShade.Draw('Af') midShade.Draw('f') med.Draw('l') p1.SetLogy() p1.SetLogx() c2.SaveAs('/afs/cern.ch/user/t/truggles/www/limits/Limits_%s_%s.png' % (signal, channel) ) print "PNG files saved here: /afs/cern.ch/user/t/truggles/www/limits/" print "They are visible online at: http://truggles.web.cern.ch/truggles/limits/" channels = ['em', 'tt', 'mt', 'll'] signals = ['ggH', 'bbH'] for signal in signals : for channel in channels : plotLimits( signal, channel )
[ "truggles@wisc.edu" ]
truggles@wisc.edu
14b7aae7767aad49409ab0944c50817bd8fbdaee
70ab3ee89cafa7f4882a6944e6ec335210875d30
/run_single_basketball_leyou.py
6ccf32d3874091547ad3bd0b9bab6d23628562ba
[]
no_license
SXL5519/caipiao1.0_1
3fa1fecd00576c36f37e6af21f0fe9b326289a6a
2db4387c5bad536cce99417041fbd34a699aa2cc
refs/heads/master
2021-10-10T07:39:45.127902
2019-01-08T09:32:47
2019-01-08T09:32:59
164,613,453
0
0
null
null
null
null
UTF-8
Python
false
false
806
py
import unittest,time from HTMLTestRunner_jpg import HTMLTestRunner from modle.function import send_mail,screen_shot,logfile case_dir = "./test_case/Leyou" pattern="*_single_basketball_*.py" discover = unittest.defaultTestLoader.discover(case_dir,pattern) logfile() if __name__ =='__main__': #日期格式化 times = time.strftime("%Y%m%d%H%M%S") report_file="./report/leyou/LeyouCP_single_basketball_case_"+times+".html" fp = open(report_file,"wb") runner = HTMLTestRunner(stream=fp, title="乐优竞蓝单关——自动化测试报告1", description="运行环境:win7 Chrome") try: runner.run(discover) except: print('运行列表错误') finally: fp.close() send_mail(report_file)
[ "shaoxinlin5519@163.com" ]
shaoxinlin5519@163.com
52e5f02be0348d286eaa45d1457e68ff9698b3b6
d83118503614bb83ad8edb72dda7f449a1226f8b
/src/dprj/platinumegg/app/cabaret/views/mgr/kpi/battleeventjoindaily.py
579ff996a58470e10b0a683fa40021637adbad39
[]
no_license
hitandaway100/caba
686fe4390e182e158cd9714c90024a082deb8c69
492bf477ac00c380f2b2758c86b46aa7e58bbad9
refs/heads/master
2021-08-23T05:59:28.910129
2017-12-03T19:03:15
2017-12-03T19:03:15
112,512,044
0
0
null
null
null
null
UTF-8
Python
false
false
1,692
py
# -*- coding: utf-8 -*- from platinumegg.app.cabaret.views.mgr.kpi.base import KpiHandler from platinumegg.app.cabaret.util.api import BackendApi import settings from platinumegg.app.cabaret.models.battleevent.BattleEvent import BattleEventMaster from platinumegg.app.cabaret.views.mgr.model_edit import AppModelChoiceField import os from django import forms class Handler(KpiHandler): """バトルイベント参加率. """ class Form(forms.Form): eventid = AppModelChoiceField(BattleEventMaster, required=False, label=u'イベントID') def getTitle(self): return u'バトルイベント日別参加数' def getKpiName(self): eventid = self.request.get("eventid") if not eventid or not str(eventid).isdigit(): config = BackendApi.get_current_battleeventconfig(self.getModelMgr(), using=settings.DB_READONLY) eventid = config.mid self.__eventid = eventid return 'battleeventjoindaily_%03d' % int(eventid) def getOptionalForm(self): form = Handler.Form() form.eventid = self.__eventid return form def makeFileDataTable(self, dirpath, filelist): tabledata = [] for filedata in filelist: filepath = os.path.join(dirpath, filedata['filename']) f = None data = None try: f = open(filepath) data = f.read() f.close() except: if f: f.close() raise tabledata.append(data.split(',')) return tabledata def main(request): return Handler.run(request)
[ "shangye@mail.com" ]
shangye@mail.com
7aac97bd18585740470971643e94c41583982cc6
b096dbccb31d3bd181259e930816964c71034ff4
/tests/test_base/test_signature.py
44e7721141d1ad15178fb3820f4cc6706d18aad4
[]
no_license
cosphere-org/lily
b68f95720381a69ce0caa5f47fca461b3f5242a9
f6a8281e10eedcccb86fcf3a26aaf282d91f70f4
refs/heads/master
2023-02-18T13:49:03.568989
2022-06-30T09:58:23
2022-06-30T09:58:23
175,789,374
6
0
null
2023-02-15T18:49:10
2019-03-15T09:28:05
Python
UTF-8
Python
false
false
3,362
py
from datetime import datetime import string from unittest.mock import call from itsdangerous import SignatureExpired, URLSafeTimedSerializer import pytest from lily.base import signature from lily.base.events import EventFactory # # sign_payload # def test_sign_payload__calls_dumps_correctly(mocker): dumps_mock = mocker.patch.object(URLSafeTimedSerializer, 'dumps') signature.sign_payload( email='test@whats.com', payload='WHATEVER', secret='personal_secret', salt='salt_me') assert dumps_mock.call_args_list == [ call(['test@whats.com', 'WHATEVER'], salt='salt_me')] def test_sign_payload__returns_correct_code(mocker): mocker.patch.object( URLSafeTimedSerializer, 'dumps').return_value = 'g67g6f7g' encoded_payload = signature.sign_payload( email='test@whats.com', payload='WAT', secret='personal_secret', salt='salt_me') assert encoded_payload == 'g67g6f7g' # # verify_payload # def test_verify_payload__make_the_right_calls(mocker): loads_mock = mocker.patch.object(URLSafeTimedSerializer, 'loads') loads_mock.return_value = ('hi@there', 'HI') payload = signature.verify_payload( encoded_payload='rubishtokenwhereareyou', secret='my_secret', salt='salt_me', signer_email='hi@there', max_age=24) assert payload == 'HI' assert loads_mock.call_count == 1 def test_verify_payload__different_secrets_for_encoding_and_decoding(): code = signature.sign_payload( 'why@gmail.com', 'NO!', 'secret123', 'salt') with pytest.raises(EventFactory.BrokenRequest) as e: assert signature.verify_payload( code, 'my_secret', 'salt', 'hi@there', 120) assert e.value.event == 'PAYLOAD_VERIFIED_AS_BROKEN' def test_verify_payload__email_mismatch(mocker): code = signature.sign_payload( 'why@gmail.com', 'NO!', 'secret123', 'salt') with pytest.raises(EventFactory.BrokenRequest) as e: signature.verify_payload( code, 'secret123', 'salt', 'hello@there', 120) assert e.value.event == 'PAYLOAD_VERIFIED_AS_BROKEN_MISMATCHING_EMAILS' def test_verify_payload__recognizes_expired_token(mocker): mocker.patch.object( URLSafeTimedSerializer, 'loads' ).side_effect = SignatureExpired( 'error occured', date_signed=datetime(2013, 1, 15, 6, 48)) with pytest.raises(EventFactory.BrokenRequest) as e: signature.verify_payload( 'what.ever', 'personal_secret', 'salt', 'test@whats.com', 24) assert e.value.event == 'PAYLOAD_VERIFIED_AS_EXPIRED' # # create_secret # def test_create_secret__creates_unique_secrets(): secrets = [signature.create_secret() for i in range(1000)] assert len(secrets) == len(set(secrets)) def test_create_secret__safe_secret(): def assert_is_secret_safe(secret): assert len(secret) == 64 assert len(set(string.ascii_uppercase) & set(secret)) > 0 assert len(set(string.ascii_lowercase) & set(secret)) > 0 assert len(set(string.digits) & set(secret)) > 0 assert len(set(string.punctuation) & set(secret)) > 0 # uniqueness of characters assert len(set(secret)) > 30 for i in range(1000): assert_is_secret_safe(signature.create_secret())
[ "maciej@cosphere.org" ]
maciej@cosphere.org
605b8ac68ca36f19a6c83959423f5e17545569cc
6d9fbe6e6a2abfd8455e92f6dba67a5f02d87f41
/lib/phonenumbers/shortdata/region_RW.py
34419d07f20772809e8c065ece534282a63aab08
[]
no_license
JamesBrace/InfluenceUWebLaunch
549d0b48ff3259b139cb891a19cb8b5382ffe2c8
332d25940e4b1b45a7a2a8200f77c8413543b199
refs/heads/master
2021-09-04T04:08:47.594900
2018-01-15T16:49:29
2018-01-15T16:49:29
80,778,825
1
1
null
null
null
null
UTF-8
Python
false
false
796
py
"""Auto-generated file, do not edit by hand. RW metadata""" from ..phonemetadata import NumberFormat, PhoneNumberDesc, PhoneMetadata PHONE_METADATA_RW = PhoneMetadata(id='RW', country_code=None, international_prefix=None, general_desc=PhoneNumberDesc(national_number_pattern='1\\d{2}', possible_number_pattern='\\d{3}', possible_length=(3,)), toll_free=PhoneNumberDesc(), premium_rate=PhoneNumberDesc(), emergency=PhoneNumberDesc(national_number_pattern='112', possible_number_pattern='\\d{3}', example_number='112', possible_length=(3,)), short_code=PhoneNumberDesc(national_number_pattern='112', possible_number_pattern='\\d{3}', example_number='112', possible_length=(3,)), standard_rate=PhoneNumberDesc(), carrier_specific=PhoneNumberDesc(), short_data=True)
[ "james.brace@mail.mcgill.ca" ]
james.brace@mail.mcgill.ca
b3293d311675fab17255febd8777666c78c11a7c
42d3e676cb9da325712dd54001a8fdda1661d1e1
/OutOfAfrica.py
8459020897373768d0f5f6ef2277657bb5c6f9b3
[]
no_license
janaobsteter/msprime
5f139f2b101e7246e53a6d0baaff1f28cf1dfa6c
e96ae69720100d544f69384d19a28fcca07a9c1d
refs/heads/master
2022-08-03T11:35:51.759317
2020-05-25T07:59:49
2020-05-25T07:59:49
266,719,099
0
0
null
null
null
null
UTF-8
Python
false
false
2,470
py
import math import msprime # First we set out the maximum likelihood values of the various parameters # given in Table 1. N_A = 7300 N_B = 2100 N_AF = 12300 N_EU0 = 1000 N_AS0 = 510 # Times are provided in years, so we convert into generations. generation_time = 25 T_AF = 220e3 / generation_time # T_B = 140e3 / generation_time # divergence between west african and Eurasian populations T_EU_AS = 21.2e3 / generation_time # divergence time between european and east asian populations # We need to work out the starting (diploid) population sizes based on # the growth rates provided for these two populations r_EU = 0.004 r_AS = 0.0055 N_EU = N_EU0 / math.exp(-r_EU * T_EU_AS) N_AS = N_AS0 / math.exp(-r_AS * T_EU_AS) # Migration rates during the various epochs. m_AF_B = 25e-5 m_AF_EU = 3e-5 m_AF_AS = 1.9e-5 m_EU_AS = 9.6e-5 # Population IDs correspond to their indexes in the population # configuration array. Therefore, we have 0=YRI, 1=CEU and 2=CHB # initially. population_configurations = [ msprime.PopulationConfiguration( sample_size=0, initial_size=N_AF), msprime.PopulationConfiguration( sample_size=1, initial_size=N_EU, growth_rate=r_EU), msprime.PopulationConfiguration( sample_size=1, initial_size=N_AS, growth_rate=r_AS) ] migration_matrix = [ [ 0, m_AF_EU, m_AF_AS], [m_AF_EU, 0, m_EU_AS], [m_AF_AS, m_EU_AS, 0], ] demographic_events = [ # CEU and CHB merge into B with rate changes at T_EU_AS msprime.MassMigration( time=T_EU_AS, source=2, destination=1, proportion=1.0), msprime.MigrationRateChange(time=T_EU_AS, rate=0), msprime.MigrationRateChange( time=T_EU_AS, rate=m_AF_B, matrix_index=(0, 1)), msprime.MigrationRateChange( time=T_EU_AS, rate=m_AF_B, matrix_index=(1, 0)), msprime.PopulationParametersChange( time=T_EU_AS, initial_size=N_B, growth_rate=0, population_id=1), # Population B merges into YRI at T_B msprime.MassMigration( time=T_B, source=1, destination=0, proportion=1.0), # Size changes to N_A at T_AF msprime.PopulationParametersChange( time=T_AF, initial_size=N_A, population_id=0) ] # Use the demography debugger to print out the demographic history # that we have just described. dd = msprime.DemographyDebugger( population_configurations=population_configurations, migration_matrix=migration_matrix, demographic_events=demographic_events) dd.print_history()
[ "obsteter.jana@gmail.com" ]
obsteter.jana@gmail.com
0f506c20beb65be6f34ecb5fcb1bcacae5c97864
09e57dd1374713f06b70d7b37a580130d9bbab0d
/benchmark/startQiskit_Class2922.py
8052c4b482d939ce0e7613f8c75fb9223b0a120a
[ "BSD-3-Clause" ]
permissive
UCLA-SEAL/QDiff
ad53650034897abb5941e74539e3aee8edb600ab
d968cbc47fe926b7f88b4adf10490f1edd6f8819
refs/heads/main
2023-08-05T04:52:24.961998
2021-09-19T02:56:16
2021-09-19T02:56:16
405,159,939
2
0
null
null
null
null
UTF-8
Python
false
false
4,299
py
# qubit number=4 # total number=42 import cirq import qiskit from qiskit import QuantumCircuit, QuantumRegister, ClassicalRegister from qiskit import BasicAer, execute, transpile from pprint import pprint from qiskit.test.mock import FakeVigo from math import log2 import numpy as np import networkx as nx def bitwise_xor(s: str, t: str) -> str: length = len(s) res = [] for i in range(length): res.append(str(int(s[i]) ^ int(t[i]))) return ''.join(res[::-1]) def bitwise_dot(s: str, t: str) -> str: length = len(s) res = 0 for i in range(length): res += int(s[i]) * int(t[i]) return str(res % 2) def build_oracle(n: int, f) -> QuantumCircuit: # implement the oracle O_f # NOTE: use multi_control_toffoli_gate ('noancilla' mode) # https://qiskit.org/documentation/_modules/qiskit/aqua/circuits/gates/multi_control_toffoli_gate.html # https://quantumcomputing.stackexchange.com/questions/3943/how-do-you-implement-the-toffoli-gate-using-only-single-qubit-and-cnot-gates # https://quantumcomputing.stackexchange.com/questions/2177/how-can-i-implement-an-n-bit-toffoli-gate controls = QuantumRegister(n, "ofc") target = QuantumRegister(1, "oft") oracle = QuantumCircuit(controls, target, name="Of") for i in range(2 ** n): rep = np.binary_repr(i, n) if f(rep) == "1": for j in range(n): if rep[j] == "0": oracle.x(controls[j]) oracle.mct(controls, target[0], None, mode='noancilla') for j in range(n): if rep[j] == "0": oracle.x(controls[j]) # oracle.barrier() return oracle def make_circuit(n:int,f) -> QuantumCircuit: # circuit begin input_qubit = QuantumRegister(n,"qc") classical = ClassicalRegister(n, "qm") prog = QuantumCircuit(input_qubit, classical) prog.h(input_qubit[3]) # number=35 prog.cz(input_qubit[0],input_qubit[3]) # number=36 prog.h(input_qubit[3]) # number=37 prog.h(input_qubit[3]) # number=22 prog.cx(input_qubit[0],input_qubit[3]) # number=32 prog.cx(input_qubit[0],input_qubit[3]) # number=39 prog.x(input_qubit[3]) # number=40 prog.cx(input_qubit[0],input_qubit[3]) # number=41 prog.cx(input_qubit[0],input_qubit[3]) # number=34 prog.h(input_qubit[3]) # number=19 prog.cz(input_qubit[0],input_qubit[3]) # number=20 prog.h(input_qubit[3]) # number=21 prog.z(input_qubit[3]) # number=10 prog.h(input_qubit[1]) # number=2 prog.h(input_qubit[2]) # number=3 prog.h(input_qubit[3]) # number=4 prog.h(input_qubit[0]) # number=5 oracle = build_oracle(n-1, f) prog.append(oracle.to_gate(),[input_qubit[i] for i in range(n-1)]+[input_qubit[n-1]]) prog.h(input_qubit[1]) # number=6 prog.h(input_qubit[2]) # number=7 prog.h(input_qubit[3]) # number=8 prog.h(input_qubit[0]) # number=9 prog.h(input_qubit[0]) # number=26 prog.cz(input_qubit[1],input_qubit[0]) # number=27 prog.h(input_qubit[0]) # number=28 prog.z(input_qubit[1]) # number=24 prog.cx(input_qubit[3],input_qubit[2]) # number=38 prog.h(input_qubit[0]) # number=29 prog.cz(input_qubit[1],input_qubit[0]) # number=30 prog.h(input_qubit[0]) # number=31 prog.h(input_qubit[1]) # number=18 prog.rx(2.8902652413026093,input_qubit[2]) # number=13 prog.y(input_qubit[1]) # number=11 prog.y(input_qubit[1]) # number=12 # circuit end return prog if __name__ == '__main__': a = "111" b = "0" f = lambda rep: bitwise_xor(bitwise_dot(a, rep), b) prog = make_circuit(4,f) backend = BasicAer.get_backend('statevector_simulator') sample_shot =8000 info = execute(prog, backend=backend).result().get_statevector() qubits = round(log2(len(info))) info = { np.binary_repr(i, qubits): round((info[i]*(info[i].conjugate())).real,3) for i in range(2 ** qubits) } backend = FakeVigo() circuit1 = transpile(prog,backend,optimization_level=2) writefile = open("../data/startQiskit_Class2922.csv","w") print(info,file=writefile) print("results end", file=writefile) print(circuit1.__len__(),file=writefile) print(circuit1,file=writefile) writefile.close()
[ "wangjiyuan123@yeah.net" ]
wangjiyuan123@yeah.net
f5bdb35b232eabf76f6685808f87bd5839a6ca32
392651974f2e6d006618b64aac6cc2613ba01883
/accounts/admin.py
5601dfa75500f8614dcb52e35fbc4181ec32ee53
[ "MIT", "Apache-2.0" ]
permissive
davidjrichardson/uwcs-zarya
654266dc67deaacd6dba2e390c6dbc85a9525d83
ab0a94540bf496531dd6b13fe7d313badc4a353c
refs/heads/master
2022-09-29T12:58:14.542328
2021-03-10T17:07:06
2021-03-10T21:24:29
63,612,819
7
6
MIT
2022-08-30T20:57:36
2016-07-18T15:04:54
Python
UTF-8
Python
false
false
1,088
py
from django.contrib import admin from django.contrib.auth import get_user_model from django.contrib.auth.admin import UserAdmin as BaseUserAdmin from accounts.models import CompsocUser, ShellAccount, DatabaseAccount, ExecPlacement, ExecPosition class CompsocUserInline(admin.StackedInline): model = CompsocUser class ShellAccountInline(admin.StackedInline): model = ShellAccount class DatabaseAccountInline(admin.StackedInline): model = DatabaseAccount class CompsocUserAdmin(BaseUserAdmin): inlines = [ CompsocUserInline, ShellAccountInline, DatabaseAccountInline ] def nickname(self, obj): return CompsocUser.objects.get(user=obj).nickname CompsocUserAdmin.list_display = ('username', 'nickname', 'email', 'first_name', 'last_name', 'is_staff') CompsocUserAdmin.search_fields = ('username', 'compsocuser__nickname', 'first_name', 'last_name', 'email') admin.site.unregister(get_user_model()) admin.site.register(get_user_model(), CompsocUserAdmin) admin.site.register(ExecPosition) admin.site.register(ExecPlacement)
[ "david@tankski.co.uk" ]
david@tankski.co.uk
77764321c46b85ea547f1665e7b53ed0b2e9e1d9
de24f83a5e3768a2638ebcf13cbe717e75740168
/moodledata/vpl_data/468/usersdata/301/111541/submittedfiles/Av2_Parte3.py
a5ede4cca77b944784daf1e730e8b2f44e7d0ec2
[]
no_license
rafaelperazzo/programacao-web
95643423a35c44613b0f64bed05bd34780fe2436
170dd5440afb9ee68a973f3de13a99aa4c735d79
refs/heads/master
2021-01-12T14:06:25.773146
2017-12-22T16:05:45
2017-12-22T16:05:45
69,566,344
0
0
null
null
null
null
UTF-8
Python
false
false
183
py
# -*- coding: utf-8 -*- a=[] m=int(input('a quantidade de listas: ')) for i in range(0,m,1): int(input('digite os elemnetos: ')) a.append(m) c=sum(a) b=len(a) print(a)
[ "rafael.mota@ufca.edu.br" ]
rafael.mota@ufca.edu.br
d16a63615e60bf2d1563cbf42caf63ac028d8eb7
56782846ce12a4aa65c0cdd41231f82fb09bb2e2
/python/14940.py
08e7b50de71974b1f76b31e33f87fbaefb6d109c
[]
no_license
Zigje9/Algorithm_study
2eb7255ffae734954944e549ccaab7c4573c3b99
a6c526b42f8c0e01daf1d699f55c9b0a78741be8
refs/heads/master
2023-08-14T02:08:26.395265
2021-09-23T01:10:31
2021-09-23T01:10:31
256,192,531
1
0
null
null
null
null
UTF-8
Python
false
false
1,139
py
import sys from collections import deque N, M = map(int, sys.stdin.readline().split()) move_x = [1, 0, -1, 0] move_y = [0, -1, 0, 1] board = [] for _ in range(N): board.append(list(map(int, sys.stdin.readline().split()))) def get_start(): for i in range(N): for j in range(M): if board[i][j] == 2: return [i, j] visit = [[0]*M for _ in range(N)] start_x, start_y = get_start() def bfs(): q = deque() q.append([start_x, start_y, 0]) visit[start_x][start_y] = 2e9 while q: now_x, now_y, dist = q.popleft() for i in range(4): next_x = now_x + move_x[i] next_y = now_y + move_y[i] if 0 <= next_x < N and 0 <= next_y < M: if visit[next_x][next_y] == 0 and board[next_x][next_y] == 1: visit[next_x][next_y] = dist+1 q.append([next_x, next_y, dist+1]) bfs() for i in range(N): for j in range(M): if visit[i][j] == 0 and board[i][j] == 1: visit[i][j] = -1 visit[start_x][start_y] = 0 for line in visit: print(" ".join(map(str, line)))
[ "pjkwprn@gmail.com" ]
pjkwprn@gmail.com
0f505cac33fd32fb549df9455005938e8b0736e7
86e45d6f82210888601a8063c4a491a36c886b60
/channels/management/commands/runserver.py
34154b95ace15eaa6c7c0a54bb444472e8d09fae
[ "BSD-3-Clause" ]
permissive
wengole/channels
68dfb1776f05bd5e8bafd82299b6274fbf932f4e
21b54e7db8f08a61af934a56d6832a3065d37676
refs/heads/master
2021-01-15T15:54:04.794407
2015-11-07T10:49:47
2015-11-07T12:47:26
45,731,612
0
0
null
2015-11-07T10:15:27
2015-11-07T10:15:27
null
UTF-8
Python
false
false
1,851
py
import django import threading from django.core.management.commands.runserver import Command as RunserverCommand from django.core.management import CommandError from channels import channel_backends, DEFAULT_CHANNEL_BACKEND from channels.worker import Worker from channels.adapters import UrlConsumer from channels.interfaces.wsgi import WSGIInterface class Command(RunserverCommand): def get_handler(self, *args, **options): """ Returns the default WSGI handler for the runner. """ return WSGIInterface(self.channel_backend) def run(self, *args, **options): # Run the rest return super(Command, self).run(*args, **options) def inner_run(self, *args, **options): # Check a handler is registered for http reqs self.channel_backend = channel_backends[DEFAULT_CHANNEL_BACKEND] if not self.channel_backend.registry.consumer_for_channel("http.request"): # Register the default one self.channel_backend.registry.add_consumer(UrlConsumer(), ["http.request"]) # Note that this is the right one on the console self.stdout.write("Worker thread running, channels enabled") if self.channel_backend.local_only: self.stdout.write("Local channel backend detected, no remote channels support") # Launch a worker thread worker = WorkerThread(self.channel_backend) worker.daemon = True worker.start() # Run rest of inner run super(Command, self).inner_run(*args, **options) class WorkerThread(threading.Thread): """ Class that runs a worker """ def __init__(self, channel_backend): super(WorkerThread, self).__init__() self.channel_backend = channel_backend def run(self): Worker(channel_backend=self.channel_backend).run()
[ "andrew@aeracode.org" ]
andrew@aeracode.org
18ca4fe49c3a373eff2d58cd4615322d002593fb
d3efc82dfa61fb82e47c82d52c838b38b076084c
/crossmarketetf/crossmarket_redemption_HA/YW_CETFSS_SHSH_044.py
51dcf600f3979f6907a6dc09c820765634ed9b6c
[]
no_license
nantongzyg/xtp_test
58ce9f328f62a3ea5904e6ed907a169ef2df9258
ca9ab5cee03d7a2f457a95fb0f4762013caa5f9f
refs/heads/master
2022-11-30T08:57:45.345460
2020-07-30T01:43:30
2020-07-30T01:43:30
280,388,441
0
0
null
null
null
null
UTF-8
Python
false
false
6,671
py
#!/usr/bin/python # -*- encoding: utf-8 -*- import sys sys.path.append("/home/yhl2/workspace/xtp_test") from crossmarketetf.cetfservice.cetf_main_service import * from crossmarketetf.cetfservice.cetf_get_components_asset import * from crossmarketetf.cetfservice.cetf_utils import * from crossmarketetf.cetfmysql.query_cetf_components_code import * from mysql.QueryOrderErrorMsg import queryOrderErrorMsg from service.mainService import * from mysql.getUpOrDownPrice import getUpPrice class YW_CETFSS_SHSH_044(xtp_test_case): def test_YW_CETFSS_SHSH_044(self): # -----------ETF赎回------------- title = '赎回真实的ETF-可赎回的证券数足量,资金足额,预估现金差额>0' # 定义当前测试用例的期待值 # 期望状态:初始、未成交、全成、废单、撤废、内部撤单 # xtp_ID和cancel_xtpID默认为0,不需要变动 case_goal = { '期望状态': '全成', 'errorID': 0, 'errorMSG': '', '是否生成报单': '是', '是否是撤废': '否', 'xtp_ID': 0, 'cancel_xtpID': 0, } logger.warning(title) unit_info = { 'ticker': '530580', # etf代码 'etf_unit': 10.0, # etf赎回单位数 'etf_unit_sell': 1.0, # etf卖出单位数 'component_unit_sell': 10.0 # 成分股卖出单位数 } # -----------查询ETF赎回前成分股持仓------------- component_stk_info = cetf_get_all_component_stk(Api,unit_info['ticker']) # 查询etf最小申赎数量 unit_number = query_creation_redem_unit(unit_info['ticker']) # etf赎回数量 quantity = int(unit_info['etf_unit'] * unit_number) # 定义委托参数信息------------------------------------------ wt_reqs = { 'business_type': Api.const.XTP_BUSINESS_TYPE['XTP_BUSINESS_TYPE_ETF'], 'order_client_id': 2, 'market': Api.const.XTP_MARKET_TYPE['XTP_MKT_SH_A'], 'ticker': unit_info['ticker'], 'side': Api.const.XTP_SIDE_TYPE['XTP_SIDE_REDEMPTION'], 'price_type': Api.const.XTP_PRICE_TYPE['XTP_PRICE_LIMIT'], 'quantity': quantity, 'position_effect': Api.const.XTP_POSITION_EFFECT_TYPE['XTP_POSITION_EFFECT_INIT'] } g_func.cetf_parm_init(case_goal['期望状态']) rs1 = cetf_service_test(Api, case_goal, wt_reqs,component_stk_info) etf_creation_log(case_goal, rs1) self.assertEqual(rs1['用例测试结果'], True) # --------二级市场,卖出etf----------- case_goal['期望状态'] = '废单' case_goal['errorID'] = 11010121 case_goal['errorMSG'] = queryOrderErrorMsg(11010121) # 二级市场卖出的etf数量 quantity = int(unit_info['etf_unit_sell'] * unit_number) quantity_list = split_etf_quantity(quantity) # 查询涨停价 limitup_px = getUpPrice(unit_info['ticker']) rs2 = {} for etf_quantity in quantity_list: wt_reqs_etf = { 'business_type': Api.const.XTP_BUSINESS_TYPE['XTP_BUSINESS_TYPE_CASH'], 'order_client_id': 2, 'market': Api.const.XTP_MARKET_TYPE['XTP_MKT_SH_A'], 'ticker': unit_info['ticker'], 'side': Api.const.XTP_SIDE_TYPE['XTP_SIDE_SELL'], 'price_type': Api.const.XTP_PRICE_TYPE['XTP_PRICE_BEST5_OR_CANCEL'], 'price': limitup_px, 'quantity': etf_quantity, 'position_effect': Api.const.XTP_POSITION_EFFECT_TYPE['XTP_POSITION_EFFECT_INIT'] } ParmIni(Api, case_goal['期望状态'], wt_reqs['price_type']) rs2 = serviceTest(Api, case_goal, wt_reqs_etf) if rs2['用例测试结果'] is False: etf_sell_log(case_goal, rs2) self.assertEqual(rs2['用例测试结果'], True) return etf_sell_log(case_goal, rs2) time.sleep(2) # ------------二级市场卖出成份股----------- case_goal['期望状态'] = '全成' case_goal['errorID'] = 0 case_goal['errorMSG'] = '' # 查询etf成分股代码、数量、现金替代标志等 etf_components = query_cetf_components_info(unit_info['ticker'],1) # 如果卖出单位大于100,表示卖出数量;小于100,表示卖出份数 rs3 = {} for component_info in etf_components: substitute_flag = component_info[1] if substitute_flag in (0,1): stk_code = component_info[0] components_share = component_info[2] quantity = (int(unit_info['component_unit_sell']) if unit_info['component_unit_sell'] >= 100 else int(components_share * unit_info['component_unit_sell'])) limitup_px = getUpPrice(stk_code) wt_reqs = { 'business_type': Api.const.XTP_BUSINESS_TYPE['XTP_BUSINESS_TYPE_CASH'], 'order_client_id': 2, 'market': Api.const.XTP_MARKET_TYPE['XTP_MKT_SH_A'], 'ticker': stk_code, 'side': Api.const.XTP_SIDE_TYPE['XTP_SIDE_SELL'], 'price_type': Api.const.XTP_PRICE_TYPE['XTP_PRICE_BEST5_OR_CANCEL'], 'price': limitup_px, 'quantity': quantity, 'position_effect': Api.const.XTP_POSITION_EFFECT_TYPE['XTP_POSITION_EFFECT_INIT'] } ParmIni(Api, case_goal['期望状态'], wt_reqs['price_type']) rs3 = serviceTest(Api, case_goal, wt_reqs) if rs3['用例测试结果'] is False: etf_components_sell_log(case_goal, rs3) self.assertEqual(rs3['用例测试结果'], True) etf_components_sell_log(case_goal, rs3) self.assertEqual(rs3['用例测试结果'], True) if __name__ == '__main__': unittest.main()
[ "418033945@qq.com" ]
418033945@qq.com