blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
288
| content_id
stringlengths 40
40
| detected_licenses
listlengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 684
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
⌀ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
⌀ | gha_language
stringclasses 147
values | src_encoding
stringclasses 25
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 128
12.7k
| extension
stringclasses 142
values | content
stringlengths 128
8.19k
| authors
listlengths 1
1
| author_id
stringlengths 1
132
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
6f7b09b3bc0afa1b87897d8811dee37992af9e92
|
09e57dd1374713f06b70d7b37a580130d9bbab0d
|
/benchmark/startPyquil1566.py
|
adf7d2e0df4b68ca67d20975984b7e68e9320ea4
|
[
"BSD-3-Clause"
] |
permissive
|
UCLA-SEAL/QDiff
|
ad53650034897abb5941e74539e3aee8edb600ab
|
d968cbc47fe926b7f88b4adf10490f1edd6f8819
|
refs/heads/main
| 2023-08-05T04:52:24.961998
| 2021-09-19T02:56:16
| 2021-09-19T02:56:16
| 405,159,939
| 2
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,272
|
py
|
# qubit number=5
# total number=52
import pyquil
from pyquil.api import local_forest_runtime, QVMConnection
from pyquil import Program, get_qc
from pyquil.gates import *
import numpy as np
conn = QVMConnection()
def make_circuit()-> Program:
prog = Program() # circuit begin
prog += H(0) # number=3
prog += H(1) # number=4
prog += X(1) # number=48
prog += H(1) # number=26
prog += CZ(4,1) # number=27
prog += H(1) # number=28
prog += H(2) # number=5
prog += H(3) # number=6
prog += H(4) # number=21
prog += H(1) # number=34
prog += CZ(4,1) # number=35
prog += Z(4) # number=46
prog += RX(0.8011061266653969,2) # number=37
prog += H(1) # number=36
prog += H(0) # number=1
prog += H(1) # number=2
prog += H(2) # number=7
prog += H(3) # number=8
prog += CNOT(1,0) # number=38
prog += X(0) # number=39
prog += CNOT(1,0) # number=40
prog += CNOT(0,1) # number=42
prog += X(1) # number=43
prog += CNOT(0,1) # number=44
prog += X(2) # number=11
prog += Y(1) # number=45
prog += X(3) # number=12
prog += H(2) # number=41
prog += CNOT(1,0) # number=22
prog += X(4) # number=47
prog += X(0) # number=23
prog += H(0) # number=49
prog += CZ(1,0) # number=50
prog += H(0) # number=51
prog += CNOT(0,1) # number=30
prog += X(1) # number=31
prog += CNOT(0,1) # number=32
prog += X(2) # number=15
prog += H(4) # number=29
prog += X(3) # number=16
prog += H(0) # number=17
prog += H(1) # number=18
prog += H(2) # number=19
prog += H(3) # number=20
# circuit end
return prog
def summrise_results(bitstrings) -> dict:
d = {}
for l in bitstrings:
if d.get(l) is None:
d[l] = 1
else:
d[l] = d[l] + 1
return d
if __name__ == '__main__':
prog = make_circuit()
qvm = get_qc('5q-qvm')
results = qvm.run_and_measure(prog,1024)
bitstrings = np.vstack([results[i] for i in qvm.qubits()]).T
bitstrings = [''.join(map(str, l)) for l in bitstrings]
writefile = open("../data/startPyquil1566.csv","w")
print(summrise_results(bitstrings),file=writefile)
writefile.close()
|
[
"wangjiyuan123@yeah.net"
] |
wangjiyuan123@yeah.net
|
908076fe749518b81a5d792a64a0ac250dc8aa67
|
083ca3df7dba08779976d02d848315f85c45bf75
|
/LongestSubstringwithAtLeastKRepeatingCharacters5.py
|
424c18652d21975839cddc18e43a9e0b3e29a8f8
|
[] |
no_license
|
jiangshen95/UbuntuLeetCode
|
6427ce4dc8d9f0f6e74475faced1bcaaa9fc9f94
|
fa02b469344cf7c82510249fba9aa59ae0cb4cc0
|
refs/heads/master
| 2021-05-07T02:04:47.215580
| 2020-06-11T02:33:35
| 2020-06-11T02:33:35
| 110,397,909
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 954
|
py
|
class Solution:
def longestSubstring(self, s, k):
"""
:type s: str
:type k: int
:rtype: int
"""
i = 0
res = 0
while i + k <= len(s):
if i > 0 and s[i] == s[i - 1]:
i += 1
continue
m = {}
mask = 0
max_index = i
for j in range(i, len(s)):
if s[j] not in m:
m[s[j]] = 0
m[s[j]] += 1
t = ord(s[j]) - ord('a')
if m[s[j]] < k:
mask |= 1 << t
else:
mask &= ~(1 << t)
if mask == 0:
max_index = j
res = max(res, j - i + 1)
i = max_index + 1
return res
if __name__ == '__main__':
s = input()
k = int(input())
solution = Solution()
print(solution.longestSubstring(s, k))
|
[
"jiangshen95@163.com"
] |
jiangshen95@163.com
|
7a7b0cb2ba35a1718311a5ace7ffe70e9f8f71bf
|
7b221a4981edad73991cf1e357274b46c4054eff
|
/stacks/XIAOMATECH/1.0/services/NIFI/package/scripts/nifi_cli.py
|
e82f05e871857ac17cf7d7bf280d1558ca7ca3dc
|
[
"Apache-2.0"
] |
permissive
|
aries-demos/dataops
|
a4e1516ef6205ad1ac5f692822e577e22ee85c70
|
436c6e89a1fdd0593a17815d3ec79c89a26d48f1
|
refs/heads/master
| 2020-05-29T17:20:12.854005
| 2019-05-22T06:06:00
| 2019-05-22T06:06:00
| 189,270,801
| 2
| 3
|
Apache-2.0
| 2019-05-29T17:35:25
| 2019-05-29T17:35:24
| null |
UTF-8
|
Python
| false
| false
| 6,474
|
py
|
import json
import time
from resource_management.core import shell
from resource_management.core.exceptions import Fail
from resource_management.core.logger import Logger
from resource_management.libraries.functions import format
import nifi_toolkit_util_common
def nifi_cli(command=None,
subcommand=None,
errors_retries=12,
retries_pause=10,
acceptable_errors=None,
**command_args):
"""
Executes nifi cli command and returns its output.
We need execute command several times because nifi becomes responsive after some among of time.
On non super-fast vm it takes 1.5 minutes to get nifi responding for cli calls.
Also some commands can produce different errors but after some time that errors disappear.
In other works - this cli is hard to use in automated environments :).
:param command: main cli command(nifi, registry, session, etc)
:param subcommand: sub-command of main command(nifi list-reg-clients, etc)
:param errors_retries: retries count on acceptable errors
:param retries_pause: pause between call retries
:param acceptable_errors: errors that is acceptable for retry("Connection refused" error always in this list)
:param command_args: long version of command parameters
:return: command output
"""
import params
cli_env = {"JAVA_HOME": params.java_home}
cli_script = nifi_toolkit_util_common.get_toolkit_script(
"cli.sh", params.toolkit_tmp_dir, params.stack_version_buildnum)
if errors_retries < 1:
errors_retries = 1
if acceptable_errors is None:
acceptable_errors = []
acceptable_errors.append("Connection refused")
def do_retry(output):
for acceptable_error in acceptable_errors:
if acceptable_error in output:
return True
return False
cmd = [cli_script, command]
if subcommand is not None:
cmd.append(subcommand)
client_opts = nifi_toolkit_util_common.get_client_opts()
if params.nifi_ssl_enabled:
command_args.update(nifi_toolkit_util_common.get_client_opts())
command_args["proxiedEntity"] = params.nifi_initial_admin_id
else:
command_args["baseUrl"] = client_opts["baseUrl"]
for arg_name, arg_value in command_args.iteritems():
cmd.append("--" + arg_name)
cmd.append(arg_value)
for _ in range(0, errors_retries):
errors_retries -= 1
code, out = shell.call(
cmd, sudo=True, env=cli_env, logoutput=False, quiet=True)
if code != 0 and do_retry(out) and errors_retries != 0:
time.sleep(retries_pause)
continue
elif code == 0:
return out
else:
raise Fail("Failed to execute nifi cli.sh command")
def _update_impl(client_name=None,
client_id=None,
client_url=None,
existing_clients=None):
old_name = None
old_url = None
if not client_id:
if not client_name:
raise Fail(
"For client update 'client_name' or 'client_id' must be specified"
)
for description, name, uuid, url in existing_clients:
if name == client_name:
client_id = uuid
old_name = name
old_url = url
break
else:
for description, name, uuid, url in existing_clients:
if uuid == client_id:
old_name = name
old_url = url
arguments = {"registryClientId": client_id}
do_update = False
if client_name:
if client_name != old_name:
arguments["registryClientName"] = client_name
do_update = True
Logger.info(
format(
"Trying to update NIFI Client name '{old_name}' to '{client_name}'"
))
if client_url:
if client_url != old_url:
arguments["registryClientUrl"] = client_url
do_update = True
Logger.info(
format(
"Trying update url from '{old_url}' to '{client_url}' for NIFI Client with name '{old_name}'"
))
if do_update:
nifi_cli(command="nifi", subcommand="update-reg-client", **arguments)
Logger.info(format("NIFI Client '{old_name}' updated"))
else:
Logger.info(format("NIFI Client '{old_name}' is already up-to-date"))
return client_id
def create_reg_client(client_name, client_url):
client_uuid = nifi_cli(
command="nifi",
subcommand="create-reg-client",
registryClientName=client_name,
registryClientUrl=client_url).strip()
Logger.info(
format("Created NIFI client '{client_name}' with url '{client_url}'"))
return client_uuid
def list_reg_clients():
acceptable_errors = ["Error retrieving registry clients"]
Logger.info(format("Trying to retrieve NIFI clients..."))
command_result = nifi_cli(
command="nifi",
subcommand="list-reg-clients",
acceptable_errors=acceptable_errors,
outputType="json")
result_json = json.loads(command_result)
result = []
if "registries" in result_json:
for registry in result_json["registries"]:
if "component" in registry:
component = registry["component"]
if "description" in component:
description = component["description"]
else:
description = ''
result.append((description, component["name"], component["id"],
component["uri"]))
Logger.info("Retrieved:" + str(len(result)) + " clients")
return result
def update_reg_client(client_name=None, client_id=None, client_url=None):
existing_clients = list_reg_clients()
return _update_impl(
client_name=client_name,
client_id=client_id,
client_url=client_url,
existing_clients=existing_clients)
def create_or_update_reg_client(client_name, client_url):
existing_clients = list_reg_clients()
for _, name, uuid, _ in existing_clients:
if name == client_name:
return _update_impl(
client_id=uuid,
client_url=client_url,
existing_clients=existing_clients)
return create_reg_client(client_name, client_url)
|
[
"xianhuawei@MacBook-Air.local"
] |
xianhuawei@MacBook-Air.local
|
ef18e320c181d7603f6cc50f8b4c007b64c977e5
|
b8d2f095a4b7ea567ccc61ee318ba879318eec3d
|
/二分查找/287. 寻找重复数.py
|
69bdb06bf5dbca40a1db1643ecf3e21552f93868
|
[] |
no_license
|
f1amingo/leetcode-python
|
a3ef78727ae696fe2e94896258cfba1b7d58b1e3
|
b365ba85036e51f7a9e018767914ef22314a6780
|
refs/heads/master
| 2021-11-10T16:19:27.603342
| 2021-09-17T03:12:59
| 2021-09-17T03:12:59
| 205,813,698
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 621
|
py
|
from typing import List
class Solution:
# 从[1, n]中猜一个数,再遍历一遍原数组
def findDuplicate(self, nums: List[int]) -> int:
n = len(nums) - 1
lt, rt = 1, n
while lt < rt:
mid = (lt + rt) // 2
count = 0
for num in nums:
if num <= mid:
count += 1
if count > mid:
rt = mid
else:
lt = mid + 1
return lt
assert Solution().findDuplicate([1, 3, 4, 2, 2]) == 2
assert Solution().findDuplicate([3, 1, 3, 4, 2]) == 3
|
[
"zsjperiod@foxmail.com"
] |
zsjperiod@foxmail.com
|
028d4012be6e2dba637d5afdafcded11bfba6024
|
fd8d33572656edf9e1133a72ad4e2fa090f90a5f
|
/packages/OpenCV/nodes/OpenCV___YUV_YV120/OpenCV___YUV_YV120___METACODE.py
|
fd27f158254de61c8a19e271393e77c1740baba6
|
[
"MIT"
] |
permissive
|
ChristianHohlfeld/Ryven
|
a01c2eafa79a80883a9490efb5f043fd35f53484
|
53bf7e57a7b0fa25a704cd0d2214a7f76096d4dd
|
refs/heads/master
| 2022-12-12T22:03:57.122034
| 2020-08-31T13:45:45
| 2020-08-31T13:45:45
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,153
|
py
|
from NIENV import *
import cv2
# USEFUL
# self.input(index) <- access to input data
# self.outputs[index].set_val(val) <- set output data port value
# self.main_widget <- access to main widget
class %NODE_TITLE%_NodeInstance(NodeInstance):
def __init__(self, parent_node: Node, flow, configuration=None):
super(%NODE_TITLE%_NodeInstance, self).__init__(parent_node, flow, configuration)
# self.special_actions['action name'] = {'method': M(self.action_method)}
self.img_unYUV_YV12 = None
self.img_YUV_YV12 = None
self.initialized()
def update_event(self, input_called=-1):
self.img_unYUV_YV12 = self.input(0)
self.img_YUV_YV12 = cv2.cvtColor(self.img_unYUV_YV12,cv2.COLOR_BGRA2YUV_YV12)
#self.cnvt=cv2.imshow('gray_image',self.img_YUV_I420)
self.main_widget.show_image(self.img_YUV_YV12)
self.set_output_val(0, self.img_YUV_YV12)
def get_data(self):
data = {}
# ...
return data
def set_data(self, data):
pass
# ...
def remove_event(self):
pass
|
[
"leon.thomm@gmx.de"
] |
leon.thomm@gmx.de
|
ce4f4de3c6cd53f78a77f8f7d171a222a593ea7e
|
4a28e3e3afb28c0455ea21cfb983c3a8284dc5dd
|
/Reverse.py
|
bc387ecd0eea4a1c2e6fe9318772782e900f4b58
|
[] |
no_license
|
omdeshmukh20/Python-3-Programming
|
60f6bc4e627de9d643a429e64878a636f3875cae
|
9fb4c7fa54bc26d18b69141493c7a72e0f68f7d0
|
refs/heads/main
| 2023-08-28T04:37:27.001888
| 2021-10-29T17:03:34
| 2021-10-29T17:03:34
| 370,008,995
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 674
|
py
|
#Discription: Accept Number From User And Return Reverse Of That Number Using For-Loop
#Date: 14/08/21
#Author : Om Deshmukh
# Reverse Operation
def Reverse(iValue1):
iDigit = 0
iRev = 0
if iValue1 < 0:
exit("Invalid Input! | Note : Give Input Greater Than 0")
for _ in range(iValue1):
if iValue1 == 0:
break
iDigit = iValue1 % 10
iRev = (iRev * 10) + iDigit
iValue1 = iValue1 // 10
return iRev
# Entry Point
def main():
iNo1 = int(input("Enter The Number : "))
iRet = Reverse(iNo1)
print("Reverse Number is : ", iRet)
# Code Starter
if __name__ == "__main__":
main()
|
[
"noreply@github.com"
] |
omdeshmukh20.noreply@github.com
|
4497e161d8e06316103a36d717fe15e66be3c951
|
3b504a983f1807ae7c5af51078bfab8c187fc82d
|
/client/input/InputSubsystem/JoyInput.py
|
03a21fda604012c74ea881e2b4fdb3fcfdc8f167
|
[] |
no_license
|
SEA-group/wowp_scripts
|
7d35fd213db95ea6b3dbd1ec6d3e0f13de86ba58
|
2fe54a44df34f2dcaa6860a23b835dcd8dd21402
|
refs/heads/master
| 2021-09-07T23:10:13.706605
| 2018-03-02T17:23:48
| 2018-03-02T17:23:48
| 117,280,141
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 6,816
|
py
|
# Embedded file name: scripts/client/input/InputSubsystem/JoyInput.py
import Keys
import BWPersonality
import InputMapping
import math
from MathExt import *
from consts import ROLL_AXIS, VERTICAL_AXIS, HORIZONTAL_AXIS, FORCE_AXIS, FLAPS_AXIS, INPUT_SYSTEM_STATE
from input.InputSubsystem.InputSubsystemBase import InputSubsystemBase
import GameEnvironment
BASE_SMOOTH_WINDOW = 10
class ProxyCamStrategy(object):
@staticmethod
def rotateCursor(*args):
pass
class JoystickExpertInput(InputSubsystemBase):
def __init__(self, profile):
self._profile = profile
self.__isRawForceAxis = True
self.__smoothStack = {}
self.__lastSmoothWin = {}
class JoyEvent:
def __init__(self):
self.deviceId = None
self.axis = None
self.value = None
return
self.__joyEvent = JoyEvent()
def pushLastEvent(self):
for deviceId in BWPersonality.axis:
for axis in BWPersonality.axis[deviceId]:
self.__joyEvent.deviceId = deviceId
self.__joyEvent.axis = axis
self.__joyEvent.value = BWPersonality.axis[deviceId][axis]
self.processJoystickEvent(self.__joyEvent)
def restart(self):
self.__smoothStack = {}
self.__lastSmoothWin = {}
def dispose(self):
self._profile = None
return
@property
def __cameraStrategy(self):
cam = GameEnvironment.getCamera()
if cam is not None:
return cam.getDefualtStrategies['CameraStrategyNormal']
else:
return ProxyCamStrategy
def processJoystickEvent(self, event):
jSet = InputMapping.g_instance.joystickSettings
rValue = 0.0
vValue = 0.0
hValue = 0.0
fValue = 0.0
if event.axis == jSet.ROLL_AXIS and (event.deviceId == jSet.ROLL_DEVICE or 0 == jSet.ROLL_DEVICE):
rValue = -event.value if jSet.INVERT_ROLL else event.value
rawValue = rValue
if abs(rValue) <= jSet.ROLL_DEAD_ZONE:
self._profile.sendData(ROLL_AXIS, 0.0, -rawValue)
else:
rValue = self.__signalSmoothing(jSet.ROLL_AXIS, rValue, jSet.ROLL_SMOOTH_WINDOW)
rValue = self.__signalDiscrete(jSet.ROLL_SENSITIVITY, rValue, event.deviceId, event.axis)
rValue = math.copysign((abs(rValue) - jSet.ROLL_DEAD_ZONE) / (1.0 - jSet.ROLL_DEAD_ZONE), rValue)
rValue = InputMapping.translateAxisValue(jSet.AXIS_X_CURVE, rValue)
rValue = clamp(-1.0, -rValue, 1.0)
self._profile.sendData(ROLL_AXIS, rValue, -rawValue)
elif event.axis == jSet.VERTICAL_AXIS and (event.deviceId == jSet.VERTICAL_DEVICE or 0 == jSet.VERTICAL_DEVICE):
vValue = -event.value if jSet.INVERT_VERTICAL else event.value
rawValue = vValue
if abs(vValue) <= jSet.VERTICAL_DEAD_ZONE:
self._profile.sendData(VERTICAL_AXIS, 0.0, rawValue)
else:
vValue = self.__signalSmoothing(jSet.VERTICAL_AXIS, vValue, jSet.VERTICAL_SMOOTH_WINDOW)
vValue = self.__signalDiscrete(jSet.VERTICAL_SENSITIVITY, vValue, event.deviceId, event.axis)
vValue = math.copysign((abs(vValue) - jSet.VERTICAL_DEAD_ZONE) / (1 - jSet.VERTICAL_DEAD_ZONE), vValue)
vValue = InputMapping.translateAxisValue(jSet.AXIS_Y_CURVE, vValue)
vValue = clamp(-1.0, -vValue, 1.0)
self._profile.sendData(VERTICAL_AXIS, vValue, -rawValue)
elif event.axis == jSet.HORIZONTAL_AXIS and (event.deviceId == jSet.HORIZONTAL_DEVICE or 0 == jSet.HORIZONTAL_DEVICE):
hValue = event.value if jSet.INVERT_HORIZONTAL else -event.value
rawValue = hValue
if abs(hValue) <= jSet.HORIZONTAL_DEAD_ZONE:
self._profile.sendData(HORIZONTAL_AXIS, 0.0, rawValue)
else:
hValue = self.__signalSmoothing(jSet.HORIZONTAL_AXIS, hValue, jSet.HORIZONTAL_SMOOTH_WINDOW)
hValue = self.__signalDiscrete(jSet.HORIZONTAL_SENSITIVITY, hValue, event.deviceId, event.axis)
hValue = InputMapping.translateAxisValue(jSet.AXIS_Z_CURVE, hValue)
hValue = math.copysign((abs(hValue) - jSet.HORIZONTAL_DEAD_ZONE) / (1 - jSet.HORIZONTAL_DEAD_ZONE), hValue)
if InputMapping.g_instance.currentProfileType == INPUT_SYSTEM_STATE.GAMEPAD_DIRECT_CONTROL:
hValue *= -1
hValue = clamp(-1.0, hValue, 1.0)
self._profile.sendData(HORIZONTAL_AXIS, hValue, rawValue)
elif event.axis == jSet.FORCE_AXIS and (event.deviceId == jSet.FORCE_DEVICE or 0 == jSet.FORCE_DEVICE):
fValue = -event.value if jSet.INVERT_FORCE else event.value
rawValue = fValue
if self.__isRawForceAxis:
fValue = self.__renormalization(fValue)
self._profile.sendData(FORCE_AXIS, fValue, rawValue)
self.__cameraStrategy.rotateCursor(vValue * 0.01, hValue * 0.01)
def setCursorCamera(self, isCursorCamera):
pass
def setRawForceAxis(self, value):
self.__isRawForceAxis = value
def __renormalization(self, x):
maxForce = InputMapping.g_instance.joystickSettings.POINT_OF_NORMAL_THRUST
deadZone = InputMapping.g_instance.joystickSettings.FORCE_DEAD_ZONE
if deadZone > 1:
deadZone = 1
if x > deadZone:
return 1
if maxForce < x <= deadZone:
return 0
return clamp(-1.0, (x + 1.0) / (max(-0.99, maxForce) + 1.0) - 1.0, 0.0)
def __signalDiscrete(self, discrete, value, deviceId, axis):
SENSITIVITY = 14 * discrete
joyDPI = BigWorld.getJoystickResolution(deviceId, axis) / pow(2.0, math.floor(SENSITIVITY))
halfSingleSignal = 0.5 / joyDPI
if abs(value) < 0.25 * halfSingleSignal or abs(value) > 1.0 - 0.25 * halfSingleSignal:
return value
absValue = math.floor(abs(value) * joyDPI) / joyDPI + halfSingleSignal
return math.copysign(absValue, value)
def __signalSmoothing(self, axis, value, win, e = 0.99):
if self.__lastSmoothWin.get(axis, None) != win:
self.__lastSmoothWin[axis] = win
if self.__smoothStack.get(axis, None):
self.__smoothStack[axis] = []
window = max(int(BASE_SMOOTH_WINDOW * win), 1)
self.__smoothStack.setdefault(axis, []).append(value)
if len(self.__smoothStack[axis]) > window:
self.__smoothStack[axis].pop(0)
val = math.copysign(1.0, value) if abs(value) >= e else sum(self.__smoothStack[axis]) / len(self.__smoothStack[axis])
return val
|
[
"55k@outlook.com"
] |
55k@outlook.com
|
f817dc9cd7b0ee5cb3fb0d8da067107e84fabd08
|
c380976b7c59dadaccabacf6b541124c967d2b5a
|
/.history/src/data/data_20191021130626.py
|
54a8374208393201a7d3ecf5fa63dc428630f047
|
[
"MIT"
] |
permissive
|
bkraft4257/kaggle_titanic
|
b83603563b4a3c995b631e8142fe72e1730a0e2e
|
f29ea1773773109a867278c001dbd21a9f7b21dd
|
refs/heads/master
| 2020-08-17T12:45:28.653402
| 2019-11-15T16:20:04
| 2019-11-15T16:20:04
| 215,667,760
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,770
|
py
|
import pandas as pd
import numpy as np
from typing import Union
from pathlib import Path
from nameparser import HumanName
class ExtractData:
def __init__(self, filename: Union[str, Path], age_bins = None, drop_columns=None):
# """Extract Training Data from file or Path
# Arguments:
# filename {[str]} -- Filename of CSV data file containing data.
# drop_columns -- Columns in dataframe that should be dropped.
# """
if drop_columns is None:
drop_columns = ["age", "cabin", "name", "ticket"]
self.filename = filename
self.drop_columns = drop_columns
self.all_label_columns = ["survived"]
self.all_feature_columns = [
"pclass",
"name",
"sex",
"age",
"sibsp",
"parch",
"ticket",
"fare",
"cabin",
"embarked",
]
self.Xy_raw = None
self.extract_raw()
def extract_raw(self):
"""
Extracts data from a CSV file.
Returns:
pd.DataFrame -- [description]
"""
Xy_raw = pd.read_csv(self.filename)
Xy_raw.columns = Xy_raw.columns.str.lower().str.replace(" ", "_")
Xy_raw = Xy_raw.rename(columns={"age": "age_known"})
Xy_raw["pclass"] = Xy_raw["pclass"].astype("category")
self.Xy_raw = Xy_raw.set_index("passengerid")
class TransformData:
title_translator = {
"Mlle.": "Mrs.",
"Mme.": "Mrs.",
"Sir.": "Mr.",
"Ms.": "Mrs.",
"Rev.": "Mr.",
"": "Mr.",
"Col.": "Mr.",
"Capt.": "Mr.",
"Lady.": "Mrs.",
"the Countess. of": "Mrs.",
}
def __init__(
self,
raw_data,
adult_age_threshold_min=13,
age_bins = None,
Xy_age_estimate=None,
drop_columns=None,
):
# """Extract Training Data from file or Path
# Arguments:
# filename {[str]} -- Filename of CSV data file containing data.
# drop_columns -- Columns in dataframe that should be dropped.
# """
if age_bins is None:
age_bins = [0,10,20,30, 40, 50, 60, np.inf]
if drop_columns is None:
drop_columns = ["age", "cabin", "name", "ticket"]
self.raw = raw_data
self.adult_age_threshold_min = adult_age_threshold_min
self.Xy_age_estimate = Xy_age_estimate
self.age_bins = age_bins
self.Xy = self.raw.Xy_raw.copy()
self.extract_title()
self.extract_last_name()
self.extract_cabin_number()
self.extract_cabin_prefix()
self.estimate_age()
self.calc_is_child()
self.calc_is_travelling_alone()
def calc_is_travelling_alone(self):
self.Xy["is_travelling_alone"] = (self.Xy.sibsp == 0) & (self.Xy.parch == 0)
def calc_is_child(self):
self.Xy["is_child"] = self.Xy.age < self.adult_age_threshold_min
def extract_cabin_number(self):
self.Xy["cabin_number"] = self.Xy.ticket.str.extract("(\d+)$")
def extract_cabin_prefix(self):
self.Xy["cabin_prefix"] = self.Xy.ticket.str.extract("^(.+) ")
def extract_title(self):
"""[summary]
"""
self.Xy["title"] = (
self.Xy.name.apply(lambda x: HumanName(x).title)
.replace(self.title_translator)
.replace({"\.": ""}, regex=True)
)
def extract_last_name(self):
self.Xy["last_name"] = self.Xy.name.apply(lambda x: HumanName(x).last)
def calc_age_bins(self):
self.Xy['age_bin'] = pd.cut(Xy.age, bins=[0,10,20,30, 40, 50, 60, np.inf])
def clean(self,):
"""Clean data to remove missing data and "unnecessary" features.
Arguments:
in_raw_df {pd.DataFrame} -- Dataframe containing all columns and rows Kaggle Titanic Training Data set
"""
self.Xy = self.Xy_raw.drop(self.drop_columns, axis=1)
def estimate_age(self, groupby_columns=["sex", "title"]):
"""[summary]
Keyword Arguments:
groupby {list} -- [description] (default: {['sex','title']})
"""
if self.Xy_age_estimate is None:
Xy_age_estimate = (
self.Xy.groupby(groupby_columns).age_known.mean().to_frame().round(1)
)
Xy_age_estimate = Xy_age_estimate.rename(
columns={"age_known": "age_estimate"}
)
out_df = self.Xy.reset_index().merge(Xy_age_estimate, on=groupby_columns)
out_df["age"] = out_df["age_known"].fillna(out_df["age_estimate"])
self.Xy = out_df
self.Xy_age_estimate = Xy_age_estimate
|
[
"bob.kraft@infiniteleap.net"
] |
bob.kraft@infiniteleap.net
|
0684d234e85f6b170a94bbdd8fe260adcc0f1b90
|
0296bc69a0d9608ed826ad7a719395f019df098f
|
/old_modules/render_model_1.py
|
9fc3f68c9e69607c41d3e1a6f72240c17d64ea5e
|
[] |
no_license
|
jcn16/Blender_HDRmap_render
|
c0486a77e04c5b41a6f75f123dbdb3d10c682367
|
50e6cdb79fef83081de9830e7105dd425a235a9e
|
refs/heads/main
| 2023-07-19T22:22:53.622052
| 2021-08-20T06:29:10
| 2021-08-20T06:29:10
| 377,757,283
| 2
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 7,746
|
py
|
from math import radians, sin, cos, pi
import mathutils, bpy, argparse, random, time, os,logging
def generate_rand(a=0, b=1, only_positive=False):
x = (random.random()-0.5) * 2*b
if abs(x) < a or (only_positive and x<0):
return generate_rand(a, b, only_positive)
else:
return x
def point_at(obj, target, roll=0):
"""
Rotate obj to look at target
:arg obj: the object to be rotated. Usually the camera
:arg target: the location (3-tuple or Vector) to be looked at
:arg roll: The angle of rotation about the axis from obj to target in radians.
Based on: https://blender.stackexchange.com/a/5220/12947 (ideasman42)
"""
if not isinstance(target, mathutils.Vector):
target = mathutils.Vector(target)
loc = obj.location
# direction points from the object to the target
direction = target - loc
quat = direction.to_track_quat('-Z', 'Y')
# /usr/share/blender/scripts/addons/add_advanced_objects_menu/arrange_on_curve.py
quat = quat.to_matrix().to_4x4()
rollMatrix = mathutils.Matrix.Rotation(roll, 4, 'Z')
# remember the current location, since assigning to obj.matrix_world changes it
loc = loc.to_tuple()
obj.matrix_world = quat * rollMatrix
obj.location = loc
# init & params
logger = logging.getLogger()
logger.setLevel(logging.ERROR)
random.seed(time.time())
light_num_low, light_num_high = 6, 12
light_loc_low, light_loc_high = 3, 6
#context = bpy.context
model_path = '/media/jcn/新加卷/JCN/CLOTHES/Human_model/衬衫裙子/model_3'
model = "model_3.obj"
render_path = "/media/jcn/新加卷/JCN/CLOTHES/Results/2/%08d.png"
quat_file = "/media/jcn/新加卷/JCN/CLOTHES/Results/2/result.txt"
# Delete default cube
bpy.data.objects['Cube'].select = True
bpy.ops.object.delete()
for obj in bpy.data.objects:
bpy.data.objects[obj.name].select = True
bpy.ops.object.delete()
# rendering process
# create a scene
#scene = bpy.data.scenes.new("Scene")
scene = bpy.context.scene
context=bpy.context
# create a camera
camera_data = bpy.data.cameras.new("Camera")
camera = bpy.data.objects.new("Camera", camera_data)
distance, alpha, beta, gamma = 4.5, 1.0, 89.0, 0.0
alpha, beta, gamma = radians(alpha), radians(beta), radians(gamma)
camera.location = mathutils.Vector((distance*cos(beta)*cos(alpha), distance*cos(beta)*sin(alpha), distance*sin(beta)))
point_at(camera, mathutils.Vector((0, -0.4, 0)), roll=gamma)
print('camera by looked_at', camera.location, camera.rotation_euler, camera.rotation_euler.to_quaternion())
scene.objects.link(camera)
# Create lights (lights with random num in random directions)
# light number:6~12, point light
light_num = random.randint(a=light_num_low, b=light_num_high)
print('create %d light(s) at:', light_num)
for idx in range(light_num):
light_data = bpy.data.lamps.new('light'+str(idx), type='POINT')
light = bpy.data.objects.new('light'+str(idx), light_data)
light_loc = (generate_rand(light_loc_low, light_loc_high), generate_rand(light_loc_low, light_loc_high), generate_rand(light_loc_low, light_loc_high, True))
light.location = mathutils.Vector(light_loc)
scene.objects.link(light)
light_data = bpy.data.lamps.new('light', type='POINT')
light = bpy.data.objects.new('light', light_data)
light.location = mathutils.Vector((0, 0, 8))
scene.objects.link(light)
scene.update()
scene.render.resolution_x = 2048
scene.render.resolution_y = 2048
scene.render.resolution_percentage = 100
scene.render.alpha_mode = 'TRANSPARENT'
scene.camera = camera
path = os.path.join(model_path, model)
# make a new scene with cam and lights linked
context.screen.scene = scene
bpy.ops.scene.new(type='LINK_OBJECTS')
context.scene.name = model_path
cams = [c for c in context.scene.objects if c.type == 'CAMERA']
print(cams)
bpy.context.scene.use_nodes = True
tree = bpy.context.scene.node_tree
links = tree.links
bpy.context.scene.render.image_settings.color_depth = '8'
bpy.context.scene.render.image_settings.color_mode = 'RGB'
# Clear default nodes
for n in tree.nodes:
tree.nodes.remove(n)
# 必须设置,否则无法输出法向
bpy.context.scene.render.layers['RenderLayer'].use_pass_normal = True
bpy.context.scene.render.layers["RenderLayer"].use_pass_color = True
bpy.context.scene.render.image_settings.file_format = 'PNG'
# Create input render layer node.
render_layers = tree.nodes.new('CompositorNodeRLayers')
scale_normal = tree.nodes.new(type="CompositorNodeMixRGB")
scale_normal.blend_type = 'MULTIPLY'
scale_normal.inputs[2].default_value = (0.5, 0.5, 0.5, 1)
links.new(render_layers.outputs['Normal'], scale_normal.inputs[1])
bias_normal = tree.nodes.new(type="CompositorNodeMixRGB")
bias_normal.blend_type = 'ADD'
bias_normal.inputs[2].default_value = (0.5, 0.5, 0.5, 0)
links.new(scale_normal.outputs[0], bias_normal.inputs[1])
normal_file_output = tree.nodes.new(type="CompositorNodeOutputFile")
normal_file_output.label = 'Normal Output'
links.new(bias_normal.outputs[0], normal_file_output.inputs[0])
# Remap as other types can not represent the full range of depth.
depth_file_output = tree.nodes.new(type="CompositorNodeOutputFile")
depth_file_output.label = 'Depth Output'
map = tree.nodes.new(type="CompositorNodeMapValue")
# Size is chosen kind of arbitrarily, try out until you're satisfied with resulting depth map.
map.offset = [-0.7]
map.size = [0.1]
map.use_min = True
map.min = [0]
links.new(render_layers.outputs['Depth'], map.inputs[0])
links.new(map.outputs[0], depth_file_output.inputs[0])
# image_file_output = tree.nodes.new(type="CompositorNodeOutputFile")
# image_file_output.label = 'Image'
# links.new(render_layers.outputs['Image'], image_file_output.inputs[0])
#print('image_idx: %08d, camera: (%.3f,%.3f,%.3f)' % (image_idx, a * 180. /pi, b * 180. / pi, g * 180. / pi))
albedo_file_output = tree.nodes.new(type="CompositorNodeOutputFile")
albedo_file_output.label = 'Albedo Output'
links.new(render_layers.outputs['Color'], albedo_file_output.inputs[0])
# import model
bpy.ops.import_scene.obj(filepath=path, axis_forward='-Z', axis_up='Y', filter_glob="*.obj;*.mtl") #-Z, Y
# print('scene objects:')
for o in context.scene.objects:
print(o)
for obj in context.scene.objects:
if obj.name in ['Camera.001'] + ['light'+str(idx) for idx in range(light_num)]:
continue
else:
obj.location = mathutils.Vector((0, 0, -2.0))
obj.scale = mathutils.Vector((0.002, 0.002, 0.002))
c = cams[0]
scene = bpy.context.scene
#scene = bpy.context.scene
f_quat = open(quat_file, 'w')
image_idx = 0
for g in [0]:
g = radians(float(g))
for b in [20, -20]:
b = radians(float(b))
for a in range(1, 360, 60):
a = radians(float(a))
c.location = mathutils.Vector((distance*cos(b)*cos(a), distance*cos(b)*sin(a), distance*sin(b)))
point_at(c, mathutils.Vector((0, -0.4, 0)), roll = g)
quat = c.rotation_euler.to_quaternion()
for output_node in [normal_file_output, depth_file_output,albedo_file_output]:
output_node.base_path = ''
scene.render.filepath = '/media/jcn/新加卷/JCN/CLOTHES/Results/2/image_%03d' % image_idx
# image_file_output.file_slots[0].path = '/media/jcn/新加卷/JCN/CLOTHES/Results/2/image%d' % image_idx
normal_file_output.file_slots[0].path = '/media/jcn/新加卷/JCN/CLOTHES/Results/2/normal_%03d' % image_idx
depth_file_output.file_slots[0].path = '/media/jcn/新加卷/JCN/CLOTHES/Results/2/depth_%03d' % image_idx
albedo_file_output.file_slots[0].path = '/media/jcn/新加卷/JCN/CLOTHES/Results/2/albedo_%03d' % image_idx
bpy.ops.render.render(use_viewport=True,write_still=True)
#context.scene.render.filepath = render_path % image_idx
f_quat.write('%08d,%.4f,%.4f,%.4f,%.4f,%.4f,%.4f,%.4f\n' % (image_idx, quat[0], quat[1], quat[2], quat[3], a * 180 /pi, b * 180 / pi, g * 180 / pi))
image_idx = image_idx + 1
f_quat.close()
|
[
"591599635@qq.com"
] |
591599635@qq.com
|
5b66423e71498cd6180f23934fe7cc35d8fdb9e0
|
9b64f0f04707a3a18968fd8f8a3ace718cd597bc
|
/huaweicloud-sdk-rms/huaweicloudsdkrms/v1/model/list_resources_response.py
|
dacd35a56aafb9e80f1c69849e3d804d2346de25
|
[
"Apache-2.0"
] |
permissive
|
jaminGH/huaweicloud-sdk-python-v3
|
eeecb3fb0f3396a475995df36d17095038615fba
|
83ee0e4543c6b74eb0898079c3d8dd1c52c3e16b
|
refs/heads/master
| 2023-06-18T11:49:13.958677
| 2021-07-16T07:57:47
| 2021-07-16T07:57:47
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,605
|
py
|
# coding: utf-8
import re
import six
from huaweicloudsdkcore.sdk_response import SdkResponse
class ListResourcesResponse(SdkResponse):
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
sensitive_list = []
openapi_types = {
'resources': 'list[ResourceEntity]',
'page_info': 'PageInfo'
}
attribute_map = {
'resources': 'resources',
'page_info': 'page_info'
}
def __init__(self, resources=None, page_info=None):
"""ListResourcesResponse - a model defined in huaweicloud sdk"""
super(ListResourcesResponse, self).__init__()
self._resources = None
self._page_info = None
self.discriminator = None
if resources is not None:
self.resources = resources
if page_info is not None:
self.page_info = page_info
@property
def resources(self):
"""Gets the resources of this ListResourcesResponse.
资源列表
:return: The resources of this ListResourcesResponse.
:rtype: list[ResourceEntity]
"""
return self._resources
@resources.setter
def resources(self, resources):
"""Sets the resources of this ListResourcesResponse.
资源列表
:param resources: The resources of this ListResourcesResponse.
:type: list[ResourceEntity]
"""
self._resources = resources
@property
def page_info(self):
"""Gets the page_info of this ListResourcesResponse.
:return: The page_info of this ListResourcesResponse.
:rtype: PageInfo
"""
return self._page_info
@page_info.setter
def page_info(self, page_info):
"""Sets the page_info of this ListResourcesResponse.
:param page_info: The page_info of this ListResourcesResponse.
:type: PageInfo
"""
self._page_info = page_info
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
if attr in self.sensitive_list:
result[attr] = "****"
else:
result[attr] = value
return result
def to_str(self):
import simplejson as json
return json.dumps(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, ListResourcesResponse):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
|
[
"hwcloudsdk@huawei.com"
] |
hwcloudsdk@huawei.com
|
4ca9bd19679fbb30fc5ed30b750ee022fc94c075
|
5ffdf4ddee5700e6bb3b062a07c1a9cf7e6adbc1
|
/PYTHON/Strings/capitalize.py
|
8cc94026d63e0fa32d0b508f4cfd21061a7f66e7
|
[
"MIT"
] |
permissive
|
byung-u/HackerRank
|
23df791f9460970c3b4517cb7bb15f615c5d47d0
|
4c02fefff7002b3af774b99ebf8d40f149f9d163
|
refs/heads/master
| 2021-05-05T13:05:46.722675
| 2018-03-30T08:07:36
| 2018-03-30T08:07:36
| 104,960,152
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 383
|
py
|
#!/usr/bin/env python3
def capitalize(string):
s = string.split(' ')
for i in range(0, len(s)):
if len(s[i]) == 0:
continue
if len(s[i][0]) != 0 and s[i][0].isalpha():
s[i] = s[i].title()
return ' '.join(s)
if __name__ == '__main__':
string = input()
capitalized_string = capitalize(string)
print(capitalized_string)
|
[
"iam.byungwoo@gmail.com"
] |
iam.byungwoo@gmail.com
|
e3c98e936946924d57a64be20bd0d6c76705512b
|
e55480007fde8acea46fe8eeb3ee7193c25ba113
|
/src/leetcode/1-99/09.py
|
4b14b2dee65b6c8465f1912a9386dbbec7fe586c
|
[] |
no_license
|
Annihilation7/Ds-and-Al
|
80301bf543ec2eb4b3a9810f5fc25b0386847fd3
|
a0bc5f5ef4a92c0e7a736dcff77df61d46b57409
|
refs/heads/master
| 2020-09-24T05:04:41.250051
| 2020-02-15T10:31:10
| 2020-02-15T10:31:10
| 225,669,366
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,191
|
py
|
# -*- coding: utf-8 -*-
# Email: 763366463@qq.com
# Created: 2019-12-09 12:11am
'''
判断一个整数是否是回文数。回文数是指正序(从左向右)和倒序(从右向左)读都是一样的整数。
示例 1:
输入: 121
输出: true
示例 2:
输入: -121
输出: false
解释: 从左向右读, 为 -121 。 从右向左读, 为 121- 。因此它不是一个回文数。
示例 3:
输入: 10
输出: false
解释: 从右向左读, 为 01 。因此它不是一个回文数。
'''
class Solution:
def isPalindrome(self, x: int) -> bool:
'''
负数直接先pass
用类似头尾双指针的方法来做这道题
'''
if x < 0:
return False
k = 1
while x // k >= 10:
k *= 10
while x:
left = x // k
right = x % 10
if left != right:
return False
# 最关键的两步
# 一个是如何求除了左右两数字之外剩下的数
# 一个是k应该怎么变化
# 举个小例子看一下就好了,不难
x = x % k // 10
k //= 100
return True
|
[
"763366463@qq.com"
] |
763366463@qq.com
|
64be974da5067480a0088094c5764bb85d240db1
|
3b9b4049a8e7d38b49e07bb752780b2f1d792851
|
/src/ui/webui/resources/PRESUBMIT.py
|
2ac87faf2b09787c19200dad102e395811d7661e
|
[
"BSD-3-Clause",
"Apache-2.0"
] |
permissive
|
webosce/chromium53
|
f8e745e91363586aee9620c609aacf15b3261540
|
9171447efcf0bb393d41d1dc877c7c13c46d8e38
|
refs/heads/webosce
| 2020-03-26T23:08:14.416858
| 2018-08-23T08:35:17
| 2018-09-20T14:25:18
| 145,513,343
| 0
| 2
|
Apache-2.0
| 2019-08-21T22:44:55
| 2018-08-21T05:52:31
| null |
UTF-8
|
Python
| false
| false
| 1,001
|
py
|
# Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import re
def PostUploadHook(cl, change, output_api):
rietveld_obj = cl.RpcServer()
description = rietveld_obj.get_description(cl.issue)
existing_bots = (change.CQ_INCLUDE_TRYBOTS or '').split(';')
clean_bots = set(filter(None, map(lambda s: s.strip(), existing_bots)))
new_bots = clean_bots | set(['tryserver.chromium.linux:closure_compilation'])
new_tag = 'CQ_INCLUDE_TRYBOTS=%s' % ';'.join(new_bots)
if clean_bots:
tag_reg = '^CQ_INCLUDE_TRYBOTS=.*$'
new_description = re.sub(tag_reg, new_tag, description, flags=re.M | re.I)
else:
new_description = description + '\n' + new_tag
if new_description == description:
return []
rietveld_obj.update_description(cl.issue, new_description)
return [output_api.PresubmitNotifyResult(
'Automatically added optional Closure bots to run on CQ.')]
|
[
"changhyeok.bae@lge.com"
] |
changhyeok.bae@lge.com
|
1ecae13285e6b4e11101cf69d5d4f92b64b71913
|
8fd55e7a0f8764b3fe894d927c39173507f03855
|
/sms/urls.py
|
f75a34a656dee837302b3de1b925fe0d38081e04
|
[] |
no_license
|
bogdal/django-sms
|
2ed97cbafd7c2a9b4b1521c766e89b2514b63e75
|
fa0ed8369228b2b3160e8b577b6377587ce1fe5a
|
refs/heads/master
| 2020-04-05T08:22:29.864245
| 2013-04-24T09:28:34
| 2013-04-24T09:28:34
| 3,496,819
| 3
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 410
|
py
|
from django.conf.urls import patterns, url
from sms.decorators import ip_restrictions
from sms.views import callback_received_sms, callback_delivery_report
urlpatterns = patterns('',
url(r'^callback/received-sms/$', ip_restrictions(callback_received_sms), name='callback-received-sms'),
url(r'^callback/delivery-report/$', ip_restrictions(callback_delivery_report), name='callback-delivery-report'),
)
|
[
"adam@bogdal.pl"
] |
adam@bogdal.pl
|
09e8055fcdcf2ad2dec55459c099ab811ed32068
|
fffabb9f3025e89f7d1e71e2bea1e1f93ca95c98
|
/gevent_-master/monkey_test.py
|
ca9512a34827effed846c0722e796fa03555f070
|
[] |
no_license
|
kagxin/recipe
|
2a880b77e56bae25e9793b13a8ebdeeea19b716c
|
70af9c949b9e4b476585b2b650fba416a9d3ebb2
|
refs/heads/master
| 2021-09-11T18:58:46.295928
| 2018-04-11T03:11:05
| 2018-04-11T03:11:05
| 86,281,134
| 2
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 129
|
py
|
import gevent.monkey
gevent.monkey.patch_socket()
import gevent
import urllib
import json
def fetch(pid):
pass
fetch()
|
[
"123@163.com"
] |
123@163.com
|
8ada0608c934b48b2abbcdeb5aa1350a01506751
|
0a004fc3fe8e36fd7ce0ed2cc7e8140982315e03
|
/supervised_learning/0x0F-word_embeddings/0-bag_of_words.py
|
58f174f5ee6192922d3b2ccf12ba7882281f6654
|
[] |
no_license
|
pafuentess/holbertonschool-machine_learning
|
266ed4f05e106e194cdafe39544e48904f6538f4
|
3bffd1391b3fc790f0137d0afbe90eb8e2f7d713
|
refs/heads/master
| 2023-03-26T15:12:14.721409
| 2021-03-20T20:28:15
| 2021-03-20T20:28:15
| 279,388,813
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 455
|
py
|
#!/usr/bin/env python3
""" doc """
from sklearn.feature_extraction.text import CountVectorizer
def bag_of_words(sentences, vocab=None):
""" doc """
if vocab is None:
vectorizer = CountVectorizer()
X = vectorizer.fit_transform(sentences)
vocab = vectorizer.get_feature_names()
else:
vectorizer = CountVectorizer(vocabulary=vocab)
X = vectorizer.fit_transform(sentences)
return X.toarray(), vocab
|
[
"pafuentess@unal.edu.co"
] |
pafuentess@unal.edu.co
|
d132455b70c174d99968af7351962cf9ba6070a0
|
f07a42f652f46106dee4749277d41c302e2b7406
|
/Data Set/bug-fixing-1/30cae1c356d7341ef3c3a049b435b2da9bbd5588-<main>-bug.py
|
1207a332ae96f47a64d09b9b87303bc66e73535b
|
[] |
no_license
|
wsgan001/PyFPattern
|
e0fe06341cc5d51b3ad0fe29b84098d140ed54d1
|
cc347e32745f99c0cd95e79a18ddacc4574d7faa
|
refs/heads/main
| 2023-08-25T23:48:26.112133
| 2021-10-23T14:11:22
| 2021-10-23T14:11:22
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,799
|
py
|
def main():
module = AnsibleModule(argument_spec=dict(script=dict(required=True, type='str'), url=dict(required=False, type='str', default='http://localhost:8080'), validate_certs=dict(required=False, type='bool', default=True), user=dict(required=False, no_log=True, type='str', default=None), password=dict(required=False, no_log=True, type='str', default=None), timeout=dict(required=False, type='int', default=10), args=dict(required=False, type='dict', default=None)))
if (module.params['user'] is not None):
if (module.params['password'] is None):
module.fail_json(msg='password required when user provided')
module.params['url_username'] = module.params['user']
module.params['url_password'] = module.params['password']
module.params['force_basic_auth'] = True
if (module.params['args'] is not None):
from string import Template
script_contents = Template(module.params['script']).substitute(module.params['args'])
else:
script_contents = module.params['script']
headers = {
}
if is_csrf_protection_enabled(module):
crumb = get_crumb(module)
headers = {
crumb['crumbRequestField']: crumb['crumb'],
}
(resp, info) = fetch_url(module, (module.params['url'] + '/scriptText'), data=urlencode({
'script': script_contents,
}), headers=headers, method='POST', timeout=module.params['timeout'])
if (info['status'] != 200):
module.fail_json(msg=((('HTTP error ' + str(info['status'])) + ' ') + info['msg']))
result = to_native(resp.read())
if (('Exception:' in result) and ('at java.lang.Thread' in result)):
module.fail_json(msg=('script failed with stacktrace:\n ' + result))
module.exit_json(output=result)
|
[
"dg1732004@smail.nju.edu.cn"
] |
dg1732004@smail.nju.edu.cn
|
26724562ddaf5b84d3514df2553cf578c11097ff
|
e262e64415335060868e9f7f73ab8701e3be2f7b
|
/.history/pytest_test_20201123174255.py
|
71d84ac3ea86d52edcf7b63ed99b1d05e2cfeaed
|
[] |
no_license
|
Allison001/developer_test
|
6e211f1e2bd4287ee26fd2b33baf1c6a8d80fc63
|
b8e04b4b248b0c10a35e93128a5323165990052c
|
refs/heads/master
| 2023-06-18T08:46:40.202383
| 2021-07-23T03:31:54
| 2021-07-23T03:31:54
| 322,807,303
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 254
|
py
|
import pytest
class Test_A:
@pytest.mark.parametrize('a,b',[(10,20),(5,5)])
def test_data1(self,a,b):
print(a + b)
def test_data2(self):
a = 5
b = 5
print(a+b)
if __name__ == '__main__':
pytest.main
|
[
"zhangyingxbba@gmail.com"
] |
zhangyingxbba@gmail.com
|
81031853fd92e573bbec0a772457ebba3f43bb7a
|
e229456b9effa99e906d5cdfe08200ca5e1920a4
|
/lib/modules/exfiltration/invoke-exfiltration.py
|
b64b4fe4bcca83a42b5941dd3559df04ae7ff47c
|
[
"BSD-3-Clause"
] |
permissive
|
nerbix/Empire
|
cff3620f589d38a4967737458b7f4b56acabd64c
|
f45d5f35ff459df22ef0bd55236b7ffd9cef6f45
|
refs/heads/master
| 2020-05-22T09:27:59.693741
| 2017-03-08T18:18:13
| 2017-03-08T18:18:13
| 84,344,747
| 0
| 0
| null | 2017-03-08T16:54:42
| 2017-03-08T16:54:42
| null |
UTF-8
|
Python
| false
| false
| 4,857
|
py
|
from lib.common import helpers
class Module:
def __init__(self, mainMenu, params=[]):
# metadata info about the module, not modified during runtime
self.info = {
# name for the module that will appear in module menus
'Name': 'Invoke-Exfiltration',
# list of one or more authors for the module
'Author': ['Nick Britton <nerbies@gmail.com>'],
# more verbose multi-line description of the module
'Description': ('This module will exfiltration data over a range of protocols'),
# True if the module needs to run in the background
'Background' : True,
# File extension to save the file as
'OutputExtension' : None,
# True if the module needs admin rights to run
'NeedsAdmin' : False,
# True if the method doesn't touch disk/is reasonably opsec safe
# Disabled - this can be a relatively noisy module but sometimes useful
'OpsecSafe' : False,
# The minimum PowerShell version needed for the module to run
'MinPSVersion' : '2',
# list of any references/other comments
'Comments': [
'Based heavily on the great work done by Sensepost here: http://github.com/sensepost/det'
]
}
# any options needed by the module, settable during runtime
self.options = {
# format:
# value_name : {description, required, default_value}
'Agent' : {
# The 'Agent' option is the only one that MUST be in a module
'Description' : 'Agent to generate the source traffic on',
'Required' : True,
'Value' : ''
},
'server' : {
'Description' : 'Receiving Server IP',
'Required' : True,
'Value' : ''
},
'type' : {
'Description' : 'The protocol to use (ICMP, DNS, HTTP)',
'Required' : True,
'Value' : 'ICMP'
},
'key' : {
'Description' : 'AES encryption key to use',
'Required' : True,
'Value' : 'HELLO123'
},
'file' : {
'Description' : 'Full path of file to exfiltrate',
'Required' : True,
'Value' : ''
},
'port' : {
'Description' : 'Port (for HTTP exfiltration only).',
'Required' : False,
'Value' : '8080'
},
'dns' : {
'Description' : 'DNS Server to you (for DNS exfiltration only).',
'Required' : False,
'Value' : 'google.com'
}
}
# save off a copy of the mainMenu object to access external functionality
# like listeners/agent handlers/etc.
self.mainMenu = mainMenu
# During instantiation, any settable option parameters
# are passed as an object set to the module and the
# options dictionary is automatically set. This is mostly
# in case options are passed on the command line
if params:
for param in params:
# parameter format is [Name, Value]
option, value = param
if option in self.options:
self.options[option]['Value'] = value
def generate(self):
# if you're reading in a large, external script that might be updates,
# use the pattern below
# read in the common module source code
moduleSource = self.mainMenu.installPath + "/data/module_source/exfil/Invoke-Exfiltration.ps1"
try:
f = open(moduleSource, 'r')
except:
print helpers.color("[!] Could not read module source path at: " + str(moduleSource))
return ""
moduleCode = f.read()
f.close()
script = moduleCode
# Need to actually run the module that has been loaded
script += 'Invoke-Exfiltration'
# add any arguments to the end execution of the script
for option,values in self.options.iteritems():
if option.lower() != "agent":
if values['Value'] and values['Value'] != '':
if values['Value'].lower() == "true":
# if we're just adding a switch
script += " -" + str(option)
else:
script += " -" + str(option) + " \"" + str(values['Value']) + "\""
return script
|
[
"root@localhost.localdomain"
] |
root@localhost.localdomain
|
afb957284892edf8fcd63b07feef00c82584970b
|
4a7ca643f2bb681a14105fdfba2b696c14f8fb19
|
/alphamind/tests/analysis/test_quantilieanalysis.py
|
fd31eaf64d67c0a4e71bef2c786c696b2e91e3ae
|
[
"MIT"
] |
permissive
|
iLampard/alpha-mind
|
84bb490eaa515a147b2a31deff305b2e6423c76f
|
c99ba7b2b082d7bf07263fde0cca57b1a8bcb7de
|
refs/heads/master
| 2020-03-22T02:32:28.489547
| 2018-08-22T03:01:26
| 2018-08-22T03:01:26
| 126,668,894
| 0
| 0
|
MIT
| 2018-06-26T03:37:20
| 2018-03-25T05:53:51
|
Python
|
UTF-8
|
Python
| false
| false
| 3,087
|
py
|
# -*- coding: utf-8 -*-
"""
Created on 2017-8-16
@author: cheng.li
"""
import unittest
import numpy as np
import pandas as pd
from alphamind.analysis.quantileanalysis import er_quantile_analysis
from alphamind.analysis.quantileanalysis import quantile_analysis
from alphamind.data.processing import factor_processing
from alphamind.data.standardize import standardize
from alphamind.data.winsorize import winsorize_normal
from alphamind.data.quantile import quantile
class TestQuantileAnalysis(unittest.TestCase):
def setUp(self):
n = 5000
n_f = 5
self.x = np.random.randn(n, 5)
self.x_w = np.random.randn(n_f)
self.r = np.random.randn(n)
self.b_w = np.random.randint(0, 10, n)
self.b_w = self.b_w / float(self.b_w.sum())
self.risk_exp = np.random.randn(n, 3)
self.n_bins = 10
def test_q_anl_impl(self):
n_bins = 5
x = self.x[:, 0]
q_groups = quantile(x, n_bins)
s = pd.Series(self.r, index=q_groups)
grouped_return = s.groupby(level=0).mean().values.flatten()
expected_res = grouped_return.copy()
res = n_bins - 1
res_weight = 1. / res
for i, value in enumerate(expected_res):
expected_res[i] = (1. + res_weight) * value - res_weight * grouped_return.sum()
calculated_res = er_quantile_analysis(x, n_bins, self.r)
np.testing.assert_array_almost_equal(expected_res, calculated_res)
def test_quantile_analysis_simple(self):
f_df = pd.DataFrame(self.x)
calculated = quantile_analysis(f_df,
self.x_w,
self.r,
n_bins=self.n_bins,
do_neutralize=False,
pre_process=[],
post_process=[])
er = self.x_w @ self.x.T
expected = er_quantile_analysis(er, self.n_bins, self.r)
np.testing.assert_array_almost_equal(calculated, expected)
def test_quantile_analysis_with_factor_processing(self):
f_df = pd.DataFrame(self.x)
calculated = quantile_analysis(f_df,
self.x_w,
self.r,
n_bins=self.n_bins,
do_neutralize=True,
risk_exp=self.risk_exp,
pre_process=[winsorize_normal, standardize],
post_process=[standardize])
er = self.x_w @ factor_processing(self.x,
[winsorize_normal, standardize],
self.risk_exp,
[standardize]).T
expected = er_quantile_analysis(er, self.n_bins, self.r)
np.testing.assert_array_almost_equal(calculated, expected)
if __name__ == '__main__':
unittest.main()
|
[
"scrappedprince.li@gmail.com"
] |
scrappedprince.li@gmail.com
|
e23182c952b026b3073f17d87b07c6f0b5a98c59
|
e3ffd1f17819ab8a7b95b63f4a30cbbe85d7c44d
|
/week_4/contacts_short_41.py
|
6a866c20b264e1243963c70d3d82ae7fb3e9efe3
|
[] |
no_license
|
jamesoneill54/programming_2
|
77d105f0eb3be7c1af3fe6ca89cf291aca4e0c95
|
8c8d7e4551894b773f52ee1e4785fe324b974ac9
|
refs/heads/master
| 2021-01-22T04:18:22.654154
| 2017-05-25T23:46:31
| 2017-05-25T23:46:31
| 92,451,317
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 365
|
py
|
import sys
with open(sys.argv[1], 'r') as f:
contacts = {}
for line in f:
[name, phone] = line.strip().split()
contacts[name] = phone
for line in sys.stdin:
if line.strip() in contacts:
print('Name:', line.strip())
print('Phone:', contacts[line.strip()])
else:
print('Name:', line.strip())
print('No such contact')
|
[
"noreply@github.com"
] |
jamesoneill54.noreply@github.com
|
57790357a11b82f693fabb647bd204391e2a5368
|
a9e3f3ad54ade49c19973707d2beb49f64490efd
|
/Part-03-Understanding-Software-Crafting-Your-Own-Tools/models/edx-platform/lms/djangoapps/commerce/api/v1/views.py
|
c85719eccde259580a7818f846c5cf0d12d2b7f5
|
[
"AGPL-3.0-only",
"AGPL-3.0-or-later",
"MIT"
] |
permissive
|
luque/better-ways-of-thinking-about-software
|
8c3dda94e119f0f96edbfe5ba60ca6ec3f5f625d
|
5809eaca7079a15ee56b0b7fcfea425337046c97
|
refs/heads/master
| 2021-11-24T15:10:09.785252
| 2021-11-22T12:14:34
| 2021-11-22T12:14:34
| 163,850,454
| 3
| 1
|
MIT
| 2021-11-22T12:12:31
| 2019-01-02T14:21:30
|
JavaScript
|
UTF-8
|
Python
| false
| false
| 3,600
|
py
|
"""
Commerce views
"""
import logging
from django.contrib.auth.models import User # lint-amnesty, pylint: disable=imported-auth-user
from django.http import Http404
from edx_rest_api_client import exceptions
from edx_rest_framework_extensions.auth.jwt.authentication import JwtAuthentication
from rest_framework.authentication import SessionAuthentication
from rest_framework.generics import ListAPIView, RetrieveUpdateAPIView
from rest_framework.permissions import IsAuthenticated
from rest_framework.views import APIView
from common.djangoapps.course_modes.models import CourseMode
from common.djangoapps.util.json_request import JsonResponse
from openedx.core.djangoapps.commerce.utils import ecommerce_api_client
from openedx.core.lib.api.authentication import BearerAuthentication
from openedx.core.lib.api.mixins import PutAsCreateMixin
from ...utils import is_account_activation_requirement_disabled
from .models import Course
from .permissions import ApiKeyOrModelPermission, IsAuthenticatedOrActivationOverridden
from .serializers import CourseSerializer
log = logging.getLogger(__name__)
class CourseListView(ListAPIView):
""" List courses and modes. """
authentication_classes = (JwtAuthentication, BearerAuthentication, SessionAuthentication,)
permission_classes = (IsAuthenticated,)
serializer_class = CourseSerializer
pagination_class = None
def get_queryset(self):
return list(Course.iterator())
class CourseRetrieveUpdateView(PutAsCreateMixin, RetrieveUpdateAPIView):
""" Retrieve, update, or create courses/modes. """
lookup_field = 'id'
lookup_url_kwarg = 'course_id'
model = CourseMode
authentication_classes = (JwtAuthentication, BearerAuthentication, SessionAuthentication,)
permission_classes = (ApiKeyOrModelPermission,)
serializer_class = CourseSerializer
# Django Rest Framework v3 requires that we provide a queryset.
# Note that we're overriding `get_object()` below to return a `Course`
# rather than a CourseMode, so this isn't really used.
queryset = CourseMode.objects.all()
def get_object(self, queryset=None): # lint-amnesty, pylint: disable=arguments-differ, unused-argument
course_id = self.kwargs.get(self.lookup_url_kwarg)
course = Course.get(course_id)
if course:
return course
raise Http404
def pre_save(self, obj):
# There is nothing to pre-save. The default behavior changes the Course.id attribute from
# a CourseKey to a string, which is not desired.
pass
class OrderView(APIView):
""" Retrieve order details. """
authentication_classes = (JwtAuthentication, SessionAuthentication,)
permission_classes = (IsAuthenticatedOrActivationOverridden,)
def get(self, request, number):
""" HTTP handler. """
# If the account activation requirement is disabled for this installation, override the
# anonymous user object attached to the request with the actual user object (if it exists)
if not request.user.is_authenticated and is_account_activation_requirement_disabled():
try:
request.user = User.objects.get(id=request.session._session_cache['_auth_user_id']) # lint-amnesty, pylint: disable=protected-access
except User.DoesNotExist:
return JsonResponse(status=403)
try:
order = ecommerce_api_client(request.user).orders(number).get()
return JsonResponse(order)
except exceptions.HttpNotFoundError:
return JsonResponse(status=404)
|
[
"rafael.luque@osoco.es"
] |
rafael.luque@osoco.es
|
a8b587769f572f0e5f80a0878307423b27a05b90
|
28aed3120411fd7558fc08b47274f5ced5d5069c
|
/UIAutomation/tests/Utils/test_envsettingreader.py
|
c9533f87b7fe7e555380cf537766a0f96e5fc602
|
[
"MIT",
"Apache-2.0"
] |
permissive
|
SirCYong/long_long_ago
|
8e181310267836774b50824e873adb7959f80080
|
6dfe9a9eb9d0f25a55bccd22b66878bde1a2fd6b
|
refs/heads/master
| 2020-03-16T02:58:18.161981
| 2018-05-07T15:17:54
| 2018-05-07T15:17:54
| 132,477,192
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 663
|
py
|
import unittest
from nose.tools import assert_equal
from UIAutomation.Utils import get_setting_configuration, get_env_script_runs_on
class TestEnvSettingReader(unittest.TestCase):
def test_get_setting_configuration(self):
assert_equal('Android', get_setting_configuration('android', 'platformName'))
assert_equal('.activity.base.LauncherActivity', get_setting_configuration('android', 'appActivity'))
assert_equal('com.iscs.SmallAnimal', get_setting_configuration('ios', 'bundleId'))
def test_get_env_script_runs_on(self):
assert get_env_script_runs_on().lower() == 'cit' or get_env_script_runs_on().lower() == 'sit'
|
[
"649803977@qq.com"
] |
649803977@qq.com
|
c90939194ffd9a0ae033eda433c4412b90e34be4
|
2bcc6c45a28251dcde72bb8b003b5592350dc208
|
/exams/models.py
|
5909bafe70117024d9487d86d13b9a60bbab0dba
|
[] |
no_license
|
amanjhurani/university_dost
|
153d1a245df4338be60df3e9980e0238408e40ad
|
41f6119c88d36f0153fbf1a5be1913e2c45d9751
|
refs/heads/master
| 2021-10-08T22:23:42.252577
| 2018-12-18T11:22:10
| 2018-12-18T11:22:10
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,291
|
py
|
from django.urls import reverse
from config.settings.base import AUTH_USER_MODEL
from django.db import models
from universities.models import Subject
from config.utils import random_string_generator
from markdownx.models import MarkdownxField
class Exam(models.Model):
# Choices
MONTH_CHOICES = (
('january', 'January'),
('february', 'February'),
('march', 'March'),
('april', 'April'),
('may', 'May'),
('june', 'June'),
('july', 'July'),
('august', 'August'),
('september', 'September'),
('october', 'October'),
('november', 'November'),
('december', 'December')
)
TERM_CHOICES = (
('summer', 'Summer'),
('winter', 'Winter')
)
# Fields
month = models.CharField(max_length=128, choices=MONTH_CHOICES)
year = models.CharField(max_length=4)
term = models.CharField(max_length=12, choices=TERM_CHOICES)
date = models.DateField()
total_time = models.CharField(max_length=12)
total_marks = models.IntegerField()
exam_code = models.CharField(max_length=128, blank=True, null=True)
exam_complete = models.BooleanField(default=False)
# Relationship Fields
subject = models.ForeignKey(
Subject, on_delete=models.CASCADE
)
class Meta:
ordering = ('-pk',)
def save(self, *args, **kwargs):
if self.exam_code and len(self.exam_code.split('-')) > 3:
self.exam_code = self.exam_code.split('-')[3]
self.exam_code = '{}-{}'.format(
self.subject.subject_code,
random_string_generator(size=5)
)
qs_exists = Exam.objects.filter(exam_code=self.exam_code).exists()
if qs_exists:
self.exam_code = '{}-{}'.format(self.subject.subject_code,
random_string_generator(size=5))
super(Exam, self).save(*args, **kwargs)
def __str__(self):
return self.subject.name + " " + self.term + "-" + self.year
def get_absolute_url(self):
return reverse('exams_exam_detail', args=(self.pk,))
def get_update_url(self):
return reverse('exams_exam_update', args=(self.pk,))
class Question(models.Model):
# Choices
QUESTION_TYPE_CHOICES = (
('mcq', 'MCQ'),
('short_question', 'Short Question'),
('descriptive_question', 'Descriptive Question'),
)
# Fields
question_code = models.CharField(max_length=128, blank=True, null=True)
question_number = models.CharField(max_length=128)
question_body = MarkdownxField()
question_type = models.CharField(
max_length=12, choices=QUESTION_TYPE_CHOICES)
answer = MarkdownxField(blank=True, null=True)
explanation = MarkdownxField(blank=True, null=True)
marks = models.IntegerField()
upvote = models.IntegerField(default=0)
downvote = models.IntegerField(default=0)
# Relationship Fields
exam = models.ForeignKey(
Exam, on_delete=models.CASCADE
)
author = models.ForeignKey(
AUTH_USER_MODEL, blank=True, null=True, on_delete=models.SET_NULL
)
class Meta:
ordering = ('-pk',)
def save(self, *args, **kwargs):
if len(self.question_code.split('-')) > 4:
self.question_code = self.question_code.split('-')[4]
self.question_code = '{}-{}'.format(
self.exam.exam_code,
self.question_code
)
super(Question, self).save(*args, **kwargs)
def __str__(self):
return (self.question_number +
" | " + self.exam.term +
"-" + self.exam.year
)
def get_absolute_url(self):
return reverse('exams_question_detail', args=(self.pk,))
def get_update_url(self):
return reverse('exams_question_update', args=(self.pk,))
class AnswerFeedback(models.Model):
# Choices
FEEDBACK_TYPE_CHOICES = (
('wrong_answer', 'Wrong Answer'),
('improvement', 'Improvement'),
)
FEEDBACK_STATUS_CHOICES = (
('received', 'Received'),
('reviewing', 'Reviewing'),
('reviewed', 'Reviewed'),
('resolved', 'Resolved')
)
# Fields
feedback_title = models.CharField(max_length=256, blank=True, null=True)
feedback_body = models.TextField()
feedback_type = models.CharField(
max_length=128, choices=FEEDBACK_TYPE_CHOICES)
feedback_status = models.CharField(
max_length=128,
choices=FEEDBACK_STATUS_CHOICES,
default='received')
user_email = models.EmailField(max_length=256, blank=True, null=True)
created = models.DateTimeField(auto_now_add=True)
updated = models.DateTimeField(auto_now=True)
# Relationship Fields
question = models.ForeignKey(
Question, on_delete=models.CASCADE
)
user = models.ForeignKey(
AUTH_USER_MODEL, blank=True, null=True, on_delete=models.SET_NULL
)
class Meta:
ordering = ('-pk',)
def __str__(self):
return self.feedback_title
def get_absolute_url(self):
return reverse('exams_answerfeedback_detail', args=(self.pk,))
def get_update_url(self):
return reverse('exams_answerfeedback_update', args=(self.pk,))
|
[
"dhaval.savalia6@gmail.com"
] |
dhaval.savalia6@gmail.com
|
49f552812ae2ebc0e2cb7111c0f72e6044fd22b7
|
d96f75610758fd6e193d575a2c5ba72c420d90e8
|
/blog/migrations/0001_initial.py
|
ae82676f9c48d490aaed6c97ff64cd7594a38393
|
[] |
no_license
|
barlapelican/my-first-blog
|
4a20ef5f3723209225510436513321b10b5d9fcf
|
1594006935765d288434d4542502deb3e954f974
|
refs/heads/master
| 2020-06-17T06:06:07.116828
| 2019-07-08T13:53:25
| 2019-07-08T13:53:25
| 195,823,812
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 987
|
py
|
# Generated by Django 2.0.13 on 2019-07-08 13:06
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Post',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=200)),
('text', models.TextField()),
('created_date', models.DateTimeField(default=django.utils.timezone.now)),
('published_date', models.DateTimeField(blank=True, null=True)),
('author', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
]
|
[
"du@example.com"
] |
du@example.com
|
9e1d8e2de437c05c4bfb0801655ea47bebb855fb
|
9743d5fd24822f79c156ad112229e25adb9ed6f6
|
/xai/brain/wordbase/otherforms/_palls.py
|
dbff538f21fd450bb0e9c7a702715178521cdfad
|
[
"MIT"
] |
permissive
|
cash2one/xai
|
de7adad1758f50dd6786bf0111e71a903f039b64
|
e76f12c9f4dcf3ac1c7c08b0cc8844c0b0a104b6
|
refs/heads/master
| 2021-01-19T12:33:54.964379
| 2017-01-28T02:00:50
| 2017-01-28T02:00:50
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 214
|
py
|
#calss header
class _PALLS():
def __init__(self,):
self.name = "PALLS"
self.definitions = pall
self.parents = []
self.childen = []
self.properties = []
self.jsondata = {}
self.basic = ['pall']
|
[
"xingwang1991@gmail.com"
] |
xingwang1991@gmail.com
|
3f395841d95506c7c4b11095f96b073ae8043dea
|
b5a7c9ae13c81d655c176ceb0b8a73b4399cbf7a
|
/practico_02/ejercicio_01.py
|
847b669e2c61394631d2e994bc0d7f2731006427
|
[
"MIT"
] |
permissive
|
ELC/TUPPython
|
7459f4af5eb0306da1a61fd1e175ca4a68e5ac46
|
0115ece1dfdd599626f1cdeb410245fbee2aa4f8
|
refs/heads/master
| 2023-06-07T21:03:12.151001
| 2021-03-17T22:31:51
| 2021-03-17T22:31:51
| 338,466,855
| 1
| 1
|
MIT
| 2021-02-28T02:48:20
| 2021-02-13T00:36:50
|
Python
|
UTF-8
|
Python
| false
| false
| 856
|
py
|
"""Módulos
Antes de realizar este TP, se recomienda ver el siguiente video:
https://youtu.be/A47sszUdTsM
En este archivo se deben importar los módulos:
- main.py as main
- source/util.py as util
- source/controller/controller.py as controller
Los imports deben hacerse de forma tal que funcionen con TODAS las formas
posibles de invocación (estando parados en la carpeta practico_02):
$PATH$/practico_02> python ejercicio_01.py
$PATH$/practico_02> python -m ejercicio_01
Referencia: https://docs.python.org/3/reference/import.html#the-import-system
"""
import main
import source.util as util
import source.controller.controller as controller
# NO MODIFICAR - INICIO
assert main.name == "main"
assert util.name == "util"
assert controller.name == "controller"
# NO MODIFICAR - FIN
# El siguiente ejercicio se encuentra en source/ejercicio_02.py
|
[
"elcg@gmx.com"
] |
elcg@gmx.com
|
1719fe5138986b05ab207d3c7b30490116f74c96
|
e980e13bd0d264b3880705fb53a795a89fb5cfe6
|
/sales_order/repair_order.py
|
f6ba01e2762bf878cdbc8480aa7f1e1f9f8ddf80
|
[] |
no_license
|
qianpeng-shen/u8
|
8e430ccab5390254b6660cbd047fc2ac495a04ca
|
7fd28399dbf921826c1ef024800994412ab1b623
|
refs/heads/master
| 2020-05-13T20:28:16.764800
| 2019-04-16T09:29:58
| 2019-04-16T09:29:58
| 181,657,315
| 2
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,972
|
py
|
# -*- coding:utf-8 -*-
#解析维修类维修工单数据
def analysis_repair(data):
repair_list = []
for repair in range(1,data.nrows):
repair_dict = {}
repair_dict['Status'] = data.cell(repair,1).value
repair_dict['UserID_ZDR'] = data.cell(repair,7).value
repair_dict['AdgoupID'] = data.cell(repair,8).value
repair_dict['UserID_KF'] = data.cell(repair,10).value
if data.cell(repair,11).vlaue:
if data.cell(repair,11).value == '网页表单':
repair_dict['CreateOrderChannel'] = '线上'
else:
repair_dict['CreateOrderChannel'] = '电话'
repair_dict['DetailContentMore'] = data.cell(repair,12).value
repair_dict['CallerNumber'] = data.cell(repair,13).value
repair_dict['UserName'] = data.cell(repair,20).value
repair_dict['ProvinceID'] = data.cell(repair,22).value
repair_dict['MunicipalityID'] = data.cell(repair,23).value
repair_dict['PrefectureID'] = data.cell(repair,24).value
repair_dict['Address_Detail'] = data.cell(repair,34).value
repair_dict['SN_GDXX'] = data.cell(repair,38).value
repair_dict['AppointmentTime'] = (data.cell(repair,40).value).replace('/','-') + 'T00"00:00Z'
repair_dict['WarrantyPeriod'] = data.cell(repair,41).value
if data.cell(repair,43).value:
malfunction = data.cell(repair,43).value
if ',' in malfunction:
repair_dict['Breakdown_PhenomenonID'] = malfunction.split(',')[1]
repair_dict['RelatedServiceProvider'] = data.cell(repair,46).value
repair_dict['ServiceProvidersNumber'] = data.cell(repair,48).value
repair_dict['ActivityContent'] = data.cell(repair,49).value
repair_dict['Reason'] = data.cell(repair,50).value
repair_dict['MailingAddress'] = data.cell(repair,52).value
repair_dict['ReturnToLogistics'] = data.cell(repair,53).value
|
[
"shenqianpeng@chengfayun.com"
] |
shenqianpeng@chengfayun.com
|
fbe657576a5f4817faee93631a0c29bd41fef7fd
|
43461f999228079c9bfee03f0e4043f08426051f
|
/python_zero/飞机大战/hm_11_监听退出事件.py
|
ac165dcc7445e849fe8d415791911b793b4875e3
|
[] |
no_license
|
MapleStoryBoy/spider
|
f9af844ae9812fe21141060213ac2677e719ac73
|
b014d81d52805f9317e85b66024d047e73d59053
|
refs/heads/master
| 2020-05-21T18:27:50.585790
| 2019-07-12T10:11:58
| 2019-07-12T10:11:58
| 186,132,575
| 6
| 2
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,376
|
py
|
import pygame
# 游戏的初始化
pygame.init()
# 创建游戏的窗口 480 * 700
screen = pygame.display.set_mode((480, 700))
# 绘制背景图像
bg = pygame.image.load("./images/background.png")
screen.blit(bg, (0, 0))
# pygame.display.update()
# 绘制英雄的飞机
hero = pygame.image.load("./images/me1.png")
screen.blit(hero, (150, 300))
# 可以在所有绘制工作完成之后,统一调用update方法
pygame.display.update()
# 创建时钟对象
clock = pygame.time.Clock()
# 1. 定义rect记录飞机的初始位置
hero_rect = pygame.Rect(150, 300, 102, 126)
# 游戏循环 -> 意味着游戏的正式开始!
while True:
# 可以指定循环体内部的代码执行的频率
clock.tick(60)
# 监听事件
for event in pygame.event.get():
# 判断事件类型是否是退出事件
if event.type == pygame.QUIT:
print("游戏退出...")
# quit 卸载所有的模块
pygame.quit()
# exit() 直接终止当前正在执行的程序
exit()
# 2. 修改飞机的位置
hero_rect.y -= 1
# 判断飞机的位置
if hero_rect.y <= 0:
hero_rect.y = 700
# 3. 调用blit方法绘制图像
screen.blit(bg, (0, 0))
screen.blit(hero, hero_rect)
# 4. 调用update方法更新显示
pygame.display.update()
pygame.quit()
|
[
"MapleStoryBoy@163.com"
] |
MapleStoryBoy@163.com
|
db4bc9a1b1db2b8a2081ccbfc57774f296f255e9
|
1f9e643698f683e77ed5f253cafda776b204f5d2
|
/backend/franchise_guru_4345/urls.py
|
5f0e09df865079b377bc64581a60b6f4c1dedb06
|
[] |
no_license
|
crowdbotics-apps/franchise-guru-4345
|
ca907aaed1618abd5828ce20c108a90f01f09af7
|
53712fe881aa94579121e1c7384ab3e039fccd9d
|
refs/heads/master
| 2022-12-12T06:50:11.577025
| 2019-06-06T20:54:46
| 2019-06-06T20:54:46
| 190,648,540
| 0
| 0
| null | 2022-12-06T16:07:16
| 2019-06-06T20:54:42
|
JavaScript
|
UTF-8
|
Python
| false
| false
| 1,074
|
py
|
"""franchise_guru_4345 URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.11/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url, include
from django.contrib import admin
urlpatterns = [
url('', include('home.urls')),
url(r'^accounts/', include('allauth.urls')),
url(r'^api/v1/', include('home.api.v1.urls')),
url(r'^admin/', admin.site.urls),
]
admin.site.site_header = 'Franchise Guru'
admin.site.site_title = 'Franchise Guru Admin Portal'
admin.site.index_title = 'Franchise Guru Admin'
|
[
"team@crowdbotics.com"
] |
team@crowdbotics.com
|
4fed6cdfbfd5f2a72de1850169ab92171fcb5f2b
|
ded46c3a86c2a70328a63d779ac038d636ae5906
|
/_WSpython/Pandas06_04_GroupByChkPop_최임정.py
|
25b7e2d0123dcbc8d13c20046ed655462fdfe522
|
[] |
no_license
|
imjoung/hongik_univ
|
82d0e7ea31763713f51bbde9d45e4aae5cb73849
|
82a3a77605d74d13eb76b915b215f6e245968180
|
refs/heads/main
| 2023-06-24T12:49:46.087083
| 2021-07-15T06:31:57
| 2021-07-15T06:31:57
| 379,128,178
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 233
|
py
|
# coding: utf-8
# In[19]:
import pandas as pd
df=pd.read_csv('../data/gapminder.tsv','\t')
# In[20]:
uniqueList=df['year'].unique()
for idx in uniqueList:
yearList=df[df['year'] == idx]
print(yearList['pop'].mean())
|
[
"noreply@github.com"
] |
imjoung.noreply@github.com
|
5ec23bfac0f13f11f9935fea8caf3b8b1e956401
|
c4af67db4c523d20f2d55aef90ba77db1fb53c38
|
/CMFCalendar/testing.py
|
0918f04c86d1a5fb9cdbaaf7709a9ab261d013b7
|
[] |
no_license
|
dtgit/dtedu
|
e59b16612d7d9ea064026bf80a44657082ef45a3
|
d787885fe7ed0de6f9e40e9b05d852a0e9d60677
|
refs/heads/master
| 2020-04-06T05:22:50.025074
| 2009-04-08T20:13:20
| 2009-04-08T20:13:20
| 171,351
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,008
|
py
|
##############################################################################
#
# Copyright (c) 2006 Zope Corporation and Contributors. All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
#
##############################################################################
""" Unit test layers.
$Id: testing.py 73064 2007-03-08 14:03:20Z yuppie $
"""
from Testing import ZopeTestCase
ZopeTestCase.installProduct('ZCTextIndex', 1)
ZopeTestCase.installProduct('CMFCore', 1)
import transaction
from Products.Five import zcml
from Products.CMFCore.testing import FunctionalZCMLLayer
from Products.CMFDefault.factory import addConfiguredSite
class FunctionalLayer(FunctionalZCMLLayer):
@classmethod
def setUp(cls):
import Products.CMFCalendar
import Products.CMFDefault
import Products.CMFTopic
import Products.DCWorkflow
zcml.load_config('configure.zcml', Products.CMFCalendar)
zcml.load_config('configure.zcml', Products.CMFDefault)
zcml.load_config('configure.zcml', Products.CMFTopic)
zcml.load_config('configure.zcml', Products.DCWorkflow)
app = ZopeTestCase.app()
addConfiguredSite(app, 'site', 'Products.CMFDefault:default',
snapshot=False,
extension_ids=('Products.CMFCalendar:default',
'Products.CMFCalendar:skins_support'))
transaction.commit()
ZopeTestCase.close(app)
@classmethod
def tearDown(cls):
app = ZopeTestCase.app()
app._delObject('site')
transaction.commit()
ZopeTestCase.close(app)
|
[
"ron@domU-12-31-39-02-65-03.compute-1.internal"
] |
ron@domU-12-31-39-02-65-03.compute-1.internal
|
6485d44a97f1ed29a4fa36480a7c390436e0aa7f
|
2b25aae9266437b657e748f3d6fea4db9e9d7f15
|
/CMU/6lab/coroutines1.py
|
a44559612b62e023d62943e13b80f94a5246447f
|
[] |
no_license
|
Zilby/Stuy-Stuff
|
b1c3bc23abf40092a8a7a80e406e7c412bd22ae0
|
5c5e375304952f62667d3b34b36f0056c1a8e753
|
refs/heads/master
| 2020-05-18T03:03:48.210196
| 2018-11-15T04:50:03
| 2018-11-15T04:50:03
| 24,191,397
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 202
|
py
|
s="The quick brown fox jumped over the lazy old dog."
def capitalize():
while True:
value=yield
print value.upper()
c=capitalize()
c.next()
for word in s.split():
c.send(word)
|
[
"azilby@gmail.com"
] |
azilby@gmail.com
|
0aefadcd0195f3d016d2f2e73d810a3fa481c9bf
|
42c48f3178a48b4a2a0aded547770027bf976350
|
/google/ads/google_ads/v3/proto/errors/keyword_plan_keyword_error_pb2.py
|
72f053c63c315f0883e175fbdc4ef1c9f7dc2a89
|
[
"Apache-2.0"
] |
permissive
|
fiboknacky/google-ads-python
|
e989464a85f28baca1f28d133994c73759e8b4d6
|
a5b6cede64f4d9912ae6ad26927a54e40448c9fe
|
refs/heads/master
| 2021-08-07T20:18:48.618563
| 2020-12-11T09:21:29
| 2020-12-11T09:21:29
| 229,712,514
| 0
| 0
|
Apache-2.0
| 2019-12-23T08:44:49
| 2019-12-23T08:44:49
| null |
UTF-8
|
Python
| false
| true
| 5,055
|
py
|
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: google/ads/googleads_v3/proto/errors/keyword_plan_keyword_error.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='google/ads/googleads_v3/proto/errors/keyword_plan_keyword_error.proto',
package='google.ads.googleads.v3.errors',
syntax='proto3',
serialized_options=_b('\n\"com.google.ads.googleads.v3.errorsB\034KeywordPlanKeywordErrorProtoP\001ZDgoogle.golang.org/genproto/googleapis/ads/googleads/v3/errors;errors\242\002\003GAA\252\002\036Google.Ads.GoogleAds.V3.Errors\312\002\036Google\\Ads\\GoogleAds\\V3\\Errors\352\002\"Google::Ads::GoogleAds::V3::Errors'),
serialized_pb=_b('\nEgoogle/ads/googleads_v3/proto/errors/keyword_plan_keyword_error.proto\x12\x1egoogle.ads.googleads.v3.errors\x1a\x1cgoogle/api/annotations.proto\"\x82\x02\n\x1bKeywordPlanKeywordErrorEnum\"\xe2\x01\n\x17KeywordPlanKeywordError\x12\x0f\n\x0bUNSPECIFIED\x10\x00\x12\x0b\n\x07UNKNOWN\x10\x01\x12\x1e\n\x1aINVALID_KEYWORD_MATCH_TYPE\x10\x02\x12\x15\n\x11\x44UPLICATE_KEYWORD\x10\x03\x12\x19\n\x15KEYWORD_TEXT_TOO_LONG\x10\x04\x12\x1d\n\x19KEYWORD_HAS_INVALID_CHARS\x10\x05\x12\x1e\n\x1aKEYWORD_HAS_TOO_MANY_WORDS\x10\x06\x12\x18\n\x14INVALID_KEYWORD_TEXT\x10\x07\x42\xf7\x01\n\"com.google.ads.googleads.v3.errorsB\x1cKeywordPlanKeywordErrorProtoP\x01ZDgoogle.golang.org/genproto/googleapis/ads/googleads/v3/errors;errors\xa2\x02\x03GAA\xaa\x02\x1eGoogle.Ads.GoogleAds.V3.Errors\xca\x02\x1eGoogle\\Ads\\GoogleAds\\V3\\Errors\xea\x02\"Google::Ads::GoogleAds::V3::Errorsb\x06proto3')
,
dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR,])
_KEYWORDPLANKEYWORDERRORENUM_KEYWORDPLANKEYWORDERROR = _descriptor.EnumDescriptor(
name='KeywordPlanKeywordError',
full_name='google.ads.googleads.v3.errors.KeywordPlanKeywordErrorEnum.KeywordPlanKeywordError',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='UNSPECIFIED', index=0, number=0,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='UNKNOWN', index=1, number=1,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='INVALID_KEYWORD_MATCH_TYPE', index=2, number=2,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='DUPLICATE_KEYWORD', index=3, number=3,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='KEYWORD_TEXT_TOO_LONG', index=4, number=4,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='KEYWORD_HAS_INVALID_CHARS', index=5, number=5,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='KEYWORD_HAS_TOO_MANY_WORDS', index=6, number=6,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='INVALID_KEYWORD_TEXT', index=7, number=7,
serialized_options=None,
type=None),
],
containing_type=None,
serialized_options=None,
serialized_start=168,
serialized_end=394,
)
_sym_db.RegisterEnumDescriptor(_KEYWORDPLANKEYWORDERRORENUM_KEYWORDPLANKEYWORDERROR)
_KEYWORDPLANKEYWORDERRORENUM = _descriptor.Descriptor(
name='KeywordPlanKeywordErrorEnum',
full_name='google.ads.googleads.v3.errors.KeywordPlanKeywordErrorEnum',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
_KEYWORDPLANKEYWORDERRORENUM_KEYWORDPLANKEYWORDERROR,
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=136,
serialized_end=394,
)
_KEYWORDPLANKEYWORDERRORENUM_KEYWORDPLANKEYWORDERROR.containing_type = _KEYWORDPLANKEYWORDERRORENUM
DESCRIPTOR.message_types_by_name['KeywordPlanKeywordErrorEnum'] = _KEYWORDPLANKEYWORDERRORENUM
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
KeywordPlanKeywordErrorEnum = _reflection.GeneratedProtocolMessageType('KeywordPlanKeywordErrorEnum', (_message.Message,), dict(
DESCRIPTOR = _KEYWORDPLANKEYWORDERRORENUM,
__module__ = 'google.ads.googleads_v3.proto.errors.keyword_plan_keyword_error_pb2'
,
__doc__ = """Container for enum describing possible errors from applying a keyword or
a negative keyword from a keyword plan.
""",
# @@protoc_insertion_point(class_scope:google.ads.googleads.v3.errors.KeywordPlanKeywordErrorEnum)
))
_sym_db.RegisterMessage(KeywordPlanKeywordErrorEnum)
DESCRIPTOR._options = None
# @@protoc_insertion_point(module_scope)
|
[
"noreply@github.com"
] |
fiboknacky.noreply@github.com
|
f69a5854f33bc30eb43daf851f9e43ceb207ec1a
|
b7948d60834c4c6fe58d8d665177511cb6db53e2
|
/Outpass Webapp + Api's - Django/student/migrations/0008_auto_20190815_0023.py
|
08405388f8bb2400e3756da54e002813b1d1e8b2
|
[] |
no_license
|
abhinavsharma629/Outpass-Generator
|
4a2ebc2c7d0fc678b2afd10a36c6cbcbc6583d60
|
f363d49c47543c70e2c114ab7d48ffaef83b5de4
|
refs/heads/master
| 2022-02-24T15:07:58.171462
| 2019-10-05T16:05:09
| 2019-10-05T16:05:09
| 205,933,273
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,289
|
py
|
# Generated by Django 2.2.4 on 2019-08-14 18:53
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('student', '0007_registeredcolleges_logo'),
]
operations = [
migrations.AlterField(
model_name='student',
name='bed_no',
field=models.PositiveIntegerField(blank=True, null=True),
),
migrations.AlterField(
model_name='student',
name='branch',
field=models.CharField(blank=True, max_length=50, null=True),
),
migrations.AlterField(
model_name='student',
name='er_no',
field=models.CharField(blank=True, max_length=7, null=True),
),
migrations.AlterField(
model_name='student',
name='hostel',
field=models.CharField(blank=True, max_length=50, null=True),
),
migrations.AlterField(
model_name='student',
name='room_no',
field=models.CharField(blank=True, max_length=50, null=True),
),
migrations.AlterField(
model_name='student',
name='year',
field=models.PositiveIntegerField(blank=True, null=True),
),
]
|
[
"abhinavsharma629@gmail.com"
] |
abhinavsharma629@gmail.com
|
7d5a0bdd30acb51aa2d53b292d0cadc6076e129e
|
ca7aa979e7059467e158830b76673f5b77a0f5a3
|
/Python_codes/p03068/s875123408.py
|
7ebd0a3af325dded24435da6029b158873c69c05
|
[] |
no_license
|
Aasthaengg/IBMdataset
|
7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901
|
f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8
|
refs/heads/main
| 2023-04-22T10:22:44.763102
| 2021-05-13T17:27:22
| 2021-05-13T17:27:22
| 367,112,348
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 166
|
py
|
n = int(input())
s = list(input())
k = int(input())
ans = ''
for i in range(n):
if s[i] != s[k-1]:
s[i] = '*'
else:
continue
print(*s, sep='')
|
[
"66529651+Aastha2104@users.noreply.github.com"
] |
66529651+Aastha2104@users.noreply.github.com
|
f3bb6fb019a485fe0bec264817b74915c0530643
|
7323b8039f47c0457ae90173c963549b7d1e6823
|
/sandbox/src1/histdemo.py
|
a8514133c69af80b7c5f510d812d969b0da96add
|
[
"LicenseRef-scancode-public-domain",
"BSD-2-Clause"
] |
permissive
|
sniemi/SamPy
|
abce0fb941f011a3264a8d74c25b522d6732173d
|
e048756feca67197cf5f995afd7d75d8286e017b
|
refs/heads/master
| 2020-05-27T18:04:27.156194
| 2018-12-13T21:19:55
| 2018-12-13T21:19:55
| 31,713,784
| 5
| 2
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 361
|
py
|
from matplotlib import rcParams
from pylab import *
mu, sigma = 100, 15
x = mu + sigma*randn(10000)
# the histogram of the data
n, bins, patches = hist(x, 100, normed=1)
# add a 'best fit' line
y = normpdf(bins, mu, sigma)
l = plot(bins, y, 'r--', linewidth=2)
xlim(40, 160)
xlabel('Smarts')
ylabel('P')
title(r'$\rm{IQ:}\/ \mu=100,\/ \sigma=15$')
show()
|
[
"niemi@stsci.edu"
] |
niemi@stsci.edu
|
d8b1f7b564f9c8a9889d070590faa58b2928a4d8
|
c6d22cf128819af1d48d02972bb9296a1687b9bb
|
/venv/Lib/site-packages/pyface/ui/wx/image_widget.py
|
2e50ff64cb90378c94caab22abc79b27e902d0f7
|
[
"BSD-3-Clause"
] |
permissive
|
GenomePhD/Bio1-HIV
|
92808a1e7e6339da6d07190ba3e1a2071f3e8428
|
b5059e7f121e4abb6888893f91f95dd79aed9ca4
|
refs/heads/master
| 2022-10-28T21:55:42.998205
| 2018-04-16T18:52:32
| 2018-04-16T18:52:32
| 129,792,081
| 0
| 1
| null | 2022-10-05T18:36:22
| 2018-04-16T19:03:26
|
Python
|
UTF-8
|
Python
| false
| false
| 7,447
|
py
|
#------------------------------------------------------------------------------
# Copyright (c) 2005, Enthought, Inc.
# All rights reserved.
#
# This software is provided without warranty under the terms of the BSD
# license included in enthought/LICENSE.txt and may be redistributed only
# under the conditions described in the aforementioned license. The license
# is also available online at http://www.enthought.com/licenses/BSD.txt
# Thanks for using Enthought open source!
#
# Author: Enthought, Inc.
# Description: <Enthought pyface package component>
#------------------------------------------------------------------------------
""" A clickable/draggable widget containing an image. """
# Major package imports.
import wx
# Enthought library imports.
from traits.api import Any, Bool, Event
# Local imports.
from .widget import Widget
class ImageWidget(Widget):
""" A clickable/draggable widget containing an image. """
#### 'ImageWidget' interface ##############################################
# The bitmap.
bitmap = Any
# Is the widget selected?
selected = Bool(False)
#### Events ####
# A key was pressed while the tree is in focus.
key_pressed = Event
# A node has been activated (ie. double-clicked).
node_activated = Event
# A drag operation was started on a node.
node_begin_drag = Event
# A (non-leaf) node has been collapsed.
node_collapsed = Event
# A (non-leaf) node has been expanded.
node_expanded = Event
# A left-click occurred on a node.
node_left_clicked = Event
# A right-click occurred on a node.
node_right_clicked = Event
#### Private interface ####################################################
_selected = Any
###########################################################################
# 'object' interface.
###########################################################################
def __init__ (self, parent, **traits):
""" Creates a new widget. """
# Base class constructors.
super(ImageWidget, self).__init__(**traits)
# Add some padding around the image.
size = (self.bitmap.GetWidth() + 10, self.bitmap.GetHeight() + 10)
# Create the toolkit-specific control.
self.control = wx.Window(parent, -1, size=size)
self.control.__tag__ = 'hack'
self._mouse_over = False
self._button_down = False
# Set up mouse event handlers:
self.control.Bind(wx.EVT_ENTER_WINDOW, self._on_enter_window)
self.control.Bind(wx.EVT_LEAVE_WINDOW, self._on_leave_window)
self.control.Bind(wx.EVT_LEFT_DCLICK, self._on_left_dclick)
self.control.Bind(wx.EVT_LEFT_DOWN, self._on_left_down)
self.control.Bind(wx.EVT_LEFT_UP, self._on_left_up)
self.control.Bind(wx.EVT_PAINT, self._on_paint)
# Pens used to draw the 'selection' marker:
# ZZZ: Make these class instances when moved to the wx toolkit code.
self._selectedPenDark = wx.Pen(
wx.SystemSettings.GetColour(wx.SYS_COLOUR_3DSHADOW), 1,
wx.PENSTYLE_SOLID
)
self._selectedPenLight = wx.Pen(
wx.SystemSettings.GetColour(wx.SYS_COLOUR_3DHIGHLIGHT), 1,
wx.PENSTYLE_SOLID
)
return
###########################################################################
# Private interface.
###########################################################################
#### Trait event handlers #################################################
def _bitmap_changed(self, bitmap):
""" Called when the widget's bitmap is changed. """
if self.control is not None:
self.control.Refresh()
return
def _selected_changed(self, selected):
""" Called when the selected state of the widget is changed. """
if selected:
for control in self.GetParent().GetChildren():
if hasattr(control, '__tag__'):
if control.Selected():
control.Selected(False)
break
self.Refresh()
return
#### wx event handlers ####################################################
def _on_enter_window(self, event):
""" Called when the mouse enters the widget. """
if self._selected is not None:
self._mouse_over = True
self.Refresh()
return
def _on_leave_window(self, event):
""" Called when the mouse leaves the widget. """
if self._mouse_over:
self._mouse_over = False
self.Refresh()
return
def _on_left_dclick(self, event):
""" Called when the left mouse button is double-clicked. """
#print 'left dclick'
event.Skip()
return
def _on_left_down ( self, event = None ):
""" Called when the left mouse button goes down on the widget. """
#print 'left down'
if self._selected is not None:
self.CaptureMouse()
self._button_down = True
self.Refresh()
event.Skip()
return
def _on_left_up ( self, event = None ):
""" Called when the left mouse button goes up on the widget. """
#print 'left up'
need_refresh = self._button_down
if need_refresh:
self.ReleaseMouse()
self._button_down = False
if self._selected is not None:
wdx, wdy = self.GetClientSizeTuple()
x = event.GetX()
y = event.GetY()
if (0 <= x < wdx) and (0 <= y < wdy):
if self._selected != -1:
self.Selected( True )
elif need_refresh:
self.Refresh()
return
if need_refresh:
self.Refresh()
event.Skip()
return
def _on_paint ( self, event = None ):
""" Called when the widget needs repainting. """
wdc = wx.PaintDC( self.control )
wdx, wdy = self.control.GetClientSizeTuple()
bitmap = self.bitmap
bdx = bitmap.GetWidth()
bdy = bitmap.GetHeight()
wdc.DrawBitmap( bitmap, (wdx - bdx) / 2, (wdy - bdy) / 2, True )
pens = [ self._selectedPenLight, self._selectedPenDark ]
bd = self._button_down
if self._mouse_over:
wdc.SetBrush( wx.TRANSPARENT_BRUSH )
wdc.SetPen( pens[ bd ] )
wdc.DrawLine( 0, 0, wdx, 0 )
wdc.DrawLine( 0, 1, 0, wdy )
wdc.SetPen( pens[ 1 - bd ] )
wdc.DrawLine( wdx - 1, 1, wdx - 1, wdy )
wdc.DrawLine( 1, wdy - 1, wdx - 1, wdy - 1 )
if self._selected == True:
wdc.SetBrush( wx.TRANSPARENT_BRUSH )
wdc.SetPen( pens[ bd ] )
wdc.DrawLine( 1, 1, wdx - 1, 1 )
wdc.DrawLine( 1, 1, 1, wdy - 1 )
wdc.DrawLine( 2, 2, wdx - 2, 2 )
wdc.DrawLine( 2, 2, 2, wdy - 2 )
wdc.SetPen( pens[ 1 - bd ] )
wdc.DrawLine( wdx - 2, 2, wdx - 2, wdy - 1 )
wdc.DrawLine( 2, wdy - 2, wdx - 2, wdy - 2 )
wdc.DrawLine( wdx - 3, 3, wdx - 3, wdy - 2 )
wdc.DrawLine( 3, wdy - 3, wdx - 3, wdy - 3 )
return
#### EOF ######################################################################
|
[
"stevetmat@users.noreply.github.com"
] |
stevetmat@users.noreply.github.com
|
dd23f81f0523a7ea828de9f8aa5f5cc5ce00d2d7
|
c9952dcac5658940508ddc139344a7243a591c87
|
/tests/lab18/test_ch018_t01_why_use_classes.py
|
89a74ccf8952816be596452943f8010beaf6bc90
|
[] |
no_license
|
wongcyrus/ite3101_introduction_to_programming
|
5da1c15212528423b3df91997327fe148abef4de
|
7cd76d0861d5355db5a6e2e171735bee2e78f829
|
refs/heads/master
| 2023-08-31T17:27:06.193049
| 2023-08-21T08:30:26
| 2023-08-21T08:30:26
| 136,574,036
| 3
| 2
| null | 2023-08-21T08:30:28
| 2018-06-08T06:06:49
|
Python
|
UTF-8
|
Python
| false
| false
| 1,037
|
py
|
import unittest
from tests.unit_test_helper import is_answer
class TestOutput(unittest.TestCase):
def setUp(self):
if is_answer:
from lab.lab18.ch018_t01_why_use_classes_ans import Fruit
else:
from lab.lab18.ch018_t01_why_use_classes import Fruit
self.fruit = Fruit("lemon", "yellow", "sour", False)
def test_member(self):
self.assertEqual("lemon", self.fruit.name)
self.assertEqual("yellow", self.fruit.color)
self.assertEqual("sour", self.fruit.flavor)
self.assertFalse(self.fruit.poisonous)
def test_create_instance(self):
if is_answer:
from lab.lab18.ch018_t01_why_use_classes_ans import lemon
else:
from lab.lab18.ch018_t01_why_use_classes import lemon
self.assertEqual("lemon", lemon.name)
self.assertEqual("yellow", lemon.color)
self.assertEqual("sour", lemon.flavor)
self.assertFalse(lemon.poisonous)
if __name__ == '__main__':
unittest.main()
|
[
"cywong@vtc.edu.hk"
] |
cywong@vtc.edu.hk
|
537dd076c49ad2ccafc435e3f66ed76126ba6de6
|
b1ddcf4bac9ca603a7a2333912eb29da8bf2cb7b
|
/modelViewset/api/views.py
|
7a9e893c1dd960e695c5c68e31de55adba80160d
|
[] |
no_license
|
sankethalake/django_practice
|
e9477ae0beee4923cd6758cc6d37517ea5979610
|
9877304f0c6415ae8979e5cc13a49559155fdd9d
|
refs/heads/main
| 2023-07-07T07:07:35.598657
| 2021-08-14T06:26:23
| 2021-08-14T06:26:23
| 389,917,128
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 243
|
py
|
from .models import Student
from .serializers import StudentSerializer
from rest_framework import viewsets
class StudentModelViewset(viewsets.ModelViewSet):
queryset = Student.objects.all()
serializer_class = StudentSerializer
|
[
"sankethalake@gmail.com"
] |
sankethalake@gmail.com
|
cc47947af7cebae7fdc2b5543d4508a2c820c757
|
cf74a48db45d0fa8c9ae58931a9368672d07fa19
|
/utils/zgrep.py
|
b587690671719faa84d712598846558189a92885
|
[
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
razikallayi/NewsBlur
|
fdb7549d73dfd6765e2cf2e4007f1b9cfb39002f
|
a266d9f585400c506fa9727796a5dddba0e69ffb
|
refs/heads/master
| 2021-01-18T08:12:02.738442
| 2015-05-27T00:58:45
| 2015-05-27T00:58:45
| 36,501,810
| 1
| 0
| null | 2015-05-29T12:01:58
| 2015-05-29T12:01:56
| null |
UTF-8
|
Python
| false
| false
| 3,581
|
py
|
#!/usr/bin/env python
import os
import time
import select
import subprocess
import sys
from optparse import OptionParser
from requests.exceptions import ConnectionError
sys.path.insert(0, '/srv/newsblur')
os.environ['DJANGO_SETTINGS_MODULE'] = 'settings'
import fabfile
NEWSBLUR_USERNAME = 'sclay'
IGNORE_HOSTS = [
'push',
]
def main(role="app", role2="work", command=None, path=None):
delay = 1
while True:
try:
streams = create_streams_for_roles(role, role2, command=command, path=path)
print " --- Loading %s App Log Tails ---" % len(streams)
read_streams(streams)
except UnicodeDecodeError: # unexpected end of data
print " --- Lost connections - Retrying... ---"
time.sleep(1)
continue
except ConnectionError:
print " --- Retrying in %s seconds... ---" % delay
time.sleep(delay)
delay += 1
continue
except KeyboardInterrupt:
print " --- End of Logging ---"
break
def create_streams_for_roles(role, role2, command=None, path=None):
streams = list()
hosts = fabfile.do(split=True)
found = set()
if not path:
path = "/srv/newsblur/logs/newsblur.log"
if not command:
command = "tail -f"
for hostname in (hosts[role] + hosts[role2]):
if isinstance(hostname, dict):
address = hostname['address']
hostname = hostname['name']
elif ':' in hostname:
hostname, address = hostname.split(':', 1)
elif isinstance(hostname, tuple):
hostname, address = hostname[0], hostname[1]
else:
address = hostname
if any(h in hostname for h in IGNORE_HOSTS): continue
if hostname in found: continue
if 'ec2' in hostname:
s = subprocess.Popen(["ssh",
"-i", os.path.expanduser(os.path.join(fabfile.env.SECRETS_PATH,
"keys/ec2.pem")),
address, "%s %s" % (command, path)], stdout=subprocess.PIPE)
else:
s = subprocess.Popen(["ssh", "-l", NEWSBLUR_USERNAME,
"-i", os.path.expanduser(os.path.join(fabfile.env.SECRETS_PATH,
"keys/newsblur.key")),
address, "%s %s" % (command, path)], stdout=subprocess.PIPE)
s.name = hostname
streams.append(s)
found.add(hostname)
return streams
def read_streams(streams):
while True:
r, _, _ = select.select(
[stream.stdout.fileno() for stream in streams], [], [])
for fileno in r:
for stream in streams:
if stream.stdout.fileno() != fileno:
continue
data = os.read(fileno, 4096)
if not data:
streams.remove(stream)
break
combination_message = "[%-6s] %s" % (stream.name[:6], data)
sys.stdout.write(combination_message)
break
if __name__ == "__main__":
parser = OptionParser()
parser.add_option("-f", "--find", dest="find")
parser.add_option("-p", "--path", dest="path")
(options, args) = parser.parse_args()
path = options.path
find = options.find
command = "zgrep \"%s\"" % find
main(role="app", role2="dev", command=command, path=path)
|
[
"samuel@ofbrooklyn.com"
] |
samuel@ofbrooklyn.com
|
e2328cbb036a2a53d77b6c6cc430606a33cc18a4
|
c9a4e88111d05cf9db399eba3ae83ddb3b0ad2da
|
/myapp/models.py
|
801590a2819433ea0630630a9b407a91b5cbd1d0
|
[] |
no_license
|
Django-TOPS/07JanPython
|
7861d9a515e9da951b14f8caa5b1bb0578d99557
|
b101e7b2b457250153aedb6a95354e10824ecec5
|
refs/heads/master
| 2023-04-12T17:08:44.644228
| 2021-05-20T03:30:10
| 2021-05-20T03:30:10
| 369,140,333
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 574
|
py
|
from django.db import models
# Create your models here.
class signup(models.Model):
firstname=models.CharField(max_length=20)
lastname=models.CharField(max_length=20)
username=models.EmailField()
password=models.CharField(max_length=20)
city=models.CharField(max_length=20)
state=models.CharField(max_length=20)
zipcode=models.IntegerField()
class notes(models.Model):
title=models.CharField(max_length=100)
category=models.CharField(max_length=100)
myfiles=models.FileField(upload_to="FileUpload")
comments=models.TextField()
|
[
"sanketiosonline@gmail.com"
] |
sanketiosonline@gmail.com
|
a2895c375cdca0634a6d85a52cc1838a3c58292c
|
092f2dd962c9c393904fd9886a726c611f8aa811
|
/palletsprojects_tutorial/tests/test_blog.py
|
dc6550b7ef96d8db0bc0fc4e69758814b26112df
|
[] |
no_license
|
iAnafem/flask_tutorial_projects
|
b5d2b4f7e2eb68ed54de3938d9006889c9fe76fb
|
c0f77fe4056b8f7c4ab16bb9cbc75f4fe90d4bde
|
refs/heads/master
| 2022-12-13T07:13:56.415457
| 2019-08-28T20:14:35
| 2019-08-28T20:16:11
| 200,421,166
| 0
| 0
| null | 2022-12-08T06:00:55
| 2019-08-03T21:09:25
|
Python
|
UTF-8
|
Python
| false
| false
| 2,609
|
py
|
import pytest
from flaskr.db import get_db
def test_index(client, auth):
response = client.get('/')
assert b'Log In' in response.data
assert b'Register' in response.data
auth.login()
response = client.get('/')
assert b'Log Out' in response.data
assert b'test title' in response.data
assert b'by test on 2018-01-01' in response.data
assert b'test\nbody' in response.data
assert b'href="/1/update"' in response.data
@pytest.mark.parametrize('path', (
'/create',
'/1/update',
'/1/delete',
))
def test_login_required(client, path):
response = client.post(path)
assert response.headers['Location'] == 'http://localhost/auth/login'
def test_author_required(app, client, auth):
# change the post author to another user
with app.app_context():
db = get_db()
db.execute('UPDATE post SET author_id = 2 WHERE id = 1')
db.commit()
auth.login()
# current user can't modify other user's post
assert client.post('/1/update').status_code == 403
assert client.post('/1/delete').status_code == 403
# current user doesn't see edit link
assert b'href="/1/update"' not in client.get('/').data
@pytest.mark.parametrize('path', (
'/2/update',
'/2/delete',
))
def test_exists_required(client, auth, path):
auth.login()
assert client.post(path).status_code == 404
def test_create(client, auth, app):
auth.login()
assert client.get('/create').status_code == 200
client.post('create', data={'title': 'created', 'body': ''})
with app.app_context():
db = get_db()
count = db.execute('SELECT COUNT(id) FROM post').fetchone()[0]
assert count == 2
def test_update(client, auth, app):
auth.login()
assert client.get('/1/update').status_code == 200
client.post('/1/update', data={'title': 'updated', 'body': ''})
with app.app_context():
db = get_db()
post = db.execute('SELECT * FROM post WHERE id = 1').fetchone()
assert post['title'] == 'updated'
@pytest.mark.parametrize('path', (
'/create',
'/1/update',
))
def text_create_update_validate(client, auth, path):
auth.login()
response = client.post(path, data={'title': '', 'body': ''})
assert b'Title is required.' in response.data
def test_delete(client, auth, app):
auth.login()
response = client.post('/1/delete')
assert response.headers['Location'] == 'http://localhost/'
with app.app_context():
db = get_db()
post = db.execute('SELECT * FROM post WHERE id = 1').fetchone()
assert post is None
|
[
"DPronkin@mostro.ru"
] |
DPronkin@mostro.ru
|
d7f6722540037a29c7c6722f0fca5d042b7c0552
|
45d6b7739ef7e61779d778b16e2d2cb9b92a08c0
|
/test/run_in_parallel-200PU-grow/SUB-Analyzer-44.py
|
153296b77ea28b2b6bcea87c4d79a9a5af001630
|
[] |
no_license
|
isobelojalvo/phase2L1TauAnalyzer
|
40b545baec97bf287a8d8ab26bea70546bf9f6f8
|
98ef6d31a523698ba0de48763cadee1d5b2ce695
|
refs/heads/master
| 2021-01-22T08:38:17.965156
| 2019-07-25T17:25:51
| 2019-07-25T17:25:51
| 92,623,686
| 0
| 1
| null | 2019-07-23T19:43:55
| 2017-05-27T20:56:25
|
Python
|
UTF-8
|
Python
| false
| false
| 6,667
|
py
|
# Auto generated configuration file
# using:
# Revision: 1.19
# Source: /local/reps/CMSSW/CMSSW/Configuration/Applications/python/ConfigBuilder.py,v
# with command line options: step2 --python_filename=rerun_step2_L1_onMCL1_FEVTHLTDEBUG.py --no_exec -s L1 --datatier GEN-SIM-DIGI-RAW -n 1 --era Phase2_timing --eventcontent FEVTDEBUGHLT --filein file:/afs/cern.ch/user/r/rekovic/release/CMSSW_9_3_2/src/step2_DIGI_PU200_10ev.root --conditions 93X_upgrade2023_realistic_v2 --beamspot HLLHC14TeV --geometry Extended2023D17 --fileout file:step2_ZEE_PU200_1ev_rerun-L1-L1Ntuple.root --customise=L1Trigger/L1TNtuples/customiseL1Ntuple.L1NtupleEMU
import FWCore.ParameterSet.Config as cms
from Configuration.StandardSequences.Eras import eras
process = cms.Process('L1',eras.Phase2_trigger)
# import of standard configurations
process.load('Configuration.StandardSequences.Services_cff')
process.load('SimGeneral.HepPDTESSource.pythiapdt_cfi')
process.load('FWCore.MessageService.MessageLogger_cfi')
process.load('Configuration.EventContent.EventContent_cff')
process.load('SimGeneral.MixingModule.mixNoPU_cfi')
process.load('Configuration.Geometry.GeometryExtended2023D17Reco_cff')
process.load('Configuration.StandardSequences.MagneticField_cff')
process.load('Configuration.StandardSequences.SimL1Emulator_cff')
process.load('Configuration.StandardSequences.EndOfProcess_cff')
process.load('Configuration.StandardSequences.FrontierConditions_GlobalTag_cff')
process.load('L1Trigger.TrackFindingTracklet.L1TrackletTracks_cff')
process.maxEvents = cms.untracked.PSet(
input = cms.untracked.int32(4000)
)
# Input source
process.source = cms.Source("PoolSource",
fileNames = cms.untracked.vstring(
"root://cmsxrootd.fnal.gov///store/relval/CMSSW_9_3_7/RelValZTT_14TeV/MINIAODSIM/PU25ns_93X_upgrade2023_realistic_v5_2023D17PU200-v1/10000/6CE39BE9-EA2D-E811-8FDA-0242AC130002.root"
),
inputCommands = cms.untracked.vstring("keep *",
"drop l1tHGCalTowerMapBXVector_hgcalTriggerPrimitiveDigiProducer_towerMap_HLT",
"drop l1tEMTFHit2016Extras_simEmtfDigis_CSC_HLT",
"drop l1tEMTFHit2016Extras_simEmtfDigis_RPC_HLT",
"drop l1tEMTFHit2016s_simEmtfDigis__HLT",
"drop l1tEMTFTrack2016Extras_simEmtfDigis__HLT",
"drop l1tEMTFTrack2016s_simEmtfDigis__HLT")
#skipEvents = cms.untracked.uint32(80)
)
process.source.secondaryFileNames = cms.untracked.vstring(
"/store/relval/CMSSW_9_3_7/RelValZTT_14TeV/GEN-SIM-DIGI-RAW/PU25ns_93X_upgrade2023_realistic_v5_2023D17PU200-v1/10000/FC056F35-2E2D-E811-BE3A-0242AC130002.root")
process.source.lumisToProcess = cms.untracked.VLuminosityBlockRange("1:46")
process.options = cms.untracked.PSet(
)
# Production Info
process.configurationMetadata = cms.untracked.PSet(
annotation = cms.untracked.string('step2 nevts:1'),
name = cms.untracked.string('Applications'),
version = cms.untracked.string('$Revision: 1.19 $')
)
# Output definition
process.FEVTDEBUGHLToutput = cms.OutputModule("PoolOutputModule",
dataset = cms.untracked.PSet(
dataTier = cms.untracked.string('GEN-SIM-DIGI-RAW'),
filterName = cms.untracked.string('')
),
fileName = cms.untracked.string('file:test_reprocess.root'),
splitLevel = cms.untracked.int32(0)
)
# Additional output definition
# Other statements
from Configuration.AlCa.GlobalTag import GlobalTag
process.GlobalTag = GlobalTag(process.GlobalTag, '100X_upgrade2023_realistic_v1', '')
process.load('SimCalorimetry.HcalTrigPrimProducers.hcaltpdigi_cff')
process.load('CalibCalorimetry.CaloTPG.CaloTPGTranscoder_cfi')
process.load('L1Trigger.L1THGCal.hgcalTriggerPrimitives_cff')
process.hgcl1tpg_step = cms.Path(process.hgcalTriggerPrimitives)
process.load('SimCalorimetry.EcalEBTrigPrimProducers.ecalEBTriggerPrimitiveDigis_cff')
process.EcalEBtp_step = cms.Path(process.simEcalEBTriggerPrimitiveDigis)
process.L1TrackTrigger_step = cms.Path(process.L1TrackletTracksWithAssociators)
process.VertexProducer.l1TracksInputTag = cms.InputTag("TTTracksFromTracklet", "Level1TTTracks")
# Path and EndPath definitions
process.L1simulation_step = cms.Path(process.SimL1Emulator)
process.endjob_step = cms.EndPath(process.endOfProcess)
process.FEVTDEBUGHLToutput_step = cms.EndPath(process.FEVTDEBUGHLToutput)
############################################################
# L1 pf object
###########################################################
process.load("L1Trigger.Phase2L1ParticleFlow.pfTracksFromL1Tracks_cfi")
from L1Trigger.Phase2L1ParticleFlow.l1ParticleFlow_cff import *
process.l1pf = cms.Path(process.pfTracksFromL1Tracks+process.l1ParticleFlow)
############################################################
# L1 Tau object
############################################################
process.load("L1Trigger.Phase2L1Taus.L1PFTauProducer_cff")
process.L1PFTauProducer.min_pi0pt = cms.double(2.5);
process.L1PFTauProducer.L1PFObjects = cms.InputTag("l1pfProducer","PF")
process.L1PFTauProducer.L1Neutrals = cms.InputTag("l1pfProducer")
process.L1PFTauProducer.L1Clusters = cms.InputTag("l1pfProducer","PF")
process.L1PFTaus = cms.Path(process.L1PFTauProducer)
# L1 Tau Analyzer
process.load("L1Trigger.phase2L1TauAnalyzer.phase2L1TauAnalyzer_cfi")
process.analyzer = cms.Path(process.L1TauAnalyzer)
process.TFileService = cms.Service("TFileService",
fileName = cms.string("analyzer-grow-l1t.root"),
closeFileFast = cms.untracked.bool(True)
)
# Schedule definition
process.schedule = cms.Schedule(process.EcalEBtp_step,process.L1TrackTrigger_step,process.L1simulation_step,process.l1pf,process.L1PFTaus,process.analyzer,process.endjob_step)
from PhysicsTools.PatAlgos.tools.helpers import associatePatAlgosToolsTask
associatePatAlgosToolsTask(process)
# Add early deletion of temporary data products to reduce peak memory need
from Configuration.StandardSequences.earlyDeleteSettings_cff import customiseEarlyDelete
process = customiseEarlyDelete(process)
# End adding early deletion
#dump_file = open('dump.py','w')
#dump_file.write(process.dumpPython())
process.source.secondaryFileNames = cms.untracked.vstring(
"/store/relval/CMSSW_9_3_7/RelValZTT_14TeV/GEN-SIM-DIGI-RAW/PU25ns_93X_upgrade2023_realistic_v5_2023D17PU200-v1/10000/76A6C136-2E2D-E811-AA99-0242AC130002.root")
process.source.lumisToProcess = cms.untracked.VLuminosityBlockRange("1:48")
process.maxEvents = cms.untracked.PSet(input = cms.untracked.int32(100))
# Input source
process.source.fileNames = cms.untracked.vstring($inputFileNames)
process.TFileService = cms.Service("TFileService",
fileName = cms.string("$outputFileName")
)
|
[
"ojalvo@wisc.edu"
] |
ojalvo@wisc.edu
|
4069cf696c9532eef3b0edf6220845339f5f76ec
|
786027545626c24486753351d6e19093b261cd7d
|
/ghidra9.2.1_pyi/ghidra/app/util/bin/format/dwarf4/next/DWARFRegisterMappingsManager.pyi
|
ba1a1bbc88343e1b810e395de5dbfa5da91fb33f
|
[
"MIT"
] |
permissive
|
kohnakagawa/ghidra_scripts
|
51cede1874ef2b1fed901b802316449b4bf25661
|
5afed1234a7266c0624ec445133280993077c376
|
refs/heads/main
| 2023-03-25T08:25:16.842142
| 2021-03-18T13:31:40
| 2021-03-18T13:31:40
| 338,577,905
| 14
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,592
|
pyi
|
import generic.jar
import ghidra.app.util.bin.format.dwarf4.next
import ghidra.program.model.lang
import java.lang
import org.jdom
class DWARFRegisterMappingsManager(object):
"""
Factory class to instantiate and cache DWARFRegisterMappings objects.
"""
def __init__(self): ...
def equals(self, __a0: object) -> bool: ...
def getClass(self) -> java.lang.Class: ...
@staticmethod
def getDWARFRegisterMappingFileFor(lang: ghidra.program.model.lang.Language) -> generic.jar.ResourceFile:
"""
Returns {@link ResourceFile} that should contain the specified language's
DWARF register mapping, never null.
@param lang {@link Language} to find the mapping file for.
@return {@link ResourceFile} of where the mapping file should be, never
null.
@throws IOException if not a Sleigh language or no mapping specified or
multiple mappings specified.
"""
...
@staticmethod
def getMappingForLang(lang: ghidra.program.model.lang.Language) -> ghidra.app.util.bin.format.dwarf4.next.DWARFRegisterMappings:
"""
Returns a possibly cached {@link DWARFRegisterMappings} object for the
specified language,
<p>
@param lang {@link Language} to get the matching DWARF register mappings
for
@return {@link DWARFRegisterMappings} instance, never null
@throws IOException if mapping not found or invalid
"""
...
@overload
@staticmethod
def hasDWARFRegisterMapping(lang: ghidra.program.model.lang.Language) -> bool:
"""
Returns true if the specified {@link Language} has DWARF register
mappings.
@param lang The {@link Language} to test
@return true if the language has a DWARF register mapping specified
@throws IOException if there was an error in the language LDEF file.
"""
...
@overload
@staticmethod
def hasDWARFRegisterMapping(langDesc: ghidra.program.model.lang.LanguageDescription) -> bool:
"""
Returns true if the specified {@link LanguageDescription} has DWARF
register mappings.
@param langDesc The {@link LanguageDescription} to test
@return true if the language has a DWARF register mapping specified
@throws IOException if there was an error in the language LDEF file.
"""
...
def hashCode(self) -> int: ...
def notify(self) -> None: ...
def notifyAll(self) -> None: ...
@staticmethod
def readMappingForLang(lang: ghidra.program.model.lang.Language) -> ghidra.app.util.bin.format.dwarf4.next.DWARFRegisterMappings:
"""
Finds the DWARF register mapping information file specified in the
specified language's LDEF file and returns a new
{@link DWARFRegisterMappings} object containing the data read from that
file.
<p>
Throws {@link IOException} if the lang does not have a mapping or it is
invalid.
<p>
@param lang {@link Language} to read the matching DWARF register mappings
for
@return a new {@link DWARFRegisterMappings} instance, created from
information read from the {@link #DWARF_REGISTER_MAPPING_NAME}
xml file referenced in the language's LDEF, never null.
@throws IOException if there is no DWARF register mapping file associated
with the specified {@link Language} or if there was an error
in the register mapping data.
"""
...
@staticmethod
def readMappingFrom(rootElem: org.jdom.Element, lang: ghidra.program.model.lang.Language) -> ghidra.app.util.bin.format.dwarf4.next.DWARFRegisterMappings:
"""
Creates a new {@link DWARFRegisterMappings} from the data present in the
xml element.
<p>
@param rootElem JDom XML element containing the <dwarf> root
element of the mapping file.
@param lang The Ghidra {@link Language} that the DWARF register mapping
applies to
@return a new {@link DWARFRegisterMappings} instance, never null.
@throws IOException if missing or invalid data found in xml
"""
...
def toString(self) -> unicode: ...
@overload
def wait(self) -> None: ...
@overload
def wait(self, __a0: long) -> None: ...
@overload
def wait(self, __a0: long, __a1: int) -> None: ...
|
[
"tsunekou1019@gmail.com"
] |
tsunekou1019@gmail.com
|
2e1ce2b1f525c0e9d47d6fbdb67c819a692334fb
|
245b92f4140f30e26313bfb3b2e47ed1871a5b83
|
/airflow/providers/google_vendor/googleads/v12/services/services/customer_feed_service/transports/base.py
|
023d30df6c5c442ea4db3635f9ad24a98bee2a10
|
[
"Apache-2.0",
"BSD-3-Clause",
"MIT"
] |
permissive
|
ephraimbuddy/airflow
|
238d6170a0e4f76456f00423124a260527960710
|
3193857376bc2c8cd2eb133017be1e8cbcaa8405
|
refs/heads/main
| 2023-05-29T05:37:44.992278
| 2023-05-13T19:49:43
| 2023-05-13T19:49:43
| 245,751,695
| 2
| 1
|
Apache-2.0
| 2021-05-20T08:10:14
| 2020-03-08T04:28:27
| null |
UTF-8
|
Python
| false
| false
| 5,984
|
py
|
# -*- coding: utf-8 -*-
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import abc
from typing import Awaitable, Callable, Optional, Sequence, Union
import pkg_resources
import google.auth # type: ignore
import google.api_core # type: ignore
from google.api_core import exceptions as core_exceptions # type: ignore
from google.api_core import gapic_v1 # type: ignore
from google.api_core import retry as retries # type: ignore
from google.auth import credentials as ga_credentials # type: ignore
from google.oauth2 import service_account # type: ignore
from airflow.providers.google_vendor.googleads.v12.services.types import customer_feed_service
try:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
gapic_version=pkg_resources.get_distribution("google-ads",).version,
)
except pkg_resources.DistributionNotFound:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo()
class CustomerFeedServiceTransport(abc.ABC):
"""Abstract transport class for CustomerFeedService."""
AUTH_SCOPES = ("https://www.googleapis.com/auth/adwords",)
DEFAULT_HOST: str = "googleads.googleapis.com"
def __init__(
self,
*,
host: str = DEFAULT_HOST,
credentials: ga_credentials.Credentials = None,
credentials_file: Optional[str] = None,
scopes: Optional[Sequence[str]] = None,
quota_project_id: Optional[str] = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
always_use_jwt_access: Optional[bool] = False,
**kwargs,
) -> None:
"""Instantiate the transport.
Args:
host (Optional[str]):
The hostname to connect to.
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
credentials_file (Optional[str]): A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
This argument is mutually exclusive with credentials.
scopes (Optional[Sequence[str]]): A list of scopes.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
The client info used to send a user-agent string along with
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
always_use_jwt_access (Optional[bool]): Whether self signed JWT should
be used for service account credentials.
"""
# Save the hostname. Default to port 443 (HTTPS) if none is specified.
if ":" not in host:
host += ":443"
self._host = host
scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES}
# Save the scopes.
self._scopes = scopes
# If no credentials are provided, then determine the appropriate
# defaults.
if credentials and credentials_file:
raise core_exceptions.DuplicateCredentialArgs(
"'credentials_file' and 'credentials' are mutually exclusive"
)
if credentials_file is not None:
credentials, _ = google.auth.load_credentials_from_file(
credentials_file,
**scopes_kwargs,
quota_project_id=quota_project_id,
)
elif credentials is None:
credentials, _ = google.auth.default(
**scopes_kwargs, quota_project_id=quota_project_id
)
# If the credentials are service account credentials, then always try to use self signed JWT.
if (
always_use_jwt_access
and isinstance(credentials, service_account.Credentials)
and hasattr(
service_account.Credentials, "with_always_use_jwt_access"
)
):
credentials = credentials.with_always_use_jwt_access(True)
# Save the credentials.
self._credentials = credentials
def _prep_wrapped_messages(self, client_info):
# Precompute the wrapped methods.
self._wrapped_methods = {
self.mutate_customer_feeds: gapic_v1.method.wrap_method(
self.mutate_customer_feeds,
default_timeout=None,
client_info=client_info,
),
}
def close(self):
"""Closes resources associated with the transport.
.. warning::
Only call this method if the transport is NOT shared
with other clients - this may cause errors in other clients!
"""
raise NotImplementedError()
@property
def mutate_customer_feeds(
self,
) -> Callable[
[customer_feed_service.MutateCustomerFeedsRequest],
Union[
customer_feed_service.MutateCustomerFeedsResponse,
Awaitable[customer_feed_service.MutateCustomerFeedsResponse],
],
]:
raise NotImplementedError()
__all__ = ("CustomerFeedServiceTransport",)
|
[
"noreply@github.com"
] |
ephraimbuddy.noreply@github.com
|
d2be3ec81f8f049e8a70a3c02bca4c7f5d207554
|
96e38b89fa057fa0c1cf34e498b4624041dfc6e2
|
/BOJ/String/Python/4583.py
|
a995a37188226e83d4452414ace1a0952986cac9
|
[] |
no_license
|
malkoG/polyglot-cp
|
66059246b01766da3c359dbd16f04348d3c7ecd2
|
584763144afe40d73e72dd55f90ee1206029ca8f
|
refs/heads/master
| 2021-11-24T13:33:49.625237
| 2019-10-06T07:42:49
| 2019-10-06T07:42:49
| 176,255,722
| 2
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 409
|
py
|
mirror=dict()
mirror['b'] = 'd'
mirror['d'] = 'b'
mirror['q'] = 'p'
mirror['p'] = 'q'
for ch in 'iovwx':
mirror[ch] = ch
while True:
s=input()
if s =="#":
break
result = ''
flag = True
for ch in s:
try:
s += mirror[ch]
except:
flag = False
break
if flag:
print(result)
else:
print("INVALID")
|
[
"rijgndqw012@gmail.com"
] |
rijgndqw012@gmail.com
|
e3d149b7b7cf48fd12d2013aefb000ecade6610f
|
5d32d0e65aa3bfa677fd1b8c92569e07e9b82af1
|
/Section 5 - Programming Logic/Guess game v3 - multiple if.py
|
1136fb613ad7c5b24b6249a57be9343f93a90ebf
|
[
"CC0-1.0"
] |
permissive
|
pdst-lccs/lccs-python
|
b74ef2a02ac8ad2637f713fff5559f4e56c9827d
|
95cb7ece05716521e9951d7a40de8fb20a88021f
|
refs/heads/master
| 2023-05-28T00:46:57.313972
| 2023-05-22T10:16:43
| 2023-05-22T10:16:43
| 240,501,524
| 21
| 18
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 687
|
py
|
# Event: LCCS Python Fundamental Skills Workshop
# Date: May 2018
# Author: Joe English, PDST
# eMail: computerscience@pdst.ie
# Name: Guessing Game v3
# Purpose: A program to demonstrate the multiple if statement
import random
number = random.randint(1, 10)
# The next line can be commented out later ...
print(number) # have a sneak peek at the number to guess!
guess = int(input("Enter a number between 1 and 10: "))
# Evaluate the condition
if guess == number:
print("Correct")
print("Well done!")
elif guess < number:
print("Hard luck!")
print("Too low")
else:
print("Hard luck!")
print("Too high")
print("Goodbye")
|
[
"noreply@github.com"
] |
pdst-lccs.noreply@github.com
|
85c8a05dbc3ccd700e56696411f9f0987cab48a8
|
8e8ea9e41032398fa8b1c54d73475a54aa11d293
|
/page/quarter/quarter_statistic/quarter_statistic.py
|
3df8bda70c25e22d603ec3c1cedd4f084dcf02b2
|
[] |
no_license
|
xmaimiao/wmPC_quarter
|
6b69812b42039101d89076923c329d8e5952308b
|
255666ccb5d2cac38b6975c0ae1ab055caabe41f
|
refs/heads/master
| 2023-03-28T02:41:21.347163
| 2021-03-29T07:12:44
| 2021-03-29T07:12:44
| 352,538,443
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,686
|
py
|
from common.contants import quarter_statistic_dir
from page.base.basepage import BasePage
from page.quarter.quarter_statistic.quarter_statistic_detail import Quarter_Statistic_Detail
class Quarter_Statistic(BasePage):
def wait_sleep(self,sleeps):
self.sleep(sleeps)
return self
def simple_search(self,keys):
'''
簡易查詢,傳進來一個字典{quarter_type:"全部",keywords:xxxx}
'''
self._params["quarter_name"] = keys["quarter_name"]
self.step(quarter_statistic_dir,"search_input")
return self
def advanced_search(self,keys):
'''
高级查詢,傳進來一個字典
{startTime:{switch:False,value:2020/01/05},endStartTime:{switch:False,value:2020/01/05},
startPlanExpireTime:{switch:False,value:2020/01/05},endPlanExpireTime:{switch:False,value:2020/01/05},
startFinishedNumber:{switch:False,value:0},endFinishedNumber:{switch:False,value:0},
startPushNumber:{switch:False,value:0},endPushNumber:{switch:False,value:0},
frequency:{switch:False,value:每天},
peopleOriented:{switch:False,value:限定人群},
status:{switch:False,value:進行中},
'''
self.step(quarter_statistic_dir, "click_advanced_search")
# if keys["startTime"]["switch"] == True:
# self._params["startTime"] = keys["startTime"]["value"]
# self.step(quarter_statistic_dir,"startTime")
# if keys["endStartTime"]["switch"] == True:
# self._params["endStartTime"] = keys["endStartTime"]["value"]
# self.step(quarter_statistic_dir,"endStartTime")
# if keys["startPlanExpireTime"]["switch"] == True:
# self._params["startPlanExpireTime"] = keys["startPlanExpireTime"]["value"]
# self.step(quarter_statistic_dir,"startPlanExpireTime")
# if keys["endPlanExpireTime"]["switch"] == True:
# self._params["endPlanExpireTime"] = keys["endPlanExpireTime"]["value"]
# self.step(quarter_statistic_dir,"endPlanExpireTime")
# 回收數量-前置
if keys["startFinishedNumber"]["switch"] == True:
self._params["startFinishedNumber"] = keys["startFinishedNumber"]["value"]
self.step(quarter_statistic_dir,"startFinishedNumber")
# 回收數量-後置
if keys["endFinishedNumber"]["switch"] == True:
self._params["endFinishedNumber"] = keys["endFinishedNumber"]["value"]
self.step(quarter_statistic_dir,"endFinishedNumber")
# if keys["startPushNumber"]["switch"] == True:
# self._params["startPushNumber"] = keys["startPushNumber"]["value"]
# self.step(quarter_statistic_dir,"startPushNumber")
# if keys["endPushNumber"]["switch"] == True:
# self._params["endPushNumber"] = keys["endPushNumber"]["value"]
# self.step(quarter_statistic_dir,"endPushNumber")
# 查詢問卷名稱
if keys["title"]["switch"] == True:
self._params["title"] = keys["title"]["value"]
self.step(quarter_statistic_dir,"title")
# # 查詢推送週期
# if keys["frequency"]["switch"] == True:
# self._params["frequency"] = keys["frequency"]["value"]
# self.step(quarter_statistic_dir, "frequency")
# # 查詢問卷類型
# if keys["peopleOriented"]["switch"] == True:
# self._params["frequency"] = keys["peopleOriented"]["value"]
# self.step(quarter_statistic_dir, "peopleOriented")
# 查询問卷状态
if keys["status"]["switch"] == True:
self._params["status"] = keys["status"]["value"]
self.step(quarter_statistic_dir, "status")
self.step(quarter_statistic_dir,"click_search")
return self
def view_the_fir(self,quarter_name):
'''
點擊第一行數據“查看”按鈕
'''
self._params["quarter_name"] = quarter_name
self.step(quarter_statistic_dir,"view_the_fir")
return Quarter_Statistic_Detail(self._driver)
def get_quarter_name_the_fir(self):
'''
編輯第一行問卷的名稱
'''
try:
return self.step(quarter_statistic_dir,"get_quarter_name_the_fir")
except Exception as e:
print("暫無數據!")
raise e
def get_quarter_status_the_fir(self):
'''
編輯第一行問卷的狀態
'''
try:
return self.step(quarter_statistic_dir,"get_quarter_status_the_fir")
except Exception as e:
print("暫無數據!")
raise e
|
[
"765120214@qq.com"
] |
765120214@qq.com
|
1f6ecc9a87a9cf8415c9d78c3fb3778a97bf9c3f
|
255e7b37e9ce28bbafba5a3bcb046de97589f21c
|
/leetcode_everyday/pastqing_491.py
|
d6d078bb2db9b5627bf0299def84a620217e9fd1
|
[] |
no_license
|
dog2humen/ForTheCoffee
|
697d2dc8366921aa18da2fa3311390061bab4b6f
|
2f940aa9dd6ce35588de18db08bf35a2d04a54f4
|
refs/heads/master
| 2023-04-15T09:53:54.711659
| 2021-04-28T13:49:13
| 2021-04-28T13:49:13
| 276,009,709
| 2
| 2
| null | 2020-07-01T08:29:33
| 2020-06-30T05:50:01
|
Python
|
UTF-8
|
Python
| false
| false
| 849
|
py
|
# coding:utf8
from typing import List
class Solution:
def findSubsequences(self, nums: List[int]) -> List[List[int]]:
return self.findSubsequences_v1(nums)
def findSubsequences_v1(self, nums: List[int]) -> List[List[int]]:
res = []
self.helper(nums, 0, [], res)
return res
def helper(self, nums, start, cur, res):
if len(cur) > 1:
res.append(cur[:])
memo = set()
for i in range(start, len(nums)):
if nums[i] in memo:
continue
if len(cur) == 0 or cur[-1] <= nums[i]:
memo.add(nums[i])
self.helper(nums, i + 1, cur + [nums[i]], res)
if __name__ == '__main__':
obj = Solution()
nums = [4, 6, 7, 7]
#nums = [4, 3, 2, 1]
res = obj.findSubsequences(nums)
print(res)
|
[
"116676671@qq.com"
] |
116676671@qq.com
|
921f2814602574d17fbd234becf4865925f0b64f
|
488e0934b8cd97e202ae05368c855a57b299bfd1
|
/Django/advanced/test_Blog/test_bookblog/book_app/migrations/0004_auto_20190408_1955.py
|
b4880c98ea624c5db12cdd69a946c614d2cc847a
|
[] |
no_license
|
didemertens/udemy_webdev
|
4d96a5e7abeec1848ecedb97f0c440cd50eb27ac
|
306215571be8e4dcb939e79b18ff6b302b75c952
|
refs/heads/master
| 2020-04-25T00:24:45.654136
| 2019-04-13T16:00:47
| 2019-04-13T16:00:47
| 172,377,429
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 363
|
py
|
# Generated by Django 2.1.7 on 2019-04-08 19:55
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('book_app', '0003_blog_intro'),
]
operations = [
migrations.AlterField(
model_name='blog',
name='intro',
field=models.TextField(),
),
]
|
[
"d.g.j.mertens@gmail.com"
] |
d.g.j.mertens@gmail.com
|
6a7cc2d596827c9bde48ed3927efac4efb6ee38c
|
1ffbdfff2c9632fa8ecd6288578e1d02f740ee23
|
/2020_/07/LeetCodeBitManipulation/03E_1356. Sort Integers by The Number of 1 Bits.py
|
dc2f4d00cc3f90c25830cf14864e295c482b40d1
|
[] |
no_license
|
taesookim0412/Python-Algorithms
|
c167c130adbe04100d42506c86402e729f95266c
|
c6272bbcab442ef32f327f877a53ee6e66d9fb00
|
refs/heads/main
| 2023-05-01T09:40:44.957618
| 2021-05-12T10:52:30
| 2021-05-12T10:52:30
| 366,682,846
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 780
|
py
|
import collections
import numpy as np
from typing import List
#Runtime: 72 ms, faster than 68.71% of Python3 online submissions for Sort Integers by The Number of 1 Bits.
#Memory Usage: 13.8 MB, less than 86.25% of Python3 online submissions for Sort Integers by The Number of 1 Bits.
class Solution:
def sortByBits(self, arr: List[int]) -> List[int]:
arr.sort()
data = collections.defaultdict(list)
res = []
for i in range(len(arr)):
numberOfOnes = str(bin(arr[i])).count('1')
data[numberOfOnes] += arr[i],
for key, val in sorted(data.items()):
print(key,val)
res += val
return res
s = Solution()
print(s.sortByBits([0,1,2,3,4,5,6,7,8]))
print(s.sortByBits([10,100,1000,10000]))
|
[
"taesoo.kim0412@gmail.com"
] |
taesoo.kim0412@gmail.com
|
faa0ab004c18bd45116e831d5433c6c545aaedb2
|
2f418a0f2fcca40f84ec0863b31ff974b574350c
|
/scripts/addons_extern/nodes_io/attributes.py
|
5763cb541cfaa72d6cdbe4733176fefca15e8fb1
|
[] |
no_license
|
JT-a/blenderpython279
|
57a81b55564218f3b1417c2ffa97f5161897ec79
|
04846c82f794c22f87d677d9eb8cec1d05c48cda
|
refs/heads/master
| 2021-06-25T06:58:07.670613
| 2017-09-11T11:14:36
| 2017-09-11T11:14:36
| 103,723,697
| 4
| 2
| null | 2017-09-16T04:09:31
| 2017-09-16T04:09:31
| null |
UTF-8
|
Python
| false
| false
| 3,621
|
py
|
#!/usr/bin/python3
# --------------------------------------------------
# ATTRIBUTES
# --------------------------------------------------
defaults = [
"bl_idname",
# "type", # read-only
"name",
"label",
# "parent",
"select",
"location",
# "dimensions", # read-only
"width",
"height",
# "width_hidden",
"use_custom_color",
"color",
"hide",
"mute",
"show_options",
"show_preview",
"show_texture",
# "inputs",
# "outputs",
]
specials = [
"attribute_name", # ["ATTRIBUTE"]
"axis", # ["TANGENT"]
"blend_type", # ["MIX_RGB"]
"bytecode", # ["SCRIPT"]
"bytecode_hash", # ["SCRIPT"]
"color_mapping", # ["TEX_IMAGE", "TEX_ENVIRONMENT", "TEX_NOISE", "TEX_GRADIENT", "TEX_MUSGRAVE", "TEX_MAGIC", "TEX_WAVE", "TEX_SKY", "TEX_VORONOI", "TEX_CHECKER", "TEX_BRICK"]
"color_ramp", # ["VALTORGB"]
"color_space", # ["TEX_IMAGE", "TEX_ENVIRONMENT"]
"coloring", # ["TEX_VORONOI"]
"component", # ["BSDF_HAIR", "BSDF_TOON"]
"convert_from", # ["VECT_TRANSFORM"]
"convert_to", # ["VECT_TRANSFORM"]
"direction_type", # ["TANGENT"]
"distribution", # ["BSDF_GLOSSY", "BSDF_REFRACTION", "BSDF_ANISOTROPIC", "BSDF_GLASS"]
"falloff", # ["SUBSURFACE_SCATTERING"]
"filepath", # ["SCRIPT"]
"from_dupli", # ["UVMAP", "TEX_COORD"]
"gradient_type", # ["TEX_GRADIENT"]
"ground_albedo", # ["TEX_SKY"]
"image", # ["TEX_IMAGE", "TEX_ENVIRONMENT"]
"interpolation", # ["TEX_IMAGE"]
"invert", # ["BUMP"]
"is_active_output", # ["OUTPUT_MATERIAL", "OUTPUT_LAMP"]
"label_size", # ["FRAME"]
"mapping", # ["CURVE_RGB", "CURVE_VEC"]
"max", # ["MAPPING"]
"min", # ["MAPPING"]
"mode", # ["SCRIPT"]
"musgrave_type", # ["TEX_MUSGRAVE"]
"node_tree", # ["GROUP"]
"object", # ["TEX_COORD"]
"offset", # ["TEX_BRICK"]
"offset_frequency", # ["TEX_BRICK"]
"operation", # ["VECT_MATH", "MATH"]
"projection", # ["TEX_IMAGE", "TEX_ENVIRONMENT"]
"projection_blend", # ["TEX_IMAGE"]
"rotation", # ["MAPPING"]
"scale", # ["MAPPING"]
"script", # ["SCRIPT"]
"shrink", # ["FRAME"]
"sky_type", # ["TEX_SKY"]
"space", # ["NORMAL_MAP"]
"squash", # ["TEX_BRICK"]
"squash_frequency", # ["TEX_BRICK"]
"sun_direction", # ["TEX_SKY"]
"text", # ["FRAME"]
"texture_mapping", # ["TEX_IMAGE", "TEX_ENVIRONMENT", "TEX_NOISE", "TEX_GRADIENT", "TEX_MUSGRAVE", "TEX_MAGIC", "TEX_WAVE", "TEX_SKY", "TEX_VORONOI", "TEX_CHECKER", "TEX_BRICK"]
"translation", # ["MAPPING"]
"turbidity", # ["TEX_SKY"]
"turbulence_depth", # ["TEX_MAGIC"]
"use_alpha", # ["MIX_RGB"]
"use_auto_update", # ["SCRIPT"]
"use_clamp", # ["MIX_RGB", "MATH"]
"use_max", # ["MAPPING"]
"use_min", # ["MAPPING"]
"use_pixel_size", # ["WIREFRAME"]
"uv_map", # ["TANGENT", "UVMAP", "NORMAL_MAP"]
"vector_type", # ["MAPPING", "VECT_TRANSFORM"]
"wave_type", # ["TEX_WAVE"]
]
# --------------------------------------------------
# INPUTS / OUTPUTS TYPES
# --------------------------------------------------
sock_vectors = [
"RGBA",
"VECTOR",
]
sock_values = [
"CUSTOM",
"VALUE",
"INT",
"BOOLEAN",
"STRING",
]
|
[
"meta.androcto1@gmail.com"
] |
meta.androcto1@gmail.com
|
9da6da5fba9daedf9b2e92c80aa332916e18eeae
|
659653ebdff3d70e0c04cd0292e489dc537b4112
|
/setup.py
|
709ece69ddc3e6068b1178071932256215b94e81
|
[] |
no_license
|
vangheem/clouddrive
|
48de3dd21f66c4ea207e6cbfefff630825349bb3
|
1b0e74dd4e9cd3dc838a3c13866ccef8a3920b63
|
refs/heads/master
| 2021-01-10T02:11:52.997944
| 2016-09-20T02:30:51
| 2016-09-20T02:30:51
| 48,830,570
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 863
|
py
|
# -*- coding: utf-8 -*-
import os
from setuptools import setup
from setuptools import find_packages
def read(*rnames):
return open(os.path.join(os.path.dirname(__file__), *rnames)).read()
setup(
name='clouddrive',
version='0.1.dev0',
description='',
long_description='',
classifiers=[
"Programming Language :: Python",
],
author='Nathan Van Gheem',
author_email='nathan@vangheem.us',
url='',
license='BSD',
packages=find_packages(exclude=['ez_setup']),
install_requires=[
'requests',
'flask',
'ZEO',
'ZODB',
'python-dateutil'
],
entry_points="""
# -*- Entry points: -*-
[console_scripts]
run-server = clouddrive:run_server
run-monitor = clouddrive.monitor:run
""",
include_package_data=True,
zip_safe=False,
)
|
[
"vangheem@gmail.com"
] |
vangheem@gmail.com
|
0f907498ef454193d80fa1dbac7f4ef357cb0485
|
f9308d5a8efe2dbb48e9cc87cd06405b60a9dc7b
|
/samples/python/apidocs/ee_featurecollection_aggregate_total_var.py
|
6f681ac3fa4a89f5f1bdf70d9c5cf99b8aef7c56
|
[
"Apache-2.0",
"CC-BY-4.0"
] |
permissive
|
google/earthengine-community
|
4e054b421f66f03507d58668084aee981062fc24
|
ce931040c518860f8788b4888c0acfdebd2952fc
|
refs/heads/master
| 2023-09-01T14:47:54.812703
| 2023-08-31T23:01:00
| 2023-08-31T23:01:39
| 200,732,820
| 428
| 552
|
Apache-2.0
| 2023-09-13T21:46:51
| 2019-08-05T21:42:11
|
Jupyter Notebook
|
UTF-8
|
Python
| false
| false
| 1,021
|
py
|
# Copyright 2023 The Google Earth Engine Community Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# [START earthengine__apidocs__ee_featurecollection_aggregate_total_var]
# FeatureCollection of power plants in Belgium.
fc = ee.FeatureCollection('WRI/GPPD/power_plants').filter(
'country_lg == "Belgium"')
print('Total variance of power plant capacities (MW):',
fc.aggregate_total_var('capacitymw').getInfo()) # 214307.38335169878
# [END earthengine__apidocs__ee_featurecollection_aggregate_total_var]
|
[
"copybara-worker@google.com"
] |
copybara-worker@google.com
|
d6447c5e8113bc3dfba69e31df59d4e3c714b954
|
5257652fc34ec87fe45d390ba49b15b238860104
|
/single_cell_atacseq_preprocessing/pseudorep_peaks_supported_by_30percent_of_bioreps_in_same_region/get_number_of_samples_that_support_each_regional_peak.py
|
47f15d19a533484f6a223eea9579cbdf122b1557
|
[] |
no_license
|
thekingofall/alzheimers_parkinsons
|
cd247fa2520c989e8dd853ed22b58a9bff564391
|
4ceae6ea3eb4c58919ff41aed8803855bca240c8
|
refs/heads/master
| 2022-11-30T22:36:37.201334
| 2020-08-12T01:23:55
| 2020-08-12T01:23:55
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,190
|
py
|
#using IDR optimal peaks from the pseudoreplicate set, calculate the number of biological replicates (based on biorep IDR optimal peak sets) that support each peak
import argparse
import pybedtools
import gzip
def parse_args():
parser=argparse.ArgumentParser(description="using IDR optimal peaks from the pseudoreplicate set, calculate the number of biological replicates (based on biorep IDR optimal peak sets) that support each peak")
parser.add_argument("--pseudorep_idr_optimal_peaks",help="file containing full paths to the pseudorep IDR peak sets")
parser.add_argument("--biorep_idr_optimal_peaks",help="file containing full paths to the biorep IDR peak sets")
parser.add_argument("--samples",help="file containing list of samples to annotate")
parser.add_argument("--thresh",default=0.3,type=float,help="percent of bioreps for a given condition/region that must contain a peak for it to be included in the finalized set")
parser.add_argument("--out_suffix",default=".idr.optimal_peaks.support30%.bed.gz",help="file suffix for the sample output peak file prefix")
return parser.parse_args()
def get_sample_to_pseudorep_peak_map(samples,pseudorep_idr_optimal_peaks):
sample_to_pseudorep_peaks=dict()
for pseudorep_peakset in pseudorep_idr_optimal_peaks:
for sample in samples:
if sample in pseudorep_peakset:
sample_to_pseudorep_peaks[sample]=pybedtools.bedtool.BedTool(pseudorep_peakset)
break
return sample_to_pseudorep_peaks
def get_sample_to_biorep_peak_map(samples,biorep_idr_optimal_peaks):
sample_to_biorep_peaks=dict()
for sample in samples:
sample_to_biorep_peaks[sample]=[]
for biorep_peakset in biorep_idr_optimal_peaks:
renamed=biorep_peakset.replace('/','_')
for sample in samples:
if sample in renamed:
sample_to_biorep_peaks[sample].append(pybedtools.bedtool.BedTool(biorep_peakset) )
break
return sample_to_biorep_peaks
def main():
args=parse_args()
pseudorep_idr_optimal_peaks=open(args.pseudorep_idr_optimal_peaks,'r').read().strip().split('\n')
biorep_idr_optimal_peaks=open(args.biorep_idr_optimal_peaks,'r').read().strip().split('\n')
samples=open(args.samples,'r').read().strip().split('\n')
sample_to_pseudorep_peaks=get_sample_to_pseudorep_peak_map(samples,pseudorep_idr_optimal_peaks)
sample_to_biorep_peaks=get_sample_to_biorep_peak_map(samples,biorep_idr_optimal_peaks)
for sample in samples:
print(sample)
pseudorep_peaks=sample_to_pseudorep_peaks[sample]
support_histogram=dict()
for entry in pseudorep_peaks:
support_histogram[tuple(entry[0:3])]=[0,entry]
for biorep_peaks in sample_to_biorep_peaks[sample]:
#intersect them
try:
intersection=pseudorep_peaks.intersect(biorep_peaks,u=True,f=0.4,F=0.4,e=True)
except:
print("could not intersect,skipping")
continue
intersection=list(set([tuple(i[0:3]) for i in intersection]))
print(str(len(intersection))+"/"+str(len(pseudorep_peaks)))
for intersection_entry in intersection:
support_histogram[intersection_entry][0]+=1
outf=gzip.open(sample+args.out_suffix,'wt')
outf_bad=gzip.open(sample+".unsupported"+args.out_suffix,'wt')
min_support_count=args.thresh*len(sample_to_biorep_peaks[sample])
print("min_support_count:"+str(min_support_count))
out_good=[]
out_bad=[]
for entry in support_histogram:
cur_entry_support=support_histogram[entry][0]
if cur_entry_support >= min_support_count:
out_good.append(str(support_histogram[entry][1]).rstrip('\n')+'\t'+str(cur_entry_support))
else:
out_bad.append(str(support_histogram[entry][1]).rstrip('\n')+'\t'+str(cur_entry_support))
outf.write('\n'.join(out_good))
outf_bad.write('\n'.join(out_bad)+'\n')
outf.close()
outf_bad.close()
if __name__=="__main__":
main()
|
[
"annashcherbina@gmail.com"
] |
annashcherbina@gmail.com
|
6793a7e2ed84de8b67e05f62dbff2c37d60be349
|
6c0beed4cd8719bf48982a853183121cea35cadf
|
/thread_syn_scanr_final.py
|
a312b10f43247c3ca188d52d82f490d1d797ed92
|
[] |
no_license
|
vicky288/pythonScripts
|
c90406dd2addc7a72275be0526daae7eba7d8c29
|
2b31c726b5924314b31a37f3a4eb86c132816859
|
refs/heads/master
| 2021-04-30T16:35:30.739238
| 2017-02-09T00:06:16
| 2017-02-09T00:06:16
| 80,077,402
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,356
|
py
|
#!/usr/bin/env python
import threading
import Queue
import time
from scapy.all import *
class WorkerThread(threading.Thread) :
def __init__(self, queue, tid) :
threading.Thread.__init__(self)
self.queue = queue
self.tid = tid
print "Worker %d Reporting for Service Sir!" %self.tid
def run(self) :
total_ports = 0
while True :
port = 0
try :
port = self.queue.get(timeout=1)
except Queue.Empty :
print "Worker %d exiting. Scanned %d ports ..." % (self.tid, total_ports)
return
# port scanning to begin
# we rely on scapy to do this
ip = sys.argv[1]
response = sr1(IP(dst=ip)/TCP(dport=port, flags="S"), verbose=False, timeout=.2)
# only checking for SYN-ACK == flags = 18
# filtererd ports etc. is another story altogether
if response :
if response[TCP].flags == 18 :
print "ThreadId %d: Received port number %d Status: OPEN" %(self.tid, port)
self.queue.task_done()
total_ports += 1
queue = Queue.Queue()
threads = []
for i in range(1, 10) :
print "Creating WorkerThread : %d"%i
worker = WorkerThread(queue, i)
worker.setDaemon(True)
worker.start()
threads.append(worker)
print "WorkerThread %d Created!"%i
for j in range (1,1000) :
queue.put(j)
queue.join()
# wait for all threads to exit
for item in threads :
item.join()
print "Scanning Complete!"
|
[
"root@localhost.localdomain"
] |
root@localhost.localdomain
|
3822dc71dbe9d74b56a67f934b2b21851a2d04bd
|
19da1a56f137a08772c347cf974be54e9c23c053
|
/lib/adafruit_boardtest/boardtest_voltage_monitor.py
|
bcdd23695e093d6d8bed54eddee6c02a1518167c
|
[] |
no_license
|
mk53202/mk53202-timeclock-pyportal
|
d94f45a9d186190a4bc6130077baa6743a816ef3
|
230a858d429f8197c00cab3e67dcfd3b295ffbe0
|
refs/heads/master
| 2021-02-04T05:38:25.533292
| 2020-02-27T22:45:56
| 2020-02-27T22:45:56
| 243,626,362
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,216
|
py
|
# The MIT License (MIT)
#
# Copyright (c) 2018 Shawn Hymel for Adafruit Industries
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
"""
`adafruit_boardtest.boardtest_voltage_monitor`
====================================================
Prints out the measured voltage on any onboard voltage/battery monitor pins.
Note that some boards have an onboard voltage divider to decrease the voltage
to these pins.
Run this script as its own main.py to individually run the test, or compile
with mpy-cross and call from separate test script.
* Author(s): Shawn Hymel for Adafruit Industries
Implementation Notes
--------------------
**Hardware:**
* `Multimeter <https://www.adafruit.com/product/2034>`_
**Software and Dependencies:**
* Adafruit CircuitPython firmware for the supported boards:
https://github.com/adafruit/circuitpython/releases
"""
import board
import analogio
__version__ = "1.0.1"
__repo__ = "https://github.com/adafruit/Adafruit_CircuitPython_BoardTest.git"
# Constants
VOLTAGE_MONITOR_PIN_NAMES = ['VOLTAGE_MONITOR', 'BATTERY']
ANALOG_REF = 3.3 # Reference analog voltage
ANALOGIN_BITS = 16 # ADC resolution (bits) for CircuitPython
# Test result strings
PASS = "PASS"
FAIL = "FAIL"
NA = "N/A"
def run_test(pins):
"""
Prints out voltage on the battery monitor or voltage monitor pin.
:param list[str] pins: list of pins to run the test on
:return: tuple(str, list[str]): test result followed by list of pins tested
"""
# Look for pins with battery monitoring names
monitor_pins = list(set(pins).intersection(set(VOLTAGE_MONITOR_PIN_NAMES)))
# Print out voltage found on these pins
if monitor_pins:
# Print out the monitor pins found
print("Voltage monitor pins found:", end=' ')
for pin in monitor_pins:
print(pin, end=' ')
print('\n')
# Print out the voltage found on each pin
for pin in monitor_pins:
monitor = analogio.AnalogIn(getattr(board, pin))
voltage = (monitor.value * ANALOG_REF) / (2**ANALOGIN_BITS)
print(pin + ": {:.2f}".format(voltage) + " V")
monitor.deinit()
print()
# Ask the user to check these voltages
print("Use a multimeter to verify these voltages.")
print("Note that some battery monitor pins might have onboard " +
"voltage dividers.")
print("Do the values look reasonable? [y/n]")
if input() == 'y':
return PASS, monitor_pins
return FAIL, monitor_pins
# Else (no pins found)
print("No battery monitor pins found")
return NA, []
def _main():
# List out all the pins available to us
pins = [p for p in dir(board)]
print()
print("All pins found:", end=' ')
# Print pins
for pin in pins:
print(pin, end=' ')
print('\n')
# Run test
result = run_test(pins)
print()
print(result[0])
print("Pins tested: " + str(result[1]))
# Execute only if run as main.py or code.py
if __name__ == "__main__":
_main()
|
[
"mkoster@stack41.com"
] |
mkoster@stack41.com
|
a0322b2f81ed8ef731da2cc2a758f162c0d92b65
|
9b36652dafb58888b7a584806ee69a33fcb609d5
|
/objutils/pickleif.py
|
fd210c74a9e0aa035425e908bbf6ad39a83c3423
|
[] |
no_license
|
pySART/objutils
|
db33e4576cf68111cb4debbafec06a0204844938
|
5ba4631b2245caae80d4dbe0053db0f2706ba53f
|
refs/heads/master
| 2020-06-29T03:35:24.485977
| 2016-11-21T14:21:56
| 2016-11-21T14:21:56
| 74,451,500
| 5
| 2
| null | 2016-11-22T08:36:10
| 2016-11-22T08:36:10
| null |
UTF-8
|
Python
| false
| false
| 1,385
|
py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
__version__ = "0.1.0"
__copyright__ = """
pyObjUtils - Object file library for Python.
(C) 2010-2013 by Christoph Schueler <github.com/Christoph2,
cpu12.gems@googlemail.com>
All Rights Reserved
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License along
with this program; if not, write to the Free Software Foundation, Inc.,
51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""
import abc
DUMMY_PROTOCOL = None
class PickleIF(object):
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def dump(self, obj, file_, protocol = DUMMY_PROTOCOL): pass
@abc.abstractmethod
def dumps(self, obj, protocol = DUMMY_PROTOCOL): pass
@abc.abstractmethod
def load(self, file_): pass
@abc.abstractmethod
def loads(self, string_): pass
|
[
"cpu12.gems@googlemail.com"
] |
cpu12.gems@googlemail.com
|
34c69a2e6e5163f82c16f2066cc150e2915edd2e
|
13a416a2694d1f6aa1a68cd47610236bf61cafbc
|
/CodePractice/Turtle/circleturtle.py
|
a465b8fe30527faaecef0725052e7c92e49ae8e1
|
[] |
no_license
|
Highjune/Python
|
c637f7d0f9e5d1ac9d6ad87b4e54833b8ff4ae11
|
1be43816d22f5f3b8679cf0cd3939e9d9f54497a
|
refs/heads/master
| 2022-11-24T01:20:54.470172
| 2020-07-27T18:01:47
| 2020-07-27T18:01:47
| 263,271,337
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 170
|
py
|
import turtle as t
t.shape("turtle")
num = int(input('num : '))
go = int((num-1)/2)
for i in range(1,num):
for j in range(1,i):
t.forward(1)
t.right(90)
|
[
"highjune37@gmail.com"
] |
highjune37@gmail.com
|
f546e9e52b380e7d078d7a83b9522f48799bf1fb
|
6e172edee44d5985d19327cf61865d861395d595
|
/2020/11/y2020_d11_p01.py
|
14c61f5a7ba331bb0fdd890b3ea3476e58d1cce5
|
[] |
no_license
|
rHermes/adventofcode
|
bbac479ec1c84c55484effa2cd94889d621b3718
|
4cbe7a952678c5f09438702562b7f6f673a1cf83
|
refs/heads/master
| 2023-01-14T07:05:35.769426
| 2023-01-01T09:53:11
| 2023-01-01T10:13:29
| 225,170,580
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,699
|
py
|
import fileinput
import itertools as it
# Let's build a jump table
def jumptbl(M, ROWS, COLS, x, y):
arounds = []
for dy, dx in [(-1,-1), (-1, 0), (-1, 1), (0,-1), (0, 1), (1,-1), (1,0), (1,1)]:
zx = x + dx
zy = y + dy
idx = zy*COLS + zx
if 0 <= zx < COLS and 0 <= zy < ROWS and M[idx] != None:
arounds.append(idx)
return arounds
# Creates a compressed version of a jump array
def compress(M, ROWS, COLS):
comp = []
# translate from full to sparse
trans = {}
# Build spare index
for y in range(COLS):
for x in range(ROWS):
idx = y*COLS + x
if M[idx] == None:
continue
trans[idx] = len(comp)
comp.append(M[idx])
# Build jump table
jmp = {}
for oidx, nidx in trans.items():
y = oidx // COLS
x = oidx % COLS
# Second pass, now to create jump table
adj = frozenset(trans[k] for k in jumptbl(M, ROWS, COLS, x, y))
if len(adj) < 4:
comp[nidx] = True
else:
jmp[nidx] = adj
return (comp, jmp)
# Step from M to N uing jmp
def step(M, N, jmp):
changed = False
for idx, adj in jmp.items():
t = sum(M[x] for x in adj)
N[idx] = (M[idx] and t < 4) or ((not M[idx]) and t == 0)
changed |= N[idx] != M[idx]
return changed
lines = [line.rstrip() for line in fileinput.input()]
ROWS = len(lines)
COLS = len(lines[0])
# None takes the spot of Empty
M = [{'L': False, '#': True, '.': None}[x] for x in it.chain(*lines)]
comp, jmp = compress(M, ROWS, COLS)
A = comp
B = A.copy()
while step(A, B, jmp):
B, A = A, B
print(sum(A))
|
[
"teodor@spaeren.no"
] |
teodor@spaeren.no
|
8eabc5915442c74698de459405acdb8a6cb90fa6
|
18b3ad3b0e1f7f10969738251e1201d01dfbc6bf
|
/backup_files/practice/rect.py
|
00e007de1004f6dc31ae22f14c65ace2161a43fa
|
[] |
no_license
|
sahthi/backup2
|
11d509b980e731c73733b1399a8143780779e75a
|
16bed38f0867fd7c766c2a008c8d43b0660f0cb0
|
refs/heads/master
| 2020-03-21T12:39:56.890129
| 2018-07-09T08:12:46
| 2018-07-09T08:12:46
| 138,565,151
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 352
|
py
|
#!/usr/bin/python
class rectangle:
def __init__(self,length,breadth):
self.length=length
self.breadth=breadth
def area(self):
return self.breadth*self.length
a=input("enter the length of rectangle:")
b=input("enter the breadth of rectangle:")
obj=rectangle(a,b)
print("area of rectangle:",obj.area())
|
[
"siddamsetty.sahithi@votarytech.com"
] |
siddamsetty.sahithi@votarytech.com
|
70252ccd8d751ddb991b9baf48cccda96d0787ae
|
00758be070825c33d9178c8a50d1a59ee2c3c790
|
/ppci/format/pefile/pefile.py
|
9828d6c93963c5f9a97d85c328a03344460d59d7
|
[
"BSD-2-Clause"
] |
permissive
|
jsdelivrbot/ppci-mirror
|
d2a87f21a735a9495ad1130959b599ab317a62f6
|
67195d628275e2332ceaf44c9e13fc58d0877157
|
refs/heads/master
| 2020-04-10T06:23:38.964744
| 2018-12-07T17:05:05
| 2018-12-07T17:05:05
| 160,853,011
| 0
| 0
|
BSD-2-Clause
| 2018-12-07T17:07:00
| 2018-12-07T17:07:00
| null |
UTF-8
|
Python
| false
| false
| 312
|
py
|
from .headers import DosHeader, CoffHeader, PeOptionalHeader64
from .headers import ImageSectionHeader, PeHeader, DataDirectoryHeader
from .headers import ImportDirectoryTable
class PeFile:
""" Pe (exe) file """
def __init__(self):
self.pe_header = PeHeader()
class ExeFile(PeFile):
pass
|
[
"windel@windel.nl"
] |
windel@windel.nl
|
79dc973cef96d3e5eef6f7cd3552f8a91bf78cb4
|
b71f656374293c5f1238fcb449aa4dde78632861
|
/eudplib/utils/blockstru.py
|
c3e9a5159a3ca9e5b4b729aab409a5ff123314d1
|
[
"MIT"
] |
permissive
|
tobeinged/eudplib
|
ce1cdc15f7ec6af857b4b64b5c826b3dd95d3e48
|
066c0faa200dc19e70cdb6979daf8f008b8ae957
|
refs/heads/master
| 2023-05-04T08:49:01.180147
| 2019-03-18T14:30:29
| 2019-03-18T14:30:29
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,549
|
py
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
'''
Copyright (c) 2014 trgk
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
'''
from .eperror import ep_assert
class BlockStruManager:
def __init__(self):
self._blockstru = []
self._lastblockdict = {}
def empty(self):
return not self._blockstru
_current_bsm = BlockStruManager() # Default one
def SetCurrentBlockStruManager(bsm):
global _current_bsm
old_bsm = _current_bsm
_current_bsm = bsm
return old_bsm
def EUDCreateBlock(name, userdata):
_blockstru = _current_bsm._blockstru
_lastblockdict = _current_bsm._lastblockdict
block = (name, userdata)
_blockstru.append(block)
if name not in _lastblockdict:
_lastblockdict[name] = []
_lastblockdict[name].append(block)
def EUDGetLastBlock():
_blockstru = _current_bsm._blockstru
return _blockstru[-1]
def EUDGetLastBlockOfName(name):
_lastblockdict = _current_bsm._lastblockdict
return _lastblockdict[name][-1]
def EUDPeekBlock(name):
lastblock = EUDGetLastBlock()
ep_assert(lastblock[0] == name, 'Block starting/ending mismatch')
return lastblock
def EUDPopBlock(name):
_blockstru = _current_bsm._blockstru
_lastblockdict = _current_bsm._lastblockdict
lastblock = _blockstru.pop()
ep_assert(lastblock[0] == name, """\
Block starting/ending mismatch:
- Started with %s
- Ended with %s\
""" % (lastblock[0], name))
_lastblockdict[name].pop()
return lastblock
def EUDGetBlockList():
return _current_bsm._blockstru
|
[
"phu54321@naver.com"
] |
phu54321@naver.com
|
a4056e610f35a5a1bfbe93990398a2a61a725fde
|
b7620d0f1a90390224c8ab71774b9c906ab3e8e9
|
/aliyun-python-sdk-imm/aliyunsdkimm/request/v20200930/CreateFigureClusteringTaskRequest.py
|
cfa9780027b8c39cc94abaae16dbba18b98bda90
|
[
"Apache-2.0"
] |
permissive
|
YaoYinYing/aliyun-openapi-python-sdk
|
e9c62940baee1a35b9ec4a9fbd1e4eb0aaf93b2f
|
e9a93cc94bd8290d1b1a391a9cb0fad2e6c64627
|
refs/heads/master
| 2022-10-17T16:39:04.515562
| 2022-10-10T15:18:34
| 2022-10-10T15:18:34
| 117,057,304
| 0
| 0
| null | 2018-01-11T06:03:02
| 2018-01-11T06:03:01
| null |
UTF-8
|
Python
| false
| false
| 2,476
|
py
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkimm.endpoint import endpoint_data
import json
class CreateFigureClusteringTaskRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'imm', '2020-09-30', 'CreateFigureClusteringTask','imm')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_UserData(self): # String
return self.get_query_params().get('UserData')
def set_UserData(self, UserData): # String
self.add_query_param('UserData', UserData)
def get_ProjectName(self): # String
return self.get_query_params().get('ProjectName')
def set_ProjectName(self, ProjectName): # String
self.add_query_param('ProjectName', ProjectName)
def get_NotifyTopicName(self): # String
return self.get_query_params().get('NotifyTopicName')
def set_NotifyTopicName(self, NotifyTopicName): # String
self.add_query_param('NotifyTopicName', NotifyTopicName)
def get_NotifyEndpoint(self): # String
return self.get_query_params().get('NotifyEndpoint')
def set_NotifyEndpoint(self, NotifyEndpoint): # String
self.add_query_param('NotifyEndpoint', NotifyEndpoint)
def get_DatasetName(self): # String
return self.get_query_params().get('DatasetName')
def set_DatasetName(self, DatasetName): # String
self.add_query_param('DatasetName', DatasetName)
def get_Tags(self): # Map
return self.get_query_params().get('Tags')
def set_Tags(self, Tags): # Map
self.add_query_param("Tags", json.dumps(Tags))
|
[
"sdk-team@alibabacloud.com"
] |
sdk-team@alibabacloud.com
|
7d99e26a6d7d4b0a7f916ad07f46105c644061c7
|
ac2f43c8e0d9649a7f063c59b3dffdfed9fd7ed7
|
/tests2/common/base_slaac_test.py
|
459b17fe8aebffa9efbf641b36e553aada1068c0
|
[] |
no_license
|
facebook/openbmc
|
bef10604ced226288600f55248b7f1be9945aea4
|
32777c66a8410d767eae15baabf71c61a0bef13c
|
refs/heads/helium
| 2023-08-17T03:13:54.729494
| 2023-08-16T23:24:18
| 2023-08-16T23:24:18
| 31,917,712
| 684
| 331
| null | 2023-07-25T21:19:08
| 2015-03-09T19:18:35
|
C
|
UTF-8
|
Python
| false
| false
| 2,578
|
py
|
#!/usr/bin/env python3
#
# Copyright 2018-present Facebook. All Rights Reserved.
#
# This program file is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by the
# Free Software Foundation; version 2 of the License.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
# for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program in a file named COPYING; if not, write to the
# Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor,
# Boston, MA 02110-1301 USA
#
import subprocess
from common.base_interface_test import BaseInterfaceTest
from utils.cit_logger import Logger
class BaseSlaacTest(BaseInterfaceTest):
def get_ipv6_address(self):
"""
Get inet6 address with highest length of a given interface
overriding this method of BaseInterfaceTest class because we want
to have inet6 address with highest length
"""
out = self.get_ip_addr_output_inet6()
# trying to find inet6 address with highest length
ipv6 = ""
for value in out[1:]:
if len(value.split("/")[0]) > len(ipv6):
ipv6 = value.split("/")[0]
Logger.debug("Got ip address for " + str(self.ifname))
return ipv6.lower()
def get_mac_address(self):
"""
Get Ethernet MAC address
"""
f = subprocess.Popen(
["fw_printenv", "-n", "ethaddr"],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
out, err = f.communicate()
self.assertEqual(
f.returncode,
0,
"fw_printenv -n ethaddr "
+ "exited with returncode: "
+ str(f.returncode)
+ ", err: "
+ str(err),
)
if out:
out = out.decode("utf-8").rstrip()
return out.lower()
else:
raise Exception("Couldn't find MAC address [FAILED]")
def generate_modified_eui_64_mac_address(self):
"""
Get Modified EUI-64 Mac Address
"""
mac_address = self.get_mac_address().split(":")
# reversing the 7th bit of the mac address
mac_address[0] = hex(int(mac_address[0], 16) ^ 2)[2:]
mac_address[2] = mac_address[2] + "fffe"
return "".join(mac_address)
|
[
"facebook-github-bot@users.noreply.github.com"
] |
facebook-github-bot@users.noreply.github.com
|
6b6b8ed46de995cb4125b9f3eae5ad6f987cb563
|
1ff9adfdb9d559e6f81ed9470467bab25e93b5ab
|
/src/ta_lib/_vendor/tigerml/core/reports/lib.py
|
e68184e54ad7fe935d6209b79f71931a46f2af5f
|
[] |
no_license
|
Seemant-tiger/housing-price-prediction
|
a39dbefcb11bc460edeeee92e6becf77d35ff3a8
|
be5d8cca769c7e267cfee1932eb82b70c2855bc1
|
refs/heads/main
| 2023-06-24T00:25:49.776720
| 2021-07-18T16:44:28
| 2021-07-18T16:44:28
| 387,222,852
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 594
|
py
|
def create_report(
contents, name="", path="", format=".html", split_sheets=True, tiger_template=False
):
if format == ".xlsx":
from .excel import create_excel_report
create_excel_report(contents, name=name, path=path, split_sheets=split_sheets)
elif format == ".pptx":
from .ppt.lib import create_ppt_report
create_ppt_report(contents, name=name, path=path, tiger_template=tiger_template)
if format == ".html":
from .html import create_html_report
create_html_report(contents, name=name, path=path, split_sheets=split_sheets)
|
[
"seemantsingh1199@gmail.com"
] |
seemantsingh1199@gmail.com
|
23e3ad4e01e0f76661ea461347891416a38d216c
|
a71a756203a07ccaece6db440410493b3b7ff77f
|
/helios/plugins/builtin/rpc_websocket_proxy_through_ipc_socket/plugin.py
|
f25a485fbaf58be719639402aa3e72f7562385ca
|
[
"MIT"
] |
permissive
|
Helios-Protocol/py-helios-node
|
73735dc24cd4c816d55649ed2f5df822efabfdce
|
691b378938f0a36bf8774dc1ee4e4370b6cf7c63
|
refs/heads/master
| 2021-08-19T23:05:18.841604
| 2020-01-18T19:38:33
| 2020-01-18T19:38:33
| 134,452,574
| 21
| 10
|
MIT
| 2019-06-09T04:43:14
| 2018-05-22T17:39:10
|
Python
|
UTF-8
|
Python
| false
| false
| 1,344
|
py
|
from argparse import (
ArgumentParser,
_SubParsersAction,
)
from helios.extensibility import (
BaseIsolatedPlugin,
)
from .websocket_proxy_server import Proxy as rpc_websocket_proxy
###
# This one is not used anymore because it is synchronous. There is a new asynchronous one in the json_rpc folder
# This one connects through IPC as well. So it wont be stopped by admin_stopRPC.
###
class RpcWebsocketProxyPlugin(BaseIsolatedPlugin):
@property
def name(self) -> str:
return "RPC Websocket Proxy"
def should_start(self) -> bool:
return (not self.context.args.disable_rpc_websocket_proxy) and self.context.chain_config.is_main_instance
def configure_parser(self, arg_parser: ArgumentParser, subparser: _SubParsersAction) -> None:
arg_parser.add_argument(
'--disable_rpc_websocket_proxy-NOT_USED',
action="store_true",
help="Should we disable the RPC websocket proxy server?",
)
def start(self) -> None:
self.logger.info('RPC Websocket proxy started')
self.context.event_bus.connect()
proxy_url = "ws://0.0.0.0:" + str(self.context.chain_config.rpc_port)
rpc_websocket_proxy_service = rpc_websocket_proxy(proxy_url, self.context.chain_config.jsonrpc_ipc_path)
rpc_websocket_proxy_service.run()
|
[
"admin@hyperevo.com"
] |
admin@hyperevo.com
|
6d4d1d60f2c789f78d8d5f3257764908e635553d
|
809f263b77b525549cd945c39c4c9cf2b8e6a167
|
/pqcrypto/sign/sphincs_shake256_192s_simple.py
|
6a45aa9c023c21f67bb3b6b83ca198236bb3e8f7
|
[
"BSD-3-Clause"
] |
permissive
|
Kayuii/pqcrypto
|
bdf5014b7590dfe363baedbf47171f4b4cb25349
|
dd8c56fd876a397caef06a00d35537a4f9c1db28
|
refs/heads/master
| 2022-12-14T00:34:36.632689
| 2020-09-08T10:40:26
| 2020-09-08T10:40:26
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 447
|
py
|
from .._sign.sphincs_shake256_192s_simple import ffi as __ffi, lib as __lib
from .common import _sign_generate_keypair_factory, _sign_sign_factory, _sign_verify_factory
PUBLIC_KEY_SIZE = __lib.CRYPTO_PUBLICKEYBYTES
SECRET_KEY_SIZE = __lib.CRYPTO_SECRETKEYBYTES
SIGNATURE_SIZE = __lib.CRYPTO_BYTES
generate_keypair = _sign_generate_keypair_factory(__ffi, __lib)
sign = _sign_sign_factory(__ffi, __lib)
verify = _sign_verify_factory(__ffi, __lib)
|
[
"inbox@philonas.net"
] |
inbox@philonas.net
|
13afaec093ca5dbb37ccc72918e13c91b3555344
|
2bb90b620f86d0d49f19f01593e1a4cc3c2e7ba8
|
/pardus/tags/2011/util/shell/command-not-found/actions.py
|
fd6ae5ea60b235f2996161b9d5463089b352de0a
|
[] |
no_license
|
aligulle1/kuller
|
bda0d59ce8400aa3c7ba9c7e19589f27313492f7
|
7f98de19be27d7a517fe19a37c814748f7e18ba6
|
refs/heads/master
| 2021-01-20T02:22:09.451356
| 2013-07-23T17:57:58
| 2013-07-23T17:57:58
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 647
|
py
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright 2008-2010 TUBITAK/UEKAE
# Licensed under the GNU General Public License, version 2.
# See the file http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt
from pisi.actionsapi import pisitools
from pisi.actionsapi import get
def install():
pisitools.dobin("src/command-not-found")
pisitools.insinto("/var/db/command-not-found", "data/packages-%s.db" % get.ARCH(), "packages.db")
for lang in ["da", "de", "es", "fr", "hu", "it", "nl", "ru", "sv", "tr"]:
pisitools.domo("po/%s.po" % lang, lang, "command-not-found.mo")
pisitools.dodoc("AUTHORS", "COPYING", "README")
|
[
"yusuf.aydemir@istanbul.com"
] |
yusuf.aydemir@istanbul.com
|
57f0473df75e076251d0ff6afe0e60431dd1b124
|
5259532bb41382bc05c7f311fdee65c67f67990e
|
/Tools/SampleTool/UI_SampleMainForm.py
|
233a453909cfe5c2f121227d1c0c5bfe19a1f080
|
[] |
no_license
|
csjy309450/MLTools_PyQt4
|
57905cc78284d87349eda511fc78c43f3527bbeb
|
d1af57c279fd12428cda303d22e7a732db3ff257
|
refs/heads/master
| 2021-04-29T10:36:54.792400
| 2018-02-28T17:03:08
| 2018-02-28T17:03:08
| 77,835,494
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 8,513
|
py
|
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'SampleToolWidget.ui'
#
# Created by: PyQt4 UI code generator 4.11.4
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
import CopyForm as cf
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
def _fromUtf8(s):
return s
try:
_encoding = QtGui.QApplication.UnicodeUTF8
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig, _encoding)
except AttributeError:
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig)
class UI_SampleMainForm(object):
"""
UI in Sample Tool Main Widget
"""
def setupUi(self, Form):
"""
初始化窗口UI
:param Form:
:return:
"""
self.mainForm = Form
Form.setObjectName(_fromUtf8("Form"))
Form.setWindowModality(QtCore.Qt.NonModal)
Form.resize(705, 579)
##对象成员变量
# QLable控件中的显示图像
self.qImg = QtGui.QPixmap()
self.currentFrameNum = -1
self.filePathsList = QtCore.QStringList()
#获取窗口大小
self.widRect = Form.frameGeometry()
##控件布局
#定义整个垂直布局内的QWidget面板
self.VLayoutWidget = QtGui.QWidget(Form)
self.VLayoutWidget.setGeometry(self.widRect)
self.VLayoutWidget.setObjectName(_fromUtf8("VLayoutWidget"))
#定义第一层中的两个QSlider控件
#HSlider_copyWidScale控制copyWidget窗口的尺寸
self.HSlider_copyWidScale = QtGui.QSlider(self.VLayoutWidget)
self.HSlider_copyWidScale.setCursor(QtGui.QCursor(QtCore.Qt.SizeHorCursor))
self.HSlider_copyWidScale.setOrientation(QtCore.Qt.Horizontal)
self.HSlider_copyWidScale.setObjectName(_fromUtf8("HSlider_copyWidScale"))
#控制图片的分辨率
self.HSlider_imgScale = QtGui.QSlider(self.VLayoutWidget)
self.HSlider_imgScale.setCursor(QtGui.QCursor(QtCore.Qt.SizeHorCursor))
self.HSlider_imgScale.setOrientation(QtCore.Qt.Horizontal)
self.HSlider_imgScale.setObjectName(_fromUtf8("HSlider_imgScale"))
# 定义滑动区域窗口内Widget面板
self.scrollAreaWidgetContents = QtGui.QWidget()
# self.scrollAreaWidgetContents.setGeometry(QtCore.QRect(0, 0, 100, 100))
self.scrollAreaWidgetContents.setObjectName(_fromUtf8("scrollAreaWidgetContents"))
self.scrollAreaWidgetContents.setMinimumSize(1200, 1200)
# 定义滑动区域面板内的QLabel对象
self.label = QtGui.QLabel(self.scrollAreaWidgetContents)
# self.label.setGeometry(QtCore.QRect(0, 0, 500, 500))
# self.label.setPixmap(self.img)
# self.label.setGeometry(self.img.rect())
# self.label.setObjectName(_fromUtf8("label"))
# self.scrollAreaWidgetContents.setMinimumSize(self.img.size())
#滑动区域窗口
self.scrollArea = QtGui.QScrollArea(self.VLayoutWidget)
self.scrollArea.setWidgetResizable(True)
self.scrollArea.setObjectName(_fromUtf8("scrollArea"))
self.scrollArea.setAutoFillBackground(True)
self.scrollArea.setWidgetResizable(True)
self.scrollArea.setGeometry(QtCore.QRect(0, 0, 80, 80))
self.scrollArea.setWidget(self.scrollAreaWidgetContents)
##layout
#定义内层布局的横向网格
self.horizontalLayout = QtGui.QHBoxLayout()
self.horizontalLayout.setObjectName(_fromUtf8("horizontalLayout"))
#加入之前定义的滑动条
self.horizontalLayout.addWidget(self.HSlider_copyWidScale)
self.horizontalLayout.addWidget(self.HSlider_imgScale)
#按顺序定义外层布局的纵向网格
self.verticalLayout = QtGui.QVBoxLayout(self.VLayoutWidget)
self.verticalLayout.setObjectName(_fromUtf8("verticalLayout"))
#按顺序加入定义好的横向网格和滑动区域对象
self.verticalLayout.addLayout(self.horizontalLayout)
self.verticalLayout.addWidget(self.scrollArea)
self.retranslateUi(Form)
QtCore.QMetaObject.connectSlotsByName(Form)
self.__InitMenubar()
def __InitMenubar(self):
action_exit = QtGui.QAction(QtGui.QIcon(), '&exit', self.mainForm)
action_exit.triggered.connect(self.mainForm.close)
action_load = QtGui.QAction(QtGui.QIcon(), '&load', self.mainForm)
action_load.triggered.connect(self.On_Action_Load)
action_next = QtGui.QAction(QtGui.QIcon(), '&next', self.mainForm)
action_next.triggered.connect(self.On_Action_Next)
action_previous = QtGui.QAction(QtGui.QIcon(), '&previous', self.mainForm)
action_previous.triggered.connect(self.On_Action_Previous)
action_screenShot = QtGui.QAction(QtGui.QIcon(), '&screen shot', self.mainForm)
action_screenShot.triggered.connect(self.On_Action_ScreenShot)
menubar = self.mainForm.menuBar()
fileMenu = menubar.addMenu('&file')
fileMenu.addAction(action_load)
fileMenu.addAction(action_next)
fileMenu.addAction(action_previous)
fileMenu.addAction(action_screenShot)
fileMenu.addAction(action_exit)
def On_Action_Load(self, event):
self.filePathsList = QtGui.QFileDialog.getOpenFileNames(self.mainForm, 'Open file', '/home')
for filePath in self.filePathsList:
print filePath
print self.filePathsList.count()
self.currentFrameNum = -1
self.On_Action_Next(None)
def __getParentClientSize(self):
return self.scrollArea.size() - QtCore.QSize(
self.scrollArea.verticalScrollBar().width(),
self.scrollArea.horizontalScrollBar().height())
def showImage(self):
dis = (abs(self.horizontalLayout.geometry().left() - 0),
abs(self.horizontalLayout.geometry().right() - self.mainForm.width()),
abs(self.horizontalLayout.geometry().top() - 0),
abs(self.mainForm.height() - self.scrollArea.geometry().bottom()))
#从文件夹加载图像
self.qImg.load(self.filePathsList[self.currentFrameNum])
#显示到QLabel对象,并调整QLabel对象的尺寸为图像尺寸
self.label.setPixmap(self.qImg)
self.label.setGeometry(self.qImg.rect())
# #设置 QScrollArea 对象中 QWidget 区域的大小
self.scrollAreaWidgetContents.setMinimumSize(self.qImg.size())
# # #根据图像大小调整scrollArea大小
self.scrollArea.setMaximumSize(self.qImg.size() + QtCore.QSize(self.scrollArea.verticalScrollBar().width(),
self.scrollArea.horizontalScrollBar().height()))
#求当前图像对象的基础上窗口允许的最大尺寸
# print self.horizontalLayout.geometry()
# print self.mainForm.size()
# print self.scrollArea.geometry()
# print dis
self.mainForm.setMaximumSize(self.scrollArea.maximumSize() + QtCore.QSize(
dis[0]+dis[1], self.HSlider_imgScale.height()+dis[2]+dis[3]))
def On_Action_Next(self, event):
if self.currentFrameNum + 1 < self.filePathsList.count():
self.currentFrameNum += 1
self.showImage()
self.mainForm.repaint()
try:
self.copyForm.UpdateImg()
except Exception, e:
pass
def On_Action_Previous(self, event):
if self.currentFrameNum - 1 >= 0:
self.currentFrameNum -= 1
self.showImage()
self.mainForm.repaint()
try:
self.copyForm.UpdateImg()
except Exception, e:
pass
def On_Action_ScreenShot(self, event):
self.copyForm = cf.CopyForm(self.qImg, self.scrollArea)
# self.mainForm.connect(self.copyForm._sinal, QtCore.SIGNAL('Signal_Key(PyQt_PyObject)'),
# self.mainForm, QtCore.SLOT("On_Key_CopyForm(PyQt_PyObject)"))
self.mainForm.connect(self.copyForm, QtCore.SIGNAL('Signal_Key(PyQt_PyObject)'),
self.mainForm, QtCore.SLOT("On_Key_CopyForm(PyQt_PyObject)"))
def retranslateUi(self, Form):
"""
:param Form:
:return:
"""
Form.setWindowTitle(_translate("Form", "Sample Tool", None))
|
[
"="
] |
=
|
d615b3f87e95f821b1ad96c4a961165d3dcfb242
|
1924da60fa3298e386acc6dac9bd390784a9b5bb
|
/test18.py
|
2eaca7b55757608822e1ea3f6eebcce199ba5a68
|
[] |
no_license
|
yukitomo/NLP100DrillExercises
|
c8a177b56f798cef225ace540e965809a1fc1fbc
|
ea2ceb366de1fa1f27d084e3b9328cc6f34ac1dd
|
refs/heads/master
| 2020-06-01T02:55:11.423238
| 2015-06-10T15:39:03
| 2015-06-10T15:39:03
| 37,205,750
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 425
|
py
|
#!/usr/bin/python
#-*-coding:utf-8-*-
#(18) 仙台市の住所らしき表現にマッチする正規表現を各自で設計し,抽出せよ.
#python test18.py tweet.txt
import sys
import re
pattern = re.compile(u'(仙台市)([^\s\w\d ]{1,20}[\d0-9〇一-九十上下東西]+)*')
for line in sys.stdin:
line = line.decode("utf-8")
match=pattern.search(line)
if match:
print match.group(0).encode("utf-8")
|
[
"over.the.tr0ouble@gmail.com"
] |
over.the.tr0ouble@gmail.com
|
742a110bb63077d24dc9f3b001ade6455c465a66
|
0b85fbdd58eab30cf2ed5676a9c331c1ab6152f6
|
/cdp_viz/handlers/services/dl.py
|
a59f386a88dc13816e7f7bb9a1accba49a601a15
|
[] |
no_license
|
pymonger/cdp-viz-pyramid
|
82ddac3552a0da9c1a831959ff28fdb3b21c126f
|
32c5f3d6f1d63c1e7e6131876da9a19ab3d25e93
|
refs/heads/master
| 2020-03-28T23:46:17.564043
| 2013-02-06T17:48:29
| 2013-02-06T17:48:29
| 149,307,796
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,392
|
py
|
import logging, simplejson, pprint, re, os, sys
from urllib2 import urlopen
from urllib import urlencode
from datetime import datetime
from string import Template
from Levenshtein import ratio, median
from pyramid.httpexceptions import HTTPFound
from pyramid_handlers import action
from beaker.cache import CacheManager
import cdp_viz.handlers.base as base
import cdp_viz.models as model
from cdp_viz.lib.timeUtils import getDatetimeFromString, getISODateTimeString
from cdp_viz.lib.sparql import MD5_SPARQL_TMPL, MANIFEST_SPARQL_TMPL, sparqlQuery
from cdp_viz.lib.sessionGraph import rdf2sessionGraph
log = logging.getLogger(__name__)
CDE_PACKAGE_TMPL = Template('''#!/bin/sh
wget "${LDOS_BASE_URL}/data/download?hash=${hash}" -O $file
for i in `cat session_manifest.txt`; do
file=`echo $$i | awk 'BEGIN{FS=","}{print $$1}'`
filebase=`basename $$file`
dir=`dirname $$file`
md5=`echo $$i | awk 'BEGIN{FS=","}{print $$2}'`
oct_perms=`echo $$i | awk 'BEGIN{FS=","}{print $$6}'`
perms=`python -c "print oct(int($$oct_perms))[-3:]"`
mkdir -p $$dir
wget -q "${LDOS_BASE_URL}/data/download?file=$${filebase}&hash=$${md5}" -O $$file
chmod $$perms $$file
echo "downloaded: $$file"
done
''')
class Download(base.Handler):
@action(renderer="string")
def sessionEntities(self):
sessionId = self.request.params.get('sessionId')
#log.debug("sessionId: %s" % sessionId)
d = simplejson.loads(sparqlQuery(MD5_SPARQL_TMPL.substitute(uri=sessionId)))
#log.debug(pprint.pformat(d))
wgetLines = []
for res in d['results']['bindings']:
entity = res['entity']['value']
hash = res['md5']['value']
match = re.search(r'http://provenance\.jpl\.nasa\.gov/cdp#(.*?)/\d{4}-\d{2}-\d{2}T\d{2}_\d{2}_\d{2}.*?$', entity)
if match:
file = os.path.basename(match.group(1))
wgetLines.append("wget %s/data/download?hash=%s -O %s" %
(self.request.registry.settings['ldos.url'], hash, file))
return "#!/bin/sh\n%s\n" % "\n".join(wgetLines)
@action(renderer="string")
def cde(self):
self.request.response.content_disposition = 'attachment; filename="wget_cde_package.sh"'
sessionId = self.request.params.get('sessionId')
#log.debug("sessionId: %s" % sessionId)
d = simplejson.loads(sparqlQuery(MANIFEST_SPARQL_TMPL.substitute(uri=sessionId)))
#log.debug(pprint.pformat(d))
wgetLines = []
for res in d['results']['bindings']:
loc = res['loc']['value']
hash = res['md5']['value']
match = re.search(r'(.*?)(?:/\d{4}-\d{2}-\d{2}T\d{2}_\d{2}_\d{2}.*?)?$', loc)
if match:
file = os.path.basename(match.group(1))
return CDE_PACKAGE_TMPL.substitute(LDOS_BASE_URL=self.request.registry.settings['ldos.url'],
hash=hash,
file=file)
return "No CDE package for session %s." % sessionId
def download(self):
filename = self.request.params.get('filename')
md5 = self.request.params.get('hash')
return HTTPFound(location="%s/data/download?filename=%s&hash=%s" % (
self.request.registry.settings['ldos.url'],
filename, md5))
|
[
"pymonger@gmail.com"
] |
pymonger@gmail.com
|
5c970dffe7023ba46848e3b65f0ad476cbb2b53e
|
29145db13229d311269f317bf2819af6cba7d356
|
/april circuits/shifts.py
|
bb24c3d034ba66dfbb7a8eba41e99923e3127ea4
|
[] |
no_license
|
rocket3989/hackerEarth2019
|
802d1ca6fd03e80657cbe07a3f123e087679af4d
|
42c0a7005e52c3762496220136cc5c1ee93571bb
|
refs/heads/master
| 2021-07-05T01:32:42.203964
| 2020-12-22T03:40:20
| 2020-12-22T03:40:20
| 211,607,143
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 252
|
py
|
for tc in range(int(input())):
N, M, c = input().split()
N = int(N)
M = int(M)
N = bin(N)[2:].zfill(16)
if c == 'L':
print(int(N[M:] + N[:M], 2))
else:
print(int(N[16 - M:] + N[:16 - M], 2))
|
[
"rocket3989@gmail.com"
] |
rocket3989@gmail.com
|
37918bdb0d4e31428108d8434477b8686f64c19d
|
f75609812d20d46a9f94ee0cfdb91c321d26b63d
|
/flask/flask_fundamentals/Number_Game/server.py
|
6830ce31939d2a6ef2ce63d2e02eb346853fbccf
|
[] |
no_license
|
IanAranha/Python2021
|
eff47a20451f61b144b17f48321a7b06308aadca
|
d9769b8b387b77753b77f6efe3a9a270a1f158d3
|
refs/heads/main
| 2023-04-02T08:20:24.382913
| 2021-04-10T22:27:10
| 2021-04-10T22:27:10
| 345,918,060
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 865
|
py
|
from flask import Flask, redirect, render_template, session, request
import random
app = Flask(__name__)
app.secret_key = "0004ThisIsASecretKey"
@app.route("/")
def index():
if "random_number" not in session:
session["random_number"] = random.randrange(0, 101)
return render_template("index.html")
@app.route("/guess", methods=["post"])
def guess():
if request.form["input"] == "":
print("Cannot be blank")
return redirect("/")
session["guessed_num"] = int(request.form["input"])
if session["guessed_num"] < session["random_number"]:
session["state"] = "low"
elif session["guessed_num"] > session["random_number"]:
session["state"] = "high"
else:
session["state"] = "correct"
return redirect("/")
@app.route("/reset", methods=["post"])
def reset():
session.clear()
return redirect('/')
if __name__ == "__main__":
app.run(debug=True)
|
[
"ianorama@gmail.com"
] |
ianorama@gmail.com
|
243c193623591d29bb3fa6344bb1b2d31f4adb6f
|
2753757e2d13f5dd0d1faf1264031d476e162975
|
/others/assignment/temp.py
|
ae6d46f2f1fb03391ed5c73d858f9a215d0d38a0
|
[] |
no_license
|
florije1988/Suggestions
|
c8846dd089eab816051ecc1fc43a7fcc07580194
|
23718968acc16fa243c248a6ac3d4715c53daaa1
|
refs/heads/master
| 2020-05-20T07:01:54.292081
| 2014-08-11T07:52:02
| 2014-08-11T07:52:02
| 21,298,258
| 1
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,279
|
py
|
# -*- coding: utf-8 -*-
__author__ = 'florije'
import time
def reverse_str(str_arg):
if len(str_arg) == 1:
return str_arg
else:
return str_arg[-1] + reverse_str(str_arg[:-1])
if __name__ == '__main__':
# s_arg = input('list:')
# print s_arg
# print type(s_arg)
# for i in range(1, 20):
# print '%02d' % i
#
# print "Age:%02d" % 1
# title = ''
# f =file("%s.html" % title, "a")
u = u'汉'
print repr(u)
s = u.encode('UTF-8')
print repr(s)
u2 = s.decode('UTF-8')
print u2
print repr(u2) # u'\u6c49'
# 对unicode进行解码是错误的
# s2 = u.decode('UTF-8')
# 同样,对str进行编码也是错误的
# u2 = s.encode('UTF-8')
a = ['anhui:0', 'shtel1:0', 'shtel2:0', 'weinan3:0', 'weinan1:0', 'weinan2:0', 'luckyhost:100', 'crh:99']
a.sort(key=lambda item: int(item.split(':')[1]))
print a
print reverse_str('fuboqing')
t = time.clock()
a = [1, 2, 3, 4, 5, 6, 7, 8, 9]
# print [item * 3 if tx.index(item) < 3 else item for item in tx]
# tx[:3] = [i*3 for i in tx[:3]]
# print tx
def aa(a):
a[0] = a[0] * 3
a[1] = a[1] * 3
a[2] = a[2] * 3
return a
print aa(a)
print time.clock() - t
|
[
"florije1988@gmail.com"
] |
florije1988@gmail.com
|
7f44ed7c492048c7a2268982590b8ef20b58f77e
|
75dcb56e318688499bdab789262839e7f58bd4f6
|
/_algorithms_challenges/practicepython/python-exercises-master/07-list-comprehension/exercise.py
|
894ad5cd07e85383178aea3f7a25e85196b75242
|
[] |
no_license
|
syurskyi/Algorithms_and_Data_Structure
|
9a1f358577e51e89c862d0f93f373b7f20ddd261
|
929dde1723fb2f54870c8a9badc80fc23e8400d3
|
refs/heads/master
| 2023-02-22T17:55:55.453535
| 2022-12-23T03:15:00
| 2022-12-23T03:15:00
| 226,243,987
| 4
| 1
| null | 2023-02-07T21:01:45
| 2019-12-06T04:14:10
|
Jupyter Notebook
|
UTF-8
|
Python
| false
| false
| 270
|
py
|
# /#! /urs/bin/env python
if __name__ == '__main__':
all = [1, 4, 9, 16, 25, 36, 49, 64, 81, 100]
odd = [number for number in all if number % 2 == 1]
even = [number for number in all if number % 2 == 0]
print("All: " + str(all) + '\nOdd: ' + str(odd))
|
[
"sergejyurskyj@yahoo.com"
] |
sergejyurskyj@yahoo.com
|
b5fc5c27bf55103c13421385e42b252a54f84749
|
0c1d6b8dff8bedfffa8703015949b6ca6cc83f86
|
/lib/worklists/operator/CT/v4.0/business/GPON_2+1/QoS_DSCP/script.py
|
8027050e03e5cef79e0d59b75c244127b0de19af
|
[] |
no_license
|
samwei8/TR069
|
6b87252bd53f23c37186c9433ce4d79507b8c7dd
|
7f6b8d598359c6049a4e6cb1eb1db0899bce7f5c
|
refs/heads/master
| 2021-06-21T11:07:47.345271
| 2017-08-08T07:14:55
| 2017-08-08T07:14:55
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,812
|
py
|
#coding:utf-8
# -----------------------------rpc --------------------------
import os
import sys
#debug
DEBUG_UNIT = False
if (DEBUG_UNIT):
g_prj_dir = os.path.dirname(__file__)
parent1 = os.path.dirname(g_prj_dir)
parent2 = os.path.dirname(parent1)
parent3 = os.path.dirname(parent2)
parent4 = os.path.dirname(parent3) # tr069v3\lib
parent5 = os.path.dirname(parent4) # tr069v3\
sys.path.insert(0, parent4)
sys.path.insert(0, os.path.join(parent4, 'common'))
sys.path.insert(0, os.path.join(parent4, 'worklist'))
sys.path.insert(0, os.path.join(parent4, 'usercmd'))
sys.path.insert(0, os.path.join(parent5, 'vendor'))
from TR069.lib.common.event import *
from TR069.lib.common.error import *
from time import sleep
import TR069.lib.common.logs.log as log
g_prj_dir = os.path.dirname(__file__)
parent1 = os.path.dirname(g_prj_dir)
parent2 = os.path.dirname(parent1) # dir is system
try:
i = sys.path.index(parent2)
if (i !=0):
# stratege= boost priviledge
sys.path.pop(i)
sys.path.insert(0, parent2)
except Exception,e:
sys.path.insert(0, parent2)
import _Common
reload(_Common)
from _Common import *
import _QoS
reload(_QoS)
from _QoS import QoS
def test_script(obj):
"""
"""
sn = obj.sn # 取得SN号
DeviceType = "GPON" # 绑定tr069模板类型.只支持ADSL\LAN\EPON三种
rollbacklist = [] # 存储工单失败时需回退删除的实例.目前缺省是不开启回退
# 初始化日志
obj.dict_ret.update(str_result=u"开始执行工单:%s........\n" %
os.path.basename(os.path.dirname(__file__)))
# data传参
Max = obj.dict_data.get("Max")[0]
Min = obj.dict_data.get("Min")[0]
ClassQueue = obj.dict_data.get("ClassQueue")[0]
DSCPMarkValue = obj.dict_data.get("DSCPMarkValue")[0]
M802_1_P_Value = obj.dict_data.get("M802_1_P_Value")[0]
# X_CT-COM_UplinkQoS节点参数
dict_root = {'Mode':[0, 'Null'],
'Enable':[1, '1'],
'Bandwidth':[0, 'Null'],
'Plan':[1, 'priority'],
'EnableForceWeight':[0, 'Null'],
'EnableDSCPMark':[1, '1'],
'Enable802-1_P':[1, '2']}
# X_CT-COM_UplinkQoS.App.{i}.节点下的参数
dict_app = {'AppName':[0, 'Null'],
'ClassQueue':[0, 'Null']}
# X_CT-COM_UplinkQoS.Classification.{i}.type.{i}.节点下的参数
# 注意,使用列表嵌套字典的形式,因为基于业务的QoS保障测试-UDP时需要多个实例
list_value_type = [{'Type':[1, 'DSCP'],
'Max':[1, Max],
'Min':[1, Min],
'ProtocolList':[1, 'TCP,UDP']}]
# X_CT-COM_UplinkQoS.Classification.{i}.节点下的参数
dict_classification = {'ClassQueue':[1, ClassQueue],
'DSCPMarkValue':[1, DSCPMarkValue],
'802-1_P_Value':[1, M802_1_P_Value]}
# X_CT-COM_UplinkQoS.PriorityQueue.{i}.节点下的参数
dict_priorityQueue = {'Enable':[1, '1'],
'Priority':[1, '1'],
'Weight':[0, 'Null']}
# 开始执行QoS工单
ret, ret_data = QoS(obj, sn, DeviceType, dict_root,
dict_app, list_value_type,
dict_classification, dict_priorityQueue,
change_account=1,
rollbacklist=rollbacklist)
# 将工单脚本执行结果返回到OBJ的结果中
obj.dict_ret.update(str_result=obj.dict_ret["str_result"] + ret_data)
# 如果执行失败,统一调用回退机制(缺省是关闭的)
if ret == ERR_FAIL:
ret_rollback, ret_data_rollback = rollback(sn, rollbacklist, obj)
obj.dict_ret.update(str_result=obj.dict_ret["str_result"] + ret_data_rollback)
info = u"工单:%s执行结束\n" % os.path.basename(os.path.dirname(__file__))
obj.dict_ret.update(str_result=obj.dict_ret["str_result"] + info)
return ret
if __name__ == '__main__':
log_dir = g_prj_dir
log.start(name="nwf", directory=log_dir, level="DebugWarn")
log.set_file_id(testcase_name="tr069")
obj = MsgWorklistExecute(id_="1")
obj.sn = "3F3001880F5CAD80F"
dict_data= {"Min":("10","1"),"Max":("10","2"),
"DSCPMarkValue":("1","3"),"M802_1_P_Value":("1","4"),
"ClassQueue":("1","5")}
obj.dict_data = dict_data
try:
ret = test_script(obj)
if ret == ERR_SUCCESS:
print u"测试成功"
else:
print u"测试失败"
print "****************************************"
print obj.dict_ret["str_result"]
except Exception, e:
print u"测试异常"
|
[
"zhaojunhhu@gmail.com"
] |
zhaojunhhu@gmail.com
|
06e91545546c5d5f9f8c5ae573bbd5682f098d9e
|
e7b7cc34f77c71e61aa0fa05bcc62f54fc2fc0e1
|
/Array/test_q056_merge_intervals.py
|
144e68cff13f68e05cc835a31a46718e9c0dfad5
|
[] |
no_license
|
sevenhe716/LeetCode
|
41d2ef18f5cb317858c9b69d00bcccb743cbdf48
|
4a1747b6497305f3821612d9c358a6795b1690da
|
refs/heads/master
| 2020-03-16T16:12:27.461172
| 2019-04-22T13:27:54
| 2019-04-22T13:27:54
| 130,221,784
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 498
|
py
|
import unittest
from Array.q056_merge_intervals import Solution
from common import Interval
class TestMergeIntervals(unittest.TestCase):
"""Test q056_merge_intervals.py"""
def test_merge_intervals(self):
s = Solution()
self.assertEqual([[1, 6], [8, 10], [15, 18]], s.merge([Interval(1, 3), Interval(2, 6), Interval(8, 10), Interval(15, 18)]))
self.assertEqual([[1, 5]], s.merge([Interval(1, 4), Interval(4, 5)]))
if __name__ == '__main__':
unittest.main()
|
[
"429134862@qq.com"
] |
429134862@qq.com
|
2b9e1a91205de5663111b9f61c7cc6a51b919853
|
53faa0ef3496997412eb5e697bc85eb09a28f8c9
|
/supervised_learning/0x06-keras/5-main.py
|
4c36d29b9b95d170647282429ea17053b98b29ca
|
[] |
no_license
|
oran2527/holbertonschool-machine_learning
|
aaec2ffe762b959573f98a5f4e002272a5d643a3
|
8761eb876046ad3c0c3f85d98dbdca4007d93cd1
|
refs/heads/master
| 2023-08-14T00:37:31.163130
| 2021-09-20T13:34:33
| 2021-09-20T13:34:33
| 330,999,053
| 0
| 2
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,440
|
py
|
#!/usr/bin/env python3
"""
Main file
"""
# Force Seed - fix for Keras
SEED = 0
import os
os.environ['PYTHONHASHSEED'] = str(SEED)
import random
random.seed(SEED)
import numpy as np
np.random.seed(SEED)
import tensorflow as tf
tf.set_random_seed(SEED)
import tensorflow.keras as K
session_conf = tf.ConfigProto(intra_op_parallelism_threads=1, inter_op_parallelism_threads=1)
sess = tf.Session(graph=tf.get_default_graph(), config=session_conf)
K.backend.set_session(sess)
# Imports
build_model = __import__('1-input').build_model
optimize_model = __import__('2-optimize').optimize_model
one_hot = __import__('3-one_hot').one_hot
train_model = __import__('5-train').train_model
if __name__ == '__main__':
datasets = np.load('../data/MNIST.npz')
X_train = datasets['X_train']
X_train = X_train.reshape(X_train.shape[0], -1)
Y_train = datasets['Y_train']
Y_train_oh = one_hot(Y_train)
X_valid = datasets['X_valid']
X_valid = X_valid.reshape(X_valid.shape[0], -1)
Y_valid = datasets['Y_valid']
Y_valid_oh = one_hot(Y_valid)
lambtha = 0.0001
keep_prob = 0.95
network = build_model(784, [256, 256, 10], ['relu', 'relu', 'softmax'], lambtha, keep_prob)
alpha = 0.001
beta1 = 0.9
beta2 = 0.999
optimize_model(network, alpha, beta1, beta2)
batch_size = 64
epochs = 5
train_model(network, X_train, Y_train_oh, batch_size, epochs, validation_data=(X_valid, Y_valid_oh))
|
[
"orlago250183@gmail.com"
] |
orlago250183@gmail.com
|
a2d10d6ff44f902b929f0b62b703589f1f7756f7
|
19d43b8c175bb5304393cf9c259eacb7110dd4fc
|
/objectModel/Python/cdm/resolvedmodel/resolved_attribute.py
|
77b50937eb6d60885c0362dca92be9f242d7eb5e
|
[
"CC-BY-4.0",
"MIT"
] |
permissive
|
bissont/CDM
|
3fd814566ea1bf9d19e300cd5b438b384ce4bcba
|
0cffb140e0b41e526be072b547cae91a03c4cd6f
|
refs/heads/master
| 2020-12-29T12:55:23.822187
| 2020-02-05T02:19:27
| 2020-02-05T02:19:27
| 238,614,156
| 1
| 0
| null | 2020-02-06T05:21:51
| 2020-02-06T05:21:50
| null |
UTF-8
|
Python
| false
| false
| 5,092
|
py
|
# ----------------------------------------------------------------------
# Copyright (c) Microsoft Corporation.
# All rights reserved.
# ----------------------------------------------------------------------
from typing import Any, cast, Optional, Union, TYPE_CHECKING
from cdm.resolvedmodel.resolved_trait_set import ResolvedTraitSet
if TYPE_CHECKING:
from cdm.objectmodel import CdmAttribute, CdmAttributeContext, CdmObject, SpewCatcher
from cdm.resolvedmodel import AttributeResolutionContext, ResolvedAttributeSet
from cdm.utilities import ApplierState, ResolveOptions, TraitToPropertyMap
ResolutionTarget = Union[CdmAttribute, ResolvedAttributeSet]
class ResolvedAttribute():
def __init__(self, res_opt: 'ResolveOptions', target: 'ResolutionTarget', default_name: str, att_ctx: 'CdmAttributeContext') -> None:
self.applier_state = None # type: Optional[ApplierState]
self.arc = None # type: Optional[AttributeResolutionContext]
self.att_ctx = att_ctx # type: CdmAttributeContext
self.insert_order = 0 # type: int
self.previous_resolved_name = default_name # type: str
self.resolved_traits = ResolvedTraitSet(res_opt) # type: ResolvedTraitSet
self.target = target # type: ResolutionTarget
self._resolved_name = default_name # type: str
self._ttpm = None # type: Optional[TraitToPropertyMap]
@property
def resolved_name(self) -> str:
return self._resolved_name
@resolved_name.setter
def resolved_name(self, value: str) -> None:
self._resolved_name = value
if self.previous_resolved_name is None:
self.previous_resolved_name = value
@property
def is_primary_key(self) -> Optional[bool]:
return self._trait_to_property_map.fetch_property_value('isPrimaryKey')
@property
def is_read_only(self) -> Optional[bool]:
return self._trait_to_property_map.fetch_property_value('isReadOnly')
@property
def is_nullable(self) -> Optional[bool]:
return self._trait_to_property_map.fetch_property_value('isNullable')
@property
def data_format(self) -> str:
return self._trait_to_property_map.fetch_property_value('dataFormat')
@property
def source_name(self) -> str:
return self._trait_to_property_map.fetch_property_value('sourceName')
@property
def source_ordering(self) -> Optional[int]:
return self._trait_to_property_map.fetch_property_value('sourceOrdering')
@property
def display_name(self) -> str:
return self._trait_to_property_map.fetch_property_value('displayName')
@property
def description(self) -> str:
return self._trait_to_property_map.fetch_property_value('description')
@property
def maximum_value(self) -> str:
return self._trait_to_property_map.fetch_property_value('maximumValue')
@property
def minimum_value(self) -> str:
return self._trait_to_property_map.fetch_property_value('minimumValue')
@property
def maximum_length(self) -> Optional[int]:
return self._trait_to_property_map.fetch_property_value('maximumLength')
@property
def value_constrained_to_list(self) -> Optional[bool]:
return self._trait_to_property_map.fetch_property_value('valueConstrainedToList')
@property
def default_value(self) -> Any:
return self._trait_to_property_map.fetch_property_value('defaultValue')
@property
def creation_sequence(self) -> int:
return self.insert_order
@property
def _trait_to_property_map(self) -> 'TraitToPropertyMap':
from cdm.utilities import TraitToPropertyMap
if self._ttpm is not None:
return self._ttpm
self._ttpm = TraitToPropertyMap(cast('CdmObject', self.target))
return self._ttpm
def copy(self) -> 'ResolvedAttribute':
# Use the options from the traits.
copy = ResolvedAttribute(self.resolved_traits.res_opt, self.target, self._resolved_name, self.att_ctx)
copy.resolved_traits = self.resolved_traits.shallow_copy()
copy.insert_order = self.insert_order
copy.arc = self.arc
if self.applier_state is not None:
copy.applier_state = self.applier_state.copy()
return copy
def spew(self, res_opt: 'ResolveOptions', to: 'SpewCatcher', indent: str, name_sort: bool) -> None:
to.spew_line('{}[{}]'.format(indent, self._resolved_name))
self.resolved_traits.spew(res_opt, to, indent + '-', name_sort)
def complete_context(self, res_opt: 'ResolveOptions') -> None:
from cdm.objectmodel import CdmAttribute
if self.att_ctx is None or self.att_ctx.name is not None:
return
self.att_ctx.name = self._resolved_name
if isinstance(self.target, CdmAttribute):
self.att_ctx.definition = self.target.create_simple_reference(res_opt)
self.att_ctx.at_corpus_path = str(self.att_ctx.parent.fetch_object_definition(res_opt).at_corpus_path) + '/' + self._resolved_name
|
[
"nebanfic@microsoft.com"
] |
nebanfic@microsoft.com
|
990db47ec28843c8eb2d8542de7e375dbb43c859
|
9c37742bdd09ccfb02da09be79e20b7333694d9b
|
/pyswagger/tests/v1_2/test_app.py
|
d65c18e48ea45b37e6f89ececb380e1a155dc7f9
|
[
"MIT"
] |
permissive
|
simudream/pyswagger
|
72eea9a24140d3dfbb4f6a4537e10a9b07c4d09f
|
1dcf7ab291d9535dfdb705e0cb0e2c6f2b0fb474
|
refs/heads/master
| 2020-12-11T05:32:38.335378
| 2015-01-22T11:39:10
| 2015-01-22T11:39:10
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 8,178
|
py
|
from pyswagger import SwaggerApp, errs
from ..utils import get_test_data_folder
from pyswagger.spec.v2_0.objects import (
Schema,
Operation,
)
import unittest
import httpretty
import os
import six
class HTTPGetterTestCase(unittest.TestCase):
""" test HTTPGetter """
@httpretty.activate
def test_http_getter(self):
""" make sure HTTPGetter works """
folder = get_test_data_folder(version='1.2', which='wordnik')
resource_list = user = pet = store = None
with open(os.path.join(folder, 'resource_list.json')) as f:
resource_list = f.read()
with open(os.path.join(folder, 'user.json')) as f:
user = f.read()
with open(os.path.join(folder, 'pet.json')) as f:
pet = f.read()
with open(os.path.join(folder, 'store.json')) as f:
store = f.read()
httpretty.register_uri(
httpretty.GET, 'http://petstore.swagger.wordnik.com/api/api-docs',
status=200,
body=resource_list
)
httpretty.register_uri(
httpretty.GET, 'http://petstore.swagger.wordnik.com/api/api-docs/user',
status=200,
body=user
)
httpretty.register_uri(
httpretty.GET, 'http://petstore.swagger.wordnik.com/api/api-docs/pet',
status=200,
body=pet
)
httpretty.register_uri(
httpretty.GET, 'http://petstore.swagger.wordnik.com/api/api-docs/store',
status=200,
body=store
)
local_app = SwaggerApp._create_('http://petstore.swagger.wordnik.com/api/api-docs')
self.assertEqual(sorted(local_app.raw._field_names_), sorted(['info', 'authorizations', 'apiVersion', 'swaggerVersion', 'apis']))
op = local_app.raw.apis['pet'].apis['updatePet']
self.assertEqual(sorted(op._field_names_), sorted([
'authorizations',
'consumes',
'defaultValue',
'deprecated',
'enum',
'format',
'items',
'maximum',
'method',
'minimum',
'nickname',
'parameters',
'path',
'produces',
'$ref',
'responseMessages',
'type',
'uniqueItems'
]))
class ValidationTestCase(unittest.TestCase):
""" test case for validation """
def setUp(self):
self.app = SwaggerApp.load(get_test_data_folder(version='1.2', which='err'))
def test_errs(self):
"""
"""
errs = self.app.validate(strict=False)
self.maxDiff = None
self.assertEqual(sorted(errs), sorted([
(('#/info', 'Info'), 'requirement description not meet.'),
(('#/info', 'Info'), 'requirement title not meet.'),
(('#/authorizations/oauth2', 'Authorization'), 'requirement type not meet.'),
(('#/authorizations/oauth2/grantTypes/implicit/loginEndpoint', 'LoginEndpoint'), 'requirement url not meet.'),
(('#/authorizations/oauth2/scopes/0', 'Scope'), 'requirement scope not meet.'),
(('#/authorizations/oauth2/grantTypes/authorization_code/tokenRequestEndpoint', 'TokenRequestEndpoint'), 'requirement url not meet.'),
(('#/apis/pet/apis/getPetById', 'Operation'), 'requirement method not meet.'),
(('#/apis/pet/apis/getPetById/parameters/0', 'Parameter'), 'requirement name not meet.'),
(('#/apis/pet/apis/getPetById/responseMessages/0', 'ResponseMessage'), 'requirement code not meet.'),
(('#/apis/pet/apis', 'Operation'), 'requirement nickname not meet.'),
(('#/apis/pet/models/Pet/properties/tags', 'Property'), 'array should be existed along with items'),
(('#/apis/pet/apis/getPetById/parameters/0', 'Parameter'), 'allowMultiple should be applied on path, header, or query parameters'),
(('#/apis/pet/apis/partialUpdate/parameters/1', 'Parameter'), 'body parameter with invalid name: qqq'),
(('#/apis/pet/apis/partialUpdate/parameters/0', 'Parameter'), 'void is only allowed in Operation object.')
]))
def test_raise_exception(self):
""" raise exceptions in strict mode """
self.assertRaises(errs.ValidationError, self.app.validate)
class SwaggerAppTestCase(unittest.TestCase):
""" test case for SwaggerApp """
def setUp(self):
folder = get_test_data_folder(
version='1.2',
)
def _hook(url):
p = six.moves.urllib.parse.urlparse(url)
if p.scheme != 'file':
return url
path = os.path.join(folder, p.path if not p.path.startswith('/') else p.path[1:])
return six.moves.urllib.parse.urlunparse(p[:2]+(path,)+p[3:])
self.app = SwaggerApp.load('wordnik', url_load_hook=_hook)
self.app.prepare()
def test_ref(self):
""" test ref function """
self.assertRaises(ValueError, self.app.resolve, None)
self.assertRaises(ValueError, self.app.resolve, '')
self.assertTrue(isinstance(self.app.resolve('#/definitions/user!##!User'), Schema))
self.assertTrue(isinstance(self.app.resolve('#/paths/~1api~1user~1{username}/put'), Operation))
self.assertEqual(self.app.resolve('#/paths/~1api~1store~1order/post/produces'), ['application/json'])
self.assertEqual(self.app.resolve('#/host'), 'petstore.swagger.wordnik.com')
# resolve with URL part
# refer to
# http://stackoverflow.com/questions/10246116/python-dereferencing-weakproxy
# for how to dereferencing weakref
self.assertEqual(
self.app.resolve('#/definitions/user!##!User').__repr__(),
self.app.resolve('file:///wordnik#/definitions/user!##!User').__repr__()
)
self.assertEqual(
self.app.resolve('#/paths/~1api~1user~1{username}/put').__repr__(),
self.app.resolve('file:///wordnik#/paths/~1api~1user~1{username}/put').__repr__()
)
def test_scope_dict(self):
""" ScopeDict is a syntactic suger
to access scoped named object, ex. Operation, Model
"""
# Operation
self.assertTrue(self.app.op['user', 'getUserByName'], Operation)
self.assertTrue(self.app.op['user', 'getUserByName'] is self.app.op['user!##!getUserByName'])
self.assertTrue(self.app.op['getUserByName'] is self.app.op['user!##!getUserByName'])
def test_shortcut(self):
""" a short cut to Resource, Operation, Model from SwaggerApp """
# Resource
# TODO: resource is now replaced by tags
#self.assertTrue(isinstance(app.rs['pet'], Resource))
#self.assertTrue(isinstance(app.rs['user'], Resource))
#self.assertTrue(isinstance(app.rs['store'], Resource))
# Operation
self.assertEqual(len(self.app.op.values()), 20)
self.assertEqual(sorted(self.app.op.keys()), sorted([
'pet!##!addPet',
'pet!##!deletePet',
'pet!##!findPetsByStatus',
'pet!##!findPetsByTags',
'pet!##!getPetById',
'pet!##!partialUpdate',
'pet!##!updatePet',
'pet!##!updatePetWithForm',
'pet!##!uploadFile',
'store!##!deleteOrder',
'store!##!getOrderById',
'store!##!placeOrder',
'user!##!createUser',
'user!##!createUsersWithArrayInput',
'user!##!createUsersWithListInput',
'user!##!deleteUser',
'user!##!getUserByName',
'user!##!loginUser',
'user!##!logoutUser',
'user!##!updateUser'
]))
self.assertTrue(self.app.op['user!##!getUserByName'], Operation)
# Model
d = self.app.resolve('#/definitions')
self.assertEqual(len(d.values()), 5)
self.assertEqual(sorted(d.keys()), sorted([
'pet!##!Category',
'pet!##!Pet',
'pet!##!Tag',
'store!##!Order',
'user!##!User'
]))
|
[
"missionaryliao@gmail.com"
] |
missionaryliao@gmail.com
|
4403e503e127c23cb397fe72eb4aca8267bc9fc4
|
a2d36e471988e0fae32e9a9d559204ebb065ab7f
|
/huaweicloud-sdk-cloudrtc/huaweicloudsdkcloudrtc/v2/model/update_url_auth_request.py
|
8044913f81326b931d750d21e5b323e8a54d90bf
|
[
"Apache-2.0"
] |
permissive
|
zhouxy666/huaweicloud-sdk-python-v3
|
4d878a90b8e003875fc803a61414788e5e4c2c34
|
cc6f10a53205be4cb111d3ecfef8135ea804fa15
|
refs/heads/master
| 2023-09-02T07:41:12.605394
| 2021-11-12T03:20:11
| 2021-11-12T03:20:11
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 6,802
|
py
|
# coding: utf-8
import re
import six
from huaweicloudsdkcore.utils.http_utils import sanitize_for_serialization
class UpdateUrlAuthRequest:
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
sensitive_list = []
openapi_types = {
'content_type': 'str',
'authorization': 'str',
'x_sdk_date': 'str',
'x_project_id': 'str',
'app_id': 'str',
'body': 'AppAuthReq'
}
attribute_map = {
'content_type': 'Content-Type',
'authorization': 'Authorization',
'x_sdk_date': 'X-Sdk-Date',
'x_project_id': 'X-Project-Id',
'app_id': 'app_id',
'body': 'body'
}
def __init__(self, content_type=None, authorization=None, x_sdk_date=None, x_project_id=None, app_id=None, body=None):
"""UpdateUrlAuthRequest - a model defined in huaweicloud sdk"""
self._content_type = None
self._authorization = None
self._x_sdk_date = None
self._x_project_id = None
self._app_id = None
self._body = None
self.discriminator = None
self.content_type = content_type
if authorization is not None:
self.authorization = authorization
if x_sdk_date is not None:
self.x_sdk_date = x_sdk_date
if x_project_id is not None:
self.x_project_id = x_project_id
self.app_id = app_id
if body is not None:
self.body = body
@property
def content_type(self):
"""Gets the content_type of this UpdateUrlAuthRequest.
内容类型。
:return: The content_type of this UpdateUrlAuthRequest.
:rtype: str
"""
return self._content_type
@content_type.setter
def content_type(self, content_type):
"""Sets the content_type of this UpdateUrlAuthRequest.
内容类型。
:param content_type: The content_type of this UpdateUrlAuthRequest.
:type: str
"""
self._content_type = content_type
@property
def authorization(self):
"""Gets the authorization of this UpdateUrlAuthRequest.
使用AK/SK方式认证时必选,携带的鉴权信息。
:return: The authorization of this UpdateUrlAuthRequest.
:rtype: str
"""
return self._authorization
@authorization.setter
def authorization(self, authorization):
"""Sets the authorization of this UpdateUrlAuthRequest.
使用AK/SK方式认证时必选,携带的鉴权信息。
:param authorization: The authorization of this UpdateUrlAuthRequest.
:type: str
"""
self._authorization = authorization
@property
def x_sdk_date(self):
"""Gets the x_sdk_date of this UpdateUrlAuthRequest.
使用AK/SK方式认证时必选,请求的发生时间。
:return: The x_sdk_date of this UpdateUrlAuthRequest.
:rtype: str
"""
return self._x_sdk_date
@x_sdk_date.setter
def x_sdk_date(self, x_sdk_date):
"""Sets the x_sdk_date of this UpdateUrlAuthRequest.
使用AK/SK方式认证时必选,请求的发生时间。
:param x_sdk_date: The x_sdk_date of this UpdateUrlAuthRequest.
:type: str
"""
self._x_sdk_date = x_sdk_date
@property
def x_project_id(self):
"""Gets the x_project_id of this UpdateUrlAuthRequest.
使用AK/SK方式认证时必选,携带项目ID信息。
:return: The x_project_id of this UpdateUrlAuthRequest.
:rtype: str
"""
return self._x_project_id
@x_project_id.setter
def x_project_id(self, x_project_id):
"""Sets the x_project_id of this UpdateUrlAuthRequest.
使用AK/SK方式认证时必选,携带项目ID信息。
:param x_project_id: The x_project_id of this UpdateUrlAuthRequest.
:type: str
"""
self._x_project_id = x_project_id
@property
def app_id(self):
"""Gets the app_id of this UpdateUrlAuthRequest.
应用id
:return: The app_id of this UpdateUrlAuthRequest.
:rtype: str
"""
return self._app_id
@app_id.setter
def app_id(self, app_id):
"""Sets the app_id of this UpdateUrlAuthRequest.
应用id
:param app_id: The app_id of this UpdateUrlAuthRequest.
:type: str
"""
self._app_id = app_id
@property
def body(self):
"""Gets the body of this UpdateUrlAuthRequest.
:return: The body of this UpdateUrlAuthRequest.
:rtype: AppAuthReq
"""
return self._body
@body.setter
def body(self, body):
"""Sets the body of this UpdateUrlAuthRequest.
:param body: The body of this UpdateUrlAuthRequest.
:type: AppAuthReq
"""
self._body = body
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
if attr in self.sensitive_list:
result[attr] = "****"
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
import simplejson as json
if six.PY2:
import sys
reload(sys)
sys.setdefaultencoding("utf-8")
return json.dumps(sanitize_for_serialization(self), ensure_ascii=False)
def __repr__(self):
"""For `print`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, UpdateUrlAuthRequest):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
|
[
"hwcloudsdk@huawei.com"
] |
hwcloudsdk@huawei.com
|
a448c9227d0b822d8e2f908cfc10bd93e53162b2
|
eacfc1c0b2acd991ec2cc7021664d8e79c9e58f6
|
/ccpnmr2.4/python/memops/gui/DataEntry.py
|
e60cff844461888a85150c46163e540f8db69eb0
|
[] |
no_license
|
edbrooksbank/ccpnmr2.4
|
cfecb0896dcf8978d796e6327f7e05a3f233a921
|
f279ca9bb2d972b1ce075dad5fcc16e6f4a9496c
|
refs/heads/master
| 2021-06-30T22:29:44.043951
| 2019-03-20T15:01:09
| 2019-03-20T15:01:09
| 176,757,815
| 0
| 1
| null | 2020-07-24T14:40:26
| 2019-03-20T14:59:23
|
HTML
|
UTF-8
|
Python
| false
| false
| 5,930
|
py
|
"""
======================COPYRIGHT/LICENSE START==========================
DataEntry.py: <write function here>
Copyright (C) 2005 Wayne Boucher, Rasmus Fogh, Tim Stevens and Wim Vranken (University of Cambridge and EBI/MSD)
=======================================================================
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation; either
version 2.1 of the License, or (at your option) any later version.
A copy of this license can be found in ../../../license/LGPL.license
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with this library; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
======================COPYRIGHT/LICENSE END============================
for further information, please contact :
- CCPN website (http://www.ccpn.ac.uk/)
- PDBe website (http://www.ebi.ac.uk/pdbe/)
=======================================================================
If you are using this software for academic purposes, we suggest
quoting the following references:
===========================REFERENCE START=============================
R. Fogh, J. Ionides, E. Ulrich, W. Boucher, W. Vranken, J.P. Linge, M.
Habeck, W. Rieping, T.N. Bhat, J. Westbrook, K. Henrick, G. Gilliland,
H. Berman, J. Thornton, M. Nilges, J. Markley and E. Laue (2002). The
CCPN project: An interim report on a data model for the NMR community
(Progress report). Nature Struct. Biol. 9, 416-418.
Wim F. Vranken, Wayne Boucher, Tim J. Stevens, Rasmus
H. Fogh, Anne Pajon, Miguel Llinas, Eldon L. Ulrich, John L. Markley, John
Ionides and Ernest D. Laue (2005). The CCPN Data Model for NMR Spectroscopy:
Development of a Software Pipeline. Proteins 59, 687 - 696.
===========================REFERENCE END===============================
"""
import memops.gui.QueryDialogBox as QueryDialogBox
from memops.gui.FileSelectPopup import FileSelectPopup
def askPassword(title, prompt, parent = None):
return QueryDialogBox.askPassword(title, prompt, parent=parent)
def askString(title, prompt, initial_value = '', parent = None):
return QueryDialogBox.askString(title, prompt,initialvalue=initial_value,
parent=parent)
def askInteger(title, prompt, initial_value = '', min_value = None,
max_value = None, parent = None):
return QueryDialogBox.askInteger(title, prompt, initialvalue=initial_value,
minvalue=min_value, maxvalue=max_value, parent=parent)
def askFloat(title, prompt, initial_value = '', min_value = None,
max_value = None, parent = None):
return QueryDialogBox.askFloat(title, prompt, initialvalue=initial_value,
minvalue=min_value, maxvalue=max_value, parent=parent)
def askFile(title, prompt, initial_value = '', parent = None,
dismiss_text='Cancel', extra_dismiss_text = ''):
if (parent):
popup = FileSelectPopup(parent, title=title, prompt=prompt, show_file=True,
dismiss_text=dismiss_text,
extra_dismiss_text=extra_dismiss_text,
file=initial_value)
file = popup.getFile()
popup.destroy()
return file
else:
return askString(title, prompt, initial_value)
def askDir(title, prompt, initial_value = '', parent = None,
dismiss_text='Cancel', extra_dismiss_text = '', default_dir = None):
if (parent):
popup = FileSelectPopup(parent, title=title, prompt=prompt, show_file=False,
dismiss_text=dismiss_text,
extra_dismiss_text=extra_dismiss_text,
file=initial_value, default_dir = default_dir)
dir = popup.getDirectory()
popup.destroy()
return dir
else:
return askString(title, prompt, initial_value)
class DataEntry:
def askPassword(self, title, prompt, initial_value = '', parent = None, *args, **kw):
return askPassword(title, prompt, initial_value, parent)
def askString(self, title, prompt, initial_value = '', parent = None, *args, **kw):
return askString(title, prompt, initial_value, parent)
def askInteger(self, title, prompt, initial_value = '', min_value = None,
max_value = None, parent = None, *args, **kw):
return askInteger(title, prompt, initial_value, min_value, max_value, parent)
def askFloat(self, title, prompt, initial_value = '', min_value = None,
max_value = None, parent = None, *args, **kw):
return askFloat(title, prompt, initial_value, min_value, max_value, parent)
def askFile(self, title, prompt, initial_value = '', parent = None,
dismiss_text='Cancel', extra_dismiss_text = '', *args, **kw):
return askFile(title, prompt, initial_value, parent)
def askDir(self, title, prompt, initial_value = '', parent = None,
dismiss_text='Cancel', extra_dismiss_text = '', default_dir = None, *args, **kw):
return askDir(title, prompt, initial_value, parent, default_dir = default_dir)
dataEntry = DataEntry()
if (__name__ == '__main__'):
import Tkinter
r = Tkinter.Tk()
print dataEntry.askString('ask string title', 'ask string prompt')
print dataEntry.askInteger('ask integer title', 'ask integer prompt')
print dataEntry.askFloat('ask float title', 'ask float prompt')
print dataEntry.askFile('ask file title', 'ask file prompt', parent=r)
print dataEntry.askDir('ask dir title', 'ask dir prompt', parent=r)
|
[
"ejb66@le.ac.uk"
] |
ejb66@le.ac.uk
|
a605dfcfc2f4d00faa17e9fbac69fb61a709b560
|
b35469b3a3ef3ecb8da35a178ba0994bae2989b3
|
/kubevirt/models/v1_pci_host_device.py
|
65d45e6884a716731c600aef51e52b927476c143
|
[
"Apache-2.0"
] |
permissive
|
CHsixnine/client-python
|
4802d76bbe3761a1311038665d931349298bcd81
|
315335602923dacbc3b73b23339002d69a5a41cc
|
refs/heads/master
| 2023-03-20T22:45:25.578704
| 2021-03-17T07:34:18
| 2021-03-17T07:34:18
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,120
|
py
|
# coding: utf-8
"""
KubeVirt API
This is KubeVirt API an add-on for Kubernetes.
OpenAPI spec version: 1.0.0
Contact: kubevirt-dev@googlegroups.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from pprint import pformat
from six import iteritems
import re
class V1PciHostDevice(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'external_resource_provider': 'bool',
'pci_vendor_selector': 'str',
'resource_name': 'str'
}
attribute_map = {
'external_resource_provider': 'externalResourceProvider',
'pci_vendor_selector': 'pciVendorSelector',
'resource_name': 'resourceName'
}
def __init__(self, external_resource_provider=None, pci_vendor_selector=None, resource_name=None):
"""
V1PciHostDevice - a model defined in Swagger
"""
self._external_resource_provider = None
self._pci_vendor_selector = None
self._resource_name = None
if external_resource_provider is not None:
self.external_resource_provider = external_resource_provider
self.pci_vendor_selector = pci_vendor_selector
self.resource_name = resource_name
@property
def external_resource_provider(self):
"""
Gets the external_resource_provider of this V1PciHostDevice.
:return: The external_resource_provider of this V1PciHostDevice.
:rtype: bool
"""
return self._external_resource_provider
@external_resource_provider.setter
def external_resource_provider(self, external_resource_provider):
"""
Sets the external_resource_provider of this V1PciHostDevice.
:param external_resource_provider: The external_resource_provider of this V1PciHostDevice.
:type: bool
"""
self._external_resource_provider = external_resource_provider
@property
def pci_vendor_selector(self):
"""
Gets the pci_vendor_selector of this V1PciHostDevice.
:return: The pci_vendor_selector of this V1PciHostDevice.
:rtype: str
"""
return self._pci_vendor_selector
@pci_vendor_selector.setter
def pci_vendor_selector(self, pci_vendor_selector):
"""
Sets the pci_vendor_selector of this V1PciHostDevice.
:param pci_vendor_selector: The pci_vendor_selector of this V1PciHostDevice.
:type: str
"""
if pci_vendor_selector is None:
raise ValueError("Invalid value for `pci_vendor_selector`, must not be `None`")
self._pci_vendor_selector = pci_vendor_selector
@property
def resource_name(self):
"""
Gets the resource_name of this V1PciHostDevice.
:return: The resource_name of this V1PciHostDevice.
:rtype: str
"""
return self._resource_name
@resource_name.setter
def resource_name(self, resource_name):
"""
Sets the resource_name of this V1PciHostDevice.
:param resource_name: The resource_name of this V1PciHostDevice.
:type: str
"""
if resource_name is None:
raise ValueError("Invalid value for `resource_name`, must not be `None`")
self._resource_name = resource_name
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
if not isinstance(other, V1PciHostDevice):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
|
[
"travis@travis-ci.org"
] |
travis@travis-ci.org
|
374fb7f9548ddb214ed23c9f91baa6f51c6ecd9a
|
eb722922339781fa6bd9937e69383fcd06256738
|
/day1/kapua-python-client/swagger_client/models/user_query.py
|
f40b1b68ccf746a9a9f1ae2d1ffd2154a5689df1
|
[
"MIT"
] |
permissive
|
mrsrinivas/diec
|
6a0c5da26ff23170b71217bfbc810bb98a897a83
|
ae9a5203b506d5cc18cb381666351bf9ce6b9b6c
|
refs/heads/master
| 2021-01-05T05:41:19.394898
| 2020-01-15T06:24:33
| 2020-01-15T06:24:33
| 240,901,175
| 1
| 0
|
MIT
| 2020-02-16T13:59:53
| 2020-02-16T13:59:52
| null |
UTF-8
|
Python
| false
| false
| 6,757
|
py
|
# coding: utf-8
"""
Eclipse Kapua REST API
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) # noqa: E501
OpenAPI spec version: 1.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
from swagger_client.models.kapua_sort_criteria import KapuaSortCriteria # noqa: F401,E501
from swagger_client.models.query_predicate import QueryPredicate # noqa: F401,E501
class UserQuery(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'limit': 'int',
'scope_id': 'str',
'fetch_attributes': 'list[str]',
'predicate': 'QueryPredicate',
'sort_criteria': 'KapuaSortCriteria',
'offset': 'int'
}
attribute_map = {
'limit': 'limit',
'scope_id': 'scopeId',
'fetch_attributes': 'fetchAttributes',
'predicate': 'predicate',
'sort_criteria': 'sortCriteria',
'offset': 'offset'
}
def __init__(self, limit=None, scope_id=None, fetch_attributes=None, predicate=None, sort_criteria=None, offset=None): # noqa: E501
"""UserQuery - a model defined in Swagger""" # noqa: E501
self._limit = None
self._scope_id = None
self._fetch_attributes = None
self._predicate = None
self._sort_criteria = None
self._offset = None
self.discriminator = None
if limit is not None:
self.limit = limit
if scope_id is not None:
self.scope_id = scope_id
if fetch_attributes is not None:
self.fetch_attributes = fetch_attributes
if predicate is not None:
self.predicate = predicate
if sort_criteria is not None:
self.sort_criteria = sort_criteria
if offset is not None:
self.offset = offset
@property
def limit(self):
"""Gets the limit of this UserQuery. # noqa: E501
:return: The limit of this UserQuery. # noqa: E501
:rtype: int
"""
return self._limit
@limit.setter
def limit(self, limit):
"""Sets the limit of this UserQuery.
:param limit: The limit of this UserQuery. # noqa: E501
:type: int
"""
self._limit = limit
@property
def scope_id(self):
"""Gets the scope_id of this UserQuery. # noqa: E501
:return: The scope_id of this UserQuery. # noqa: E501
:rtype: str
"""
return self._scope_id
@scope_id.setter
def scope_id(self, scope_id):
"""Sets the scope_id of this UserQuery.
:param scope_id: The scope_id of this UserQuery. # noqa: E501
:type: str
"""
self._scope_id = scope_id
@property
def fetch_attributes(self):
"""Gets the fetch_attributes of this UserQuery. # noqa: E501
:return: The fetch_attributes of this UserQuery. # noqa: E501
:rtype: list[str]
"""
return self._fetch_attributes
@fetch_attributes.setter
def fetch_attributes(self, fetch_attributes):
"""Sets the fetch_attributes of this UserQuery.
:param fetch_attributes: The fetch_attributes of this UserQuery. # noqa: E501
:type: list[str]
"""
self._fetch_attributes = fetch_attributes
@property
def predicate(self):
"""Gets the predicate of this UserQuery. # noqa: E501
:return: The predicate of this UserQuery. # noqa: E501
:rtype: QueryPredicate
"""
return self._predicate
@predicate.setter
def predicate(self, predicate):
"""Sets the predicate of this UserQuery.
:param predicate: The predicate of this UserQuery. # noqa: E501
:type: QueryPredicate
"""
self._predicate = predicate
@property
def sort_criteria(self):
"""Gets the sort_criteria of this UserQuery. # noqa: E501
:return: The sort_criteria of this UserQuery. # noqa: E501
:rtype: KapuaSortCriteria
"""
return self._sort_criteria
@sort_criteria.setter
def sort_criteria(self, sort_criteria):
"""Sets the sort_criteria of this UserQuery.
:param sort_criteria: The sort_criteria of this UserQuery. # noqa: E501
:type: KapuaSortCriteria
"""
self._sort_criteria = sort_criteria
@property
def offset(self):
"""Gets the offset of this UserQuery. # noqa: E501
:return: The offset of this UserQuery. # noqa: E501
:rtype: int
"""
return self._offset
@offset.setter
def offset(self, offset):
"""Sets the offset of this UserQuery.
:param offset: The offset of this UserQuery. # noqa: E501
:type: int
"""
self._offset = offset
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(UserQuery, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, UserQuery):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
|
[
"noreply@github.com"
] |
mrsrinivas.noreply@github.com
|
a65ad9748193a80ca6ea3a3b9948f43ba7938fbe
|
60a831fb3c92a9d2a2b52ff7f5a0f665d4692a24
|
/IronPythonStubs/release/stubs.min/System/ComponentModel/__init___parts/DataObjectAttribute.py
|
fa31cc9ac8a0bb5e911aa72b7329df96aa63c06d
|
[
"MIT"
] |
permissive
|
shnlmn/Rhino-Grasshopper-Scripts
|
a9411098c5d1bbc55feb782def565d535b27b709
|
0e43c3c1d09fb12cdbd86a3c4e2ba49982e0f823
|
refs/heads/master
| 2020-04-10T18:59:43.518140
| 2020-04-08T02:49:07
| 2020-04-08T02:49:07
| 161,219,695
| 11
| 2
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,065
|
py
|
class DataObjectAttribute(Attribute,_Attribute):
"""
Identifies a type as an object suitable for binding to an System.Web.UI.WebControls.ObjectDataSource object. This class cannot be inherited.
DataObjectAttribute()
DataObjectAttribute(isDataObject: bool)
"""
def Equals(self,obj):
"""
Equals(self: DataObjectAttribute,obj: object) -> bool
Determines whether this instance of System.ComponentModel.DataObjectAttribute fits the pattern
of another object.
obj: An object to compare with this instance of System.ComponentModel.DataObjectAttribute.
Returns: true if this instance is the same as the instance specified by the obj parameter; otherwise,
false.
"""
pass
def GetHashCode(self):
"""
GetHashCode(self: DataObjectAttribute) -> int
Returns the hash code for this instance.
Returns: A 32-bit signed integer hash code.
"""
pass
def IsDefaultAttribute(self):
"""
IsDefaultAttribute(self: DataObjectAttribute) -> bool
Gets a value indicating whether the current value of the attribute is the default value for the
attribute.
Returns: true if the current value of the attribute is the default; otherwise,false.
"""
pass
def __eq__(self,*args):
""" x.__eq__(y) <==> x==y """
pass
def __init__(self,*args):
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
@staticmethod
def __new__(self,isDataObject=None):
"""
__new__(cls: type)
__new__(cls: type,isDataObject: bool)
"""
pass
def __ne__(self,*args):
pass
IsDataObject=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets a value indicating whether an object should be considered suitable for binding to an System.Web.UI.WebControls.ObjectDataSource object at design time.
Get: IsDataObject(self: DataObjectAttribute) -> bool
"""
DataObject=None
Default=None
NonDataObject=None
|
[
"magnetscoil@gmail.com"
] |
magnetscoil@gmail.com
|
960c42bb96022428399d3f6c90afa9aafe47ae6a
|
09e8c92187ff8d7a726727041e2dd80850dcce3d
|
/leetcode/965_univalued_binary_tree.py
|
e0da41251305a9165e61a661dbd73b04870d0e67
|
[] |
no_license
|
kakru/puzzles
|
6dd72bd0585f526e75d026f3ba2446b0c14f60e0
|
b91bdf0e68605f7e517446f8a00b1e0f1897c24d
|
refs/heads/master
| 2020-04-09T09:47:31.341475
| 2019-05-03T21:24:41
| 2019-05-03T21:24:41
| 160,246,660
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 970
|
py
|
#!/usr/bin/env python3
# Definition for a binary tree node.
class TreeNode(object):
def __init__(self, x, left=None, right=None):
self.val = x
self.left = left
self.right = right
class Solution(object):
def isUnivalTree(self, root):
"""
:type root: TreeNode
:rtype: bool
"""
val = root.val
stack = [root]
while stack:
p = stack.pop()
if p.val != val: return False
if p.left: stack.append(p.left)
if p.right: stack.append(p.right)
return True
t = TreeNode(1,
TreeNode(1,
TreeNode(1),
TreeNode(1)
),
TreeNode(1,
None,
TreeNode(1)
)
)
print(Solution().isUnivalTree(t), True)
t = TreeNode(2,
TreeNode(2,
TreeNode(5),
TreeNode(2)
),
TreeNode(2)
)
print(Solution().isUnivalTree(t), False)
|
[
"karol@kruzelecki.com"
] |
karol@kruzelecki.com
|
cc1b9a672163c2594baee1485636929c3ba41bf0
|
3955c3f367a3a60f8602dcb4609faec9898438bb
|
/graylog/apis/systemshutdown_api.py
|
255334f55b57fe4bd65f0fe6abe937ca07111116
|
[
"Apache-2.0"
] |
permissive
|
MinhKMA/graylog.py
|
e89c34defa5422d59d0a501355058f5eb2dfe68c
|
3118f4a49c91c2cbbd660523b0ab99e56fbfd861
|
refs/heads/master
| 2021-05-06T21:03:06.946509
| 2016-09-23T04:31:13
| 2016-09-23T04:31:13
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,316
|
py
|
# coding: utf-8
"""
No descripton provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: 2.1.1+01d50e5
Generated by: https://github.com/swagger-api/swagger-codegen.git
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from __future__ import absolute_import
import sys
import os
import re
# python 2 and python 3 compatibility library
from six import iteritems
from ..configuration import Configuration
from ..api_client import ApiClient
class SystemshutdownApi(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
config = Configuration()
if api_client:
self.api_client = api_client
else:
if not config.api_client:
config.api_client = ApiClient()
self.api_client = config.api_client
def shutdown(self, **kwargs):
"""
Shutdown this node gracefully.
Attempts to process all buffered and cached messages before exiting, shuts down inputs first to make sure that no new messages are accepted.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.shutdown(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.shutdown_with_http_info(**kwargs)
else:
(data) = self.shutdown_with_http_info(**kwargs)
return data
def shutdown_with_http_info(self, **kwargs):
"""
Shutdown this node gracefully.
Attempts to process all buffered and cached messages before exiting, shuts down inputs first to make sure that no new messages are accepted.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.shutdown_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = []
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method shutdown" % key
)
params[key] = val
del params['kwargs']
resource_path = '/system/shutdown/shutdown'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept([])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type([])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
|
[
"on99@users.noreply.github.com"
] |
on99@users.noreply.github.com
|
77cfd36b65d609b44e3fa1b960fbfb54748bfadd
|
e9172452ed3777653ec7a4c7ef6d2269a2309a4c
|
/pandasRollingStats.py
|
30244426f797b5c72c4db22f9f4bba209db9fc6a
|
[] |
no_license
|
aiporre/QuinoaMarketForecast
|
ec7163ea52e7c63c34448c302d4539b96270a3dd
|
b76bf5380b930859392a7c6c46eade2464a94143
|
refs/heads/master
| 2021-09-24T09:20:13.704502
| 2016-10-03T06:51:14
| 2016-10-03T06:51:14
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,524
|
py
|
import pandas as pd
import quandl
import matplotlib.pyplot as plt
from matplotlib import style
style.use('fivethirtyeight')
def get_zinc_price():
api_key = open('data/myApiKey.txt', 'r').read()
gold = quandl.get('ODA/PZINC_USD', authtoken=api_key)
pd.DataFrame(gold).to_pickle('data/zinc.pickle')
def get_wheat_price():
api_key = open('data/myApiKey.txt', 'r').read()
gold = quandl.get('ODA/PWHEAMT_USD', authtoken=api_key)
pd.DataFrame(gold).to_pickle('data/wheat.pickle')
fig = plt.figure()
ax1 = plt.subplot2grid((4,1),(0,0))
ax2 = plt.subplot2grid((4,1),(1,0))
ax3 = plt.subplot2grid((4,1),(2,0))
ax4 = plt.subplot2grid((4,1),(3,0))
# read prices of zinc
try:
zinc = pd.read_pickle('data/zinc.pickle')
except:
zinc = get_zinc_price()
# read prices of wheat
try:
wheat = pd.read_pickle('data/wheat.pickle')
except:
wheat = get_wheat_price()
# calculatin rollings
zinc.columns = ['price_z']
wheat.columns = ['price_w']
zw = zinc.join(wheat)
zinc['priceRA'] = pd.rolling_mean(zinc['price_z'],12)
zinc['priceRS'] = pd.rolling_std(zinc['price_z'],12)
print zw.head(10)
zinc_wheat_corr = pd.rolling_corr(zw['price_z'],zw['price_w'],12)
print zinc.head(15)
print zinc_wheat_corr.head(15)
# zinc.dropna(inplace = True) # posible to use dorpna
zinc[['price_z','priceRA']].plot(ax = ax1)
zinc['priceRS'].plot(ax = ax2)
zw.plot(ax = ax3)
zinc_wheat_corr.plot(ax = ax4)
plt.show()
# standrd deviatio help to filter date that doesnlt fit
# an to undersatd the volatitty of the data.
|
[
"ariel.iporre.rivas@gmail.com"
] |
ariel.iporre.rivas@gmail.com
|
039b5a5d6166730f71fa8dbae29bca022fb667b1
|
a3cc7286d4a319cb76f3a44a593c4a18e5ddc104
|
/lib/surface/logging/metrics/delete.py
|
0fce2d106ce17d119936f16b3a3b14351d2e6cd1
|
[
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0"
] |
permissive
|
jordanistan/Google-Cloud-SDK
|
f2c6bb7abc2f33b9dfaec5de792aa1be91154099
|
42b9d7914c36a30d1e4b84ae2925df7edeca9962
|
refs/heads/master
| 2023-09-01T01:24:53.495537
| 2023-08-22T01:12:23
| 2023-08-22T01:12:23
| 127,072,491
| 0
| 1
|
NOASSERTION
| 2023-08-22T01:12:24
| 2018-03-28T02:31:19
|
Python
|
UTF-8
|
Python
| false
| false
| 1,918
|
py
|
# Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""'logging metrics delete' command."""
from googlecloudsdk.api_lib.logging import util
from googlecloudsdk.calliope import base
from googlecloudsdk.core import log
from googlecloudsdk.core.console import console_io
class Delete(base.DeleteCommand):
"""Deletes a logs-based metric."""
@staticmethod
def Args(parser):
"""Register flags for this command."""
parser.add_argument(
'metric_name', help='The name of the metric to delete.')
def Run(self, args):
"""This is what gets called when the user runs this command.
Args:
args: an argparse namespace. All the arguments that were provided to this
command invocation.
"""
console_io.PromptContinue(
'Really delete metric [%s]?' % args.metric_name, cancel_on_no=True)
util.GetClient().projects_metrics.Delete(
util.GetMessages().LoggingProjectsMetricsDeleteRequest(
metricName=util.CreateResourceName(
util.GetCurrentProjectParent(), 'metrics', args.metric_name)))
log.DeletedResource(args.metric_name)
Delete.detailed_help = {
'DESCRIPTION': """\
Deletes a logs-based metric called high_severity_count.
""",
'EXAMPLES': """\
To delete a metric called high_severity_count, run:
$ {command} high_severity_count
""",
}
|
[
"jordan.robison@gmail.com"
] |
jordan.robison@gmail.com
|
70e4497255159185bbd2c4946a1eb958f6f1520f
|
4178f2916d2da72cbb45454fbed941dcfe8f6460
|
/POM_test/TestCase/Detail_Profile/TC_005.py
|
a8f7d88a1374eb3444ef223474fdf03a291f71c2
|
[] |
no_license
|
maxcrup007/Selenium_Webdriver_Python
|
15196cb04ba5cafdc5b776c26d167f0b48fb0e14
|
6be7f0b9f53df1ba592957029e8a4d22e409d1c4
|
refs/heads/main
| 2023-03-24T21:04:31.976451
| 2021-03-22T09:16:04
| 2021-03-22T09:16:04
| 349,379,454
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,008
|
py
|
# ทดสอบการเข้าใช้งานของ "ข้อมูลส่วนตัว"
import time
import unittest
import sys
from selenium import webdriver
from selenium.webdriver import ActionChains
from POM_test.login import *
from POM_test.profilePage import *
from POM_test.scrollbar import *
import os
sys.path.append(os.path.join(os.path.dirname(__file__), "...", "..."))
class TestProfile_5(unittest.TestCase):
@classmethod
def setUpClass(self):
self.driver = webdriver.Chrome(executable_path="C:/Users/voraw/Downloads/Compressed/webdriver/chromedriver/chromedriver")
self.driver.implicitly_wait(10)
self.driver.maximize_window()
def test_login_valid(self):
driver = self.driver
self.driver.get("https://top-upstream-client.mulberrysoft.com/#/older/activity")
login = LoginPage(driver)
scroll = ScrollbarPage(driver)
login.enter_username("demo005")
login.enter_password("123456")
login.click_login()
time.sleep(2)
profile = ProfilePage(driver)
profile.into_profilePage()
time.sleep(5)
profile.profile_name_input("vatcharapong mahachot")
time.sleep(2)
profile.profile_email_input("vatcharapong11@hotmail.com")
time.sleep(2)
profile.profile_phone_number("086799315")
time.sleep(2)
scroll.profile_scrolling()
time.sleep(2)
profile.profile_address_text("555 หมู่17")
time.sleep(2)
scroll.profile_scrolling2()
time.sleep(2)
profile.profile_submit_confirm()
time.sleep(2)
@classmethod
def tearDownClass(cls):
cls.driver.close()
cls.driver.quit()
print("Test Completed")
if __name__ == '__main__':
unittest.main()
|
[
"36732487+maxcrup007@users.noreply.github.com"
] |
36732487+maxcrup007@users.noreply.github.com
|
2a1e29bb5786850365a0cf5fca0e7f577085fec3
|
36e593943be060ca5ea74a3d45923aba422ad2c9
|
/ThinkBayes/code/dungeons.py
|
0df9ed07edd9dfe1089ba8c63d598987c192d448
|
[] |
no_license
|
xjr7670/book_practice
|
a73f79437262bb5e3b299933b7b1f7f662a157b5
|
5a562d76830faf78feec81bc11190b71eae3a799
|
refs/heads/master
| 2023-08-28T19:08:52.329127
| 2023-08-24T09:06:00
| 2023-08-24T09:06:00
| 101,477,574
| 3
| 1
| null | 2021-06-10T18:38:54
| 2017-08-26T09:56:02
|
Python
|
UTF-8
|
Python
| false
| false
| 2,831
|
py
|
"""This file contains code for use with "Think Bayes",
by Allen B. Downey, available from greenteapress.com
Copyright 2012 Allen B. Downey
License: GNU GPLv3 http://www.gnu.org/licenses/gpl.html
"""
import random
import thinkbayes
import thinkplot
FORMATS = ['pdf', 'eps', 'png']
class Die(thinkbayes.Pmf):
"""Represents the PMF of outcomes for a die."""
def __init__(self, sides, name=''):
"""Initializes the die.
sides: int number of sides
name: string
"""
thinkbayes.Pmf.__init__(self, name=name)
for x in xrange(1, sides+1):
self.Set(x, 1)
self.Normalize()
def PmfMax(pmf1, pmf2):
"""Computes the distribution of the max of values drawn from two Pmfs.
pmf1, pmf2: Pmf objects
returns: new Pmf
"""
res = thinkbayes.Pmf()
for v1, p1 in pmf1.Items():
for v2, p2 in pmf2.Items():
res.Incr(max(v1, v2), p1*p2)
return res
def main():
pmf_dice = thinkbayes.Pmf()
pmf_dice.Set(Die(4), 5)
pmf_dice.Set(Die(6), 4)
pmf_dice.Set(Die(8), 3)
pmf_dice.Set(Die(12), 2)
pmf_dice.Set(Die(20), 1)
pmf_dice.Normalize()
mix = thinkbayes.Pmf()
for die, weight in pmf_dice.Items():
for outcome, prob in die.Items():
mix.Incr(outcome, weight*prob)
mix = thinkbayes.MakeMixture(pmf_dice)
thinkplot.Hist(mix, width=0.9)
thinkplot.Save(root='dungeons3',
xlabel='Outcome',
ylabel='Probability',
formats=FORMATS)
random.seed(17)
d6 = Die(6, 'd6')
dice = [d6] * 3
three = thinkbayes.SampleSum(dice, 1000)
three.name = 'sample'
three.Print()
three_exact = d6 + d6 + d6
three_exact.name = 'exact'
three_exact.Print()
thinkplot.PrePlot(num=2)
thinkplot.Pmf(three)
thinkplot.Pmf(three_exact, linestyle='dashed')
thinkplot.Save(root='dungeons1',
xlabel='Sum of three d6',
ylabel='Probability',
axis=[2, 19, 0, 0.15],
formats=FORMATS)
thinkplot.Clf()
thinkplot.PrePlot(num=1)
# compute the distribution of the best attribute the hard way
best_attr2 = PmfMax(three_exact, three_exact)
best_attr4 = PmfMax(best_attr2, best_attr2)
best_attr6 = PmfMax(best_attr4, best_attr2)
# thinkplot.Pmf(best_attr6)
# and the easy way
best_attr_cdf = three_exact.Max(6)
best_attr_cdf.name = ''
best_attr_pmf = thinkbayes.MakePmfFromCdf(best_attr_cdf)
best_attr_pmf.Print()
thinkplot.Pmf(best_attr_pmf)
thinkplot.Save(root='dungeons2',
xlabel='Best of three d6',
ylabel='Probability',
axis=[2, 19, 0, 0.23],
formats=FORMATS)
if __name__ == '__main__':
main()
|
[
"xjr30226@126.com"
] |
xjr30226@126.com
|
9cdb76e81612b5b87a3078f6f2c985f285dbbe6e
|
be7a79f3c590f0923f1e793c6a36cfebd9ca4d01
|
/brocolli/converter/onnx_layers/concat_func.py
|
32b54e65699b614aff8c4d4dff1d7f195fd89e83
|
[
"MIT"
] |
permissive
|
inisis/brocolli
|
f255d44dc9148fd2b3bc82f6a21e429a579399b4
|
46a3d8c5e19e481746a9c8a85c5e9a71a49b846c
|
refs/heads/master
| 2023-07-22T09:37:19.480983
| 2023-07-17T14:25:35
| 2023-07-17T14:25:35
| 168,733,444
| 326
| 72
|
MIT
| 2023-06-04T17:03:43
| 2019-02-01T17:17:22
|
Python
|
UTF-8
|
Python
| false
| false
| 905
|
py
|
from loguru import logger
from onnx import helper
from onnx import TensorProto as tp
from .base_layer import BaseLayer
class ConcatFunc(BaseLayer):
def __init__(self, source_node, module=None, auto_gen=True):
super(ConcatFunc, self).__init__(source_node, module, auto_gen)
def get_concat_attr(self):
attr_dict = {"axis": []}
dim = self.get_value_by_key_or_index("dim", 1, 0)
attr_dict["axis"] = dim
return attr_dict
def generate_node(self, name=None, params=None, attr_dict=None):
if name is not None:
self._name = name
if attr_dict is None:
attr_dict = self.get_concat_attr()
node = helper.make_node(
"Concat", self._in_names, self._out_names, self._name, **attr_dict
)
logger.info(f"{self.__class__.__name__}: {self._name} created")
self._node.append(node)
|
[
"desmond.yao@buaa.edu.cn"
] |
desmond.yao@buaa.edu.cn
|
a0a83028a3a6053fbf17d8665c12eeb4ad4e51ef
|
c47340ae6bcac6002961cc2c6d2fecb353c1e502
|
/test/test_passwords_object.py
|
f302c422a7a0449e34de00837f7cdeffa116807f
|
[
"MIT"
] |
permissive
|
rafaeldelrey/controlm_py
|
6d9f56b8b6e72750f329d85b932ace6c41002cbd
|
ed1eb648d1d23e587321227217cbfcc5065535ab
|
refs/heads/main
| 2023-04-23T09:01:32.024725
| 2021-05-19T00:25:53
| 2021-05-19T00:25:53
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 911
|
py
|
# coding: utf-8
"""
Control-M Services
Provides access to BMC Control-M Services # noqa: E501
OpenAPI spec version: 9.20.115
Contact: customer_support@bmc.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import unittest
import controlm_py
from controlm_py.models.passwords_object import PasswordsObject # noqa: E501
from controlm_py.rest import ApiException
class TestPasswordsObject(unittest.TestCase):
"""PasswordsObject unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testPasswordsObject(self):
"""Test PasswordsObject"""
# FIXME: construct object with mandatory attributes with example values
# model = controlm_py.models.passwords_object.PasswordsObject() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
|
[
"dcompane@gmail.com"
] |
dcompane@gmail.com
|
5231b1176e669d8aa95fff862a57be460421e78e
|
ae2695f60480aa9dbe2acf68309d7918b67c6954
|
/alembic/versions/aca6937e73_committee_summary_te.py
|
d71879fcec60efa04167e13acbb5c7c1a6ddbb46
|
[
"MIT"
] |
permissive
|
mgax/mptracker
|
0853dd11a7b15bce8d535eb86f65c1e37596a4e6
|
e8d3c489aed36c70f81e89626f02e735e5890435
|
refs/heads/master
| 2023-02-22T03:53:26.481927
| 2020-08-19T09:25:58
| 2020-08-19T09:25:58
| 11,983,896
| 4
| 6
|
MIT
| 2023-02-02T07:16:26
| 2013-08-08T18:52:45
|
Python
|
UTF-8
|
Python
| false
| false
| 278
|
py
|
revision = 'aca6937e73'
down_revision = '58f2cb9046f'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.add_column('committee_summary',
sa.Column('text', sa.Text(), nullable=True))
def downgrade():
op.drop_column('committee_summary', 'text')
|
[
"alex@grep.ro"
] |
alex@grep.ro
|
f510be9f877cf397ceb2bf6817365f456d8d5106
|
6490638f15a2dfbe0cec9725186f9784d57c92f0
|
/SCOS/__init__.py
|
a6e179ad6b3b0a273223cde1aa960d4a7e93d834
|
[
"MIT"
] |
permissive
|
khawatkom/SpacePyLibrary
|
af9c490ef796b9d37a13298c41df1fb5bf6b3cee
|
c94415e9d85519f345fc56938198ac2537c0c6d0
|
refs/heads/master
| 2020-05-14T21:52:39.388979
| 2019-04-17T17:06:04
| 2019-04-17T17:06:04
| 181,970,668
| 1
| 0
| null | 2019-04-17T21:26:44
| 2019-04-17T21:26:44
| null |
UTF-8
|
Python
| false
| false
| 1,145
|
py
|
#******************************************************************************
# (C) 2018, Stefan Korner, Austria *
# *
# The Space Python Library is free software; you can redistribute it and/or *
# modify it under under the terms of the MIT License as published by the *
# Massachusetts Institute of Technology. *
# *
# The Space Python Library is distributed in the hope that it will be useful, *
# but WITHOUT ANY WARRANTY; without even the implied warranty of *
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the MIT License *
# for more details. *
#******************************************************************************
# SCOS-2000 Functionality *
#******************************************************************************
__all__ = ["ENV", "MIB"]
|
[
"korner-hajek@gmx.at"
] |
korner-hajek@gmx.at
|
e4ce7e967120ec413c360cfb38e9419d4965a57c
|
5abf069ff84cb7ea465069c258c144460649da35
|
/desktop/toolkit/qscintilla2/actions.py
|
813c39c4e18bf77a3179ae66180e0b5080e6f9a0
|
[] |
no_license
|
poyraz76/Packages-Systemd
|
7628cf6f6a8808f8766735551956e3dd8da9a2a9
|
a515ea0275dc0d8ec38fb6eaacc85904dde9f286
|
refs/heads/master
| 2021-01-09T05:51:48.542336
| 2017-02-04T10:25:22
| 2017-02-04T10:25:22
| 80,849,530
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,161
|
py
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Licensed under the GNU General Public License, version 3.
# See the file http://www.gnu.org/licenses/gpl.txt
from pisi.actionsapi import autotools
from pisi.actionsapi import pisitools
from pisi.actionsapi import shelltools
from pisi.actionsapi import pythonmodules
from pisi.actionsapi import get
from pisi.actionsapi import qt5
WorkDir = "QScintilla-gpl-%s" % get.srcVERSION()
NoStrip = ["/usr/share/doc"]
def setup():
shelltools.cd("Qt4Qt5")
shelltools.system("qmake qscintilla.pro")
# Change C/XXFLAGS
pisitools.dosed("Makefile", "^CFLAGS.*\\$\\(DEFINES\\)", "CFLAGS = %s -fPIC $(DEFINES)" % get.CFLAGS())
pisitools.dosed("Makefile", "^CXXFLAGS.*\\$\\(DEFINES\\)", "CXXFLAGS = %s -fPIC $(DEFINES)" % get.CXXFLAGS())
# Get designer plugin's Makefile
shelltools.cd("../designer-Qt4Qt5/")
shelltools.system("qmake designer.pro INCLUDEPATH+=../Qt4Qt5 QMAKE_LIBDIR+=../Qt4Qt5")
# Change C/XXFLAGS of designer plugin's makefile
pisitools.dosed("Makefile", "^CFLAGS.*\\$\\(DEFINES\\)", "CFLAGS = %s -fPIC $(DEFINES)" % get.CFLAGS())
pisitools.dosed("Makefile", "^CXXFLAGS.*\\$\\(DEFINES\\)", "CXXFLAGS = %s -fPIC $(DEFINES)" % get.CXXFLAGS())
def build():
shelltools.system("cp -rf Python Python3")
shelltools.cd("Qt4Qt5")
qt5.make()
shelltools.cd("../designer-Qt4Qt5/")
qt5.make()
# Get Makefile of qscintilla-python via sip
shelltools.cd("../Python")
pythonmodules.run("configure.py -n ../Qt4Qt5 -o ../Qt4Qt5 -c --pyqt=PyQt5 --pyqt-sipdir=/usr/share/sip/Py2Qt5 --qsci-sipdir=/usr/share/sip/Py2Qt5 --sip-incdir=/usr/lib/python2.7/site-packages --qmake /usr/bin/qmake")
pisitools.dosed("Makefile", "/usr/include/qt/QtPrintSupport", "/usr/include/qt5/QtPrintSupport")
pisitools.dosed("Makefile", "/usr/include/qt/QtWidgets", "/usr/include/qt5/QtWidgets")
autotools.make()
shelltools.cd("../Python3")
pythonmodules.run("configure.py -n ../Qt4Qt5 -o ../Qt4Qt5 -c --pyqt=PyQt5 --qmake /usr/bin/qmake", pyVer = "3")
pisitools.dosed("Makefile", "/usr/include/qt/QtPrintSupport", "/usr/include/qt5/QtPrintSupport")
pisitools.dosed("Makefile", "/usr/include/qt/QtWidgets", "/usr/include/qt5/QtWidgets")
autotools.make()
def install():
shelltools.cd("Qt4Qt5")
qt5.install("INSTALL_ROOT=%s" % get.installDIR())
shelltools.cd("../designer-Qt4Qt5/")
qt5.install("INSTALL_ROOT=%s" % get.installDIR())
#build and install qscintilla-python
shelltools.cd("../Python3")
#autotools.rawInstall("DESTDIR=%s" % get.installDIR())
qt5.install("INSTALL_ROOT=%s" % get.installDIR())
pisitools.insinto("/usr/lib/python3.4/site-packages/PyQt5", "Qsci.so")
shelltools.cd("../Python")
#autotools.rawInstall("DESTDIR=%s" % get.installDIR())
qt5.install("INSTALL_ROOT=%s" % get.installDIR())
pisitools.insinto("/usr/lib/python2.7/site-packages/PyQt5", "Qsci.so")
shelltools.cd("..")
pisitools.dohtml("doc/html-Qt4Qt5/")
pisitools.insinto("/usr/share/doc/%s/Scintilla" % get.srcNAME(), "doc/Scintilla/*")
pisitools.dodoc("LICENSE*", "NEWS", "README")
|
[
"ergunsalman@hotmail.com"
] |
ergunsalman@hotmail.com
|
a46e60ebdf24c5dc1a7a082a563e503deea9c428
|
9977e4a5cb94760b380bd0de0faab9c04a3d94db
|
/examples/plot_simulation2d.py
|
84f7ae9dcd7f6be0d65082772180cb538387b9ec
|
[] |
no_license
|
vishalbelsare/mtw
|
82e76826f1382b9602eadad835a9b6355923505c
|
e15e918774bb5b1e020c5b87572004a552eb571e
|
refs/heads/master
| 2022-11-28T18:57:55.045921
| 2019-08-07T21:55:16
| 2019-08-07T21:55:16
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,779
|
py
|
"""
====================
MTW synthetic images
====================
This example generates 3 synthetic sparse images (as regression coefficients)
which are fed to random gaussian matrices X. Increasing the Wasserstein
hyperparameter increases consistency across regression coefficients.
"""
import numpy as np
import matplotlib.pyplot as plt
from matplotlib import cm
from mtw import MTW, utils
from mtw.examples_utils import (generate_dirac_images, gaussian_design,
contour_coefs)
print(__doc__)
print("Generating data...")
seed = 42
width, n_tasks = 32, 4
nnz = 3 # number of non zero elements per image
overlap = 0.
positive = True
n_features = width ** 2
n_samples = n_features // 2
"""Generate Coefs and X, Y data..."""
coefs = generate_dirac_images(width, n_tasks, nnz=nnz, positive=positive,
seed=seed, overlap=overlap)
coefs_flat = coefs.reshape(-1, n_tasks)
std = 0.25
X, Y = gaussian_design(n_samples, coefs_flat, corr=0.95, sigma=std,
scaled=True, seed=seed)
###############################################################################
# set ot params
epsilon = 2.5 / n_features
M = utils.groundmetric2d(n_features, p=2, normed=True)
gamma = utils.compute_gamma(0.8, M)
###############################################################################
# set hyperparameters and fit MTW
betamax = np.array([x.T.dot(y) for x, y in zip(X, Y)]).max() / n_samples
alpha = 10. / n_samples
beta_fr = 0.35
beta = beta_fr * betamax
callback_options = {'callback': True,
'x_real': coefs.reshape(- 1, n_tasks),
'verbose': True, 'rate': 1}
print("Fitting MTW model...")
mtw = MTW(M=M, alpha=alpha, beta=beta, sigma0=0., positive=positive,
epsilon=epsilon, gamma=gamma, stable=False, tol_ot=1e-6, tol=1e-4,
maxiter_ot=10, maxiter=2000, n_jobs=n_tasks,
gpu=False, **callback_options)
mtw.fit(X, Y)
###############################################################################
# Now we plot the 3 images on top of each other (True), the MTW fitted
# coefficients and their latent Wasserstein barycenter"""
f, axes = plt.subplots(1, 3, figsize=(12, 4))
coefs = coefs.reshape(width, width, -1)
coefs_mtw = mtw.coefs_.reshape(width, width, -1)
thetabar = mtw.barycenter_.reshape(width, width)[:, :, None]
contours = [coefs, coefs_mtw, thetabar]
titles = ["True", "Recovered", "Barycenter"]
cmaps = [cm.Reds, cm.Blues, cm.Greens, cm.Oranges, cm.Greys, cm.Purples]
for ax, data_, t in zip(axes.ravel(), contours, titles):
contour_coefs(data_, ax, cmaps=cmaps, title=t)
axes[-1].clear()
contour_coefs(thetabar, ax=axes[-1], cmaps=cmaps,
title="barycenter Contours")
plt.tight_layout()
plt.show()
|
[
"hicham.janati@inria.fr"
] |
hicham.janati@inria.fr
|
8e75b88201a1a9c29a76c8dbb9c96749e65847cc
|
5a52ccea88f90dd4f1acc2819997fce0dd5ffb7d
|
/alipay/aop/api/response/AlipayOpenAppOpenidBatchqueryResponse.py
|
852a2cc617b5cca9294234c0928fbc32c01da61e
|
[
"Apache-2.0"
] |
permissive
|
alipay/alipay-sdk-python-all
|
8bd20882852ffeb70a6e929038bf88ff1d1eff1c
|
1fad300587c9e7e099747305ba9077d4cd7afde9
|
refs/heads/master
| 2023-08-27T21:35:01.778771
| 2023-08-23T07:12:26
| 2023-08-23T07:12:26
| 133,338,689
| 247
| 70
|
Apache-2.0
| 2023-04-25T04:54:02
| 2018-05-14T09:40:54
|
Python
|
UTF-8
|
Python
| false
| false
| 1,574
|
py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
from alipay.aop.api.response.AlipayResponse import AlipayResponse
from alipay.aop.api.domain.OpenIdValue import OpenIdValue
class AlipayOpenAppOpenidBatchqueryResponse(AlipayResponse):
def __init__(self):
super(AlipayOpenAppOpenidBatchqueryResponse, self).__init__()
self._illegal_user_id_list = None
self._open_id_list = None
@property
def illegal_user_id_list(self):
return self._illegal_user_id_list
@illegal_user_id_list.setter
def illegal_user_id_list(self, value):
if isinstance(value, list):
self._illegal_user_id_list = list()
for i in value:
self._illegal_user_id_list.append(i)
@property
def open_id_list(self):
return self._open_id_list
@open_id_list.setter
def open_id_list(self, value):
if isinstance(value, list):
self._open_id_list = list()
for i in value:
if isinstance(i, OpenIdValue):
self._open_id_list.append(i)
else:
self._open_id_list.append(OpenIdValue.from_alipay_dict(i))
def parse_response_content(self, response_content):
response = super(AlipayOpenAppOpenidBatchqueryResponse, self).parse_response_content(response_content)
if 'illegal_user_id_list' in response:
self.illegal_user_id_list = response['illegal_user_id_list']
if 'open_id_list' in response:
self.open_id_list = response['open_id_list']
|
[
"jishupei.jsp@alibaba-inc.com"
] |
jishupei.jsp@alibaba-inc.com
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.