blob_id stringlengths 40 40 | directory_id stringlengths 40 40 | path stringlengths 3 616 | content_id stringlengths 40 40 | detected_licenses listlengths 0 112 | license_type stringclasses 2 values | repo_name stringlengths 5 115 | snapshot_id stringlengths 40 40 | revision_id stringlengths 40 40 | branch_name stringclasses 777 values | visit_date timestamp[us]date 2015-08-06 10:31:46 2023-09-06 10:44:38 | revision_date timestamp[us]date 1970-01-01 02:38:32 2037-05-03 13:00:00 | committer_date timestamp[us]date 1970-01-01 02:38:32 2023-09-06 01:08:06 | github_id int64 4.92k 681M ⌀ | star_events_count int64 0 209k | fork_events_count int64 0 110k | gha_license_id stringclasses 22 values | gha_event_created_at timestamp[us]date 2012-06-04 01:52:49 2023-09-14 21:59:50 ⌀ | gha_created_at timestamp[us]date 2008-05-22 07:58:19 2023-08-21 12:35:19 ⌀ | gha_language stringclasses 149 values | src_encoding stringclasses 26 values | language stringclasses 1 value | is_vendor bool 2 classes | is_generated bool 2 classes | length_bytes int64 3 10.2M | extension stringclasses 188 values | content stringlengths 3 10.2M | authors listlengths 1 1 | author_id stringlengths 1 132 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
f8d1fea9546441f77be588dc74e9b5e244b85d87 | 612d763da9594102993a61cb9337f5f5fd8da15e | /Simple/SimpleNeuroNet_Binary.py | beea5b3545e25e9b9fa94c0ceff35d4680559f23 | [] | no_license | sathayas/DeepLearningTestground | 484abd623256045a7b5dc5e435acabe8b87415b9 | 2ad496c73a3be883119702522ffcb217c2b5eb55 | refs/heads/master | 2020-03-17T12:08:19.293404 | 2020-01-19T04:09:18 | 2020-01-19T04:09:18 | 133,575,858 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,040 | py | import autograd.numpy as np
import autograd.numpy.random as npr
from autograd import grad
import sklearn.metrics
import pylab
# Generate Dataset
examples = 1000
features = 100
X = npr.randn(examples, features) # scalar features
Y = (npr.randn(examples)>0).astype(int) # binary labels
D = (X, Y)
# Specify the network
layer1_units = 10
layer2_units = 1
w1 = npr.rand(features, layer1_units)
b1 = npr.rand(layer1_units)
w2 = npr.rand(layer1_units, layer2_units)
b2 = npr.rand(layer2_units)
theta = (w1, b1, w2, b2)
# Define the loss function (binary cross entropy)
def binary_cross_entropy(y, y_hat):
return np.sum(-((y * np.log(y_hat)) + ((1-y) * np.log(1 - y_hat))))
def sigmoid(x):
return 1/(1+np.exp(-x))
# Wraper around the Neural Network
def neural_network(x, theta):
w1, b1, w2, b2 = theta
return sigmoid(np.dot((sigmoid(np.dot(x,w1) + b1)), w2) + b2)
# Wrapper around the objective function to be optimised
def objective(theta, idx):
return binary_cross_entropy(D[1][idx], neural_network(D[0][idx], theta))
# Update
def update_theta(theta, delta, alpha):
w1, b1, w2, b2 = theta
w1_delta, b1_delta, w2_delta, b2_delta = delta
w1_new = w1 - alpha * w1_delta
b1_new = b1 - alpha * b1_delta
w2_new = w2 - alpha * w2_delta
b2_new = b2 - alpha * b2_delta
new_theta = (w1_new,b1_new,w2_new,b2_new)
return new_theta
# Compute Gradient
grad_objective = grad(objective)
# Train the Neural Network
epochs = 10
Y_pred = (neural_network(D[0],theta)>0.5).astype(int)
print("Accuracy score before training:",
sklearn.metrics.accuracy_score(D[1],Y_pred))
accuScore = []
for i in range(0, epochs):
for j in range(0, examples):
delta = grad_objective(theta, j)
theta = update_theta(theta,delta, 0.1)
Y_pred = (neural_network(D[0],theta)>0.5).astype(int)
accuScore.append(sklearn.metrics.accuracy_score(D[1],Y_pred))
print("Accuracy score after training:",
sklearn.metrics.accuracy_score(D[1],Y_pred))
pylab.plot(accuScore)
pylab.show()
| [
"hayasaka@utexas.edu"
] | hayasaka@utexas.edu |
90ca2e895d9d77031144a857d40d49e108401fa4 | 37c243e2f0aab70cbf38013d1d91bfc3a83f7972 | /pp7TeV/HeavyIonsAnalysis/JetAnalysis/python/jets/akPu6PFJetSequence_pp_jec_cff.py | d3ab48c908bd41a4600658e58932bbd4da9d71df | [] | no_license | maoyx/CMSWork | 82f37256833cbe4c60cb8df0b4eb68ceb12b65e7 | 501456f3f3e0f11e2f628b40e4d91e29668766d5 | refs/heads/master | 2021-01-01T18:47:55.157534 | 2015-03-12T03:47:15 | 2015-03-12T03:47:15 | 10,951,799 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,497 | py |
import FWCore.ParameterSet.Config as cms
from PhysicsTools.PatAlgos.patHeavyIonSequences_cff import *
from HeavyIonsAnalysis.JetAnalysis.inclusiveJetAnalyzer_cff import *
akPu6PFmatch = patJetGenJetMatch.clone(
src = cms.InputTag("akPu6PFJets"),
matched = cms.InputTag("ak6HiGenJets")
)
akPu6PFparton = patJetPartonMatch.clone(src = cms.InputTag("akPu6PFJets"),
matched = cms.InputTag("genParticles")
)
akPu6PFcorr = patJetCorrFactors.clone(
useNPV = False,
# primaryVertices = cms.InputTag("hiSelectedVertex"),
levels = cms.vstring('L2Relative','L3Absolute'),
src = cms.InputTag("akPu6PFJets"),
payload = "AKPu6PF_generalTracks"
)
akPu6PFpatJets = patJets.clone(jetSource = cms.InputTag("akPu6PFJets"),
jetCorrFactorsSource = cms.VInputTag(cms.InputTag("akPu6PFcorr")),
genJetMatch = cms.InputTag("akPu6PFmatch"),
genPartonMatch = cms.InputTag("akPu6PFparton"),
jetIDMap = cms.InputTag("akPu6PFJetID"),
addBTagInfo = False,
addTagInfos = False,
addDiscriminators = False,
addAssociatedTracks = False,
addJetCharge = False,
addJetID = False,
getJetMCFlavour = False,
addGenPartonMatch = True,
addGenJetMatch = True,
embedGenJetMatch = True,
embedGenPartonMatch = True,
embedCaloTowers = False,
embedPFCandidates = False
)
akPu6PFJetAnalyzer = inclusiveJetAnalyzer.clone(jetTag = cms.InputTag("akPu6PFpatJets"),
genjetTag = 'ak6HiGenJets',
rParam = 0.6,
matchJets = cms.untracked.bool(False),
matchTag = 'patJets',
pfCandidateLabel = cms.untracked.InputTag('particleFlow'),
trackTag = cms.InputTag("generalTracks"),
fillGenJets = True,
isMC = True,
genParticles = cms.untracked.InputTag("genParticles"),
eventInfoTag = cms.InputTag("generator")
)
akPu6PFJetSequence_mc = cms.Sequence(
akPu6PFmatch
*
akPu6PFparton
*
akPu6PFcorr
*
akPu6PFpatJets
*
akPu6PFJetAnalyzer
)
akPu6PFJetSequence_data = cms.Sequence(akPu6PFcorr
*
akPu6PFpatJets
*
akPu6PFJetAnalyzer
)
akPu6PFJetSequence_jec = akPu6PFJetSequence_mc
akPu6PFJetSequence_mix = akPu6PFJetSequence_mc
akPu6PFJetSequence = cms.Sequence(akPu6PFJetSequence_jec)
akPu6PFJetAnalyzer.genPtMin = cms.untracked.double(1)
| [
"yaxian.mao@cern.ch"
] | yaxian.mao@cern.ch |
95fc4ecbd21d1eb95f4455306dc5dbf5f3b81498 | fb5c5d50d87a6861393d31911b9fae39bdc3cc62 | /Scripts/sims4communitylib/dialogs/custom_dialogs/picker_dialogs/common_ui_object_category_picker.py | 406e9c564ab5c122dee6554869bee97de57477a2 | [
"CC-BY-4.0"
] | permissive | ColonolNutty/Sims4CommunityLibrary | ee26126375f2f59e5567b72f6eb4fe9737a61df3 | 58e7beb30b9c818b294d35abd2436a0192cd3e82 | refs/heads/master | 2023-08-31T06:04:09.223005 | 2023-08-22T19:57:42 | 2023-08-22T19:57:42 | 205,197,959 | 183 | 38 | null | 2023-05-28T16:17:53 | 2019-08-29T15:48:35 | Python | UTF-8 | Python | false | false | 2,435 | py | """
The Sims 4 Community Library is licensed under the Creative Commons Attribution 4.0 International public license (CC BY 4.0).
https://creativecommons.org/licenses/by/4.0/
https://creativecommons.org/licenses/by/4.0/legalcode
Copyright (c) COLONOLNUTTY
"""
from distributor.rollback import ProtocolBufferRollback
from interactions.utils.tunable_icon import TunableIconFactory
from sims4.localization import TunableLocalizedString
from sims4.tuning.tunable import TunableTuple, TunableList, Tunable
from ui.ui_dialog_picker import UiObjectPicker
from distributor.shared_messages import build_icon_info_msg
class CommonUiObjectCategoryPicker(UiObjectPicker):
"""An ObjectPicker with categories listed in a drop down.
"""
FACTORY_TUNABLES = {
'object_categories': TunableList(
description='\n The categories to display in the drop down for this picker.\n ',
tunable=TunableTuple(
object_category=Tunable(
tunable_type=str,
default='ALL'
),
icon=TunableIconFactory(),
category_name=TunableLocalizedString()
)
)
}
def _build_customize_picker(self, picker_data) -> None:
# noinspection PyBroadException
try:
with ProtocolBufferRollback(picker_data.filter_data) as filter_data_list:
for category in self.object_categories:
with ProtocolBufferRollback(filter_data_list.filter_data) as category_data:
category_data.tag_type = abs(hash(category.object_category)) % (10 ** 8)
build_icon_info_msg(category.icon(None), None, category_data.icon_info)
category_data.description = category.category_name
filter_data_list.use_dropdown_filter = self.use_dropdown_filter
super()._build_customize_picker(picker_data)
except:
with ProtocolBufferRollback(picker_data.filter_data) as category_data:
for category in self.object_categories:
category_data.tag_type = abs(hash(category.object_category)) % (10 ** 8)
build_icon_info_msg(category.icon(None), None, category_data.icon_info)
category_data.description = category.category_name
super()._build_customize_picker(picker_data)
| [
"ColonolNutty@hotmail.com"
] | ColonolNutty@hotmail.com |
942a63ed2268cec19d5f3a0790d7570a508c5463 | 1bdb0da31d14102ca03ee2df44f0ec522b0701a4 | /EmiliaRomagna/EmiliAmbiente/5-LabelsPublishing.py | 9ecbca232ba7c06cfe066148d873a8d4ffcce2a6 | [] | no_license | figuriamoci/Acqua | dc073d90c3c5e5899b22005685847916de1dfd95 | aef22fcd0c80c92441e0e3df2468d7a2f23a848a | refs/heads/master | 2020-12-15T04:00:26.855139 | 2020-06-08T21:17:55 | 2020-06-08T21:17:55 | 234,986,179 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 303 | py | import acqua.labelCollection as lc
import logging
import acqua.aqueduct as aq
gestore = "EmiliAmbiente"
aq.setEnv('EmiliaRomagna//'+gestore)
geoJsonFile = gestore+'.geojson'
ll = lc.removeEtichette(gestore)
ll = lc.to_mongoDBInsertMany(geoJsonFile)
logging.info("Safe %s record(s) to MongoDB.",len(ll))
| [
"an.fantini@gmail.com"
] | an.fantini@gmail.com |
67e82f1a3eced602d9fbdf7d700faba6612cfb3e | 76799ea50d7b0b9cf8dc38f52e2516b0684fa010 | /py2win/testdata/sampleproject/setup.py | 679189dc9682ffd2832e8b5b456177fd6a753b5e | [
"MIT"
] | permissive | trollfred/py2win | 67fc6cc78e5453c46258aff6ca28b1b91b5bd8ea | 82158e7f5530b65adfc7b3d434b037c592a5913f | refs/heads/master | 2020-07-01T17:32:49.746098 | 2019-11-03T16:17:00 | 2019-11-03T16:17:00 | 201,240,871 | 0 | 0 | MIT | 2019-08-08T11:06:14 | 2019-08-08T11:06:13 | null | UTF-8 | Python | false | false | 1,606 | py | """A setuptools based setup module.
See:
https://packaging.python.org/en/latest/distributing.html
https://github.com/pypa/sampleproject
"""
from setuptools import setup, find_packages
from codecs import open
from os import path
here = path.abspath(path.dirname(__file__))
# Get the long description from the README file
with open(path.join(here, 'README.rst'), encoding='utf-8') as f:
long_description = f.read()
setup(
name='sample',
version='1.2.0',
description='A sample Python project',
long_description=long_description,
url='https://github.com/pypa/sampleproject',
author='The Python Packaging Authority',
author_email='pypa-dev@googlegroups.com',
license='MIT',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Topic :: Software Development :: Build Tools',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
keywords='sample setuptools development',
packages=find_packages(exclude=['contrib', 'docs', 'tests']),
install_requires=['PyQt5'],
extras_require={
'dev': ['check-manifest'],
'test': ['coverage'],
},
package_data={
'sample': ['package_data.dat'],
},
data_files=[],
entry_points={
'gui_scripts': ['sample-gui=sample.gui:main'],
'console_scripts': ['sample-console=sample.console:main'],
},
)
| [
"philippe.pinard@gmail.com"
] | philippe.pinard@gmail.com |
4dedca42a28c6d0fdbb66223664cf42233f210a5 | 9099ed0407521ac40b88f3b92872307f66c57bf9 | /codes/contest/topcoder/SRM 734/TheSquareCityDiv2.py | f71d4afa51b229fa8ca462816c4f94ee8fd89523 | [] | no_license | jiluhu/dirtysalt.github.io | 0cea3f52d2c4adf2bbf5c23b74f4cb1070025816 | c026f2969c784827fac702b34b07a9268b70b62a | refs/heads/master | 2020-08-31T09:32:05.273168 | 2019-10-29T01:53:45 | 2019-10-29T01:53:45 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,217 | py | # -*- coding: utf-8 -*-
import collections
class TheSquareCityDiv2:
def find(self, r, t):
n = int(round(len(t) ** 0.5))
moves = []
for i in range(n):
for j in range(n):
t0 = t[i * n + j]
move = None
warmest = -(1 << 30)
for k0 in range(n):
for k1 in range(n):
dist = abs(k0 - i) + abs(k1 - j)
k = k0 * n + k1
if (dist <= r) and t[k] > warmest:
warmest = t[k]
move = k
moves.append(move)
states = [i * n + j for i in range(n) for j in range(n)]
while True:
changed = False
next_states = []
for s in states:
ns = moves[s]
if s != ns:
changed = True
next_states.append(ns)
if not changed:
break
states = next_states
ra = len(set(states))
group = collections.Counter()
for s in states:
group[s] += 1
rb = group.most_common(1)[0][1]
return ra, rb
# CUT begin
# TEST CODE FOR PYTHON {{{
import sys, time, math
def tc_equal(expected, received):
try:
_t = type(expected)
received = _t(received)
if _t == list or _t == tuple:
if len(expected) != len(received): return False
return all(tc_equal(e, r) for (e, r) in zip(expected, received))
elif _t == float:
eps = 1e-9
d = abs(received - expected)
return not math.isnan(received) and not math.isnan(expected) and d <= eps * max(1.0, abs(expected))
else:
return expected == received
except:
return False
def pretty_str(x):
if type(x) == str:
return '"%s"' % x
elif type(x) == tuple:
return '(%s)' % (','.join((pretty_str(y) for y in x)))
else:
return str(x)
def do_test(r, t, __expected):
startTime = time.time()
instance = TheSquareCityDiv2()
exception = None
try:
__result = instance.find(r, t);
except:
import traceback
exception = traceback.format_exc()
elapsed = time.time() - startTime # in sec
if exception is not None:
sys.stdout.write("RUNTIME ERROR: \n")
sys.stdout.write(exception + "\n")
return 0
if tc_equal(__expected, __result):
sys.stdout.write("PASSED! " + ("(%.3f seconds)" % elapsed) + "\n")
return 1
else:
sys.stdout.write("FAILED! " + ("(%.3f seconds)" % elapsed) + "\n")
sys.stdout.write(" Expected: " + pretty_str(__expected) + "\n")
sys.stdout.write(" Received: " + pretty_str(__result) + "\n")
return 0
def run_tests():
sys.stdout.write("TheSquareCityDiv2 (500 Points)\n\n")
passed = cases = 0
case_set = set()
for arg in sys.argv[1:]:
case_set.add(int(arg))
with open("TheSquareCityDiv2.sample", "r") as f:
while True:
label = f.readline()
if not label.startswith("--"): break
r = int(f.readline().rstrip())
t = []
for i in range(0, int(f.readline())):
t.append(int(f.readline().rstrip()))
t = tuple(t)
f.readline()
__answer = []
for i in range(0, int(f.readline())):
__answer.append(int(f.readline().rstrip()))
__answer = tuple(__answer)
cases += 1
if len(case_set) > 0 and (cases - 1) in case_set: continue
sys.stdout.write(" Testcase #%d ... " % (cases - 1))
passed += do_test(r, t, __answer)
sys.stdout.write("\nPassed : %d / %d cases\n" % (passed, cases))
T = time.time() - 1526551561
PT, TT = (T / 60.0, 75.0)
points = 500 * (0.3 + (0.7 * TT * TT) / (10.0 * PT * PT + TT * TT))
sys.stdout.write("Time : %d minutes %d secs\n" % (int(T / 60), T % 60))
sys.stdout.write("Score : %.2f points\n" % points)
if __name__ == '__main__':
run_tests()
# }}}
# CUT end
| [
"dirtysalt1987@gmail.com"
] | dirtysalt1987@gmail.com |
05496927557001dd893bc06afe133f220458c9de | 661d3fb1f4880ff6efb7638bf066f63397c26ef0 | /Final Implementation/client.py | 8bc748c34af443547c201831f4ace27e17d1538e | [] | no_license | jamesfallon99/CA314 | 6048020bccf8f119b9eb847a8b610aef090130b0 | fb78429fe206ae2fd6cd413e4b119e84e1bae1ea | refs/heads/main | 2023-07-05T11:03:13.470699 | 2021-08-24T21:31:42 | 2021-08-24T21:31:42 | 399,609,567 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,126 | py | from game import Board, Tile
from player import Player
import threading
import socket, pickle
import copy
import json
from json import JSONEncoder
# NOTE: in our presentation we can mention that the changes we have made i.e: adding threading was necessary in order to making it run locally for the demo.
# NOTE: because user input and GUI are yet to be implemented we are testing using threads and the terminal.
class Client(threading.Thread):
def __init__(self, socket, data_out=None, data_in=None, player=None):
"""
Initializes an instance of the client class.
:param socket: socket object for the client.
:param data_out: JSON object
:param data_in: JSON object
:player: Player object
"""
super().__init__()
self.socket = socket
self.data_out = data_out
self.data_in = data_in
self.player = player
def con(self, host , port):
self.socket.connect((host, port))
def run(self):
while True:
received = pickle.loads(self.socket.recv(8192))
if type(received) == Board:
self.player.board = received
elif type(received) == int:
self.player.score += received
elif type(received) == str:
if received == 'invalid word':
print(received)
else:
self.player.client_host_id = int(received)
elif type(tuple):
print(received)
def send_game_data(self, data, target):
"""
Sends updated and encoded game state data to the server,'
packaged/structured correctly for being
interpreted by the Server.
:param data: JSON object
:param target: server socket object
"""
return # void
def encode(self, data):
"""
:param data: Encoding data to be sent via
socket to server.
"""
return encoded_data
def decode(self, data):
"""
:param data: Converts data into the correct
format/structure to be parsed and then utilised.
"""
return decoded_data
def input_name(self, name):
"""
Updates player object's name
:param name: player inputted name
"""
return #void
def input_game_code(self, code):
"""
:param code: player inputted code
"""
return code
def display(self):
"""
Render board object to users screen
"""
return #void
def join_game(self, code, target):
"""
Passes player name and code to the server
:param code: player inputted code
:param target: object for code to be sent to
"""
return #void
def start_game(self):
"""
Send game creation request to server
"""
return #void
def create_game(self, name):
"""
Creates a game
:param name: player name to be set
"""
return #void
# substitution for GUI due to time constraint
# function runs inside a thread
# use the commands to test various tasks that would normally require an GUI
def handle_input(client):
commands = [
'!help',
'!place_tile',
'!display',
'!end_turn',
]
print(f'For a list of commands use !help')
while True:
player_input = input('')
if player_input == '!help':
print('Here are all the commands available:')
print(commands)
elif player_input == '!place_tile':
print(f'Format for inserting is => letter y x e.g: a 4 1')
print('Please type the move you wish to make using the above format')
tile_placement_input = input('Move: ')
letter, y, x = tile_placement_input.split(' ')
corresponding_tile = [tile for tile in client.player.tiles if tile.letter == letter]
client.player.board.place_tile(corresponding_tile[0], int(y), int(x))
print(f'You inserted {letter} into position [{y}][{x}] on the board')
elif player_input == '!display':
client.player.board.printing()
elif player_input == '!end_turn':
client.socket.send(pickle.dumps(client.player.board))
elif player_input == '!send_test':
client.socket.send(pickle.dumps(client.socket.getsockname()))
elif player_input == '!see_players':
client.socket.send(pickle.dumps('see_players'))
elif player_input == '!see_score':
print(client.player.score)
elif player_input == '!join_game':
client.socket.send(pickle.dumps('join_game'))
client.socket.send(pickle.dumps(client.player))
elif player_input == '!see_tiles':
print([tile.letter for tile in client.player.tiles])
def main():
# client socket
# make socket use TCP for reliable communication
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
# player board
player_board = Board()
# player_board.board[4][1] = Tile('c', 2, (None, 0))
# player_board.board[4][2] = Tile('a', 4, ('L', 2))
# player_board.board[4][3] = Tile('t', 5, ('W', 3))
# tiles
player_tiles = [
Tile("c", 2, (None, 0)),
Tile("a", 4, ("L", 2)),
Tile("t", 5, ("W", 3)),
Tile("s", 9, ("L", 2)),
Tile("d", 2, (None, 0)),
Tile("o", 7, ("L", 2)),
Tile("g", 3, ("W", 3)),
]
client = Client(sock)
ligma = Player("ligma", player_tiles, 0, client.socket.getsockname()) #(self, id, name, tiles, score, client_socket, board):
client.player = ligma
client.con('127.0.0.1', 8000)
client.start()
terminal_input = threading.Thread(target=handle_input, args=(client, )) # please note player_board is the "server" board at the moment ofr testing purposes
terminal_input.start()
if __name__ == "__main__":
main() | [
"noreply@github.com"
] | jamesfallon99.noreply@github.com |
3432242cb6953a1399e20569611c0388973804c9 | 268b22da698310c1fd0471f94d61e02782cbaf37 | /Week6/week6work/test/users.py | 0107e56820ae81040fc0462445cadda5f71ba73f | [] | no_license | jayquake/DI-Excersises | 0c1147863753fb29a6f688bd73bdd9acc047c180 | 02cb0ee9baed7fd7736273e8fc68317ba4356e39 | refs/heads/master | 2020-12-10T11:38:12.225341 | 2020-05-06T08:34:35 | 2020-05-06T08:34:35 | 233,582,928 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 749 | py | import json
def create_database(dst_file='my_file.json'):
data = [
{
'username': 'Jason',
'password': 'horse',
'status': True
},
{
'username': 'Mac',
'password': 'candy' ,
'status': False
},
{
'username': 'Apple',
'password': 'monkeydog',
'status': True
},
]
with open(dst_file, 'w') as f :
json.dump(data, f)
def load_database(src_file='my_file.json'):
with open(src_file, 'r') as f:
data = json.load(f)
return data
def write_database(src_file='my_file.json'):
with open(src_file, 'w') as f:
json.dump(f)
return ('database rewritten') | [
"jayquake@gmail.com"
] | jayquake@gmail.com |
5427ff9cd2a1e264c5d688fda77bdd78a405547b | 711756b796d68035dc6a39060515200d1d37a274 | /output_cog/optimized_16488.py | c2ba652af47fd94690bf6a2c85b6db77daeba442 | [] | no_license | batxes/exocyst_scripts | 8b109c279c93dd68c1d55ed64ad3cca93e3c95ca | a6c487d5053b9b67db22c59865e4ef2417e53030 | refs/heads/master | 2020-06-16T20:16:24.840725 | 2016-11-30T16:23:16 | 2016-11-30T16:23:16 | 75,075,164 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 10,832 | py | import _surface
import chimera
try:
import chimera.runCommand
except:
pass
from VolumePath import markerset as ms
try:
from VolumePath import Marker_Set, Link
new_marker_set=Marker_Set
except:
from VolumePath import volume_path_dialog
d= volume_path_dialog(True)
new_marker_set= d.new_marker_set
marker_sets={}
surf_sets={}
if "Cog2_GFPN" not in marker_sets:
s=new_marker_set('Cog2_GFPN')
marker_sets["Cog2_GFPN"]=s
s= marker_sets["Cog2_GFPN"]
mark=s.place_marker((434.286, 510.479, 590.705), (0.89, 0.1, 0.1), 18.4716)
if "Cog2_0" not in marker_sets:
s=new_marker_set('Cog2_0')
marker_sets["Cog2_0"]=s
s= marker_sets["Cog2_0"]
mark=s.place_marker((489.253, 472.319, 587.953), (0.89, 0.1, 0.1), 17.1475)
if "Cog2_1" not in marker_sets:
s=new_marker_set('Cog2_1')
marker_sets["Cog2_1"]=s
s= marker_sets["Cog2_1"]
mark=s.place_marker((560.891, 435.793, 576.377), (0.89, 0.1, 0.1), 17.1475)
if "Cog2_GFPC" not in marker_sets:
s=new_marker_set('Cog2_GFPC')
marker_sets["Cog2_GFPC"]=s
s= marker_sets["Cog2_GFPC"]
mark=s.place_marker((554.045, 577.413, 580.255), (0.89, 0.1, 0.1), 18.4716)
if "Cog2_Anch" not in marker_sets:
s=new_marker_set('Cog2_Anch')
marker_sets["Cog2_Anch"]=s
s= marker_sets["Cog2_Anch"]
mark=s.place_marker((714.157, 316.637, 549.834), (0.89, 0.1, 0.1), 18.4716)
if "Cog3_GFPN" not in marker_sets:
s=new_marker_set('Cog3_GFPN')
marker_sets["Cog3_GFPN"]=s
s= marker_sets["Cog3_GFPN"]
mark=s.place_marker((471.241, 486.164, 576.761), (1, 1, 0), 18.4716)
if "Cog3_0" not in marker_sets:
s=new_marker_set('Cog3_0')
marker_sets["Cog3_0"]=s
s= marker_sets["Cog3_0"]
mark=s.place_marker((470.299, 486.748, 576.181), (1, 1, 0.2), 17.1475)
if "Cog3_1" not in marker_sets:
s=new_marker_set('Cog3_1')
marker_sets["Cog3_1"]=s
s= marker_sets["Cog3_1"]
mark=s.place_marker((457.738, 472.108, 556), (1, 1, 0.2), 17.1475)
if "Cog3_2" not in marker_sets:
s=new_marker_set('Cog3_2')
marker_sets["Cog3_2"]=s
s= marker_sets["Cog3_2"]
mark=s.place_marker((442.789, 492.542, 543.626), (1, 1, 0.2), 17.1475)
if "Cog3_3" not in marker_sets:
s=new_marker_set('Cog3_3')
marker_sets["Cog3_3"]=s
s= marker_sets["Cog3_3"]
mark=s.place_marker((426.846, 498.278, 521.044), (1, 1, 0.2), 17.1475)
if "Cog3_4" not in marker_sets:
s=new_marker_set('Cog3_4')
marker_sets["Cog3_4"]=s
s= marker_sets["Cog3_4"]
mark=s.place_marker((425.818, 512.398, 496.679), (1, 1, 0.2), 17.1475)
if "Cog3_5" not in marker_sets:
s=new_marker_set('Cog3_5')
marker_sets["Cog3_5"]=s
s= marker_sets["Cog3_5"]
mark=s.place_marker((450.723, 503.06, 487.601), (1, 1, 0.2), 17.1475)
if "Cog3_GFPC" not in marker_sets:
s=new_marker_set('Cog3_GFPC')
marker_sets["Cog3_GFPC"]=s
s= marker_sets["Cog3_GFPC"]
mark=s.place_marker((448.133, 487.645, 593.394), (1, 1, 0.4), 18.4716)
if "Cog3_Anch" not in marker_sets:
s=new_marker_set('Cog3_Anch')
marker_sets["Cog3_Anch"]=s
s= marker_sets["Cog3_Anch"]
mark=s.place_marker((450.504, 511.825, 381.59), (1, 1, 0.4), 18.4716)
if "Cog4_GFPN" not in marker_sets:
s=new_marker_set('Cog4_GFPN')
marker_sets["Cog4_GFPN"]=s
s= marker_sets["Cog4_GFPN"]
mark=s.place_marker((595.459, 376.403, 420.34), (0, 0, 0.8), 18.4716)
if "Cog4_0" not in marker_sets:
s=new_marker_set('Cog4_0')
marker_sets["Cog4_0"]=s
s= marker_sets["Cog4_0"]
mark=s.place_marker((595.459, 376.403, 420.34), (0, 0, 0.8), 17.1475)
if "Cog4_1" not in marker_sets:
s=new_marker_set('Cog4_1')
marker_sets["Cog4_1"]=s
s= marker_sets["Cog4_1"]
mark=s.place_marker((576.811, 379.844, 441.592), (0, 0, 0.8), 17.1475)
if "Cog4_2" not in marker_sets:
s=new_marker_set('Cog4_2')
marker_sets["Cog4_2"]=s
s= marker_sets["Cog4_2"]
mark=s.place_marker((559.138, 385.806, 463.163), (0, 0, 0.8), 17.1475)
if "Cog4_3" not in marker_sets:
s=new_marker_set('Cog4_3')
marker_sets["Cog4_3"]=s
s= marker_sets["Cog4_3"]
mark=s.place_marker((543.713, 395.785, 484.931), (0, 0, 0.8), 17.1475)
if "Cog4_4" not in marker_sets:
s=new_marker_set('Cog4_4')
marker_sets["Cog4_4"]=s
s= marker_sets["Cog4_4"]
mark=s.place_marker((530.719, 410.621, 505.42), (0, 0, 0.8), 17.1475)
if "Cog4_5" not in marker_sets:
s=new_marker_set('Cog4_5')
marker_sets["Cog4_5"]=s
s= marker_sets["Cog4_5"]
mark=s.place_marker((515.678, 427.017, 523.046), (0, 0, 0.8), 17.1475)
if "Cog4_6" not in marker_sets:
s=new_marker_set('Cog4_6')
marker_sets["Cog4_6"]=s
s= marker_sets["Cog4_6"]
mark=s.place_marker((502.543, 441.294, 543.726), (0, 0, 0.8), 17.1475)
if "Cog4_GFPC" not in marker_sets:
s=new_marker_set('Cog4_GFPC')
marker_sets["Cog4_GFPC"]=s
s= marker_sets["Cog4_GFPC"]
mark=s.place_marker((582.475, 475.558, 300.223), (0, 0, 0.8), 18.4716)
if "Cog4_Anch" not in marker_sets:
s=new_marker_set('Cog4_Anch')
marker_sets["Cog4_Anch"]=s
s= marker_sets["Cog4_Anch"]
mark=s.place_marker((419.885, 409.304, 787.388), (0, 0, 0.8), 18.4716)
if "Cog5_GFPN" not in marker_sets:
s=new_marker_set('Cog5_GFPN')
marker_sets["Cog5_GFPN"]=s
s= marker_sets["Cog5_GFPN"]
mark=s.place_marker((527.007, 411.214, 564.07), (0.3, 0.3, 0.3), 18.4716)
if "Cog5_0" not in marker_sets:
s=new_marker_set('Cog5_0')
marker_sets["Cog5_0"]=s
s= marker_sets["Cog5_0"]
mark=s.place_marker((527.007, 411.214, 564.07), (0.3, 0.3, 0.3), 17.1475)
if "Cog5_1" not in marker_sets:
s=new_marker_set('Cog5_1')
marker_sets["Cog5_1"]=s
s= marker_sets["Cog5_1"]
mark=s.place_marker((528.857, 440.167, 567.072), (0.3, 0.3, 0.3), 17.1475)
if "Cog5_2" not in marker_sets:
s=new_marker_set('Cog5_2')
marker_sets["Cog5_2"]=s
s= marker_sets["Cog5_2"]
mark=s.place_marker((545.341, 464.112, 569.995), (0.3, 0.3, 0.3), 17.1475)
if "Cog5_3" not in marker_sets:
s=new_marker_set('Cog5_3')
marker_sets["Cog5_3"]=s
s= marker_sets["Cog5_3"]
mark=s.place_marker((569.39, 471.36, 584.728), (0.3, 0.3, 0.3), 17.1475)
if "Cog5_GFPC" not in marker_sets:
s=new_marker_set('Cog5_GFPC')
marker_sets["Cog5_GFPC"]=s
s= marker_sets["Cog5_GFPC"]
mark=s.place_marker((475.564, 546.293, 611.29), (0.3, 0.3, 0.3), 18.4716)
if "Cog5_Anch" not in marker_sets:
s=new_marker_set('Cog5_Anch')
marker_sets["Cog5_Anch"]=s
s= marker_sets["Cog5_Anch"]
mark=s.place_marker((668.984, 398.362, 555.969), (0.3, 0.3, 0.3), 18.4716)
if "Cog6_GFPN" not in marker_sets:
s=new_marker_set('Cog6_GFPN')
marker_sets["Cog6_GFPN"]=s
s= marker_sets["Cog6_GFPN"]
mark=s.place_marker((495.215, 499.979, 592.992), (0.21, 0.49, 0.72), 18.4716)
if "Cog6_0" not in marker_sets:
s=new_marker_set('Cog6_0')
marker_sets["Cog6_0"]=s
s= marker_sets["Cog6_0"]
mark=s.place_marker((495.388, 500.326, 592.998), (0.21, 0.49, 0.72), 17.1475)
if "Cog6_1" not in marker_sets:
s=new_marker_set('Cog6_1')
marker_sets["Cog6_1"]=s
s= marker_sets["Cog6_1"]
mark=s.place_marker((502.934, 501.293, 565.603), (0.21, 0.49, 0.72), 17.1475)
if "Cog6_2" not in marker_sets:
s=new_marker_set('Cog6_2')
marker_sets["Cog6_2"]=s
s= marker_sets["Cog6_2"]
mark=s.place_marker((516.487, 505.433, 541.126), (0.21, 0.49, 0.72), 17.1475)
if "Cog6_3" not in marker_sets:
s=new_marker_set('Cog6_3')
marker_sets["Cog6_3"]=s
s= marker_sets["Cog6_3"]
mark=s.place_marker((512.666, 489.654, 518.211), (0.21, 0.49, 0.72), 17.1475)
if "Cog6_4" not in marker_sets:
s=new_marker_set('Cog6_4')
marker_sets["Cog6_4"]=s
s= marker_sets["Cog6_4"]
mark=s.place_marker((485.026, 487.983, 516.641), (0.21, 0.49, 0.72), 17.1475)
if "Cog6_5" not in marker_sets:
s=new_marker_set('Cog6_5')
marker_sets["Cog6_5"]=s
s= marker_sets["Cog6_5"]
mark=s.place_marker((460.354, 476.654, 510.093), (0.21, 0.49, 0.72), 17.1475)
if "Cog6_6" not in marker_sets:
s=new_marker_set('Cog6_6')
marker_sets["Cog6_6"]=s
s= marker_sets["Cog6_6"]
mark=s.place_marker((436.6, 474.318, 495.279), (0.21, 0.49, 0.72), 17.1475)
if "Cog6_GFPC" not in marker_sets:
s=new_marker_set('Cog6_GFPC')
marker_sets["Cog6_GFPC"]=s
s= marker_sets["Cog6_GFPC"]
mark=s.place_marker((447.844, 417.81, 558.715), (0.21, 0.49, 0.72), 18.4716)
if "Cog6_Anch" not in marker_sets:
s=new_marker_set('Cog6_Anch')
marker_sets["Cog6_Anch"]=s
s= marker_sets["Cog6_Anch"]
mark=s.place_marker((427.822, 534.266, 432.197), (0.21, 0.49, 0.72), 18.4716)
if "Cog7_GFPN" not in marker_sets:
s=new_marker_set('Cog7_GFPN')
marker_sets["Cog7_GFPN"]=s
s= marker_sets["Cog7_GFPN"]
mark=s.place_marker((472.638, 414.79, 597.555), (0.7, 0.7, 0.7), 18.4716)
if "Cog7_0" not in marker_sets:
s=new_marker_set('Cog7_0')
marker_sets["Cog7_0"]=s
s= marker_sets["Cog7_0"]
mark=s.place_marker((490.88, 433.677, 597.751), (0.7, 0.7, 0.7), 17.1475)
if "Cog7_1" not in marker_sets:
s=new_marker_set('Cog7_1')
marker_sets["Cog7_1"]=s
s= marker_sets["Cog7_1"]
mark=s.place_marker((531.262, 474.497, 598.055), (0.7, 0.7, 0.7), 17.1475)
if "Cog7_2" not in marker_sets:
s=new_marker_set('Cog7_2')
marker_sets["Cog7_2"]=s
s= marker_sets["Cog7_2"]
mark=s.place_marker((574.35, 512.99, 593.6), (0.7, 0.7, 0.7), 17.1475)
if "Cog7_GFPC" not in marker_sets:
s=new_marker_set('Cog7_GFPC')
marker_sets["Cog7_GFPC"]=s
s= marker_sets["Cog7_GFPC"]
mark=s.place_marker((528.826, 549.799, 647.16), (0.7, 0.7, 0.7), 18.4716)
if "Cog7_Anch" not in marker_sets:
s=new_marker_set('Cog7_Anch')
marker_sets["Cog7_Anch"]=s
s= marker_sets["Cog7_Anch"]
mark=s.place_marker((666.69, 536.734, 548.685), (0.7, 0.7, 0.7), 18.4716)
if "Cog8_0" not in marker_sets:
s=new_marker_set('Cog8_0')
marker_sets["Cog8_0"]=s
s= marker_sets["Cog8_0"]
mark=s.place_marker((544.138, 496.351, 508.608), (1, 0.5, 0), 17.1475)
if "Cog8_1" not in marker_sets:
s=new_marker_set('Cog8_1')
marker_sets["Cog8_1"]=s
s= marker_sets["Cog8_1"]
mark=s.place_marker((550.949, 481.977, 532.556), (1, 0.5, 0), 17.1475)
if "Cog8_2" not in marker_sets:
s=new_marker_set('Cog8_2')
marker_sets["Cog8_2"]=s
s= marker_sets["Cog8_2"]
mark=s.place_marker((571.062, 467.716, 547.82), (1, 0.5, 0), 17.1475)
if "Cog8_3" not in marker_sets:
s=new_marker_set('Cog8_3')
marker_sets["Cog8_3"]=s
s= marker_sets["Cog8_3"]
mark=s.place_marker((587.682, 451.778, 565.773), (1, 0.5, 0), 17.1475)
if "Cog8_4" not in marker_sets:
s=new_marker_set('Cog8_4')
marker_sets["Cog8_4"]=s
s= marker_sets["Cog8_4"]
mark=s.place_marker((588.398, 442.101, 593.745), (1, 0.5, 0), 17.1475)
if "Cog8_5" not in marker_sets:
s=new_marker_set('Cog8_5')
marker_sets["Cog8_5"]=s
s= marker_sets["Cog8_5"]
mark=s.place_marker((576.237, 426.91, 616.395), (1, 0.5, 0), 17.1475)
if "Cog8_GFPC" not in marker_sets:
s=new_marker_set('Cog8_GFPC')
marker_sets["Cog8_GFPC"]=s
s= marker_sets["Cog8_GFPC"]
mark=s.place_marker((500.524, 456.01, 605.628), (1, 0.6, 0.1), 18.4716)
if "Cog8_Anch" not in marker_sets:
s=new_marker_set('Cog8_Anch')
marker_sets["Cog8_Anch"]=s
s= marker_sets["Cog8_Anch"]
mark=s.place_marker((645.779, 390.855, 639.026), (1, 0.6, 0.1), 18.4716)
for k in surf_sets.keys():
chimera.openModels.add([surf_sets[k]])
| [
"batxes@gmail.com"
] | batxes@gmail.com |
db5193711d8806da6294980ffafb537e5461f000 | fec622bc34957dd4d99f1ef0f23608eeb40ed609 | /internal/notes/builtin-SAVE/packages/xcompmgr/package.py | dc8aa398b1d218fc22d5bea149cd1d6e2eec658c | [] | no_license | scottkwarren/hpctest | 4d5ff18d00c5eb9b7da481c9aa0824aa7082062f | a8bb99b5f601a5d088ae56ab9886ab8079c081ba | refs/heads/master | 2022-09-07T19:36:18.544795 | 2022-08-18T20:26:42 | 2022-08-18T20:26:42 | 100,518,800 | 2 | 2 | null | null | null | null | UTF-8 | Python | false | false | 1,908 | py | ##############################################################################
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class Xcompmgr(AutotoolsPackage):
"""xcompmgr is a sample compositing manager for X servers supporting the
XFIXES, DAMAGE, RENDER, and COMPOSITE extensions. It enables basic
eye-candy effects."""
homepage = "http://cgit.freedesktop.org/xorg/app/xcompmgr"
url = "https://www.x.org/archive/individual/app/xcompmgr-1.1.7.tar.gz"
version('1.1.7', '4992895c8934bbc99bb2447dfe5081f2')
depends_on('libxcomposite')
depends_on('libxfixes')
depends_on('libxdamage')
depends_on('libxrender')
depends_on('libxext')
depends_on('pkg-config@0.9.0:', type='build')
depends_on('util-macros', type='build')
| [
"scott@rice.edu"
] | scott@rice.edu |
36e40d84e3ec98b642358afd8af1a9a989c1fbdf | 565409a77f506cf834abe5ed2bdd83d221ab0c2d | /web_soluciones/migrations/0009_itemsolucionimagen.py | 9c13069e4d939910896769095ec06d1ab933fc01 | [] | no_license | odecsarrollo/04_odeco_web | 9f3c840fb03afb9bf25792a78829b611e1d67d2a | c87593e24be23bb6ef759a0eafac95e5a0649fe4 | refs/heads/master | 2023-01-22T23:46:17.209001 | 2022-04-13T03:40:38 | 2022-04-13T03:40:38 | 189,261,683 | 0 | 0 | null | 2022-12-27T16:44:24 | 2019-05-29T16:29:29 | JavaScript | UTF-8 | Python | false | false | 1,114 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.5 on 2017-09-26 16:55
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
import imagekit.models.fields
import web_soluciones.models
class Migration(migrations.Migration):
dependencies = [
('web_soluciones', '0008_auto_20170926_1126'),
]
operations = [
migrations.CreateModel(
name='ItemSolucionImagen',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('orden', models.PositiveIntegerField(default=0)),
('descripcion', models.TextField()),
('imagen', imagekit.models.fields.ProcessedImageField(blank=True, null=True, upload_to=web_soluciones.models.ItemSolucionImagen.imagen_upload_to, verbose_name='Imagen Item Solución')),
('item_solucion', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, related_name='mis_imagenes', to='web_soluciones.ItemSolucion')),
],
),
]
| [
"fabio.garcia.sanchez@gmail.com"
] | fabio.garcia.sanchez@gmail.com |
6c5ceced731c19353e60b513d1631f8c919e755a | 40074020ae89350cbb012212fa1f66549167fb13 | /ch1_image_encryption_image.py | a276c338bfe85fef09d9d7755c2589da8edfdf5b | [] | no_license | jasonbrackman/classic_computer_science_problems | 8009acb8111118eb88b4affc3de153853ed0f81d | fee4e1f9796d9029a2cfd2253cfad863d7beb290 | refs/heads/master | 2020-05-25T14:46:26.711564 | 2019-05-27T02:27:53 | 2019-05-27T02:27:53 | 187,853,790 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,444 | py | import base64
from PIL import Image
from ch1_encryption import encrypt, decrypt
def introspection(obj):
for attribute in dir(obj):
if attribute.startswith("_") is False:
s = getattr(obj, attribute)
t = type(s)
print(attribute, t, s)
def load_image_as_string(path):
with Image.open(path) as im:
# introspection(im)
imb = im.tobytes()
print(im.mode, im.size)
return im.mode, im.size, imb
if __name__ == "__main__":
path = "example.jpg"
mode, size, data = load_image_as_string(path)
idata = base64.encodebytes(data)
key, encrypted = encrypt(idata)
data = decrypt(key, encrypted)
data = base64.decodebytes(data)
s = Image.frombytes(mode=mode, size=size, data=data)
s.show()
# print(type(im))
# print(type(im_b.encode()))
#
# print("Length IM: ", len(im))
# print("Length IM_B: ", len(im_b))
#
# assert im == im_b
# with open('output.bin', 'wb') as file:
# file.write(encrypted.to_bytes((encrypted.bit_length() + 7) // 8, "big"))
#
# with open('output.bin', 'rb') as file:
# bytes = file.read()
# num = int.from_bytes(bytes, byteorder='big')
# # print(num.bit_length())
#
# new_image = decrypt(key, num)
# with open('decrypted.jpg', 'wb') as x:
# x.write(new_image.encode())
#
# x = Image.open("decrypted.jpg")
# x.show()
| [
"brackman@gmail.com"
] | brackman@gmail.com |
a4ac2a1811dd7146877d5c706339874779260aa5 | dc3b25768cdc6c0c31a294a40796b51b185bc5ee | /BIP/Bayes/lhs.py | 8f40be560db3e2383c50a99048f012314340920d | [] | no_license | fccoelho/bayesian-inference | 5fa32936422aea8afca8d89272928e7f1aa0f74b | c274f398ea5dad760b7783f3eb1d343dacc6e591 | refs/heads/master | 2021-01-23T19:41:30.112936 | 2020-07-26T19:19:59 | 2020-07-26T19:19:59 | 32,230,235 | 6 | 3 | null | 2015-07-07T12:50:09 | 2015-03-14T20:19:26 | HTML | UTF-8 | Python | false | false | 6,475 | py | # !/usr/bin/python
# -*- coding:utf-8 -*-
# -----------------------------------------------------------------------------
# Name: lhs.py
# Project: Bayesian-Inference
# Purpose:
#
# Author: Flávio Codeço Coelho<fccoelho@gmail.com>
#
# Created: 2008-11-26
# Copyright: (c) 2008 by the Author
# Licence: GPL
# -----------------------------------------------------------------------------
from __future__ import absolute_import
from __future__ import print_function
from six.moves import map
from six.moves import range
import scipy.stats as stats
import numpy
from numpy.linalg import cholesky, inv
from numpy.random import uniform, shuffle
import theano as T
def lhsFromSample(sample, siz=100):
"""
Latin Hypercube Sample from a set of values.
For univariate distributions only
:Parameters:
- `sample`: list, tuple of array
- `siz`: Number or shape tuple for the output sample
"""
# TODO: add support to correlation restricted multivariate samples
if not isinstance(sample, (list, tuple, numpy.ndarray)):
raise TypeError('sample is not a list, tuple or numpy vector')
n = siz
if isinstance(siz, (tuple, list)):
n = numpy.product(siz)
perc = numpy.arange(0, 100., 100. / n)
shuffle(perc)
smp = [stats.uniform(i, 100. / n).rvs() for i in perc]
v = numpy.array([stats.scoreatpercentile(sample, p) for p in smp])
if isinstance(siz, (tuple, list)):
v.shape = siz
return v
def lhsFromDensity(kde, siz=100):
'''
LHS sampling from a variable's Kernel density estimate.
:Parameters:
- `kde`: scipy.stats.kde.gaussian_kde object
- `siz`: Number or shape tuple for the output sample
'''
if not isinstance(kde, scipy.stats.kde.gaussian_kde):
raise TypeError("kde is not a density object")
if isinstance(siz, (tuple, list)):
n = numpy.product(siz)
s = kde.resample(n)
v = lhsFromSample(s, n)
if isinstance(siz, (tuple, list)):
v.shape = siz
return v
def lhs(dist, parms, siz=100, noCorrRestr=False, corrmat=None):
'''
Latin Hypercube sampling of any distribution.
dist is is a scipy.stats random number generator
such as stats.norm, stats.beta, etc
parms is a tuple with the parameters needed for
the specified distribution.
:Parameters:
- `dist`: random number generator from scipy.stats module or a list of them.
- `parms`: tuple of parameters as required for dist, or a list of them.
- `siz` :number or shape tuple for the output sample
- `noCorrRestr`: if true, does not enforce correlation structure on the sample.
- `corrmat`: Correlation matrix
'''
if not isinstance(dist, (list, tuple)):
dists = [dist]
parms = [parms]
else:
assert len(dist) == len(parms)
dists = dist
indices = rank_restr(nvars=len(dists), smp=siz, noCorrRestr=noCorrRestr, Corrmat=corrmat)
smplist = []
for j, d in enumerate(dists):
if not isinstance(d, (stats.rv_discrete, stats.rv_continuous)):
raise TypeError('dist is not a scipy.stats distribution object')
n = siz
if isinstance(siz, (tuple, list)):
n = numpy.product(siz)
# force type to float for sage compatibility
pars = tuple([float(k) for k in parms[j]])
# perc = numpy.arange(1.,n+1)/(n+1)
step = 1. / (n)
perc = numpy.arange(0, 1, step) # class boundaries
s_pos = [uniform(i, i + step) for i in perc[:]] # [i+ step/2. for i in perc[:]]
v = d(*pars).ppf(s_pos)
# print len(v), step, perc
index = list(map(int, indices[j] - 1))
v = v[index]
if isinstance(siz, (tuple, list)):
v.shape = siz
smplist.append(v)
if len(dists) == 1:
return smplist[0]
return smplist
def rank_restr(nvars=4, smp=100, noCorrRestr=False, Corrmat=None):
"""
Returns the indices for sampling variables with
the desired correlation structure.
:Parameters:
- `nvars`: number of variables
- `smp`: number of samples
- `noCorrRestr`: No correlation restriction if True
- `Corrmat`: Correlation matrix. If None, assure uncorrelated samples.
"""
if isinstance(smp, (tuple, list)):
smp = numpy.product(smp)
def shuf(s):
"""
Shuffle a vector, making shure to make a copy of the original
:param s: A vector of values
:return: a list of arrays
"""
s1 = []
for i in range(nvars):
shuffle(s)
s1.append(s.copy())
return s1
if noCorrRestr or nvars == 1:
inds = numpy.arange(smp)
x = shuf(inds)
else:
if Corrmat is None:
C = numpy.core.numeric.identity(nvars)
else:
if Corrmat.shape[0] != nvars:
raise TypeError('Correlation matrix must be of rank %s' % nvars)
C = numpy.matrix(Corrmat)
s0 = numpy.arange(1., smp + 1) / (smp + 1.)
s = stats.norm().ppf(s0)
s1 = shuf(s)
S = numpy.matrix(s1)
P = cholesky(C)
Q = cholesky(numpy.corrcoef(S))
Final = S.transpose() * inv(Q).transpose() * P.transpose()
x = [stats.stats.rankdata(Final.transpose()[i,]) for i in range(nvars)]
return x
if __name__ == '__main__':
dist = stats.uniform, stats.uniform
parms = (0, 1.), (0, 1.)
print(lhs(dist, parms, siz=4))
import pylab as P
# dist = stats.norm
dist = stats.beta
# pars = (50,2)
pars = (1, 5) # beta
b = lhs(dist, pars, 1000)
cm = numpy.array([[1, .8], [.8, 1]])
c = lhs([dist, dist], [pars, pars], 2000, False, cm)
# print stats.pearsonr(c[0],c[1]), stats.spearmanr(c[0],c[1])
# P.hist(c[0],normed=1)#, label='c0 sample')
P.scatter(c[0], c[1])
# P.hist(c[1],normed=1)#, label='c1 sample')
# print c[0].shape,c[1].shape
n = dist(*pars).rvs(size=20)
# hist(n.ravel(),facecolor='r',alpha =0.3,normed=1, label='Regular sample')
# plot(numpy.arange(min(min(c),min(n)),max(max(c),max(n)),.1),dist(*pars).pdf(numpy.arange(min(min(c),min(n)),max(max(c),max(n)),.1)),label='PDF')
# legend()
# savefig('lhs.png',dpi=400)
# lhs([stats.norm]*19,[(0,1)]*19,17,False,numpy.identity(19))
P.show()
# TODO: Extend lhsFromSample to allow multivariate correlated sampling
| [
"fccoelho@gmail.com"
] | fccoelho@gmail.com |
bb932d4a65358821de6831577fc352f0bf812986 | de24f83a5e3768a2638ebcf13cbe717e75740168 | /moodledata/vpl_data/380/usersdata/316/78467/submittedfiles/testes.py | 5fe8a3fc8753623af659b42b84c7c6f583922ff6 | [] | no_license | rafaelperazzo/programacao-web | 95643423a35c44613b0f64bed05bd34780fe2436 | 170dd5440afb9ee68a973f3de13a99aa4c735d79 | refs/heads/master | 2021-01-12T14:06:25.773146 | 2017-12-22T16:05:45 | 2017-12-22T16:05:45 | 69,566,344 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 88 | py | # -*- coding: utf-8 -*-
#COMECE AQUI ABAIXO
a=int(input("qual a sua altura em metros?")) | [
"rafael.mota@ufca.edu.br"
] | rafael.mota@ufca.edu.br |
dfea2d32f870bfc8e2edd99da132c2ff2f27ed08 | 1dacbf90eeb384455ab84a8cf63d16e2c9680a90 | /pkgs/pyopenssl-0.15.1-py27_2/lib/python2.7/site-packages/OpenSSL/rand.py | 3adf69369a4a58b01926fe58b7e3bf322ddaedb9 | [
"Apache-2.0",
"BSD-3-Clause",
"LicenseRef-scancode-unknown"
] | permissive | wangyum/Anaconda | ac7229b21815dd92b0bd1c8b7ec4e85c013b8994 | 2c9002f16bb5c265e0d14f4a2314c86eeaa35cb6 | refs/heads/master | 2022-10-21T15:14:23.464126 | 2022-10-05T12:10:31 | 2022-10-05T12:10:31 | 76,526,728 | 11 | 10 | Apache-2.0 | 2022-10-05T12:10:32 | 2016-12-15T05:26:12 | Python | UTF-8 | Python | false | false | 4,528 | py | """
PRNG management routines, thin wrappers.
See the file RATIONALE for a short explanation of why this module was written.
"""
from functools import partial
from six import integer_types as _integer_types
from OpenSSL._util import (
ffi as _ffi,
lib as _lib,
exception_from_error_queue as _exception_from_error_queue,
path_string as _path_string)
class Error(Exception):
"""
An error occurred in an `OpenSSL.rand` API.
"""
_raise_current_error = partial(_exception_from_error_queue, Error)
_unspecified = object()
_builtin_bytes = bytes
def bytes(num_bytes):
"""
Get some random bytes as a string.
:param num_bytes: The number of bytes to fetch
:return: A string of random bytes
"""
if not isinstance(num_bytes, _integer_types):
raise TypeError("num_bytes must be an integer")
if num_bytes < 0:
raise ValueError("num_bytes must not be negative")
result_buffer = _ffi.new("char[]", num_bytes)
result_code = _lib.RAND_bytes(result_buffer, num_bytes)
if result_code == -1:
# TODO: No tests for this code path. Triggering a RAND_bytes failure
# might involve supplying a custom ENGINE? That's hard.
_raise_current_error()
return _ffi.buffer(result_buffer)[:]
def add(buffer, entropy):
"""
Add data with a given entropy to the PRNG
:param buffer: Buffer with random data
:param entropy: The entropy (in bytes) measurement of the buffer
:return: None
"""
if not isinstance(buffer, _builtin_bytes):
raise TypeError("buffer must be a byte string")
if not isinstance(entropy, int):
raise TypeError("entropy must be an integer")
# TODO Nothing tests this call actually being made, or made properly.
_lib.RAND_add(buffer, len(buffer), entropy)
def seed(buffer):
"""
Alias for rand_add, with entropy equal to length
:param buffer: Buffer with random data
:return: None
"""
if not isinstance(buffer, _builtin_bytes):
raise TypeError("buffer must be a byte string")
# TODO Nothing tests this call actually being made, or made properly.
_lib.RAND_seed(buffer, len(buffer))
def status():
"""
Retrieve the status of the PRNG
:return: True if the PRNG is seeded enough, false otherwise
"""
return _lib.RAND_status()
def egd(path, bytes=_unspecified):
"""
Query an entropy gathering daemon (EGD) for random data and add it to the
PRNG. I haven't found any problems when the socket is missing, the function
just returns 0.
:param path: The path to the EGD socket
:param bytes: (optional) The number of bytes to read, default is 255
:returns: The number of bytes read (NB: a value of 0 isn't necessarily an
error, check rand.status())
"""
if not isinstance(path, _builtin_bytes):
raise TypeError("path must be a byte string")
if bytes is _unspecified:
bytes = 255
elif not isinstance(bytes, int):
raise TypeError("bytes must be an integer")
return _lib.RAND_egd_bytes(path, bytes)
def cleanup():
"""
Erase the memory used by the PRNG.
:return: None
"""
# TODO Nothing tests this call actually being made, or made properly.
_lib.RAND_cleanup()
def load_file(filename, maxbytes=_unspecified):
"""
Seed the PRNG with data from a file
:param filename: The file to read data from (``bytes`` or ``unicode``).
:param maxbytes: (optional) The number of bytes to read, default is to read
the entire file
:return: The number of bytes read
"""
filename = _path_string(filename)
if maxbytes is _unspecified:
maxbytes = -1
elif not isinstance(maxbytes, int):
raise TypeError("maxbytes must be an integer")
return _lib.RAND_load_file(filename, maxbytes)
def write_file(filename):
"""
Save PRNG state to a file
:param filename: The file to write data to (``bytes`` or ``unicode``).
:return: The number of bytes written
"""
filename = _path_string(filename)
return _lib.RAND_write_file(filename)
# TODO There are no tests for screen at all
def screen():
"""
Add the current contents of the screen to the PRNG state. Availability:
Windows.
:return: None
"""
_lib.RAND_screen()
if getattr(_lib, 'RAND_screen', None) is None:
del screen
# TODO There are no tests for the RAND strings being loaded, whatever that
# means.
_lib.ERR_load_RAND_strings()
| [
"wgyumg@mgail.com"
] | wgyumg@mgail.com |
b17cfdd4845f8a9f7a97c78226a985f349aa6873 | 53fab060fa262e5d5026e0807d93c75fb81e67b9 | /backup/user_118/ch40_2020_04_06_20_39_17_175977.py | 7aa703ae260935ad312f4b2d6475a51ee8d6f2d0 | [] | no_license | gabriellaec/desoft-analise-exercicios | b77c6999424c5ce7e44086a12589a0ad43d6adca | 01940ab0897aa6005764fc220b900e4d6161d36b | refs/heads/main | 2023-01-31T17:19:42.050628 | 2020-12-16T05:21:31 | 2020-12-16T05:21:31 | 306,735,108 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 103 | py | def soma_valores(lista):
lista=[]
for i in lista:
lista.append(i)
return lista | [
"you@example.com"
] | you@example.com |
6034e2a2decc71a88d32f71f2ab803e15de3f3c5 | 62e5a238f0b4c22bbeecb59f3ad2c96b92da2ae8 | /strings/stripping_names.py | 493c9cd1d9cfd8b18317ab6d415f2021ab8b9fe9 | [] | no_license | MrBrunotte/python-crash-course-notes | f57bd87a8f891e46565a10c45a82ddb4a99f1dcf | c7957d059fa1cbc5f3d9852bd6137311e6c1c14a | refs/heads/master | 2022-11-29T11:24:51.786457 | 2020-08-03T21:11:33 | 2020-08-03T21:11:33 | 281,511,877 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 102 | py | name = ' stefan '
print(name)
print(name.lstrip())
print(name)
print(name.strip())
print(name)
| [
"mrbrunotte@gmail.com"
] | mrbrunotte@gmail.com |
c15621eb6087d07b6fdc7af9d27ff60cd29a03e5 | a8289cb7273245e7ec1e6079c7f266db4d38c03f | /Django_Attendance_mongo/mongos/migrations/0004_listentry.py | 5047c29886d29cadce0fc9b56b4c4e47af18ea51 | [] | no_license | palmarytech/Python_Snippet | 6acbd572d939bc9d5d765800f35a0204bc044708 | 41b4ebe15509d166c82edd23b713a1f3bf0458c5 | refs/heads/master | 2022-10-06T22:51:00.469383 | 2020-03-13T08:32:11 | 2020-03-13T08:32:11 | 272,350,189 | 1 | 0 | null | 2020-06-15T05:30:44 | 2020-06-15T05:30:44 | null | UTF-8 | Python | false | false | 600 | py | # Generated by Django 2.1.3 on 2018-12-10 07:13
from django.db import migrations, models
import djongo.models.fields
class Migration(migrations.Migration):
dependencies = [
('mongos', '0003_contact'),
]
operations = [
migrations.CreateModel(
name='ListEntry',
fields=[
('_id', djongo.models.fields.ObjectIdField(auto_created=True, primary_key=True, serialize=False)),
('headline', models.CharField(max_length=255)),
('authors', djongo.models.fields.ListField()),
],
),
]
| [
"leamon.lee13@gmail.com"
] | leamon.lee13@gmail.com |
481e1c44102267c893fdaa4e3adb75a0817ecad1 | 43f3b7e4a5b7a1210ffa72c5a855d7542d68290d | /Results/Python/Series/20.py | 06a2816df4f06bbe10f7f611c865493301a6582a | [] | no_license | bar2104y/Abramyan_1000_tasks | 38e86e119245db4bac0483583cc16d8793d5689c | e0bf9f5e73d90b8eca3fe5ba7913ed12f18d989a | refs/heads/master | 2021-06-05T18:05:09.788453 | 2020-06-30T19:52:31 | 2020-06-30T19:52:31 | 150,898,700 | 5 | 2 | null | 2018-10-02T17:16:28 | 2018-09-29T20:01:33 | Python | UTF-8 | Python | false | false | 206 | py | n = int(input("N: "))
a = int(input())
k = 0
m = []
for i in range(1, n):
tmp = int(input())
if a < tmp:
m.append(a)
k += 1
a = tmp
print("K:", k)
for tmp in m:
print(tmp)
| [
"bar2104y@yandex.ru"
] | bar2104y@yandex.ru |
74d3e5746f18ab0ddc2199e6b991a399e16ae4e0 | 6bce631b869a8717eed29eae186688a7fdb7f5c8 | /venv/Lib/site-packages/test/test_municipality_financial.py | c547f63265265c80b1143a8fdf6dbd8c2d205aa5 | [] | no_license | singhd3101/CS5100-Stock-Market-Prediction | 6d43bd39633dd80bb1141dc550302874a5bc0939 | 2804a6270a05155e168d0f2518bcd97f1c9bcb3e | refs/heads/master | 2020-11-26T03:56:02.613630 | 2019-12-19T02:22:13 | 2019-12-19T02:22:13 | 228,958,401 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,374 | py | # coding: utf-8
"""
Intrinio API
Welcome to the Intrinio API! Through our Financial Data Marketplace, we offer a wide selection of financial data feed APIs sourced by our own proprietary processes as well as from many data vendors. For a complete API request / response reference please view the [Intrinio API documentation](https://intrinio.com/documentation/api_v2). If you need additional help in using the API, please visit the [Intrinio website](https://intrinio.com) and click on the chat icon in the lower right corner. # noqa: E501
OpenAPI spec version: 2.2.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import unittest
import intrinio_sdk
from intrinio_sdk.models.municipality_financial import MunicipalityFinancial # noqa: E501
from intrinio_sdk.rest import ApiException
class TestMunicipalityFinancial(unittest.TestCase):
"""MunicipalityFinancial unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testMunicipalityFinancial(self):
"""Test MunicipalityFinancial"""
# FIXME: construct object with mandatory attributes with example values
# model = intrinio_sdk.models.municipality_financial.MunicipalityFinancial() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
| [
"singh3101div@gmail.com"
] | singh3101div@gmail.com |
2c86d8433b2281fa239c52a7f91f0908b32756d8 | 759f52976ad2cd9236da561ca254e11e08003487 | /part7/ex45/v2-replace-config/replacement_matcher.py | ff35c60a384c597782c1732e5777c6ea8757cb0e | [] | no_license | mbaeumer/fiftyseven | 57b571c3e09640a2ab0ed41e5d06643c12b48001 | d79b603d5b37bf1f4127d9253f8526ea3897dc08 | refs/heads/master | 2020-06-10T20:52:25.311992 | 2017-11-15T18:28:38 | 2017-11-15T18:28:38 | 75,877,462 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 195 | py | #!/usr/bin/python
class Replacement:
def __init__(self, to_replace, replaced_by, occurence):
self.to_replace = to_replace
self.replaced_by = replaced_by
self.occurence = occurence
| [
"martin.baeumer@gmail.com"
] | martin.baeumer@gmail.com |
3bc809c70b62cdfeff7724102903e37193402733 | 2d923980f8c3a5d450cd2435dcb96fff27e407bf | /unittests/test_search.py | 62c0e1d5efb91437da88b333d1dbbd806866b579 | [] | no_license | SHAKOTN/songs_service | 2f0cc9bfdee5138042ea82477ec0fa40e8a4c2f7 | be0a5875ee6106b35966daef4337d56ec6cf2f10 | refs/heads/master | 2021-06-24T00:27:58.876953 | 2017-08-26T13:45:25 | 2017-08-26T13:45:25 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,878 | py | import json
from unittest import TestCase
from app import app, mongo
from bson import ObjectId
class TestSearch(TestCase):
def setUp(self):
self.app = app.test_client()
with app.app_context():
self.songs = mongo.db.songs
self.song_id = None
def test_search(self):
payload = dict(
artist='Despised Icon',
title='Furtive Monologue',
difficulty=10,
level=1,
released='2017-05-01'
)
response = self.app.put(
'/songs/',
data=payload
)
self.song_id = json.loads(response.get_data().decode())['_id']
# Test case sensitivity
response_search = self.app.get(
'/songs/search',
query_string={
'message': 'Despised'
}
)
expected_song_data = payload.copy()
expected_song_data['_id'] = self.song_id
matched_songs = json.loads(response_search.get_data().decode())
assert expected_song_data == matched_songs[0]
# Test case insensitivity
response_search = self.app.get(
'/songs/search',
query_string={
'message': 'dESpIsEd'
}
)
matched_songs = json.loads(response_search.get_data().decode())
assert expected_song_data == matched_songs[0]
response_search = self.app.get(
'/songs/search',
query_string={
'message': 'Monologue'
}
)
matched_songs = json.loads(response_search.get_data().decode())
assert expected_song_data == matched_songs[0]
def tearDown(self):
self.songs.delete_one({
'_id': ObjectId(self.song_id)
})
self.songs.delete_many({
'artist': 'Despised Icon'
})
| [
"jadecoresky@gmail.com"
] | jadecoresky@gmail.com |
521615db251668aff2124d314c0e6e40af1e94cb | 04e5b6df2ee3bcfb7005d8ec91aab8e380333ac4 | /Lib/objc/_WebCore.py | 152496dd1dfac3e08030c3233439e39cab19406a | [
"MIT"
] | permissive | ColdGrub1384/Pyto | 64e2a593957fd640907f0e4698d430ea7754a73e | 7557485a733dd7e17ba0366b92794931bdb39975 | refs/heads/main | 2023-08-01T03:48:35.694832 | 2022-07-20T14:38:45 | 2022-07-20T14:38:45 | 148,944,721 | 884 | 157 | MIT | 2023-02-26T21:34:04 | 2018-09-15T22:29:07 | C | UTF-8 | Python | false | false | 6,080 | py | """
Classes from the 'WebCore' framework.
"""
try:
from rubicon.objc import ObjCClass
except ValueError:
def ObjCClass(name):
return None
def _Class(name):
try:
return ObjCClass(name)
except NameError:
return None
WebVideoFullscreenController = _Class("WebVideoFullscreenController")
WebUndefined = _Class("WebUndefined")
WebItemProviderPasteboard = _Class("WebItemProviderPasteboard")
WebItemProviderLoadResult = _Class("WebItemProviderLoadResult")
WebItemProviderRegistrationInfoList = _Class("WebItemProviderRegistrationInfoList")
WebItemProviderPromisedFileRegistrar = _Class("WebItemProviderPromisedFileRegistrar")
WebItemProviderWritableObjectRegistrar = _Class(
"WebItemProviderWritableObjectRegistrar"
)
WebItemProviderDataRegistrar = _Class("WebItemProviderDataRegistrar")
WebCoreResourceHandleAsOperationQueueDelegate = _Class(
"WebCoreResourceHandleAsOperationQueueDelegate"
)
WebCoreResourceHandleWithCredentialStorageAsOperationQueueDelegate = _Class(
"WebCoreResourceHandleWithCredentialStorageAsOperationQueueDelegate"
)
WebCoreNSURLSessionDataTask = _Class("WebCoreNSURLSessionDataTask")
WebCoreNSURLSession = _Class("WebCoreNSURLSession")
WebCoreNSURLSessionTaskMetrics = _Class("WebCoreNSURLSessionTaskMetrics")
WebCoreNSURLSessionTaskTransactionMetrics = _Class(
"WebCoreNSURLSessionTaskTransactionMetrics"
)
WebAVPlayerViewController = _Class("WebAVPlayerViewController")
WebAVPlayerViewControllerDelegate = _Class("WebAVPlayerViewControllerDelegate")
WebCoreRenderThemeBundle = _Class("WebCoreRenderThemeBundle")
WebCoreAuthenticationClientAsChallengeSender = _Class(
"WebCoreAuthenticationClientAsChallengeSender"
)
WebCookieObserverAdapter = _Class("WebCookieObserverAdapter")
WebNSHTTPCookieStorageDummyForInternalAccess = _Class(
"WebNSHTTPCookieStorageDummyForInternalAccess"
)
WebAVAssetWriterDelegate = _Class("WebAVAssetWriterDelegate")
WebDatabaseTransactionBackgroundTaskController = _Class(
"WebDatabaseTransactionBackgroundTaskController"
)
WebCoreMotionManager = _Class("WebCoreMotionManager")
WebAVMediaSelectionOption = _Class("WebAVMediaSelectionOption")
WebAVPlayerController = _Class("WebAVPlayerController")
WebValidationBubbleDelegate = _Class("WebValidationBubbleDelegate")
WebValidationBubbleTapRecognizer = _Class("WebValidationBubbleTapRecognizer")
WebPreviewConverterDelegate = _Class("WebPreviewConverterDelegate")
LegacyTileCacheTombstone = _Class("LegacyTileCacheTombstone")
WebCoreBundleFinder = _Class("WebCoreBundleFinder")
WebDisplayLinkHandler = _Class("WebDisplayLinkHandler")
WebCoreTextTrackRepresentationCocoaHelper = _Class(
"WebCoreTextTrackRepresentationCocoaHelper"
)
WebAnimationDelegate = _Class("WebAnimationDelegate")
WebCoreAudioBundleClass = _Class("WebCoreAudioBundleClass")
WebEventRegion = _Class("WebEventRegion")
WebArchiveResourceWebResourceHandler = _Class("WebArchiveResourceWebResourceHandler")
WebArchiveResourceFromNSAttributedString = _Class(
"WebArchiveResourceFromNSAttributedString"
)
WebAccessibilityObjectWrapperBase = _Class("WebAccessibilityObjectWrapperBase")
WebAccessibilityObjectWrapper = _Class("WebAccessibilityObjectWrapper")
WebAccessibilityTextMarker = _Class("WebAccessibilityTextMarker")
WebAVSampleBufferErrorListener = _Class("WebAVSampleBufferErrorListener")
WebAVStreamDataParserListener = _Class("WebAVStreamDataParserListener")
WebSpeechSynthesisWrapper = _Class("WebSpeechSynthesisWrapper")
WebMediaSessionHelper = _Class("WebMediaSessionHelper")
WebRootSampleBufferBoundsChangeListener = _Class(
"WebRootSampleBufferBoundsChangeListener"
)
WebCoreAVFPullDelegate = _Class("WebCoreAVFPullDelegate")
WebCoreAVFLoaderDelegate = _Class("WebCoreAVFLoaderDelegate")
WebCoreAVFMovieObserver = _Class("WebCoreAVFMovieObserver")
WebAVSampleBufferStatusChangeListener = _Class("WebAVSampleBufferStatusChangeListener")
WebCoreSharedBufferResourceLoaderDelegate = _Class(
"WebCoreSharedBufferResourceLoaderDelegate"
)
WebCoreAudioCaptureSourceIOSListener = _Class("WebCoreAudioCaptureSourceIOSListener")
WebCDMSessionAVContentKeySessionDelegate = _Class(
"WebCDMSessionAVContentKeySessionDelegate"
)
WebCoreFPSContentKeySessionDelegate = _Class("WebCoreFPSContentKeySessionDelegate")
WebCoreAVVideoCaptureSourceObserver = _Class("WebCoreAVVideoCaptureSourceObserver")
WebCoreAVCaptureDeviceManagerObserver = _Class("WebCoreAVCaptureDeviceManagerObserver")
WebAVAudioSessionAvailableInputsListener = _Class(
"WebAVAudioSessionAvailableInputsListener"
)
WebActionDisablingCALayerDelegate = _Class("WebActionDisablingCALayerDelegate")
WebScriptObjectPrivate = _Class("WebScriptObjectPrivate")
WebInterruptionObserverHelper = _Class("WebInterruptionObserverHelper")
WebNetworkStateObserver = _Class("WebNetworkStateObserver")
WebLowPowerModeObserver = _Class("WebLowPowerModeObserver")
WebBackgroundTaskController = _Class("WebBackgroundTaskController")
WAKResponder = _Class("WAKResponder")
WAKWindow = _Class("WAKWindow")
WAKView = _Class("WAKView")
WAKClipView = _Class("WAKClipView")
WAKScrollView = _Class("WAKScrollView")
WebViewVisualIdentificationOverlay = _Class("WebViewVisualIdentificationOverlay")
WebEvent = _Class("WebEvent")
WebScriptObject = _Class("WebScriptObject")
WebAVPlayerLayer = _Class("WebAVPlayerLayer")
LegacyTileLayer = _Class("LegacyTileLayer")
LegacyTileHostLayer = _Class("LegacyTileHostLayer")
WebSimpleLayer = _Class("WebSimpleLayer")
WebLayer = _Class("WebLayer")
WebGLLayer = _Class("WebGLLayer")
WebVideoContainerLayer = _Class("WebVideoContainerLayer")
WebTiledBackingLayer = _Class("WebTiledBackingLayer")
WebSystemBackdropLayer = _Class("WebSystemBackdropLayer")
WebDarkSystemBackdropLayer = _Class("WebDarkSystemBackdropLayer")
WebLightSystemBackdropLayer = _Class("WebLightSystemBackdropLayer")
WebResourceUsageOverlayLayer = _Class("WebResourceUsageOverlayLayer")
WebGPULayer = _Class("WebGPULayer")
WebSwapLayer = _Class("WebSwapLayer")
WebCustomNSURLError = _Class("WebCustomNSURLError")
WebCoreSharedBufferData = _Class("WebCoreSharedBufferData")
| [
"adrilabbelol@gmail.com"
] | adrilabbelol@gmail.com |
dd42d3085377f8cc1ae75a67ff9d0dd9b8e968a9 | 5b711d9d1c71eb8a7c253a17b2a7f319163d2fdc | /tests/providers/amazon/aws/operators/test_emr_containers.py | 3a7dd400d8fef5fa90ab2fa896a6fb1d7ba56364 | [
"Apache-2.0",
"BSD-3-Clause",
"MIT"
] | permissive | waleedsamy/airflow | 8289465af0ef8199bf82e0696115bb5f83f9b667 | b19ccf8ead027d9eaf53b33305be5873f2711699 | refs/heads/main | 2023-03-17T06:29:20.695168 | 2022-08-29T16:59:13 | 2022-08-29T16:59:13 | 251,581,666 | 0 | 0 | Apache-2.0 | 2020-03-31T11:21:23 | 2020-03-31T11:21:22 | null | UTF-8 | Python | false | false | 7,407 | py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import unittest
from unittest import mock
from unittest.mock import MagicMock, patch
import pytest
from airflow import configuration
from airflow.exceptions import AirflowException
from airflow.providers.amazon.aws.hooks.emr import EmrContainerHook
from airflow.providers.amazon.aws.operators.emr import EmrContainerOperator, EmrEksCreateClusterOperator
SUBMIT_JOB_SUCCESS_RETURN = {
'ResponseMetadata': {'HTTPStatusCode': 200},
'id': 'job123456',
'virtualClusterId': 'vc1234',
}
CREATE_EMR_ON_EKS_CLUSTER_RETURN = {'ResponseMetadata': {'HTTPStatusCode': 200}, 'id': 'vc1234'}
GENERATED_UUID = '800647a9-adda-4237-94e6-f542c85fa55b'
class TestEmrContainerOperator(unittest.TestCase):
@mock.patch('airflow.providers.amazon.aws.hooks.emr.EmrContainerHook')
def setUp(self, emr_hook_mock):
configuration.load_test_config()
self.emr_hook_mock = emr_hook_mock
self.emr_container = EmrContainerOperator(
task_id='start_job',
name='test_emr_job',
virtual_cluster_id='vzw123456',
execution_role_arn='arn:aws:somerole',
release_label='6.3.0-latest',
job_driver={},
configuration_overrides={},
poll_interval=0,
client_request_token=GENERATED_UUID,
tags={},
)
@mock.patch.object(EmrContainerHook, 'submit_job')
@mock.patch.object(EmrContainerHook, 'check_query_status')
def test_execute_without_failure(
self,
mock_check_query_status,
mock_submit_job,
):
mock_submit_job.return_value = "jobid_123456"
mock_check_query_status.return_value = 'COMPLETED'
self.emr_container.execute(None)
mock_submit_job.assert_called_once_with(
'test_emr_job', 'arn:aws:somerole', '6.3.0-latest', {}, {}, GENERATED_UUID, {}
)
mock_check_query_status.assert_called_once_with('jobid_123456')
assert self.emr_container.release_label == '6.3.0-latest'
@mock.patch.object(
EmrContainerHook,
'check_query_status',
side_effect=['PENDING', 'PENDING', 'SUBMITTED', 'RUNNING', 'COMPLETED'],
)
def test_execute_with_polling(self, mock_check_query_status):
# Mock out the emr_client creator
emr_client_mock = MagicMock()
emr_client_mock.start_job_run.return_value = SUBMIT_JOB_SUCCESS_RETURN
emr_session_mock = MagicMock()
emr_session_mock.client.return_value = emr_client_mock
boto3_session_mock = MagicMock(return_value=emr_session_mock)
with patch('boto3.session.Session', boto3_session_mock):
assert self.emr_container.execute(None) == 'job123456'
assert mock_check_query_status.call_count == 5
@mock.patch.object(EmrContainerHook, 'submit_job')
@mock.patch.object(EmrContainerHook, 'check_query_status')
@mock.patch.object(EmrContainerHook, 'get_job_failure_reason')
def test_execute_with_failure(
self, mock_get_job_failure_reason, mock_check_query_status, mock_submit_job
):
mock_submit_job.return_value = "jobid_123456"
mock_check_query_status.return_value = 'FAILED'
mock_get_job_failure_reason.return_value = (
"CLUSTER_UNAVAILABLE - Cluster EKS eks123456 does not exist."
)
with pytest.raises(AirflowException) as ctx:
self.emr_container.execute(None)
assert 'EMR Containers job failed' in str(ctx.value)
assert 'Error: CLUSTER_UNAVAILABLE - Cluster EKS eks123456 does not exist.' in str(ctx.value)
@mock.patch.object(
EmrContainerHook,
'check_query_status',
side_effect=['PENDING', 'PENDING', 'SUBMITTED', 'RUNNING', 'COMPLETED'],
)
def test_execute_with_polling_timeout(self, mock_check_query_status):
# Mock out the emr_client creator
emr_client_mock = MagicMock()
emr_client_mock.start_job_run.return_value = SUBMIT_JOB_SUCCESS_RETURN
emr_session_mock = MagicMock()
emr_session_mock.client.return_value = emr_client_mock
boto3_session_mock = MagicMock(return_value=emr_session_mock)
timeout_container = EmrContainerOperator(
task_id='start_job',
name='test_emr_job',
virtual_cluster_id='vzw123456',
execution_role_arn='arn:aws:somerole',
release_label='6.3.0-latest',
job_driver={},
configuration_overrides={},
poll_interval=0,
max_tries=3,
)
with patch('boto3.session.Session', boto3_session_mock):
with pytest.raises(AirflowException) as ctx:
timeout_container.execute(None)
assert mock_check_query_status.call_count == 3
assert 'Final state of EMR Containers job is SUBMITTED' in str(ctx.value)
assert 'Max tries of poll status exceeded' in str(ctx.value)
class TestEmrEksCreateClusterOperator(unittest.TestCase):
@mock.patch('airflow.providers.amazon.aws.hooks.emr.EmrContainerHook')
def setUp(self, emr_hook_mock):
configuration.load_test_config()
self.emr_hook_mock = emr_hook_mock
self.emr_container = EmrEksCreateClusterOperator(
task_id='start_cluster',
virtual_cluster_name="test_virtual_cluster",
eks_cluster_name="test_eks_cluster",
eks_namespace="test_eks_namespace",
tags={},
)
@mock.patch.object(EmrContainerHook, 'create_emr_on_eks_cluster')
def test_emr_on_eks_execute_without_failure(self, mock_create_emr_on_eks_cluster):
mock_create_emr_on_eks_cluster.return_value = "vc1234"
self.emr_container.execute(None)
mock_create_emr_on_eks_cluster.assert_called_once_with(
'test_virtual_cluster', 'test_eks_cluster', 'test_eks_namespace', {}
)
assert self.emr_container.virtual_cluster_name == 'test_virtual_cluster'
@mock.patch.object(EmrContainerHook, 'create_emr_on_eks_cluster')
def test_emr_on_eks_execute_with_failure(self, mock_create_emr_on_eks_cluster):
expected_exception_msg = (
"An error occurred (ValidationException) when calling the "
"CreateVirtualCluster "
"operation:"
"A virtual cluster already exists in the given namespace"
)
mock_create_emr_on_eks_cluster.side_effect = AirflowException(expected_exception_msg)
with pytest.raises(AirflowException) as ctx:
self.emr_container.execute(None)
assert expected_exception_msg in str(ctx.value)
| [
"noreply@github.com"
] | waleedsamy.noreply@github.com |
288a820b0ef15cd603354387b1bf9118f4bbac0c | 7a6fd34ad06e73a8ef4c1f77df344b79fc3125a8 | /zeus/datasets/common/cityscapes.py | d15c14a688e58244905b9fbed1b117da01c8f46d | [
"MIT"
] | permissive | lulilulilalala/vega | 3e105b499f921f07176f0230afdbd6a45209c242 | 977054e12dd3bc1c96bbe35f18d5db4bc82d0522 | refs/heads/master | 2023-05-14T12:59:35.125859 | 2021-06-07T12:27:16 | 2021-06-07T12:27:16 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,609 | py | # -*- coding: utf-8 -*-
# Copyright (C) 2020. Huawei Technologies Co., Ltd. All rights reserved.
# This program is free software; you can redistribute it and/or modify
# it under the terms of the MIT License.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# MIT License for more details.
"""This is the class of Cityscapes dataset."""
import os.path as osp
import cv2
import numpy as np
import glob
import pickle
from .utils.dataset import Dataset
from zeus.common import ClassFactory, ClassType
from zeus.common import FileOps
from zeus.datasets.conf.city_scapes import CityscapesConfig
@ClassFactory.register(ClassType.DATASET)
class Cityscapes(Dataset):
"""Class of Cityscapes dataset, which is subclass of Dateset.
Two types of data are supported:
1) Image with extensions in 'jpg', 'JPG', 'jpeg', 'JPEG', 'png', 'PNG', 'ppm', 'PPM', 'bmp', 'BMP'
2) pkl with extensions in 'pkl', 'pt', 'pth'. Image pkl should be in format of HWC, with bgr as the channels
To use this dataset, provide either: 1) data_dir and label_dir; or 2) data_path and list_file
:param train: if the mdoe is train or false, defaults to True
:type train: bool, optional
:param cfg: the config the dataset need, defaults to None, and if the cfg is None,
the default config will be used, the default config file is a yml file with the same name of the class
:type cfg: yml, py or dict
"""
config = CityscapesConfig()
def __init__(self, **kwargs):
"""Construct the Cityscapes class."""
super(Cityscapes, self).__init__(**kwargs)
self.dataset_init()
def _init_transforms(self):
"""Initialize transforms."""
result = list()
if "Rescale" in self.args:
import logging
logging.info(str(dict(**self.args.Rescale)))
result.append(self._get_cls("Rescale_pair")(**self.args.Rescale))
if "RandomMirror" in self.args and self.args.RandomMirror:
result.append(self._get_cls("RandomHorizontalFlip_pair")())
if "RandomColor" in self.args:
result.append(self._get_cls("RandomColor_pair")(**self.args.RandomColor))
if "RandomGaussianBlur" in self.args:
result.append(self._get_cls("RandomGaussianBlur_pair")(**self.args.RandomGaussianBlur))
if "RandomRotation" in self.args:
result.append(self._get_cls("RandomRotate_pair")(**self.args.RandomRotation))
if "Normalization" in self.args:
result.append(self._get_cls("Normalize_pair")(**self.args.Normalization))
if "RandomCrop" in self.args:
result.append(self._get_cls("RandomCrop_pair")(**self.args.RandomCrop))
return result
def _get_cls(self, _name):
return ClassFactory.get_cls(ClassType.TRANSFORM, _name)
def dataset_init(self):
"""Construct method.
If both data_dir and label_dir are provided, then use data_dir and label_dir
Otherwise use data_path and list_file.
"""
if "data_dir" in self.args and "label_dir" in self.args:
self.args.data_dir = FileOps.download_dataset(self.args.data_dir)
self.args.label_dir = FileOps.download_dataset(self.args.label_dir)
self.data_files = sorted(glob.glob(osp.join(self.args.data_dir, "*")))
self.label_files = sorted(glob.glob(osp.join(self.args.label_dir, "*")))
else:
if "data_path" not in self.args or "list_file" not in self.args:
raise Exception("You must provide a data_path and a list_file!")
self.args.data_path = FileOps.download_dataset(self.args.data_path)
with open(osp.join(self.args.data_path, self.args.list_file)) as f:
lines = f.readlines()
self.data_files = [None] * len(lines)
self.label_files = [None] * len(lines)
for i, line in enumerate(lines):
data_file_name, label_file_name = line.strip().split()
self.data_files[i] = osp.join(self.args.data_path, data_file_name)
self.label_files[i] = osp.join(self.args.data_path, label_file_name)
datatype = self._get_datatype()
if datatype == "image":
self.read_fn = self._read_item_image
else:
self.read_fn = self._read_item_pickle
def __len__(self):
"""Get the length of the dataset.
:return: the length of the dataset
:rtype: int
"""
return len(self.data_files)
def __getitem__(self, index):
"""Get an item of the dataset according to the index.
:param index: index
:type index: int
:return: an item of the dataset according to the index
:rtype: dict, {'data': xx, 'mask': xx, 'name': name}
"""
image, label = self.read_fn(index)
# image_name = self.data_files[index].split("/")[-1].split(".")[0]
image, label = self.transforms(image, label)
image = np.transpose(image, [2, 0, 1]).astype(np.float32)
mask = label.astype(np.int64)
return image, mask
@staticmethod
def _get_datatype_files(file_paths):
"""Check file extensions in file_paths to decide whether they are images or pkl.
:param file_paths: a list of file names
:type file_paths: list of str
:return image, pkl or None according to the type of files
:rtype: str
"""
IMG_EXTENSIONS = {'jpg', 'JPG', 'jpeg', 'JPEG',
'png', 'PNG', 'ppm', 'PPM', 'bmp', 'BMP'}
PKL_EXTENSIONS = {'pkl', 'pt', 'pth'}
file_extensions = set(data_file.split('.')[-1] for data_file in file_paths)
if file_extensions.issubset(IMG_EXTENSIONS):
return "image"
elif file_extensions.issubset(PKL_EXTENSIONS):
return "pkl"
else:
raise Exception("Invalid file extension")
def _get_datatype(self):
"""Check the datatype of all data.
:return image, pkl or None
:rtype: str
"""
type_data = self._get_datatype_files(self.data_files)
type_labels = self._get_datatype_files(self.label_files)
if type_data == type_labels:
return type_data
else:
raise Exception("Images and masks must be both image or pkl!")
def _read_item_image(self, index):
"""Read image and label in "image" format.
:param index: index
:type index: int
:return: image in np.array, HWC, bgr; label in np.array, HW
:rtype: tuple of np.array
"""
image = cv2.imread(self.data_files[index], cv2.IMREAD_COLOR)
label = cv2.imread(self.label_files[index], cv2.IMREAD_GRAYSCALE)
return image, label
def _read_item_pickle(self, index):
"""Read image and label in "pkl" format.
:param index: index
:type index: int
:return: image in np.array, HWC, bgr; label in np.array, HW
:rtype: tuple of np.array
"""
with open(self.data_files[index], "rb") as file:
image = pickle.load(file)
with open(self.label_files[index], "rb") as file:
label = pickle.load(file)
return image, label
@property
def input_size(self):
"""Input size of Cityspace.
:return: the input size
:rtype: int
"""
_shape = self.data.shape
return _shape[1]
| [
"zhangjiajin@huawei.com"
] | zhangjiajin@huawei.com |
940ebcb6548fcf7493b3b0290cbd312e45cf65fd | ac6f3ab88c67b09e187c92652e29fabd5bf5ffd5 | /code/15_solution.py | 0d5e9f19093d8753b86315eb89915727f382747f | [] | no_license | YanjiaSun/leetcode-3 | f3c87ef6961220c39d48094ef65db921f34d070f | 59d323161dba8d250d6dd7f31c40731845356f21 | refs/heads/master | 2022-12-05T17:15:55.601506 | 2020-08-03T11:38:39 | 2020-08-03T11:38:39 | 284,863,415 | 1 | 0 | null | 2020-08-04T03:05:05 | 2020-08-04T03:05:04 | null | UTF-8 | Python | false | false | 920 | py | class Solution:
def threeSum(self, nums: List[int]) -> List[List[int]]:
if not nums or len(nums) < 3: return []
n = len(nums)
nums.sort() # O(NlogN)
triplets = []
def find_two_sum(j, k, target):
while j < k:
b, c = nums[j], nums[k]
if b + c > target:
while j < k and nums[k] == c: k -= 1
elif b + c < target:
while j < k and nums[j] == b: j += 1
else:
triplets.append([-target, b, c])
while j < k and nums[k] == c: k -= 1
while j < k and nums[j] == b: j += 1
i = 0
while i < n - 2 and nums[i] <= 0:
a, target = nums[i], -nums[i]
find_two_sum(i+1, n-1, target)
while i < n - 2 and nums[i] == a: i += 1
return triplets | [
"ryanzjlib@gmail.com"
] | ryanzjlib@gmail.com |
6c9f386ce93fb1dc4fc4c65979aa9eb7fa14a8e5 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p03251/s200090667.py | 58f7f34828a49a6e5e378cbfc2fff95c05950530 | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 139 | py | n,m,X,Y=map(int,input().split())
x=max(X,max(map(int,input().split())))
y=min(Y,min(map(int,input().split())))
print(["War","No War"][x<y]) | [
"66529651+Aastha2104@users.noreply.github.com"
] | 66529651+Aastha2104@users.noreply.github.com |
4f589e682dfc192a5f76f0736435484407adf7b0 | 6081557aa021c9e3080e2010b4396154c6a15fd9 | /app/cli.py | cd3c06b2d883d3c1352ded88f49b6b7e0bf3e304 | [] | no_license | boberstarosta/flask-tutorial | 7dfe700db143bb003a3a0c934751c78cf623815b | 10dcacc4c0dcbf6a1607dc0ae96ecb5cbb45423f | refs/heads/master | 2022-12-11T09:24:43.123961 | 2018-11-25T11:23:12 | 2018-11-25T11:23:12 | 158,431,045 | 0 | 0 | null | 2022-12-08T01:18:40 | 2018-11-20T18:01:42 | Python | UTF-8 | Python | false | false | 1,177 | py | import os
import click
def register(app):
@app.cli.group()
def translate():
"""Translation and localization commands."""
pass
@translate.command()
def update():
"""Update all languages."""
if os.system('pybabel extract -F babel.cfg -k _l -o messages.pot .'):
raise RuntimeError('extract command failed')
if os.system('pybabel update -i messages.pot -d app/translations'):
raise RuntimeError('update command failed')
os.remove('messages.pot')
@translate.command()
def compile():
"""Compile all languages."""
if os.system('pybabel compile -d app/translations'):
raise RuntimeError('compile command failed')
@translate.command()
@click.argument('lang')
def init(lang):
"""Initialize a new language."""
if os.system('pybabel extract -F babel.cfg -k _l -o messages.pot .'):
raise RuntimeError('extract command failed')
if os.system(
'pybabel init -i messages.pot -d app/translations -l ' + lang):
raise RuntimeError('init command failed')
os.remove('messages.pot')
| [
"boberstarosta@gmail.com"
] | boberstarosta@gmail.com |
89ac6f3943f40a5c1a885e3e939a3a50779f3310 | 566ac3f150d5cec30e0926f4c369296f54b93503 | /src/contaminate.py | 640f9403909b876426a4ee6f7a22690699b56646 | [
"MIT"
] | permissive | mehdirezaie/SYSNet | 1e325bbedbe3e69b315028afd22a351bd2ee6d01 | 8da75f54177e460e6e446bfc2207dd82a76ac4cc | refs/heads/master | 2021-11-30T04:48:51.118259 | 2021-11-18T16:44:56 | 2021-11-18T16:44:56 | 171,679,858 | 6 | 1 | MIT | 2019-08-16T20:56:31 | 2019-02-20T13:38:55 | Jupyter Notebook | UTF-8 | Python | false | false | 3,388 | py |
import fitsio as ft
import numpy as np
import healpy as hp
import os
import sys
class mock(object):
def __init__(self, featsfile, paramsfile, func='lin', sf=1207432.7901):
# read inputs
feats = ft.read(featsfile)
params = np.load(paramsfile).item()
# attrs
self.hpix = feats['hpind']
self.feats = feats['features']
self.axfit = params['axfit']
self.xstats = params['xstats']
#print('Will scale the covariance by %.4f'%sf)
bfp_raw = params['params'][func]
self.bfp = (bfp_raw[0], sf*bfp_raw[1])
#
# prepare
self.n = self.feats.shape[0]
x = (self.feats - self.xstats[0])/self.xstats[1] # select axis
x_scaled = x[:, self.axfit]
if func == 'lin':
x_vector = np.column_stack([np.ones(self.n), x_scaled])
elif func == 'quad':
x_vector = np.column_stack([np.ones(self.n), x_scaled, x_scaled*x_scaled])
else:
exit(f"func:{func} is not defined")
#
#
self.x_vector = x_vector
def simulate(self, kind='truth', seed=12345):
if kind not in ['fixed', 'random', 'truth']:
exit(f"kind : {kind} is not defined")
np.random.seed(seed) # set the seed
if kind == 'truth':
thetas = self.bfp[0]
elif kind == 'fixed':
thetas = np.random.multivariate_normal(*self.bfp)
elif kind == 'random':
thetas = np.random.multivariate_normal(*self.bfp, size=self.n)
else:
exit(f"kind : {kind} is not defined")
tx = (thetas * self.x_vector)
self.txs = np.sum(tx, axis=1)
def project(self, hpin, tag):
hpmin = hp.read_map(hpin, verbose=False)
fpath = '/'.join((hpin.split('/')[:-1] + [tag]))
mname = '_'.join((tag, 'mask',hpin.split('/')[-1]))
fname = '_'.join((tag, hpin.split('/')[-1]))
if not os.path.exists(fpath):
os.makedirs(fpath)
ngalcont = self.txs * hpmin[self.hpix]
fou = '/'.join((fpath, fname))
mou = '/'.join((fpath, mname))
ngal_neg = ngalcont < 0.0
hpix_neg = self.hpix[ngal_neg]
hpix_noneg = self.hpix[~ngal_neg]
ngal_noneg = ngalcont[~ngal_neg]
#
#
ngalm = np.zeros_like(hpmin)
ngalm[hpix_noneg] = np.random.poisson(ngal_noneg)
#
#
negm = np.zeros_like(hpmin)
negm[hpix_neg] = 1.0
hp.write_map(mou, negm, fits_IDL=False, overwrite=True, dtype=np.float64)
hp.write_map(fou, ngalm, fits_IDL=False, overwrite=True, dtype=np.float64)
print('%s is written'%fou)
if __name__ == '__main__':
np.random.seed(123456) # set the global seed
seeds = np.random.randint(0, 4294967295, size=1000)
feats = sys.argv[1]
regp = sys.argv[2]
files = sys.argv[3:]
print('feats', feats)
print('regp', regp)
print('files[:2]', files[:2])
for i,mock_i in enumerate(files):
mymock = mock(feats,
regp,
func='lin', sf=23765.2929*0.05) # 0.1XtotalfracXvarngal = 2376.52929
mymock.simulate(kind='random', seed=seeds[i])
mymock.project(mock_i, 'cp2p')
| [
"medirz90@icloud.com"
] | medirz90@icloud.com |
16e67ef722cc276cdfc9de755d0783c8fa00e985 | 881a30f13880944d903fb304af0e4bdebb9bd9fb | /RL/algorithms/safe_sac.py | 66667ae9133c6e465c4146271e7379122abef6d2 | [] | no_license | bhatiaabhinav/RL | 5d32b4502e1f15a9d6b8b1ba5627b7c0dd1b8202 | 2bbfa05d5b56b1ea65b65f27b80c243d0888e6d8 | refs/heads/master | 2022-12-11T04:45:18.832027 | 2021-10-13T00:22:15 | 2021-10-13T00:22:15 | 152,107,235 | 1 | 0 | null | 2022-12-08T06:59:28 | 2018-10-08T15:55:28 | Python | UTF-8 | Python | false | false | 6,000 | py | import gym
import safety_gym # noqa
import RL
import RL.envs
from RL.agents import BasicStatsRecordingAgent
from RL.agents import (EnvRenderingAgent, ExperienceBufferAgent, # noqa
ForceExploitControlAgent, MatplotlibPlotAgent,
ModelLoaderSaverAgent, ParamsCopyAgent, PygletLoopAgent,
RandomPlayAgent, RewardScalingAgent, SafeSACActAgent,
SafeSACTrainAgent, SeedingAgent, StatsLoggingAgent,
TensorboardAgent, TensorFlowAgent)
from RL.common.wrappers import wrap_standard
from RL.contexts import SACContext
c = SACContext()
c.set_env(wrap_standard(gym.make(c.env_id), c))
r = RL.Runner(c, "runner")
# basics:
r.register_agent(TensorFlowAgent(c, "TensorFlowAgent"))
r.register_agent(SeedingAgent(c, "SeedingAgent"))
r.register_agent(RewardScalingAgent(c, "RewardScalingAgent"))
# core algo
r.register_agent(ForceExploitControlAgent(c, "ExploitControlAgent"))
r.register_agent(RandomPlayAgent(c, "MinimumExperienceAgent", play_for_steps=c.minimum_experience))
safe_sac_act_agent = r.register_agent(SafeSACActAgent(c, "SafeSACActAgent"))
r.register_agent(ModelLoaderSaverAgent(c, "LoaderSaverAgent", safe_sac_act_agent.model.get_vars()))
if not c.eval_mode:
exp_buff_agent = r.register_agent(ExperienceBufferAgent(c, "ExperienceBufferAgent"))
safe_sac_train_agent = r.register_agent(SafeSACTrainAgent(c, "SafeSACTrainAgent", safe_sac_act_agent, exp_buff_agent))
r.register_agent(ParamsCopyAgent(c, "TargetNetUpdateAgent", safe_sac_act_agent.model.get_vars('valuefn0', 'valuefn1', 'running_stats'), safe_sac_train_agent.target_model.get_vars('valuefn0', 'valuefn1', 'running_stats'), c.target_network_update_every, c.target_network_update_tau))
# rendering and visualizations:
if c.render:
r.register_agent(EnvRenderingAgent(c, "RenderingAgent"))
r.register_agent(PygletLoopAgent(c, "PygletLoopAgent"))
# stats record:
r.register_agent(BasicStatsRecordingAgent(c, "StatsRecordingAgent"))
# stats log:
keys = list(filter(lambda k: k.startswith('Env-0'), RL.stats.stats_dict.keys()))
misc_keys = ['ValueFn Loss', "Safety ValueFn Loss", 'Critic Loss', "Safety Critic Loss", 'Actor Loss', 'Total Updates', "Average Actor Critic Q", "Average Actor Critic Safety Q", "Average Action LogStd", "Average Action LogPi"]
r.register_agent(StatsLoggingAgent(c, "Env-0-StatsLoggingAgent", keys + misc_keys, poll_every_episode=1))
# stats plot:
r.register_agent(TensorboardAgent(c, "Env-0-TensorboardAgent", keys, 'Env-0 Total Frames'))
r.register_agent(TensorboardAgent(c, 'Misc-TensorboardAgent', misc_keys, 'Env-0 Total Frames', log_every_episode=-1, log_every_step=100))
# r.register_agent(MatplotlibPlotAgent(c, 'RPE', [(RL.stats.get('Env-0 Episode ID'), RL.stats.get('Env-0 Episode Reward'))], ['b-'], xlabel='Episode ID', ylabel='Reward', legend='RPE', auto_save=True, smoothing=c.matplotlib_smoothing))
# r.register_agent(MatplotlibPlotAgent(c, 'CPE', [(RL.stats.get('Env-0 Episode ID'), RL.stats.get('Env-0 Episode Cost'))], ['b-'], xlabel='Episode ID', ylabel='Cost', legend='CPE', auto_save=True, smoothing=c.matplotlib_smoothing))
r.run()
"""
python -m RL.algorithms.safe_sac --env_id=MyPointCircleFinite-v0 --experiment_name=safesac_ln_vs_cpo --num_steps_to_run=150000 --normalize_observations=False --alpha=0.2 --actor_learning_rate=0.0001 --learning_rate=0.001 --target_network_update_tau=0.005 --exploit_every=8 --minimum_experience=10000 --logstd_min=-20 --logstd_max=2 --num_critics=2 --init_scale=None --l2_reg=0 --train_every=1 --experience_buffer_length=1000000 --minibatch_size=256 --hidden_layers=[64,32] --gamma=0.995 --cost_gamma=0.995 --layer_norm=True --cost_threshold=5 --beta=0.2 --safe_sac_penalty_max_grad=1000 --clip_gradients=1 --ignore_done_on_timelimit=False --reward_scaling=2 --cost_scaling=2 --record_returns=True
"""
"""
python -m RL.algorithms.safe_sac --env_id=MyPointCircleFinite-v0 --experiment_name=safesac --num_steps_to_run=150000 --normalize_observations=False --alpha=0.2 --actor_learning_rate=0.0001 --learning_rate=0.001 --target_network_update_tau=0.005 --exploit_every=8 --minimum_experience=10000 --logstd_min=-20 --logstd_max=2 --num_critics=2 --init_scale=None --l2_reg=0 --train_every=1 --experience_buffer_length=1000000 --minibatch_size=100 --hidden_layers=[256,256] --gamma=0.99 --cost_gamma=1 --layer_norm=False --cost_threshold=5 --beta=0.2 --safe_sac_penalty_max_grad=1000 --clip_gradients=1 --ignore_done_on_timelimit=False --reward_scaling=2 --cost_scaling=2 --record_returns=False
"""
"""
python -m RL.algorithms.safe_sac --env_id=MyPointCircleFinite-v0 --experiment_name=safesac --num_steps_to_run=150000 --normalize_observations=False --alpha=0.2 --actor_learning_rate=0.0001 --learning_rate=0.001 --target_network_update_tau=0.005 --exploit_every=8 --minimum_experience=10000 --logstd_min=-20 --logstd_max=2 --num_critics=2 --init_scale=None --l2_reg=0 --train_every=1 --experience_buffer_length=1000000 --minibatch_size=100 --hidden_layers=[256,256] --gamma=0.99 --cost_gamma=1 --layer_norm=False --cost_threshold=5 --beta=0.2 --safe_sac_penalty_max_grad=1000 --clip_gradients=1 --ignore_done_on_timelimit=False --reward_scaling=2 --cost_scaling=2 --record_returns=False
"""
"""
python -m RL.algorithms.safe_sac --env_id=Safexp-PointGoal1-v0 --experiment_name=safesac_R20C0.4 --num_steps_to_run=10000000 --normalize_observations=False --alpha=0.2 --actor_learning_rate=0.0001 --learning_rate=0.001 --target_network_update_tau=0.005 --exploit_every=8 --minimum_experience=10000 --logstd_min=-20 --logstd_max=2 --num_critics=2 --init_scale=None --l2_reg=0 --train_every=1 --experience_buffer_length=1000000 --minibatch_size=100 --hidden_layers=[256,256] --gamma=0.99 --cost_gamma=1 --layer_norm=False --cost_threshold=25 --beta=0.2 --safe_sac_penalty_max_grad=1000 --clip_gradients=1 --ignore_done_on_timelimit=False --reward_scaling=20 --cost_scaling=0.4 --record_returns=False --adam_epsilon=0.0001
"""
| [
"bhatiaabhinav93@gmail.com"
] | bhatiaabhinav93@gmail.com |
1e977918ab91ec15b37a6d3250f6f39b55d3b970 | d3cb2f94c30a21d766feb408d626f20d7b574762 | /proxy/proxy.py | da4111a674af7891a1ab2b56d0b1e85e6d562fd8 | [] | no_license | sonlia/rqspider | 3942404f7336ad24b0858f58a9b52c1e2d3648ab | e5ae5f1ff5bfa0cf51d7d3c90bcf81fda399945d | refs/heads/master | 2022-12-30T19:41:24.225104 | 2020-10-22T04:03:19 | 2020-10-22T04:03:19 | 302,540,374 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,431 | py | #-*- coding:utf-8 -*-
#!/usr/bin/python
from __future__ import absolute_import
import sys
from getproxy import GetProxy
import datetime ,requests
import time
import uuid
import hashlib
import random ,numpy
import random
from tinydb import TinyDB, Query
from utils.log import log as _log
log = _log(__name__)
class grab_proxy(GetProxy):
def _validate_proxy(self, proxy, scheme='http'):
country = proxy.get('country')
host = proxy.get('host')
port = proxy.get('port')
_proxy_hash = '%s://%s:%s' % (scheme, host, port)
proxy_hash = hashlib.sha1(_proxy_hash).hexdigest()
if proxy_hash in self.proxies_hash:
return
self.proxies_hash[proxy_hash] = True
request_proxies = {
scheme: "%s:%s" % (host, port)
}
time_list = []
num =1
for i in range(num):
request_begin = time.time()
try:
response_json = requests.get(
"%s://httpbin.org/get?show_env=1&cur=%s" % (scheme, request_begin),
proxies=request_proxies,
timeout=5
).json()
except:
break
if str(request_begin) != response_json.get('args', {}).get('cur', ''):
break
request_end = time.time()
_time = round(request_end - request_begin, 4)
time_list.append(_time)
time.sleep(random.uniform(1, 5))
if len(time_list):
na = numpy.array(time_list)
var = na.var()
mean = na.mean()
succes_ratio = float(num)/len(time_list)
anonymity = self._check_proxy_anonymity(response_json)
country = country or self.geoip_reader.country(host).country.iso_code
export_address = self._check_export_address(response_json)
return {
"id": str(uuid.uuid1()),
"host": host,
"port": port,
"anonymity": anonymity,
"country": country,
"response_time": round(mean,4),
"var" : var,
"succes_ratio":succes_ratio,
"hash":proxy_hash,
"update_time": str(datetime.datetime.now()),
"flag":0,
"type":scheme,
}
def save_proxies(self):
d = db()
d.insert_many(data =self.valid_proxies,table="proxy_ip")
# for i in self.valid_proxies:
def data(self):
return self.valid_proxies
# d.insert(i)
def start(self):
self.init()
self.load_input_proxies()
self.validate_input_proxies()
self.load_plugins()
self.grab_web_proxies()
self.validate_web_proxies()
# self.save_proxies()
class db:
#ip 数量 低于50 启动爬虫
#ip 数量大于20 的时候 每次使用 flag 增加1 超过2就删除ip
#ip 数量低于 20 的时候 则更新所有 flag 非 0 的 ip
def __init__(self,path="/home/ubuntu/workspace/spider/proxy/db.json"):
self.db = TinyDB(path)
def table(self,table,cache_size=None):
return self.db.table(table,cache_size=None)
def insert(self,table,data):
self.table(table).insert(data)
def get_all(self,table="proxy_ip"):
return self.table(table).all()
def insert_many(self,data,table="proxy_ip"):
self.table(table).insert_multiple(data)
def get_ip(self,table="proxy_ip"):
Qu = Query()
da = self.table(table).search(Qu.flag==0)
all_count = self.get_all()
log.debug("total ip count : %s " % len(all_count))
if len(all_count)<50:
log.debug("ip count is not engough")
g =grab_proxy() #此处应该更新ip池
g.start()
time.sleep(10)
print "sleep 10s restart crawl"
self.get_ip(table)
if len(da)>20:
log.debug("left useful ip : %s " % len(da))
i = random.choice(da)
proxy_hash = i["hash"]
self.add_flag(proxy_hash)
log.debug("get ip: %s " % i["host"])
return i
else :
log.debug("left %s " % len(da))
self.update_flag()
def get_random_list(self,table="proxy_ip",num=10):
#从 未爬取的page 列表里面随机选取num
Qu = Query()
da = self.table(table).search(Qu.flag!=0)
a = random.sample(da,num)
return a
def grab_list(self,num):
pass
def remove_ip(self,ip,table="proxy_ip"):
pass
def add_flag(self,proxy_hash,table = "proxy_ip"):
Qu = Query()
da = self.table(table).search(Qu.hash==proxy_hash)[0]
num = da.get("flag")
if num==2:
a = self.table(table).get(Qu.hash==proxy_hash)
# print dir(a)
e = a.eid
log.debug("removing ip")
self.table(table).remove(eids=[e])
else :
self.table(table).update({"flag":num+1},Qu.hash==proxy_hash)
def update_flag(self,table="proxy_ip"):
Qu = Query()
log.debug("update flag")
da = self.table(table).update({"flag":0},Qu.flag!=0)
def start():
grab = grab_proxy()
grab.start()
if __name__ == "__main__":
start() | [
"root@localhost.localdomain"
] | root@localhost.localdomain |
2de32e8e7c167a0f3bd6c91965e31cfc2d4d382a | 19af2e1dfe389afc71e26bebaadf7008251e04e2 | /android_test/tensorflow-master/tensorflow/python/keras/layers/recurrent_v2.py | 903de3d1a0916b6fcb0df43e09de0b58d66845c1 | [
"Apache-2.0"
] | permissive | simi48/Ef-If_Jassen | 6c4975216bb4ae4514fe94a8395a5da5c8e8fb2d | 6076839492bff591cf9b457e949999e9167903e6 | refs/heads/master | 2022-10-15T15:36:35.023506 | 2020-12-02T10:38:13 | 2020-12-02T10:38:13 | 173,759,247 | 4 | 0 | Apache-2.0 | 2022-10-04T23:51:35 | 2019-03-04T14:22:28 | PureBasic | UTF-8 | Python | false | false | 50,842 | py | # Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Recurrent layers for TF 2.0.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import uuid
from tensorflow.python.eager import context
from tensorflow.python.eager import function
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import device
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.keras import backend as K
from tensorflow.python.keras.engine.input_spec import InputSpec
from tensorflow.python.keras.layers import recurrent
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import gen_cudnn_rnn_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import state_ops
from tensorflow.python.util.tf_export import keras_export
# The following string constants are used by Defun approach for unified backend
# of LSTM and GRU.
_DEFUN_API_NAME_ATTRIBUTE = 'api_implements'
_DEFUN_DEVICE_ATTRIBUTE = 'api_preferred_device'
_CPU_DEVICE_NAME = 'CPU'
_GPU_DEVICE_NAME = 'GPU'
# The following number constants are used to represent the runtime of the defun
# backend function. Since the CPU/GPU implementation are mathematically same, we
# need some signal for the function to indicate which function is executed. This
# is for testing purpose to verify the correctness of swapping backend function.
_RUNTIME_UNKNOWN = 0
_RUNTIME_CPU = 1
_RUNTIME_GPU = 2
@keras_export('keras.layers.GRUCell', v1=[])
class GRUCell(recurrent.GRUCell):
"""Cell class for the GRU layer.
Arguments:
units: Positive integer, dimensionality of the output space.
activation: Activation function to use. Default: hyperbolic tangent
(`tanh`). If you pass None, no activation is applied
(ie. "linear" activation: `a(x) = x`).
recurrent_activation: Activation function to use for the recurrent step.
Default: sigmoid (`sigmoid`). If you pass `None`, no activation is
applied (ie. "linear" activation: `a(x) = x`).
use_bias: Boolean, whether the layer uses a bias vector.
kernel_initializer: Initializer for the `kernel` weights matrix,
used for the linear transformation of the inputs.
recurrent_initializer: Initializer for the `recurrent_kernel`
weights matrix, used for the linear transformation of the recurrent state.
bias_initializer: Initializer for the bias vector.
kernel_regularizer: Regularizer function applied to the `kernel` weights
matrix.
recurrent_regularizer: Regularizer function applied to the
`recurrent_kernel` weights matrix.
bias_regularizer: Regularizer function applied to the bias vector.
kernel_constraint: Constraint function applied to the `kernel` weights
matrix.
recurrent_constraint: Constraint function applied to the `recurrent_kernel`
weights matrix.
bias_constraint: Constraint function applied to the bias vector.
dropout: Float between 0 and 1. Fraction of the units to drop for the
linear transformation of the inputs.
recurrent_dropout: Float between 0 and 1. Fraction of the units to drop for
the linear transformation of the recurrent state.
implementation: Implementation mode, either 1 or 2.
Mode 1 will structure its operations as a larger number of
smaller dot products and additions, whereas mode 2 (default) will
batch them into fewer, larger operations. These modes will
have different performance profiles on different hardware and
for different applications.
reset_after: GRU convention (whether to apply reset gate after or
before matrix multiplication). False = "before",
True = "after" (default and CuDNN compatible).
Call arguments:
inputs: A 2D tensor.
states: List of state tensors corresponding to the previous timestep.
training: Python boolean indicating whether the layer should behave in
training mode or in inference mode. Only relevant when `dropout` or
`recurrent_dropout` is used.
"""
def __init__(self,
units,
activation='tanh',
recurrent_activation='sigmoid',
use_bias=True,
kernel_initializer='glorot_uniform',
recurrent_initializer='orthogonal',
bias_initializer='zeros',
kernel_regularizer=None,
recurrent_regularizer=None,
bias_regularizer=None,
kernel_constraint=None,
recurrent_constraint=None,
bias_constraint=None,
dropout=0.,
recurrent_dropout=0.,
implementation=2,
reset_after=True,
**kwargs):
super(GRUCell, self).__init__(
units,
activation=activation,
recurrent_activation=recurrent_activation,
use_bias=use_bias,
kernel_initializer=kernel_initializer,
recurrent_initializer=recurrent_initializer,
bias_initializer=bias_initializer,
kernel_regularizer=kernel_regularizer,
recurrent_regularizer=recurrent_regularizer,
bias_regularizer=bias_regularizer,
kernel_constraint=kernel_constraint,
recurrent_constraint=recurrent_constraint,
bias_constraint=bias_constraint,
dropout=dropout,
recurrent_dropout=recurrent_dropout,
implementation=implementation,
reset_after=reset_after,
**kwargs)
@keras_export('keras.layers.GRU', v1=[])
class GRU(recurrent.DropoutRNNCellMixin, recurrent.GRU):
"""Gated Recurrent Unit - Cho et al. 2014.
Based on available runtime hardware and constraints, this layer
will choose different implementations (cuDNN-based or pure-TensorFlow)
to maximize the performance. If a GPU is available and all
the arguments to the layer meet the requirement of the CuDNN kernel
(see below for details), the layer will use a fast cuDNN implementation.
The requirements to use the cuDNN implementation are:
1. `activation` == 'tanh'
2. `recurrent_activation` == 'sigmoid'
3. `recurrent_dropout` == 0
4. `unroll` is False
5. `use_bias` is True
6. `reset_after` is True
7. Inputs are not masked or strictly right padded.
There are two variants of the GRU implementation. The default one is based on
[v3](https://arxiv.org/abs/1406.1078v3) and has reset gate applied to hidden
state before matrix multiplication. The other one is based on
[original](https://arxiv.org/abs/1406.1078v1) and has the order reversed.
The second variant is compatible with CuDNNGRU (GPU-only) and allows
inference on CPU. Thus it has separate biases for `kernel` and
`recurrent_kernel`. To use this variant, set `'reset_after'=True` and
`recurrent_activation='sigmoid'`.
Arguments:
units: Positive integer, dimensionality of the output space.
activation: Activation function to use.
Default: hyperbolic tangent (`tanh`).
If you pass `None`, no activation is applied
(ie. "linear" activation: `a(x) = x`).
recurrent_activation: Activation function to use
for the recurrent step.
Default: sigmoid (`sigmoid`).
If you pass `None`, no activation is applied
(ie. "linear" activation: `a(x) = x`).
use_bias: Boolean, whether the layer uses a bias vector.
kernel_initializer: Initializer for the `kernel` weights matrix,
used for the linear transformation of the inputs.
recurrent_initializer: Initializer for the `recurrent_kernel`
weights matrix,
used for the linear transformation of the recurrent state.
bias_initializer: Initializer for the bias vector.
kernel_regularizer: Regularizer function applied to
the `kernel` weights matrix.
recurrent_regularizer: Regularizer function applied to
the `recurrent_kernel` weights matrix.
bias_regularizer: Regularizer function applied to the bias vector.
activity_regularizer: Regularizer function applied to
the output of the layer (its "activation")..
kernel_constraint: Constraint function applied to
the `kernel` weights matrix.
recurrent_constraint: Constraint function applied to
the `recurrent_kernel` weights matrix.
bias_constraint: Constraint function applied to the bias vector.
dropout: Float between 0 and 1.
Fraction of the units to drop for the linear transformation of the inputs.
recurrent_dropout: Float between 0 and 1.
Fraction of the units to drop for
the linear transformation of the recurrent state.
implementation: Implementation mode, either 1 or 2.
Mode 1 will structure its operations as a larger number of
smaller dot products and additions, whereas mode 2 will
batch them into fewer, larger operations. These modes will
have different performance profiles on different hardware and
for different applications.
return_sequences: Boolean. Whether to return the last output
in the output sequence, or the full sequence.
return_state: Boolean. Whether to return the last state
in addition to the output.
go_backwards: Boolean (default False).
If True, process the input sequence backwards and return the
reversed sequence.
stateful: Boolean (default False). If True, the last state
for each sample at index i in a batch will be used as initial
state for the sample of index i in the following batch.
unroll: Boolean (default False).
If True, the network will be unrolled,
else a symbolic loop will be used.
Unrolling can speed-up a RNN,
although it tends to be more memory-intensive.
Unrolling is only suitable for short sequences.
reset_after: GRU convention (whether to apply reset gate after or
before matrix multiplication). False = "before",
True = "after" (default and CuDNN compatible).
Call arguments:
inputs: A 3D tensor.
mask: Binary tensor of shape `(samples, timesteps)` indicating whether
a given timestep should be masked.
training: Python boolean indicating whether the layer should behave in
training mode or in inference mode. This argument is passed to the cell
when calling it. This is only relevant if `dropout` or
`recurrent_dropout` is used.
initial_state: List of initial state tensors to be passed to the first
call of the cell.
"""
def __init__(self,
units,
activation='tanh',
recurrent_activation='sigmoid',
use_bias=True,
kernel_initializer='glorot_uniform',
recurrent_initializer='orthogonal',
bias_initializer='zeros',
kernel_regularizer=None,
recurrent_regularizer=None,
bias_regularizer=None,
activity_regularizer=None,
kernel_constraint=None,
recurrent_constraint=None,
bias_constraint=None,
dropout=0.,
recurrent_dropout=0.,
implementation=2,
return_sequences=False,
return_state=False,
go_backwards=False,
stateful=False,
unroll=False,
time_major=False,
reset_after=True,
**kwargs):
# return_runtime is a flag for testing, which shows the real backend
# implementation chosen by grappler in graph mode.
self._return_runtime = kwargs.pop('return_runtime', False)
super(GRU, self).__init__(
units,
activation=activation,
recurrent_activation=recurrent_activation,
use_bias=use_bias,
kernel_initializer=kernel_initializer,
recurrent_initializer=recurrent_initializer,
bias_initializer=bias_initializer,
kernel_regularizer=kernel_regularizer,
recurrent_regularizer=recurrent_regularizer,
bias_regularizer=bias_regularizer,
activity_regularizer=activity_regularizer,
kernel_constraint=kernel_constraint,
recurrent_constraint=recurrent_constraint,
bias_constraint=bias_constraint,
dropout=dropout,
recurrent_dropout=recurrent_dropout,
implementation=implementation,
return_sequences=return_sequences,
return_state=return_state,
go_backwards=go_backwards,
stateful=stateful,
unroll=unroll,
time_major=time_major,
reset_after=reset_after,
**kwargs)
# CuDNN uses following setting by default and not configurable.
self.could_use_cudnn = (
activation == 'tanh' and recurrent_activation == 'sigmoid' and
recurrent_dropout == 0 and not unroll and use_bias and
reset_after)
def call(self, inputs, mask=None, training=None, initial_state=None):
# GRU does not support constants. Ignore it during process.
inputs, initial_state, _ = self._process_inputs(inputs, initial_state, None)
if isinstance(mask, list):
mask = mask[0]
input_shape = K.int_shape(inputs)
timesteps = input_shape[0] if self.time_major else input_shape[1]
if not self.could_use_cudnn:
# CuDNN does not support masking, fall back to use the normal GRU.
kwargs = {'training': training}
def step(cell_inputs, cell_states):
return self.cell.call(cell_inputs, cell_states, **kwargs)
last_output, outputs, states = K.rnn(
step,
inputs,
initial_state,
constants=None,
go_backwards=self.go_backwards,
mask=mask,
unroll=self.unroll,
input_length=timesteps,
time_major=self.time_major,
zero_output_for_mask=self.zero_output_for_mask)
# This is a dummy tensor for testing purpose.
runtime = _runtime(_RUNTIME_UNKNOWN)
else:
last_output, outputs, runtime, states = self._defun_gru_call(
inputs, initial_state, training, mask)
if self.stateful:
updates = [state_ops.assign(self.states[0], states[0])]
self.add_update(updates, inputs)
if self.return_sequences:
output = outputs
else:
output = last_output
if self.return_state:
return [output] + list(states)
elif self._return_runtime:
return output, runtime
else:
return output
def _defun_gru_call(self, inputs, initial_state, training, mask):
# Use the new defun approach for backend implementation swap.
# Note that different implementations need to have same function
# signature, eg, the tensor parameters need to have same shape and dtypes.
self.reset_dropout_mask()
dropout_mask = self.get_dropout_mask_for_cell(inputs, training, count=3)
if dropout_mask is not None:
inputs *= dropout_mask[0]
cudnn_gru_kwargs = {
'inputs': inputs,
'init_h': initial_state[0],
'kernel': self.cell.kernel,
'recurrent_kernel': self.cell.recurrent_kernel,
'bias': self.cell.bias,
'mask': mask,
'time_major': self.time_major,
'go_backwards': self.go_backwards
}
normal_gru_kwargs = cudnn_gru_kwargs.copy()
normal_gru_kwargs.update({
'activation': self.activation,
'recurrent_activation': self.recurrent_activation
})
if context.executing_eagerly():
device_type = _get_context_device_type()
can_use_gpu = (
# Either user specified GPU or unspecified but GPU is available.
(device_type == _GPU_DEVICE_NAME
or (device_type is None and context.num_gpus() > 0))
and
(mask is None or is_sequence_right_padded(mask, self.time_major)))
# Under eager context, check the device placement and prefer the
if can_use_gpu:
last_output, outputs, new_h, runtime = cudnn_gru(**cudnn_gru_kwargs)
else:
last_output, outputs, new_h, runtime = standard_gru(**normal_gru_kwargs)
else:
api_name = 'gru_' + str(uuid.uuid4())
defun_standard_gru = _generate_defun_backend(
api_name, _CPU_DEVICE_NAME, standard_gru)
defun_cudnn_gru = _generate_defun_backend(
api_name, _GPU_DEVICE_NAME, cudnn_gru)
# Call the normal GRU impl and register the CuDNN impl function. The
# grappler will kick in during session execution to optimize the graph.
last_output, outputs, new_h, runtime = defun_standard_gru(
**normal_gru_kwargs)
def register_cudnn_defun():
function.register(defun_cudnn_gru, **cudnn_gru_kwargs)
# return some dummy value since the tf.cond require some return value.
return 0
if mask is None:
register_cudnn_defun()
else:
# Only when seq_right_padded=True, CuDNN kernel can support that
# properly.
control_flow_ops.cond(is_sequence_right_padded(mask, self.time_major),
true_fn=register_cudnn_defun,
false_fn=lambda: 0)
states = [new_h]
return last_output, outputs, runtime, states
def standard_gru(inputs, init_h, kernel, recurrent_kernel, bias, activation,
recurrent_activation, mask, time_major, go_backwards):
"""GRU with standard kernel implementation.
This implementation can be run on all types of hardware.
This implementation lifts out all the layer weights and make them function
parameters. It has same number of tensor input params as the CuDNN
counterpart. The RNN step logic has been simplified, eg dropout and mask is
removed since CuDNN implementation does not support that.
Arguments:
inputs: Input tensor of GRU layer.
init_h: Initial state tensor for the cell output.
kernel: Weights for cell kernel.
recurrent_kernel: Weights for cell recurrent kernel.
bias: Weights for cell kernel bias and recurrent bias. The bias contains the
combined input_bias and recurrent_bias.
activation: Activation function to use for output.
recurrent_activation: Activation function to use for hidden recurrent state.
mask: Binary tensor of shape `(samples, timesteps)` indicating whether
a given timestep should be masked.
time_major: Boolean, whether the inputs are in the format of
[time, batch, feature] or [batch, time, feature].
go_backwards: Boolean (default False). If True, process the input sequence
backwards and return the reversed sequence.
Returns:
last_output: output tensor for the last timestep, which has shape
[batch, units].
outputs: output tensor for all timesteps, which has shape
[batch, time, units].
state_0: the cell output, which has same shape as init_h.
runtime: constant string tensor which indicate real runtime hardware. This
value is for testing purpose and should be used by user.
"""
input_shape = K.int_shape(inputs)
timesteps = input_shape[0] if time_major else input_shape[1]
input_bias, recurrent_bias = array_ops.unstack(bias)
def step(cell_inputs, cell_states):
"""Step function that will be used by Keras RNN backend."""
h_tm1 = cell_states[0]
# inputs projected by all gate matrices at once
matrix_x = K.dot(cell_inputs, kernel)
matrix_x = K.bias_add(matrix_x, input_bias)
x_z, x_r, x_h = array_ops.split(matrix_x, 3, axis=1)
# hidden state projected by all gate matrices at once
matrix_inner = K.dot(h_tm1, recurrent_kernel)
matrix_inner = K.bias_add(matrix_inner, recurrent_bias)
recurrent_z, recurrent_r, recurrent_h = array_ops.split(matrix_inner, 3,
axis=1)
z = recurrent_activation(x_z + recurrent_z)
r = recurrent_activation(x_r + recurrent_r)
hh = activation(x_h + r * recurrent_h)
# previous and candidate state mixed by update gate
h = z * h_tm1 + (1 - z) * hh
return h, [h]
last_output, outputs, new_states = K.rnn(
step,
inputs, [init_h],
constants=None,
unroll=False,
time_major=time_major,
mask=mask,
go_backwards=go_backwards,
input_length=timesteps)
return last_output, outputs, new_states[0], _runtime(_RUNTIME_CPU)
def cudnn_gru(inputs, init_h, kernel, recurrent_kernel, bias, mask, time_major,
go_backwards):
"""GRU with CuDNN implementation which is only available for GPU."""
if not time_major:
inputs = array_ops.transpose(inputs, perm=(1, 0, 2))
init_h = array_ops.expand_dims(init_h, axis=0)
weights = array_ops.split(kernel, 3, axis=1)
weights += array_ops.split(recurrent_kernel, 3, axis=1)
# Note that the bias was initialized as shape (2, 3 * units), flat it into
# (6 * units)
bias = array_ops.split(K.flatten(bias), 6)
# Note that the gate order for CuDNN is different from the canonical format.
# canonical format is [z, r, h], whereas CuDNN is [r, z, h]. The swap need to
# be done for kernel, recurrent_kernel, input_bias, recurrent_bias.
# z is update gate weights.
# r is reset gate weights.
# h is output gate weights.
weights[0], weights[1] = weights[1], weights[0]
weights[3], weights[4] = weights[4], weights[3]
bias[0], bias[1] = bias[1], bias[0]
bias[3], bias[4] = bias[4], bias[3]
params = _canonical_to_params(
weights=weights,
biases=bias,
shape=constant_op.constant([-1]),
transpose_weights=True)
if mask is not None:
sequence_length = calculate_sequence_by_mask(mask, time_major)
else:
# Fill the array with shape [batch] with value of max timesteps.
sequence_length = array_ops.fill([array_ops.shape(inputs)[1]],
array_ops.shape(inputs)[0])
if go_backwards:
inputs = array_ops.reverse_sequence_v2(inputs, sequence_length, seq_axis=0,
batch_axis=1)
outputs, h, _, _, _ = gen_cudnn_rnn_ops.cudnn_rnnv3(
inputs,
input_h=init_h,
input_c=0,
params=params,
is_training=True,
rnn_mode='gru',
sequence_lengths=sequence_length)
last_output = outputs[-1]
if not time_major:
outputs = array_ops.transpose(outputs, perm=[1, 0, 2])
h = h[0]
# In the case of variable length input, the cudnn kernel will fill zeros for
# the output, whereas the default keras behavior is to bring over the previous
# output for t-1, so that in the return_sequence=False case, user can quickly
# get the final effect output instead just 0s at the last timestep.
# In order to mimic the default keras behavior, we copy the final h state as
# the last_output, since it is numerically same as the output.
if mask is not None:
last_output = h
return last_output, outputs, h, _runtime(_RUNTIME_GPU)
@keras_export('keras.layers.LSTMCell', v1=[])
class LSTMCell(recurrent.LSTMCell):
"""Cell class for the LSTM layer.
Arguments:
units: Positive integer, dimensionality of the output space.
activation: Activation function to use. Default: hyperbolic tangent
(`tanh`). If you pass `None`, no activation is applied (ie. "linear"
activation: `a(x) = x`).
recurrent_activation: Activation function to use for the recurrent step.
Default: sigmoid (`sigmoid`). If you pass `None`, no activation is applied
(ie. "linear" activation: `a(x) = x`).
use_bias: Boolean, whether the layer uses a bias vector.
kernel_initializer: Initializer for the `kernel` weights matrix, used for
the linear transformation of the inputs.
recurrent_initializer: Initializer for the `recurrent_kernel` weights
matrix, used for the linear transformation of the recurrent state.
bias_initializer: Initializer for the bias vector.
unit_forget_bias: Boolean. If True, add 1 to the bias of the forget gate at
initialization. Setting it to true will also force
`bias_initializer="zeros"`. This is recommended in [Jozefowicz et
al.](http://www.jmlr.org/proceedings/papers/v37/jozefowicz15.pdf)
kernel_regularizer: Regularizer function applied to the `kernel` weights
matrix.
recurrent_regularizer: Regularizer function applied to
the `recurrent_kernel` weights matrix.
bias_regularizer: Regularizer function applied to the bias vector.
kernel_constraint: Constraint function applied to the `kernel` weights
matrix.
recurrent_constraint: Constraint function applied to the `recurrent_kernel`
weights matrix.
bias_constraint: Constraint function applied to the bias vector.
dropout: Float between 0 and 1. Fraction of the units to drop for the linear
transformation of the inputs.
recurrent_dropout: Float between 0 and 1. Fraction of the units to drop for
the linear transformation of the recurrent state.
implementation: Implementation mode, either 1 or 2.
Mode 1 will structure its operations as a larger number of smaller dot
products and additions, whereas mode 2 (default) will batch them into
fewer, larger operations. These modes will have different performance
profiles on different hardware and for different applications.
Call arguments:
inputs: A 2D tensor.
states: List of state tensors corresponding to the previous timestep.
training: Python boolean indicating whether the layer should behave in
training mode or in inference mode. Only relevant when `dropout` or
`recurrent_dropout` is used.
"""
def __init__(self,
units,
activation='tanh',
recurrent_activation='sigmoid',
use_bias=True,
kernel_initializer='glorot_uniform',
recurrent_initializer='orthogonal',
bias_initializer='zeros',
unit_forget_bias=True,
kernel_regularizer=None,
recurrent_regularizer=None,
bias_regularizer=None,
kernel_constraint=None,
recurrent_constraint=None,
bias_constraint=None,
dropout=0.,
recurrent_dropout=0.,
implementation=2,
**kwargs):
super(LSTMCell, self).__init__(
units,
activation=activation,
recurrent_activation=recurrent_activation,
use_bias=use_bias,
kernel_initializer=kernel_initializer,
recurrent_initializer=recurrent_initializer,
bias_initializer=bias_initializer,
unit_forget_bias=unit_forget_bias,
kernel_regularizer=kernel_regularizer,
recurrent_regularizer=recurrent_regularizer,
bias_regularizer=bias_regularizer,
kernel_constraint=kernel_constraint,
recurrent_constraint=recurrent_constraint,
bias_constraint=bias_constraint,
dropout=dropout,
recurrent_dropout=recurrent_dropout,
implementation=implementation,
**kwargs)
@keras_export('keras.layers.LSTM', v1=[])
class LSTM(recurrent.DropoutRNNCellMixin, recurrent.LSTM):
"""Long Short-Term Memory layer - Hochreiter 1997.
Based on available runtime hardware and constraints, this layer
will choose different implementations (cuDNN-based or pure-TensorFlow)
to maximize the performance. If a GPU is available and all
the arguments to the layer meet the requirement of the CuDNN kernel
(see below for details), the layer will use a fast cuDNN implementation.
The requirements to use the cuDNN implementation are:
1. `activation` == 'tanh'
2. `recurrent_activation` == 'sigmoid'
3. `recurrent_dropout` == 0
4. `unroll` is False
5. `use_bias` is True
6. Inputs are not masked or strictly right padded.
Arguments:
units: Positive integer, dimensionality of the output space.
activation: Activation function to use.
Default: hyperbolic tangent (`tanh`). If you pass `None`, no activation
is applied (ie. "linear" activation: `a(x) = x`).
recurrent_activation: Activation function to use for the recurrent step.
Default: sigmoid (`sigmoid`). If you pass `None`, no activation is
applied (ie. "linear" activation: `a(x) = x`).
use_bias: Boolean, whether the layer uses a bias vector.
kernel_initializer: Initializer for the `kernel` weights matrix, used for
the linear transformation of the inputs..
recurrent_initializer: Initializer for the `recurrent_kernel` weights
matrix, used for the linear transformation of the recurrent state..
bias_initializer: Initializer for the bias vector.
unit_forget_bias: Boolean. If True, add 1 to the bias of the forget gate at
initialization. Setting it to true will also force
`bias_initializer="zeros"`. This is recommended in [Jozefowicz et
al.](http://www.jmlr.org/proceedings/papers/v37/jozefowicz15.pdf).
kernel_regularizer: Regularizer function applied to the `kernel` weights
matrix.
recurrent_regularizer: Regularizer function applied to the
`recurrent_kernel` weights matrix.
bias_regularizer: Regularizer function applied to the bias vector.
activity_regularizer: Regularizer function applied to the output of the
layer (its "activation")..
kernel_constraint: Constraint function applied to the `kernel` weights
matrix.
recurrent_constraint: Constraint function applied to the `recurrent_kernel`
weights matrix.
bias_constraint: Constraint function applied to the bias vector.
dropout: Float between 0 and 1. Fraction of the units to drop for the linear
transformation of the inputs.
recurrent_dropout: Float between 0 and 1. Fraction of the units to drop for
the linear transformation of the recurrent state.
implementation: Implementation mode, either 1 or 2. Mode 1 will structure
its operations as a larger number of smaller dot products and additions,
whereas mode 2 will batch them into fewer, larger operations. These modes
will have different performance profiles on different hardware and for
different applications.
return_sequences: Boolean. Whether to return the last output. in the output
sequence, or the full sequence.
return_state: Boolean. Whether to return the last state in addition to the
output.
go_backwards: Boolean (default False). If True, process the input sequence
backwards and return the reversed sequence.
stateful: Boolean (default False). If True, the last state for each sample
at index i in a batch will be used as initial state for the sample of
index i in the following batch.
unroll: Boolean (default False). If True, the network will be unrolled, else
a symbolic loop will be used. Unrolling can speed-up a RNN, although it
tends to be more memory-intensive. Unrolling is only suitable for short
sequences.
Call arguments:
inputs: A 3D tensor.
mask: Binary tensor of shape `(samples, timesteps)` indicating whether
a given timestep should be masked.
training: Python boolean indicating whether the layer should behave in
training mode or in inference mode. This argument is passed to the cell
when calling it. This is only relevant if `dropout` or
`recurrent_dropout` is used.
initial_state: List of initial state tensors to be passed to the first
call of the cell.
"""
def __init__(self,
units,
activation='tanh',
recurrent_activation='sigmoid',
use_bias=True,
kernel_initializer='glorot_uniform',
recurrent_initializer='orthogonal',
bias_initializer='zeros',
unit_forget_bias=True,
kernel_regularizer=None,
recurrent_regularizer=None,
bias_regularizer=None,
activity_regularizer=None,
kernel_constraint=None,
recurrent_constraint=None,
bias_constraint=None,
dropout=0.,
recurrent_dropout=0.,
implementation=2,
return_sequences=False,
return_state=False,
go_backwards=False,
stateful=False,
time_major=False,
unroll=False,
**kwargs):
# return_runtime is a flag for testing, which shows the real backend
# implementation chosen by grappler in graph mode.
self.return_runtime = kwargs.pop('return_runtime', False)
super(LSTM, self).__init__(
units,
activation=activation,
recurrent_activation=recurrent_activation,
use_bias=use_bias,
kernel_initializer=kernel_initializer,
recurrent_initializer=recurrent_initializer,
bias_initializer=bias_initializer,
unit_forget_bias=unit_forget_bias,
kernel_regularizer=kernel_regularizer,
recurrent_regularizer=recurrent_regularizer,
bias_regularizer=bias_regularizer,
activity_regularizer=activity_regularizer,
kernel_constraint=kernel_constraint,
recurrent_constraint=recurrent_constraint,
bias_constraint=bias_constraint,
dropout=dropout,
recurrent_dropout=recurrent_dropout,
implementation=implementation,
return_sequences=return_sequences,
return_state=return_state,
go_backwards=go_backwards,
stateful=stateful,
time_major=time_major,
unroll=unroll,
**kwargs)
self.state_spec = [
InputSpec(shape=(None, dim)) for dim in (self.units, self.units)
]
self.could_use_cudnn = (
activation == 'tanh' and recurrent_activation == 'sigmoid' and
recurrent_dropout == 0 and not unroll and use_bias)
def call(self, inputs, mask=None, training=None, initial_state=None):
# LSTM does not support constants. Ignore it during process.
inputs, initial_state, _ = self._process_inputs(inputs, initial_state, None)
if isinstance(mask, list):
mask = mask[0]
input_shape = K.int_shape(inputs)
timesteps = input_shape[0] if self.time_major else input_shape[1]
if not self.could_use_cudnn:
# Fall back to use the normal LSTM.
kwargs = {'training': training}
def step(inputs, states):
return self.cell.call(inputs, states, **kwargs)
last_output, outputs, states = K.rnn(
step,
inputs,
initial_state,
constants=None,
go_backwards=self.go_backwards,
mask=mask,
unroll=self.unroll,
input_length=timesteps,
time_major=self.time_major,
zero_output_for_mask=self.zero_output_for_mask)
runtime = _runtime(_RUNTIME_UNKNOWN)
else:
# Use the new defun approach for backend implementation swap.
# Note that different implementations need to have same function
# signature, eg, the tensor parameters need to have same shape and dtypes.
# Since the CuDNN has an extra set of bias, those bias will be passed to
# both normal and CuDNN implementations.
self.reset_dropout_mask()
dropout_mask = self.get_dropout_mask_for_cell(inputs, training, count=4)
if dropout_mask is not None:
inputs *= dropout_mask[0]
cudnn_lstm_kwargs = {
'inputs': inputs,
'init_h': initial_state[0],
'init_c': initial_state[1],
'kernel': self.cell.kernel,
'recurrent_kernel': self.cell.recurrent_kernel,
'bias': self.cell.bias,
'mask': mask,
'time_major': self.time_major,
'go_backwards': self.go_backwards
}
normal_lstm_kwargs = cudnn_lstm_kwargs.copy()
normal_lstm_kwargs.update({
'activation': self.activation,
'recurrent_activation': self.recurrent_activation
})
if context.executing_eagerly():
device_type = _get_context_device_type()
can_use_gpu = (
# Either user specified GPU or unspecified but GPU is available.
(device_type == _GPU_DEVICE_NAME
or (device_type is None and context.num_gpus() > 0))
and
(mask is None or is_sequence_right_padded(mask, self.time_major)))
# Under eager context, check the device placement and prefer the
# GPU implementation when GPU is available.
if can_use_gpu:
last_output, outputs, new_h, new_c, runtime = cudnn_lstm(
**cudnn_lstm_kwargs)
else:
last_output, outputs, new_h, new_c, runtime = standard_lstm(
**normal_lstm_kwargs)
else:
# Each time a `tf.function` is called, we will give it a unique
# identifiable API name, so that Grappler won't get confused when it
# sees multiple LSTM layers added into same graph, and it will be able
# to pair up the different implementations across them.
api_name = 'lstm_' + str(uuid.uuid4())
defun_standard_lstm = _generate_defun_backend(
api_name, _CPU_DEVICE_NAME, standard_lstm)
defun_cudnn_lstm = _generate_defun_backend(
api_name, _GPU_DEVICE_NAME, cudnn_lstm)
# Call the normal LSTM impl and register the CuDNN impl function. The
# grappler will kick in during session execution to optimize the graph.
last_output, outputs, new_h, new_c, runtime = defun_standard_lstm(
**normal_lstm_kwargs)
def register_cudnn_defun():
function.register(defun_cudnn_lstm, **cudnn_lstm_kwargs)
# return some dummy value since the tf.cond require some return value.
return 0
if mask is None:
register_cudnn_defun()
else:
# Only when seq_right_padded=True, CuDNN kernel can support that
# properly.
control_flow_ops.cond(is_sequence_right_padded(mask, self.time_major),
true_fn=register_cudnn_defun,
false_fn=lambda: 0)
states = [new_h, new_c]
if self.stateful:
updates = []
for i in range(len(states)):
updates.append(state_ops.assign(self.states[i], states[i]))
self.add_update(updates, inputs)
if self.return_sequences:
output = outputs
else:
output = last_output
if self.return_state:
return [output] + list(states)
elif self.return_runtime:
return output, runtime
else:
return output
def _canonical_to_params(weights, biases, shape, transpose_weights=False):
"""Utility function convert variable to CuDNN compatible parameter.
Note that Keras weights for kernels are different from the CuDNN format. Eg.:
```
Keras CuDNN
[[0, 1, 2], <---> [[0, 2, 4],
[3, 4, 5]] [1, 3, 5]]
```
If the input weights need to be in a unified format, then set
`transpose_weights=True` to convert the weights.
Args:
weights: list of weights for the individual kernels and recurrent kernels.
biases: list of biases for individual gate.
shape: the shape for the converted variables that will be feed to CuDNN.
transpose_weights: boolean, whether to transpose the weights.
Returns:
The converted weights that can be feed to CuDNN ops as param.
"""
def convert(w):
return array_ops.transpose(w) if transpose_weights else w
weights = [array_ops.reshape(convert(x), shape) for x in weights]
biases = [array_ops.reshape(x, shape) for x in biases]
return array_ops.concat(weights + biases, axis=0)
def standard_lstm(inputs, init_h, init_c, kernel, recurrent_kernel, bias,
activation, recurrent_activation, mask, time_major,
go_backwards):
"""LSTM with standard kernel implementation.
This implementation can be run on all types for hardware.
This implementation lifts out all the layer weights and make them function
parameters. It has same number of tensor input params as the CuDNN
counterpart. The RNN step logic has been simplified, eg dropout and mask is
removed since CuDNN implementation does not support that.
Note that the first half of the bias tensor should be ignored by this impl.
The CuDNN impl need an extra set of input gate bias. In order to make the both
function take same shape of parameter, that extra set of bias is also feed
here.
Args:
inputs: input tensor of LSTM layer.
init_h: initial state tensor for the cell output.
init_c: initial state tensor for the cell hidden state.
kernel: weights for cell kernel.
recurrent_kernel: weights for cell recurrent kernel.
bias: weights for cell kernel bias and recurrent bias. Only recurrent bias
is used in this case.
activation: Activation function to use for output.
recurrent_activation: Activation function to use for hidden recurrent state.
mask: Boolean tensor for mask out the steps within sequence.
time_major: boolean, whether the inputs are in the format of
[time, batch, feature] or [batch, time, feature].
go_backwards: Boolean (default False). If True, process the input sequence
backwards and return the reversed sequence.
Returns:
last_output: output tensor for the last timestep, which has shape
[batch, units].
outputs: output tensor for all timesteps, which has shape
[batch, time, units].
state_0: the cell output, which has same shape as init_h.
state_1: the cell hidden state, which has same shape as init_c.
runtime: constant string tensor which indicate real runtime hardware. This
value is for testing purpose and should be used by user.
"""
input_shape = K.int_shape(inputs)
timesteps = input_shape[0] if time_major else input_shape[1]
def step(cell_inputs, cell_states):
"""Step function that will be used by Keras RNN backend."""
h_tm1 = cell_states[0] # previous memory state
c_tm1 = cell_states[1] # previous carry state
z = K.dot(cell_inputs, kernel)
z += K.dot(h_tm1, recurrent_kernel)
z = K.bias_add(z, bias)
z0, z1, z2, z3 = array_ops.split(z, 4, axis=1)
i = recurrent_activation(z0)
f = recurrent_activation(z1)
c = f * c_tm1 + i * activation(z2)
o = recurrent_activation(z3)
h = o * activation(c)
return h, [h, c]
last_output, outputs, new_states = K.rnn(
step,
inputs, [init_h, init_c],
constants=None,
unroll=False,
time_major=time_major,
mask=mask,
go_backwards=go_backwards,
input_length=timesteps)
return (last_output, outputs, new_states[0], new_states[1],
_runtime(_RUNTIME_CPU))
def cudnn_lstm(inputs, init_h, init_c, kernel, recurrent_kernel, bias, mask,
time_major, go_backwards):
"""LSTM with CuDNN implementation which is only available for GPU.
Note that currently only right padded data is supported, or the result will be
polluted by the unmasked data which should be filtered.
Args:
inputs: Input tensor of LSTM layer.
init_h: Initial state tensor for the cell output.
init_c: Initial state tensor for the cell hidden state.
kernel: Weights for cell kernel.
recurrent_kernel: Weights for cell recurrent kernel.
bias: Weights for cell kernel bias and recurrent bias. Only recurrent bias
is used in this case.
mask: Boolean tensor for mask out the steps within sequence.
time_major: Boolean, whether the inputs are in the format of
[time, batch, feature] or [batch, time, feature].
go_backwards: Boolean (default False). If True, process the input sequence
backwards and return the reversed sequence.
Returns:
last_output: Output tensor for the last timestep, which has shape
[batch, units].
outputs: Output tensor for all timesteps, which has shape
[batch, time, units].
state_0: The cell output, which has same shape as init_h.
state_1: The cell hidden state, which has same shape as init_c.
runtime: Constant string tensor which indicate real runtime hardware. This
value is for testing purpose and should not be used by user.
"""
if not time_major:
# Cudnn kernel prefer the input to be time major.
inputs = array_ops.transpose(inputs, perm=(1, 0, 2))
init_h = array_ops.expand_dims(init_h, axis=0)
init_c = array_ops.expand_dims(init_c, axis=0)
weights = array_ops.split(kernel, 4, axis=1)
weights += array_ops.split(recurrent_kernel, 4, axis=1)
# CuDNN has an extra set of bias for inputs, we disable them (setting to 0),
# so that mathematically it is same as the canonical LSTM implementation.
full_bias = array_ops.concat((array_ops.zeros_like(bias), bias), 0)
if mask is not None:
sequence_length = calculate_sequence_by_mask(mask, time_major)
else:
# Fill the array with shape [batch] with value of max timesteps.
sequence_length = array_ops.fill([array_ops.shape(inputs)[1]],
array_ops.shape(inputs)[0])
if go_backwards:
inputs = array_ops.reverse_sequence_v2(inputs, sequence_length, seq_axis=0,
batch_axis=1)
params = _canonical_to_params(
weights=weights,
biases=array_ops.split(full_bias, 8),
shape=constant_op.constant([-1]),
transpose_weights=True)
outputs, h, c, _, _ = gen_cudnn_rnn_ops.cudnn_rnnv3(
inputs, input_h=init_h, input_c=init_c, params=params, is_training=True,
rnn_mode='lstm', sequence_lengths=sequence_length)
last_output = outputs[-1]
if not time_major:
outputs = array_ops.transpose(outputs, perm=[1, 0, 2])
h = h[0]
c = c[0]
# In the case of variable length input, the cudnn kernel will fill zeros for
# the output, whereas the default keras behavior is to bring over the previous
# output for t-1, so that in the return_sequence=False case, user can quickly
# get the final effect output instead just 0s at the last timestep.
# In order to mimic the default keras behavior, we copy the final h state as
# the last_output, since it is numerically same as the output.
if mask is not None:
last_output = h
return last_output, outputs, h, c, _runtime(_RUNTIME_GPU)
def is_sequence_right_padded(mask, time_major):
"""Check the mask tensor and see if it right padded.
For CuDNN kernel, it uses the sequence length param to skip the tailing
timestep. If the data is left padded, or not a strict right padding (has
masked value in the middle of the sequence), then CuDNN kernel won't be work
properly in those cases.
Left padded data: [[False, False, True, True, True]].
Right padded data: [[True, True, True, False, False]].
Mixture of mask/unmasked data: [[True, False, True, False, False]].
Note that for the mixed data example above, the actually data RNN should see
are those 2 Trues (index 0 and 2), the index 1 False should be ignored and not
pollute the internal states.
Args:
mask: the Boolean tensor with shape [batch, timestep] or [timestep, batch]
when time_major is True.
time_major: Boolean, whether the input mask is time major or batch major.
Returns:
boolean scalar tensor, whether the mask is strictly right padded.
"""
timestep_index = 0 if time_major else 1
max_seq_length = array_ops.shape(mask)[timestep_index]
reversed_mask = math_ops.cast(array_ops.reverse(mask, axis=[timestep_index]),
dtypes.int32)
# Use the argmax to find the index of leading 1 in the reversed mask, which is
# the index of the last True value in the original mask.
index = math_ops.argmax(reversed_mask, axis=timestep_index,
output_type=dtypes.int32)
count_of_true = math_ops.reduce_sum(reversed_mask, axis=timestep_index)
# If the data is strictly right padded, then the
# "index = max_seq_length - count_of_true" should hold.
return math_ops.reduce_all(
math_ops.equal(index, max_seq_length - count_of_true))
def calculate_sequence_by_mask(mask, time_major):
"""Calculate the sequence length tensor (1-D) based on the masking tensor.
The masking tensor is a 2D boolean tensor with shape [batch, timestep]. For
any timestep that should be masked, the corresponding field will be False.
Consider the following example:
a = [[True, True, False, False],
[True, False, True, False]]
It is a (2, 4) tensor, and the corresponding sequence length result should be
1D tensor with value [2, 3]. Note that for the second example, we need to find
the index of the last True value, which is 2 and sequence length is 3.
Args:
mask: Boolean tensor with shape [batch, timestep] or [timestep, batch] if
time_major=True.
time_major: Boolean, which indicates whether the mask is time major or batch
major.
Returns:
sequence_length: 1D int32 tensor.
"""
timestep_index = 0 if time_major else 1
max_seq_length = array_ops.shape(mask)[timestep_index]
reversed_mask = math_ops.cast(array_ops.reverse(mask, axis=[timestep_index]),
dtypes.int32)
# Use the argmax to find the index of leading 1 in the reversed mask, which is
# the index of the last True value in the original mask.
reversed_index = math_ops.argmax(reversed_mask, axis=timestep_index,
output_type=dtypes.int32)
return max_seq_length - reversed_index
def _generate_defun_backend(unique_api_name, preferred_device, func):
function_attributes = {
_DEFUN_API_NAME_ATTRIBUTE: unique_api_name,
_DEFUN_DEVICE_ATTRIBUTE: preferred_device,
}
return function.defun_with_attributes(func=func,
attributes=function_attributes)
def _get_context_device_type():
"""Parse the current context and return the device type, eg CPU/GPU."""
current_device = context.context().device_name
if current_device is None:
return None
return device.DeviceSpec.from_string(current_device).device_type
def _runtime(runtime_name):
with ops.device('/cpu:0'):
return constant_op.constant(
runtime_name, dtype=dtypes.float32, name='runtime')
| [
"TheSiebi@users.noreply.github.com"
] | TheSiebi@users.noreply.github.com |
51140ea0cc978e5ed4e0f1464b9a9073bf1dce48 | 7789f4c84a250ce45accdecbf73630519bfc4aa1 | /devel/lib/python2.7/dist-packages/rail_manipulation_msgs/msg/_GripperActionResult.py | 1a365d9f54227a9deb6cbfccf4f3ecdbd87f872a | [] | no_license | JerryHu1994/NRI-authoring-Backend | 33610913692c3ba8ac2e7fd47b735d193771526c | d130201224deb8696ae4b2dbc451c251693040d3 | refs/heads/master | 2021-09-15T23:15:01.061392 | 2018-06-12T15:56:40 | 2018-06-12T15:56:40 | 113,100,927 | 0 | 2 | null | 2018-06-12T15:56:41 | 2017-12-04T22:11:56 | HTML | UTF-8 | Python | false | false | 12,554 | py | # This Python file uses the following encoding: utf-8
"""autogenerated by genpy from rail_manipulation_msgs/GripperActionResult.msg. Do not edit."""
import sys
python3 = True if sys.hexversion > 0x03000000 else False
import genpy
import struct
import rail_manipulation_msgs.msg
import genpy
import actionlib_msgs.msg
import std_msgs.msg
class GripperActionResult(genpy.Message):
_md5sum = "0698ce25b2d595b82357c010557e935f"
_type = "rail_manipulation_msgs/GripperActionResult"
_has_header = True #flag to mark the presence of a Header object
_full_text = """# ====== DO NOT MODIFY! AUTOGENERATED FROM AN ACTION DEFINITION ======
Header header
actionlib_msgs/GoalStatus status
GripperResult result
================================================================================
MSG: std_msgs/Header
# Standard metadata for higher-level stamped data types.
# This is generally used to communicate timestamped data
# in a particular coordinate frame.
#
# sequence ID: consecutively increasing ID
uint32 seq
#Two-integer timestamp that is expressed as:
# * stamp.sec: seconds (stamp_secs) since epoch (in Python the variable is called 'secs')
# * stamp.nsec: nanoseconds since stamp_secs (in Python the variable is called 'nsecs')
# time-handling sugar is provided by the client library
time stamp
#Frame this data is associated with
# 0: no frame
# 1: global frame
string frame_id
================================================================================
MSG: actionlib_msgs/GoalStatus
GoalID goal_id
uint8 status
uint8 PENDING = 0 # The goal has yet to be processed by the action server
uint8 ACTIVE = 1 # The goal is currently being processed by the action server
uint8 PREEMPTED = 2 # The goal received a cancel request after it started executing
# and has since completed its execution (Terminal State)
uint8 SUCCEEDED = 3 # The goal was achieved successfully by the action server (Terminal State)
uint8 ABORTED = 4 # The goal was aborted during execution by the action server due
# to some failure (Terminal State)
uint8 REJECTED = 5 # The goal was rejected by the action server without being processed,
# because the goal was unattainable or invalid (Terminal State)
uint8 PREEMPTING = 6 # The goal received a cancel request after it started executing
# and has not yet completed execution
uint8 RECALLING = 7 # The goal received a cancel request before it started executing,
# but the action server has not yet confirmed that the goal is canceled
uint8 RECALLED = 8 # The goal received a cancel request before it started executing
# and was successfully cancelled (Terminal State)
uint8 LOST = 9 # An action client can determine that a goal is LOST. This should not be
# sent over the wire by an action server
#Allow for the user to associate a string with GoalStatus for debugging
string text
================================================================================
MSG: actionlib_msgs/GoalID
# The stamp should store the time at which this goal was requested.
# It is used by an action server when it tries to preempt all
# goals that were requested before a certain time
time stamp
# The id provides a way to associate feedback and
# result message with specific goal requests. The id
# specified must be unique.
string id
================================================================================
MSG: rail_manipulation_msgs/GripperResult
# ====== DO NOT MODIFY! AUTOGENERATED FROM AN ACTION DEFINITION ======
# Define the result
bool success # If the gripper action was successful
"""
__slots__ = ['header','status','result']
_slot_types = ['std_msgs/Header','actionlib_msgs/GoalStatus','rail_manipulation_msgs/GripperResult']
def __init__(self, *args, **kwds):
"""
Constructor. Any message fields that are implicitly/explicitly
set to None will be assigned a default value. The recommend
use is keyword arguments as this is more robust to future message
changes. You cannot mix in-order arguments and keyword arguments.
The available fields are:
header,status,result
:param args: complete set of field values, in .msg order
:param kwds: use keyword arguments corresponding to message field names
to set specific fields.
"""
if args or kwds:
super(GripperActionResult, self).__init__(*args, **kwds)
#message fields cannot be None, assign default values for those that are
if self.header is None:
self.header = std_msgs.msg.Header()
if self.status is None:
self.status = actionlib_msgs.msg.GoalStatus()
if self.result is None:
self.result = rail_manipulation_msgs.msg.GripperResult()
else:
self.header = std_msgs.msg.Header()
self.status = actionlib_msgs.msg.GoalStatus()
self.result = rail_manipulation_msgs.msg.GripperResult()
def _get_types(self):
"""
internal API method
"""
return self._slot_types
def serialize(self, buff):
"""
serialize message into buffer
:param buff: buffer, ``StringIO``
"""
try:
_x = self
buff.write(_get_struct_3I().pack(_x.header.seq, _x.header.stamp.secs, _x.header.stamp.nsecs))
_x = self.header.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_x = self
buff.write(_get_struct_2I().pack(_x.status.goal_id.stamp.secs, _x.status.goal_id.stamp.nsecs))
_x = self.status.goal_id.id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
buff.write(_get_struct_B().pack(self.status.status))
_x = self.status.text
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
buff.write(_get_struct_B().pack(self.result.success))
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(locals().get('_x', self)))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(locals().get('_x', self)))))
def deserialize(self, str):
"""
unpack serialized message in str into this message instance
:param str: byte array of serialized message, ``str``
"""
try:
if self.header is None:
self.header = std_msgs.msg.Header()
if self.status is None:
self.status = actionlib_msgs.msg.GoalStatus()
if self.result is None:
self.result = rail_manipulation_msgs.msg.GripperResult()
end = 0
_x = self
start = end
end += 12
(_x.header.seq, _x.header.stamp.secs, _x.header.stamp.nsecs,) = _get_struct_3I().unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.header.frame_id = str[start:end].decode('utf-8')
else:
self.header.frame_id = str[start:end]
_x = self
start = end
end += 8
(_x.status.goal_id.stamp.secs, _x.status.goal_id.stamp.nsecs,) = _get_struct_2I().unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.status.goal_id.id = str[start:end].decode('utf-8')
else:
self.status.goal_id.id = str[start:end]
start = end
end += 1
(self.status.status,) = _get_struct_B().unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.status.text = str[start:end].decode('utf-8')
else:
self.status.text = str[start:end]
start = end
end += 1
(self.result.success,) = _get_struct_B().unpack(str[start:end])
self.result.success = bool(self.result.success)
return self
except struct.error as e:
raise genpy.DeserializationError(e) #most likely buffer underfill
def serialize_numpy(self, buff, numpy):
"""
serialize message with numpy array types into buffer
:param buff: buffer, ``StringIO``
:param numpy: numpy python module
"""
try:
_x = self
buff.write(_get_struct_3I().pack(_x.header.seq, _x.header.stamp.secs, _x.header.stamp.nsecs))
_x = self.header.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_x = self
buff.write(_get_struct_2I().pack(_x.status.goal_id.stamp.secs, _x.status.goal_id.stamp.nsecs))
_x = self.status.goal_id.id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
buff.write(_get_struct_B().pack(self.status.status))
_x = self.status.text
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
buff.write(_get_struct_B().pack(self.result.success))
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(locals().get('_x', self)))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(locals().get('_x', self)))))
def deserialize_numpy(self, str, numpy):
"""
unpack serialized message in str into this message instance using numpy for array types
:param str: byte array of serialized message, ``str``
:param numpy: numpy python module
"""
try:
if self.header is None:
self.header = std_msgs.msg.Header()
if self.status is None:
self.status = actionlib_msgs.msg.GoalStatus()
if self.result is None:
self.result = rail_manipulation_msgs.msg.GripperResult()
end = 0
_x = self
start = end
end += 12
(_x.header.seq, _x.header.stamp.secs, _x.header.stamp.nsecs,) = _get_struct_3I().unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.header.frame_id = str[start:end].decode('utf-8')
else:
self.header.frame_id = str[start:end]
_x = self
start = end
end += 8
(_x.status.goal_id.stamp.secs, _x.status.goal_id.stamp.nsecs,) = _get_struct_2I().unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.status.goal_id.id = str[start:end].decode('utf-8')
else:
self.status.goal_id.id = str[start:end]
start = end
end += 1
(self.status.status,) = _get_struct_B().unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.status.text = str[start:end].decode('utf-8')
else:
self.status.text = str[start:end]
start = end
end += 1
(self.result.success,) = _get_struct_B().unpack(str[start:end])
self.result.success = bool(self.result.success)
return self
except struct.error as e:
raise genpy.DeserializationError(e) #most likely buffer underfill
_struct_I = genpy.struct_I
def _get_struct_I():
global _struct_I
return _struct_I
_struct_3I = None
def _get_struct_3I():
global _struct_3I
if _struct_3I is None:
_struct_3I = struct.Struct("<3I")
return _struct_3I
_struct_B = None
def _get_struct_B():
global _struct_B
if _struct_B is None:
_struct_B = struct.Struct("<B")
return _struct_B
_struct_2I = None
def _get_struct_2I():
global _struct_2I
if _struct_2I is None:
_struct_2I = struct.Struct("<2I")
return _struct_2I
| [
"hjr01211@gmail.com"
] | hjr01211@gmail.com |
532d9ca34f1c0b575e1c9c921622da06dc7220ff | 3c40c7ca9ab7e59b8bf34a91acffa5850b9b797c | /dotfiles/bash/setup.py | c6a9474c4f1de1fcae51fd3b7befbe054d0b7ca9 | [] | no_license | dkudrow/dotfiles | df0737c0ad31c1db5fd380e3241548b07ff324db | 89285b9a65e0ea52a9cc20254a7a635500eb643a | refs/heads/master | 2021-01-10T20:26:30.225595 | 2018-04-09T22:40:56 | 2018-04-09T22:40:56 | 20,342,657 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 298 | py | # dotfiles/bash/setup.py
from dotfiles.common import *
import os
def setup(params, cfgdir):
for f in ['.bashrc', '.bash_aliases']:
src = os.path.join(cfgdir, f)
dst = os.path.join(HOME, f)
ln(src, dst, params)
touch(os.path.join(HOME, '.bashrc.local'), params)
| [
"you@example.com"
] | you@example.com |
6c95a7bd09b068813b9c0a5829973bb8d0395ab0 | 623f977c05e559451b5131c225388d99737fca5b | /clustering.py | 4cb4f7826d1068fc6301b83fdbf6be644b973325 | [] | no_license | kmgr10/clustering | e874066ef9fc6204b1080bf9c1d6d31018d9567c | 8bf2a588dc2c210f280f5deecdee0920bc41c70a | refs/heads/master | 2022-11-29T14:32:04.342212 | 2020-08-16T16:48:30 | 2020-08-16T16:48:30 | 284,081,447 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 507 | py | from sklearn.cluster import KMeans
from sklearn import preprocessing
from sklearn.cluster import AgglomerativeClustering
import numpy as np
def generate_cluster_labels(data, clus_method, clus_n):
Scaling = preprocessing.MinMaxScaler().fit(data)
X_scaled = Scaling.transform(data)
if clus_method == 'K-means':
clus = KMeans(n_clusters=clus_n).fit(X_scaled)
elif clus_method == 'Agglo':
clus = AgglomerativeClustering(n_clusters=clus_n,affinity='l2',linkage='complete').fit(X_scaled)
return clus | [
"me@me.com"
] | me@me.com |
8771ba438ade98f28b96a9fb88b3121bc0f9cbac | c09e5bb87702a0828b0ec1c2eb221566529f2432 | /chap14_exe.py | deff1d79024b0f13c756c294c70cedf8042f2d7b | [] | no_license | anubhav-shukla/Learnpyhton | 396a1c8748c6cf640726e3609f7399b161c00858 | 7b53b30a0a6829d480f1d3c7d5a914de1c57fa73 | refs/heads/master | 2023-07-05T03:04:22.117259 | 2021-09-01T12:46:01 | 2021-09-01T12:46:01 | 346,263,181 | 0 | 0 | null | 2021-09-01T12:46:02 | 2021-03-10T07:02:27 | Python | UTF-8 | Python | false | false | 575 | py | # exercise
from functools import wraps
import time
def calculate_time(func):
@wraps(func)
def wrap(*args,**kwargs):
print(f'Executing ......{func.__name__}')
t1=time.time()
func(*args , **kwargs)
returned=func(*args,**kwargs)
t2=time.time()
total=t2-t1
print (f'This function takes{total} sec to run')
return returned
return wrap
@calculate_time
# t=time.time()
def funcl():
print("this is function")
funcl()
# t1=time.time()
# print(t1-t)
# this func takes 3 sec to run | [
"noreply@github.com"
] | anubhav-shukla.noreply@github.com |
b4e8562b690e03fd42d7b8893605994cdf54a829 | 95d9f80d130e9ce030f24f4a2d5a5dc8f3b9140d | /sp/migrations/0004_auto_20200401_1328.py | 4cdb3298ee86441b8d28ba51f9717900e8b0a548 | [
"BSD-3-Clause"
] | permissive | mnasiri/django-saml-sp | b75cb225cdc0db11571d312d9ab54e10560118d8 | 8e112faecc7a5866a44a95b16c6f694fce5cecd1 | refs/heads/master | 2022-04-12T15:37:13.631042 | 2020-04-01T13:33:40 | 2020-04-01T13:33:40 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 843 | py | # Generated by Django 3.0.4 on 2020-04-01 13:28
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("sp", "0003_auto_20200331_1934"),
]
operations = [
migrations.AlterField(
model_name="idp", name="authenticate_method", field=models.CharField(blank=True, max_length=200),
),
migrations.AlterField(
model_name="idp",
name="base_url",
field=models.CharField(
help_text="Root URL for the site, including http/https, no trailing slash.",
max_length=200,
verbose_name="Base URL",
),
),
migrations.AlterField(
model_name="idp", name="login_method", field=models.CharField(blank=True, max_length=200),
),
]
| [
"dcwatson@gmail.com"
] | dcwatson@gmail.com |
5e04be56a5ddda1b05ae34fe3d07ad9b5e3e211f | a2006f6aae3c36b1b9fe40b7b47941b515b49d5c | /llin/urls.py | d8829c7c0660cb42b81f327e88f7734d8e210d42 | [] | no_license | hakimkal/llin | f32832e599752edc1e7ff4f23d10f94bc11e47f3 | 56b5f449375153b6ec786994d564a721339a2b26 | refs/heads/master | 2016-09-09T21:06:58.224071 | 2014-12-15T10:19:39 | 2014-12-15T10:19:39 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,076 | py | from django.conf.urls import patterns, include, url
from django.contrib import admin
from states import views as state_view
from django.contrib.auth.views import login,logout
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'llin.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^$', include('states.urls')),
url(r'^maps/$',state_view.MapsStatesListing.as_view()),
url(r'^stateslist/$', state_view.StateListFrame.as_view()),
url(r'^states/',include('states.urls')),
url(r'^grappelli/', include('grappelli.urls')),
url(r'^login/$',login,{'template_name':'accounts/login.html'},name='login_link'),
url(r'^logout/$',logout,{'next_page':'/'}, name = 'logout'),
url(r'^accounts/',include('accounts.urls')),
url(r'^admin/', include(admin.site.urls)),
)
#for serving media files
from llin import settings
urlpatterns += patterns(
'django.views.static',
(r'media/(?P<path>.*)',
'serve',
{'document_root': settings.MEDIA_ROOT}), ) | [
"hakimkal@gmail.com"
] | hakimkal@gmail.com |
760e1b121ec13bc07ed91ee24f10171fa98d51f6 | 946a9dcf4e644f0d3f806f016a23ae8d96095082 | /LeetCode/Two Pointers/763_PartitionLabels.py | e9a910bd636450f8af37e9418ab39efb51dc76ad | [] | no_license | HzCeee/Algorithms | 3dea898f071f4103ca3eb038f63b01ba4ba95383 | e05f29071d0badd081535e773f43ebc303aa12c4 | refs/heads/master | 2018-10-20T21:54:37.154631 | 2018-10-11T20:46:34 | 2018-10-11T20:46:34 | 116,142,856 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 557 | py | def partitionLabels(self, S):
"""
:type S: str
:rtype: List[int]
"""
# leftPtr and righrPtr denotes the start and end of the current partition
# curPtr denotes the current checked index
lastOccurence = {char: curPtr for curPtr, char in enumerate(S)}
rightPtr = leftPtr = 0
ans = []
for curPtr, char in enumerate(S):
rightPtr = max(rightPtr, lastOccurence[char])
if curPtr == rightPtr:
ans.append(curPtr - leftPtr + 1)
leftPtr = curPtr + 1
return ans | [
"huangzixihzx@gmail.com"
] | huangzixihzx@gmail.com |
8d32d9b895aea36e66f915ee29f009e5d94b6c93 | 8f90e2899978bb81d283fe76a5b287c0e42bd7ea | /oscrypto/_win/_advapi32_ctypes.py | 36690d1f8657a198f929cf66a1d26bb08d88d91a | [
"MIT"
] | permissive | haowanxing/oscrypto | 24c98f9858f11feb55540e66224239c099abb756 | 14b5bd07dfc0e3fe7eee1048f56d5f1af53aee51 | refs/heads/master | 2021-01-17T19:45:53.095897 | 2016-08-29T17:03:17 | 2016-08-29T17:06:05 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,466 | py | # coding: utf-8
from __future__ import unicode_literals, division, absolute_import, print_function
import ctypes
from ctypes import windll, wintypes, POINTER, Structure, c_void_p, c_char_p, c_uint
from ctypes.wintypes import BOOL, DWORD
from .._ffi import FFIEngineError, LibraryNotFoundError
from .._types import str_cls
__all__ = [
'advapi32',
'get_error',
]
try:
advapi32 = windll.advapi32
except (OSError) as e:
if str_cls(e).find('The specified module could not be found') != -1:
raise LibraryNotFoundError('advapi32.dll could not be found')
raise
HCRYPTPROV = wintypes.HANDLE
HCRYPTKEY = wintypes.HANDLE
HCRYPTHASH = wintypes.HANDLE
PBYTE = c_char_p
ALG_ID = c_uint
try:
class CRYPTOAPI_BLOB(Structure): # noqa
_fields_ = [
("cbData", DWORD),
("pbData", POINTER(ctypes.c_byte)),
]
CRYPT_INTEGER_BLOB = CRYPTOAPI_BLOB
CRYPT_OBJID_BLOB = CRYPTOAPI_BLOB
CRYPT_DER_BLOB = CRYPTOAPI_BLOB
CRYPT_ATTR_BLOB = CRYPTOAPI_BLOB
class CRYPT_ALGORITHM_IDENTIFIER(Structure):
_fields = [
('pszObjId', wintypes.LPSTR),
('Parameters', CRYPT_OBJID_BLOB),
]
class CRYPT_BIT_BLOB(Structure):
_fields_ = [
('cbData', DWORD),
('pbData', PBYTE),
('cUnusedBits', DWORD),
]
class CERT_PUBLIC_KEY_INFO(Structure):
_fields_ = [
('Algorithm', CRYPT_ALGORITHM_IDENTIFIER),
('PublicKey', CRYPT_BIT_BLOB),
]
class CRYPT_ATTRIBUTE(Structure):
_fields_ = [
('pszObjId', wintypes.LPSTR),
('cValue', DWORD),
('rgValue', POINTER(CRYPT_ATTR_BLOB)),
]
class CRYPT_ATTRIBUTES(Structure):
_fields_ = [
('cAttr', DWORD),
('rgAttr', POINTER(CRYPT_ATTRIBUTE)),
]
class CRYPT_PRIVATE_KEY_INFO(Structure):
_fields_ = [
('Version', DWORD),
('Algorithm', CRYPT_ALGORITHM_IDENTIFIER),
('PrivateKey', CRYPT_DER_BLOB),
('pAttributes', POINTER(CRYPT_ATTRIBUTES)),
]
class PUBLICKEYSTRUC(Structure):
_fields_ = [
('bType', wintypes.BYTE),
('bVersion', wintypes.BYTE),
('reserved', wintypes.WORD),
('aiKeyAlg', ALG_ID),
]
BLOBHEADER = PUBLICKEYSTRUC
class DSSPUBKEY(Structure):
_fields_ = [
('magic', DWORD),
('bitlen', DWORD),
]
class DSSBLOBHEADER(Structure):
_fields_ = [
('publickeystruc', PUBLICKEYSTRUC),
('dsspubkey', DSSPUBKEY),
]
class RSAPUBKEY(Structure):
_fields_ = [
('magic', DWORD),
('bitlen', DWORD),
('pubexp', DWORD),
]
class RSABLOBHEADER(Structure):
_fields_ = [
('publickeystruc', PUBLICKEYSTRUC),
('rsapubkey', RSAPUBKEY),
]
class PLAINTEXTKEYBLOB(Structure):
_fields_ = [
('hdr', BLOBHEADER),
('dwKeySize', DWORD),
# rgbKeyData omitted since it is a flexible array member
]
class DSSSEED(Structure):
_fields_ = [
('counter', DWORD),
('seed', wintypes.BYTE * 20),
]
advapi32.CryptAcquireContextW.argtypes = [
POINTER(HCRYPTPROV),
wintypes.LPCWSTR,
wintypes.LPCWSTR,
DWORD,
DWORD
]
advapi32.CryptAcquireContextW.restype = wintypes.BOOL
advapi32.CryptReleaseContext.argtypes = [
HCRYPTPROV,
DWORD
]
advapi32.CryptReleaseContext.restype = wintypes.BOOL
advapi32.CryptImportKey.argtypes = [
HCRYPTPROV,
PBYTE,
DWORD,
HCRYPTKEY,
DWORD,
POINTER(HCRYPTKEY)
]
advapi32.CryptImportKey.restype = BOOL
advapi32.CryptGenKey.argtypes = [
HCRYPTPROV,
ALG_ID,
DWORD,
POINTER(HCRYPTKEY)
]
advapi32.CryptGenKey.restype = wintypes.BOOL
advapi32.CryptGetKeyParam.argtypes = [
HCRYPTKEY,
DWORD,
PBYTE,
POINTER(DWORD),
DWORD
]
advapi32.CryptGetKeyParam.restype = wintypes.BOOL
advapi32.CryptSetKeyParam.argtypes = [
HCRYPTKEY,
DWORD,
c_void_p,
DWORD
]
advapi32.CryptSetKeyParam.restype = wintypes.BOOL
advapi32.CryptExportKey.argtypes = [
HCRYPTKEY,
HCRYPTKEY,
DWORD,
DWORD,
PBYTE,
POINTER(DWORD)
]
advapi32.CryptExportKey.restype = BOOL
advapi32.CryptDestroyKey.argtypes = [
HCRYPTKEY
]
advapi32.CryptDestroyKey.restype = wintypes.BOOL
advapi32.CryptCreateHash.argtypes = [
HCRYPTPROV,
ALG_ID,
HCRYPTKEY,
DWORD,
POINTER(HCRYPTHASH)
]
advapi32.CryptCreateHash.restype = BOOL
advapi32.CryptHashData.argtypes = [
HCRYPTHASH,
PBYTE,
DWORD,
DWORD
]
advapi32.CryptHashData.restype = BOOL
advapi32.CryptSetHashParam.argtypes = [
HCRYPTHASH,
DWORD,
PBYTE,
DWORD
]
advapi32.CryptSetHashParam.restype = BOOL
advapi32.CryptSignHashW.argtypes = [
HCRYPTHASH,
DWORD,
wintypes.LPCWSTR,
DWORD,
PBYTE,
POINTER(DWORD)
]
advapi32.CryptSignHashW.restype = BOOL
advapi32.CryptVerifySignatureW.argtypes = [
HCRYPTHASH,
PBYTE,
DWORD,
HCRYPTKEY,
wintypes.LPCWSTR,
DWORD
]
advapi32.CryptVerifySignatureW.restype = BOOL
advapi32.CryptDestroyHash.argtypes = [
HCRYPTHASH
]
advapi32.CryptDestroyHash.restype = wintypes.BOOL
advapi32.CryptEncrypt.argtypes = [
HCRYPTKEY,
HCRYPTHASH,
BOOL,
DWORD,
PBYTE,
POINTER(DWORD),
DWORD
]
advapi32.CryptEncrypt.restype = BOOL
advapi32.CryptDecrypt.argtypes = [
HCRYPTKEY,
HCRYPTHASH,
BOOL,
DWORD,
PBYTE,
POINTER(DWORD)
]
advapi32.CryptDecrypt.restype = BOOL
except (AttributeError):
raise FFIEngineError('Error initializing ctypes')
setattr(advapi32, 'HCRYPTPROV', HCRYPTPROV)
setattr(advapi32, 'HCRYPTKEY', HCRYPTKEY)
setattr(advapi32, 'HCRYPTHASH', HCRYPTHASH)
setattr(advapi32, 'CRYPT_INTEGER_BLOB', CRYPT_INTEGER_BLOB)
setattr(advapi32, 'CRYPT_OBJID_BLOB', CRYPT_OBJID_BLOB)
setattr(advapi32, 'CRYPT_DER_BLOB', CRYPT_DER_BLOB)
setattr(advapi32, 'CRYPT_ATTR_BLOB', CRYPT_ATTR_BLOB)
setattr(advapi32, 'CRYPT_ALGORITHM_IDENTIFIER', CRYPT_ALGORITHM_IDENTIFIER)
setattr(advapi32, 'CRYPT_BIT_BLOB', CRYPT_BIT_BLOB)
setattr(advapi32, 'CERT_PUBLIC_KEY_INFO', CERT_PUBLIC_KEY_INFO)
setattr(advapi32, 'CRYPT_PRIVATE_KEY_INFO', CRYPT_PRIVATE_KEY_INFO)
setattr(advapi32, 'CRYPT_ATTRIBUTE', CRYPT_ATTRIBUTE)
setattr(advapi32, 'CRYPT_ATTRIBUTES', CRYPT_ATTRIBUTES)
setattr(advapi32, 'PUBLICKEYSTRUC', PUBLICKEYSTRUC)
setattr(advapi32, 'DSSPUBKEY', DSSPUBKEY)
setattr(advapi32, 'DSSBLOBHEADER', DSSBLOBHEADER)
setattr(advapi32, 'RSAPUBKEY', RSAPUBKEY)
setattr(advapi32, 'RSABLOBHEADER', RSABLOBHEADER)
setattr(advapi32, 'BLOBHEADER', BLOBHEADER)
setattr(advapi32, 'PLAINTEXTKEYBLOB', PLAINTEXTKEYBLOB)
setattr(advapi32, 'DSSSEED', DSSSEED)
def get_error():
error = ctypes.GetLastError()
return (error, ctypes.FormatError(error))
| [
"will@wbond.net"
] | will@wbond.net |
90126f6e51fa927ffe22c0d1c6e12a7c5d277fe5 | 0993569fced8fcdc9222f15563cca145e48fe297 | /appengine/swarming/ts_mon_metrics.py | 718ebaef49d91660f27d4789251e8997aa3655cb | [
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | DalavanCloud/luci-py | 0cc112f4a5acb4f2aea115b1859c90623848ae4c | e0a8a5640c4104e5c90781d833168aa8a8d1f24d | refs/heads/master | 2020-03-25T16:03:52.327328 | 2018-06-01T23:29:36 | 2018-06-01T23:42:56 | 143,913,142 | 0 | 1 | null | 2018-08-07T18:38:16 | 2018-08-07T18:38:15 | null | UTF-8 | Python | false | false | 17,913 | py | # Copyright 2016 The LUCI Authors. All rights reserved.
# Use of this source code is governed under the Apache License, Version 2.0
# that can be found in the LICENSE file.
"""Timeseries metrics."""
from collections import defaultdict
import datetime
import json
import logging
from google.appengine.datastore.datastore_query import Cursor
from components import utils
import gae_ts_mon
from server import bot_management
from server import task_result
# - android_devices is a side effect of the health of each Android devices
# connected to the bot.
# - caches has an unbounded matrix.
# - server_version is the current server version. It'd be good to have but the
# current monitoring pipeline is not adapted for this.
# - id is unique for each bot.
# - temp_band is android specific.
# Keep in sync with ../swarming_bot/bot_code/bot_main.py
_IGNORED_DIMENSIONS = (
'android_devices', 'caches', 'id', 'server_version', 'temp_band')
# Real timeout is 60s, keep it slightly under to bail out early.
_REQUEST_TIMEOUT_SEC = 50
# Cap the max number of items per taskqueue task, to keep the total
# number of collected streams managable within each instance.
_EXECUTORS_PER_SHARD = 500
_JOBS_PER_SHARD = 500
# Override default target fields for app-global metrics.
_TARGET_FIELDS = {
'job_name': '', # module name
'hostname': '', # version
'task_num': 0, # instance ID
}
### All the metrics.
# Custom bucketer with 12% resolution in the range of 1..10**5. Used for job
# cycle times.
_bucketer = gae_ts_mon.GeometricBucketer(growth_factor=10**0.05,
num_finite_buckets=100)
# Regular (instance-local) metrics: jobs/completed and jobs/durations.
# Both have the following metric fields:
# - project_id: e.g. 'chromium'
# - subproject_id: e.g. 'blink'. Set to empty string if not used.
# - pool: e.g. 'Chrome'
# - spec_name: name of a job specification, e.g. '<master>:<builder>'
# for buildbot jobs.
# - result: one of 'success', 'failure', or 'infra-failure'.
_jobs_completed = gae_ts_mon.CounterMetric(
'jobs/completed',
'Number of completed jobs.', [
gae_ts_mon.StringField('spec_name'),
gae_ts_mon.StringField('project_id'),
gae_ts_mon.StringField('subproject_id'),
gae_ts_mon.StringField('pool'),
gae_ts_mon.StringField('result'),
])
_jobs_durations = gae_ts_mon.CumulativeDistributionMetric(
'jobs/durations',
'Cycle times of completed jobs, in seconds.', [
gae_ts_mon.StringField('spec_name'),
gae_ts_mon.StringField('project_id'),
gae_ts_mon.StringField('subproject_id'),
gae_ts_mon.StringField('pool'),
gae_ts_mon.StringField('result'),
],
bucketer=_bucketer)
# Similar to jobs/completed and jobs/duration, but with a dedup field.
# - project_id: e.g. 'chromium'
# - subproject_id: e.g. 'blink'. Set to empty string if not used.
# - pool: e.g. 'Chrome'
# - spec_name: name of a job specification, e.g. '<master>:<builder>'
# for buildbot jobs.
# - deduped: boolean describing whether the job was deduped or not.
_jobs_requested = gae_ts_mon.CounterMetric(
'jobs/requested',
'Number of requested jobs over time.', [
gae_ts_mon.StringField('spec_name'),
gae_ts_mon.StringField('project_id'),
gae_ts_mon.StringField('subproject_id'),
gae_ts_mon.StringField('pool'),
gae_ts_mon.BooleanField('deduped'),
])
# Swarming-specific metric. Metric fields:
# - project_id: e.g. 'chromium'
# - subproject_id: e.g. 'blink'. Set to empty string if not used.
# - pool: e.g. 'Chrome'
# - spec_name: name of a job specification, e.g. '<master>:<builder>'
# for buildbot jobs.
_tasks_expired = gae_ts_mon.CounterMetric(
'swarming/tasks/expired',
'Number of expired tasks', [
gae_ts_mon.StringField('spec_name'),
gae_ts_mon.StringField('project_id'),
gae_ts_mon.StringField('subproject_id'),
gae_ts_mon.StringField('pool'),
])
_task_bots_runnable = gae_ts_mon.CumulativeDistributionMetric(
'swarming/tasks/bots_runnable',
'Number of bots available to run tasks.', [
gae_ts_mon.StringField('pool'),
],
)
# Global metric. Metric fields:
# - project_id: e.g. 'chromium'
# - subproject_id: e.g. 'blink'. Set to empty string if not used.
# - pool: e.g. 'Chrome'
# - spec_name: name of a job specification, e.g. '<master>:<builder>'
# for buildbot jobs.
# Override target field:
# - hostname: 'autogen:<executor_id>': name of the bot that executed a job,
# or an empty string. e.g. 'autogen:swarm42-m4'.
# Value should be 'pending' or 'running'. Completed / canceled jobs should not
# send this metric.
_jobs_running = gae_ts_mon.BooleanMetric(
'jobs/running',
'Presence metric for a running job.', [
gae_ts_mon.StringField('spec_name'),
gae_ts_mon.StringField('project_id'),
gae_ts_mon.StringField('subproject_id'),
gae_ts_mon.StringField('pool'),
])
# Global metric. Metric fields:
# - project_id: e.g. 'chromium'
# - subproject_id: e.g. 'blink'. Set to empty string if not used.
# - pool: e.g. 'Chrome'
# - spec_name: name of a job specification, e.g. '<master>:<builder>'
# for buildbot jobs.
# - status: 'pending' or 'running'.
_jobs_active = gae_ts_mon.GaugeMetric(
'jobs/active',
'Number of running, pending or otherwise active jobs.', [
gae_ts_mon.StringField('spec_name'),
gae_ts_mon.StringField('project_id'),
gae_ts_mon.StringField('subproject_id'),
gae_ts_mon.StringField('pool'),
gae_ts_mon.StringField('status'),
])
# Global metric. Target field: hostname = 'autogen:<executor_id>' (bot id).
_executors_pool = gae_ts_mon.StringMetric(
'executors/pool',
'Pool name for a given job executor.',
None)
# Global metric. Target fields:
# - hostname = 'autogen:<executor_id>' (bot id).
# Status value must be 'ready', 'running', or anything else, possibly
# swarming-specific, when it cannot run a job. E.g. 'quarantined' or
# 'dead'.
_executors_status = gae_ts_mon.StringMetric(
'executors/status',
'Status of a job executor.',
None)
# Global metric. Target fields:
# - hostname = 'autogen:<executor_id>' (bot id).
# Status value must be 'ready', 'running', or anything else, possibly
# swarming-specific, when it cannot run a job. E.g. 'quarantined' or
# 'dead'.
# Note that 'running' will report data as long as the job is running,
# so it is best to restrict data to status == 'pending.'
_jobs_pending_durations = gae_ts_mon.NonCumulativeDistributionMetric(
'jobs/pending_durations',
'Pending times of active jobs, in seconds.', [
gae_ts_mon.StringField('spec_name'),
gae_ts_mon.StringField('project_id'),
gae_ts_mon.StringField('subproject_id'),
gae_ts_mon.StringField('pool'),
gae_ts_mon.StringField('status'),
],
bucketer=_bucketer)
# Global metric. Target fields:
# - hostname = 'autogen:<executor_id>' (bot id).
# Status value must be 'ready', 'running', or anything else, possibly
# swarming-specific, when it cannot run a job. E.g. 'quarantined' or
# 'dead'.
# Note that 'running' will report data as long as the job is running,
# so it is best to restrict data to status == 'pending.'
_jobs_max_pending_duration = gae_ts_mon.FloatMetric(
'jobs/max_pending_duration',
'Maximum pending seconds of pending jobs.', [
gae_ts_mon.StringField('spec_name'),
gae_ts_mon.StringField('project_id'),
gae_ts_mon.StringField('subproject_id'),
gae_ts_mon.StringField('pool'),
gae_ts_mon.StringField('status'),
])
# Global metric. Metric fields:
# - busy = Whether or not the count is for machines that are busy.
# - machine_type = server.lease_management.MachineType.key.id().
_machine_types_actual_size = gae_ts_mon.GaugeMetric(
'swarming/machine_types/actual_size',
'Actual number of Machine Provider bots per MachineType.', [
gae_ts_mon.BooleanField('busy'),
gae_ts_mon.StringField('machine_type'),
])
# Global metric. Metric fields:
# - machine_type = server.lease_management.MachineType.key.id().
# - enabled = server.lease_management.MachineType.enabled.
_machine_types_target_size = gae_ts_mon.GaugeMetric(
'swarming/machine_types/target_size',
'Target number of Machine Provider bots per MachineType.', [
gae_ts_mon.BooleanField('enabled'),
gae_ts_mon.StringField('machine_type'),
])
# Instance metric. Metric fields:
# - machine_type = server.lease_managment.MachineType.key.id().
_machine_types_connection_time = gae_ts_mon.CumulativeDistributionMetric(
'swarming/machine_types/connection_time',
'Time between bot_leased and bot_connected events.', [
gae_ts_mon.StringField('machine_type'),
])
### Private stuff.
def _pool_from_dimensions(dimensions):
"""Return a canonical string of flattened dimensions."""
pairs = []
for key, values in dimensions.iteritems():
if key in _IGNORED_DIMENSIONS:
continue
# Strip all the prefixes of other values. values is already sorted.
for i, value in enumerate(values):
if not any(v.startswith(value) for v in values[i+1:]):
pairs.append(u'%s:%s' % (key, value))
return u'|'.join(sorted(pairs))
def _set_jobs_metrics(payload):
params = _ShardParams(payload)
state_map = {task_result.State.RUNNING: 'running',
task_result.State.PENDING: 'pending'}
jobs_counts = defaultdict(lambda: 0)
jobs_total = 0
jobs_pending_distributions = defaultdict(
lambda: gae_ts_mon.Distribution(_bucketer))
jobs_max_pending_durations = defaultdict(
lambda: 0.0)
query_iter = task_result.get_result_summaries_query(
None, None, 'created_ts', 'pending_running', None).iter(
produce_cursors=True, start_cursor=params.cursor)
while query_iter.has_next():
runtime = (utils.utcnow() - params.start_time).total_seconds()
if jobs_total >= _JOBS_PER_SHARD or runtime > _REQUEST_TIMEOUT_SEC:
params.cursor = query_iter.cursor_after()
params.task_count += 1
utils.enqueue_task(url='/internal/taskqueue/tsmon/jobs',
queue_name='tsmon',
payload=params.json())
params.task_count -= 1 # For accurate logging below.
break
params.count += 1
jobs_total += 1
summary = query_iter.next()
status = state_map.get(summary.state, '')
fields = _extract_job_fields(summary.tags)
target_fields = dict(_TARGET_FIELDS)
if summary.bot_id:
target_fields['hostname'] = 'autogen:' + summary.bot_id
if summary.bot_id and status == 'running':
_jobs_running.set(True, target_fields=target_fields, fields=fields)
fields['status'] = status
key = tuple(sorted(fields.iteritems()))
jobs_counts[key] += 1
pending_duration = summary.pending_now(utils.utcnow())
if pending_duration is not None:
jobs_pending_distributions[key].add(pending_duration.total_seconds())
jobs_max_pending_durations[key] = max(
jobs_max_pending_durations[key],
pending_duration.total_seconds())
logging.debug(
'_set_jobs_metrics: task %d started at %s, processed %d jobs (%d total)',
params.task_count, params.task_start, jobs_total, params.count)
# Global counts are sharded by task_num and aggregated in queries.
target_fields = dict(_TARGET_FIELDS)
target_fields['task_num'] = params.task_count
for key, count in jobs_counts.iteritems():
_jobs_active.set(count, target_fields=target_fields, fields=dict(key))
for key, distribution in jobs_pending_distributions.iteritems():
_jobs_pending_durations.set(
distribution, target_fields=target_fields, fields=dict(key))
for key, val in jobs_max_pending_durations.iteritems():
_jobs_max_pending_duration.set(
val, target_fields=target_fields, fields=dict(key))
def _set_executors_metrics(payload):
params = _ShardParams(payload)
query_iter = bot_management.BotInfo.query().iter(
produce_cursors=True, start_cursor=params.cursor)
executors_count = 0
while query_iter.has_next():
runtime = (utils.utcnow() - params.start_time).total_seconds()
if (executors_count >= _EXECUTORS_PER_SHARD or
runtime > _REQUEST_TIMEOUT_SEC):
params.cursor = query_iter.cursor_after()
params.task_count += 1
utils.enqueue_task(url='/internal/taskqueue/tsmon/executors',
queue_name='tsmon',
payload=params.json())
params.task_count -= 1 # For accurate logging below.
break
params.count += 1
executors_count += 1
bot_info = query_iter.next()
status = 'ready'
if bot_info.task_id:
status = 'running'
elif bot_info.quarantined:
status = 'quarantined'
elif bot_info.is_dead:
status = 'dead'
elif bot_info.state and bot_info.state.get('maintenance', False):
status = 'maintenance'
target_fields = dict(_TARGET_FIELDS)
target_fields['hostname'] = 'autogen:' + bot_info.id
_executors_status.set(status, target_fields=target_fields)
_executors_pool.set(
_pool_from_dimensions(bot_info.dimensions),
target_fields=target_fields)
logging.debug(
'%s: task %d started at %s, processed %d bots (%d total)',
'_set_executors_metrics', params.task_count, params.task_start,
executors_count, params.count)
def _set_mp_metrics(payload):
"""Set global Machine Provider-related ts_mon metrics."""
# Consider utilization metrics over 2 minutes old to be outdated.
for name, data in sorted(payload.iteritems()):
_machine_types_target_size.set(
data['target_size'],
fields={'enabled': data['enabled'], 'machine_type': name},
target_fields=_TARGET_FIELDS)
if data.get('busy') is not None:
_machine_types_actual_size.set(
data['busy'],
fields={'busy': True, 'machine_type': name},
target_fields=_TARGET_FIELDS)
_machine_types_actual_size.set(
data['idle'],
fields={'busy': False, 'machine_type': name},
target_fields=_TARGET_FIELDS)
def _set_global_metrics():
utils.enqueue_task(url='/internal/taskqueue/tsmon/jobs', queue_name='tsmon')
utils.enqueue_task(url='/internal/taskqueue/tsmon/executors',
queue_name='tsmon')
utils.enqueue_task(url='/internal/taskqueue/tsmon/machine_types',
queue_name='tsmon')
class _ShardParams(object):
"""Parameters for a chain of taskqueue tasks."""
def __init__(self, payload):
self.start_time = utils.utcnow()
self.cursor = None
self.task_start = self.start_time
self.task_count = 0
self.count = 0
if not payload:
return
try:
params = json.loads(payload)
if params['cursor']:
self.cursor = Cursor(urlsafe=params['cursor'])
self.task_start = datetime.datetime.strptime(
params['task_start'], utils.DATETIME_FORMAT)
self.task_count = params['task_count']
self.count = params['count']
except (ValueError, KeyError) as e:
logging.error('_ShardParams: bad JSON: %s: %s', payload, e)
# Stop the task chain and let the request fail.
raise
def json(self):
return utils.encode_to_json({
'cursor': self.cursor.urlsafe() if self.cursor else None,
'task_start': self.task_start,
'task_count': self.task_count,
'count': self.count,
})
def _extract_job_fields(tags):
"""Extracts common job's metric fields from TaskResultSummary.
Args:
tags (list of str): list of 'key:value' strings.
"""
tags_dict = {}
for tag in tags:
try:
key, value = tag.split(':', 1)
tags_dict[key] = value
except ValueError:
pass
spec_name = tags_dict.get('spec_name')
if not spec_name:
spec_name = '%s:%s' % (
tags_dict.get('master', ''),
tags_dict.get('buildername', ''))
if tags_dict.get('build_is_experimental') == 'true':
spec_name += ':experimental'
fields = {
'project_id': tags_dict.get('project', ''),
'subproject_id': tags_dict.get('subproject', ''),
'pool': tags_dict.get('pool', ''),
'spec_name': spec_name,
}
return fields
### Public API.
def on_task_requested(summary, deduped):
"""When a task is created."""
fields = _extract_job_fields(summary.tags)
fields['deduped'] = deduped
_jobs_requested.increment(fields=fields)
def on_task_completed(summary):
"""When a task is stopped from being processed."""
fields = _extract_job_fields(summary.tags)
if summary.state == task_result.State.EXPIRED:
_tasks_expired.increment(fields=fields)
return
if summary.internal_failure:
fields['result'] = 'infra-failure'
elif summary.failure:
fields['result'] = 'failure'
else:
fields['result'] = 'success'
_jobs_completed.increment(fields=fields)
if summary.duration is not None:
_jobs_durations.add(summary.duration, fields=fields)
def on_machine_connected_time(seconds, fields):
_machine_types_connection_time.add(seconds, fields=fields)
def set_global_metrics(kind, payload=None):
if kind == 'jobs':
_set_jobs_metrics(payload)
elif kind == 'executors':
_set_executors_metrics(payload)
elif kind == 'mp':
_set_mp_metrics(payload)
else:
logging.error('set_global_metrics(kind=%s): unknown kind.', kind)
def initialize():
# These metrics are the ones that are reset everything they are flushed.
gae_ts_mon.register_global_metrics([
_executors_pool,
_executors_status,
_jobs_active,
_jobs_max_pending_duration,
_jobs_pending_durations,
_jobs_running,
_machine_types_actual_size,
_machine_types_target_size,
])
gae_ts_mon.register_global_metrics_callback('callback', _set_global_metrics)
| [
"commit-bot@chromium.org"
] | commit-bot@chromium.org |
b80f05965f60e88b838d72c5eef3000d3ee55c51 | 1925c535d439d2d47e27ace779f08be0b2a75750 | /leetcode/minimum_time_to_collect_all_apples_in_a_tree.py | 9a06ab10892810967e0fae8964db764325709cde | [] | no_license | arthurDz/algorithm-studies | ee77d716041671c4b8bb757d8d96f3d10b6589f7 | 1e4d23dd0c40df34f58d71c7ca3e6491be732075 | refs/heads/master | 2023-04-27T12:17:06.209278 | 2021-04-30T20:16:18 | 2021-04-30T20:16:18 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,944 | py | # Given an undirected tree consisting of n vertices numbered from 0 to n-1, which has some apples in their vertices. You spend 1 second to walk over one edge of the tree. Return the minimum time in seconds you have to spend in order to collect all apples in the tree starting at vertex 0 and coming back to this vertex.
# The edges of the undirected tree are given in the array edges, where edges[i] = [fromi, toi] means that exists an edge connecting the vertices fromi and toi. Additionally, there is a boolean array hasApple, where hasApple[i] = true means that vertex i has an apple, otherwise, it does not have any apple.
# Example 1:
# Input: n = 7, edges = [[0,1],[0,2],[1,4],[1,5],[2,3],[2,6]], hasApple = [false,false,true,false,true,true,false]
# Output: 8
# Explanation: The figure above represents the given tree where red vertices have an apple. One optimal path to collect all apples is shown by the green arrows.
# Example 2:
# Input: n = 7, edges = [[0,1],[0,2],[1,4],[1,5],[2,3],[2,6]], hasApple = [false,false,true,false,false,true,false]
# Output: 6
# Explanation: The figure above represents the given tree where red vertices have an apple. One optimal path to collect all apples is shown by the green arrows.
# Example 3:
# Input: n = 7, edges = [[0,1],[0,2],[1,4],[1,5],[2,3],[2,6]], hasApple = [false,false,false,false,false,false,false]
# Output: 0
# Constraints:
# 1 <= n <= 10^5
# edges.length == n-1
# edges[i].length == 2
# 0 <= fromi, toi <= n-1
# fromi < toi
# hasApple.length == n
def minTime(self, n: int, edges: List[List[int]], hasApple: List[bool]) -> int:
graph = collections.defaultdict(list)
for i, v in edges:
graph[i].append(v)
def traverse(node):
total = 0
for sub in graph[node]:
total += traverse(sub)
if total > 0 or hasApple[node]: total += 2
return total
return max(0, traverse(0) - 2) | [
"yunfan.yang@minerva.kgi.edu"
] | yunfan.yang@minerva.kgi.edu |
9a6f34d658bd34bb557f9e550a143540abcf2add | f77d97840915ff2318c8f3841096019337c58689 | /_admin/migrations/env.py | b7f89639043932bb7f95a869797ac2f2fc201484 | [] | no_license | rrader/events-service | f35d7e237e0ef5e3598b90878713539960153895 | 5933a6ba83aacb63832dd6efa806409bb37812aa | refs/heads/master | 2021-01-10T04:25:45.875103 | 2015-11-20T16:21:32 | 2015-11-20T16:21:32 | 44,528,882 | 4 | 1 | null | 2015-11-01T19:28:47 | 2015-10-19T11:02:48 | Python | UTF-8 | Python | false | false | 2,541 | py | from alembic import context
from sqlalchemy import engine_from_config, pool
from logging.config import fileConfig
# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
config = context.config
# Interpret the config file for Python logging.
# This line sets up loggers basically.
fileConfig(config.config_file_name)
# add your model's MetaData object here
# for 'autogenerate' support
# from myapp import mymodel
# target_metadata = mymodel.Base.metadata
from flask import current_app
config.set_main_option('sqlalchemy.url', current_app.config.get('SQLALCHEMY_DATABASE_URI'))
target_metadata = current_app.extensions['migrate'].db.metadata
# other values from the config, defined by the needs of env.py,
# can be acquired:
# my_important_option = config.get_main_option("my_important_option")
# ... etc.
def exclude_tables_from_config(config_):
tables_ = config_.get("tables", None)
if tables_ is not None:
tables = tables_.split(",")
return tables
exclude_tables = exclude_tables_from_config(config.get_section('alembic:exclude'))
def include_object(object, name, type_, reflected, compare_to):
if type_ == "table" and name in exclude_tables:
return False
else:
return True
def run_migrations_offline():
"""Run migrations in 'offline' mode.
This configures the context with just a URL
and not an Engine, though an Engine is acceptable
here as well. By skipping the Engine creation
we don't even need a DBAPI to be available.
Calls to context.execute() here emit the given string to the
script output.
"""
url = config.get_main_option("sqlalchemy.url")
context.configure(url=url)
with context.begin_transaction():
context.run_migrations()
def run_migrations_online():
"""Run migrations in 'online' mode.
In this scenario we need to create an Engine
and associate a connection with the context.
"""
engine = engine_from_config(
config.get_section(config.config_ini_section),
prefix='sqlalchemy.',
poolclass=pool.NullPool)
connection = engine.connect()
context.configure(
connection=connection,
target_metadata=target_metadata
)
try:
with context.begin_transaction():
context.run_migrations()
finally:
connection.close()
if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()
| [
"roman.rader@gmail.com"
] | roman.rader@gmail.com |
39bcecf7096e30317938833bbcbad285c393e708 | 134ff3c0719d4c0022eb0fb7c859bdbff5ca34b2 | /desktop/libs/hadoop/setup.py | 45c29110199a12209f2f58f95c8a774324b992a0 | [
"Apache-2.0"
] | permissive | civascu/hue | 22637f13a4cfc557716557661523131b6ac16da4 | 82f2de44789ff5a981ed725175bae7944832d1e9 | refs/heads/master | 2020-03-31T01:50:39.449966 | 2010-07-21T01:05:50 | 2010-07-21T01:07:15 | 788,284 | 0 | 0 | Apache-2.0 | 2019-02-04T07:03:12 | 2010-07-21T07:34:27 | Python | UTF-8 | Python | false | false | 1,344 | py | # Licensed to Cloudera, Inc. under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. Cloudera, Inc. licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from setuptools import setup, find_packages
setup(
name = "hadoop",
version = "0.9.1",
url = 'http://github.com/cloudera/hue',
description = "Hadoop Libraries",
# Note that we're cheating by installing gen-py
# in hadoop's __init__.py.
packages = find_packages('src'),
package_dir = {'': 'src' },
install_requires = ['setuptools', 'desktop'],
# Even libraries need to be registered as desktop_apps,
# if they have configuration, like this one.
entry_points = { 'desktop.sdk.lib': 'hadoop=hadoop' },
)
| [
"bcwalrus@cloudera.com"
] | bcwalrus@cloudera.com |
f0196b2a9c3e4a9ca2069fdb9c0d194f6fcb2024 | 80d8b35c2199817ca69f36d61f228110b5eacae2 | /bravo/tests/test_packets.py | 7d123fabcf7e67485c290ed2ec8721fb81b144eb | [] | no_license | Estevo-Aleixo/bravo | b1d309d1215250f63bba6fade3e81e2f248dd8f4 | 2c0b07dd4385114c870123643ddc57d3770b2e91 | refs/heads/master | 2021-01-16T21:35:06.195078 | 2011-02-02T09:08:59 | 2011-02-02T09:08:59 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,973 | py | # vim: set fileencoding=utf8 :
import unittest
from construct import Container
from construct import MappingError
import bravo.packets
class TestPacketDataStructures(unittest.TestCase):
def test_named_packets_exist(self):
for name, slot in bravo.packets.packets_by_name.iteritems():
self.assertTrue(slot in bravo.packets.packets,
"%d is missing" % slot)
def test_packet_names_exist(self):
for slot in bravo.packets.packets.iterkeys():
self.assertTrue(slot in bravo.packets.packets_by_name.values(),
"%d is missing" % slot)
def test_packet_names_match(self):
for name, slot in bravo.packets.packets_by_name.iteritems():
self.assertEqual(name, bravo.packets.packets[slot].name)
class TestPacketParsing(unittest.TestCase):
def test_ping(self):
packet = ""
parsed = bravo.packets.packets[0].parse(packet)
self.assertTrue(parsed)
def test_handshake(self):
packet = "\x00\x01a"
parsed = bravo.packets.packets[2].parse(packet)
self.assertEqual(parsed.username, "a")
def test_handshake_unicode(self):
packet = "\x00\x02\xc2\xa7"
parsed = bravo.packets.packets[2].parse(packet)
self.assertEqual(parsed.username, u"§")
def test_chat_color(self):
packet = "\x00\x15<\xc2\xa7fMrZunz\xc2\xa7f> Alrite"
parsed = bravo.packets.packets[3].parse(packet)
self.assertEqual(parsed.message, u"<§fMrZunz§f> Alrite")
def test_time(self):
packet = "\x00\x00\x00\x00\x00\x00\x00\x2a"
parsed = bravo.packets.packets[4].parse(packet)
self.assertEqual(parsed.timestamp, 42)
def test_orientation(self):
packet = "\x45\xc5\x66\x76\x42\x2d\xff\xfc\x01"
parsed = bravo.packets.packets[12].parse(packet)
self.assertEqual(parsed.look.pitch, 43.49998474121094)
self.assertEqual(parsed.look.rotation, 6316.8076171875)
def test_build(self):
packet = "\x00\x00\x00\x19@\x00\x00\x00@\x05\x00\x04@\x12"
parsed = bravo.packets.packets[15].parse(packet)
self.assertEqual(parsed.x, 25)
self.assertEqual(parsed.y, 64)
self.assertEqual(parsed.z, 64)
self.assertEqual(parsed.face, "+x")
self.assertEqual(parsed.id, 4)
self.assertEqual(parsed.count, 64)
self.assertEqual(parsed.damage, 18)
def test_build_bad_face(self):
packet = "\x00\x00\x00\x19@\x00\x00\x00@\x06\x00\x04@\x12"
self.assertRaises(MappingError, bravo.packets.packets[15].parse,
packet)
def test_animate(self):
packet = "\x00\x00\x00\x03\x01"
parsed = bravo.packets.packets[18].parse(packet)
self.assertEqual(parsed.eid, 3)
self.assertEqual(parsed.animation, "arm")
def test_animate_bad_animation(self):
packet = "\x00\x00\x00\x03\x05"
self.assertRaises(MappingError, bravo.packets.packets[18].parse,
packet)
class TestPacketAssembly(unittest.TestCase):
def test_ping(self):
container = Container()
assembled = bravo.packets.packets[0].build(container)
self.assertEqual(assembled, "")
def test_time(self):
container = Container(timestamp=42)
assembled = bravo.packets.packets[4].build(container)
self.assertEqual(assembled, "\x00\x00\x00\x00\x00\x00\x00\x2a")
def test_build(self):
container = Container(x=25, y=64, z=64, face="+x", id=4, count=64,
damage=18)
assembled = bravo.packets.packets[15].build(container)
self.assertEqual(assembled,
"\x00\x00\x00\x19@\x00\x00\x00@\x05\x00\x04@\x12")
def test_build_bad_face(self):
container = Container(x=25, y=64, z=64, face="+q", id=4, count=64,
damage=18)
self.assertRaises(MappingError, bravo.packets.packets[15].build,
container)
class TestPacketHelpers(unittest.TestCase):
def test_make_packet(self):
packet = bravo.packets.make_packet("ping")
self.assertEqual(packet, "\x00")
class TestPacketIntegration(unittest.TestCase):
def test_location_round_trip(self):
packet = "\x0d@\x1a\x00\x00\x00\x00\x00\x00@P\xcf\\)\x00\x00\x00@Pg\xae\x14\x80\x00\x00@\x1e\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"
header, payload = bravo.packets.parse_packets(packet)[0][0]
self.assertEqual(header, 13)
self.assertEqual(payload.position.x, 6.5)
self.assertEqual(payload.position.y, 67.24000000953674)
self.assertEqual(payload.position.stance, 65.62000000476837)
self.assertEqual(payload.position.z, 7.5)
self.assertEqual(payload.look.rotation, 0.0)
self.assertEqual(payload.look.pitch, 0.0)
self.assertEqual(payload.flying.flying, 0)
reconstructed = bravo.packets.make_packet("location", payload)
self.assertEqual(packet, reconstructed)
| [
"leif.theden@gmail.com"
] | leif.theden@gmail.com |
106fce316f2b8daf10ebed6e56ebc77eae52f5fb | 348a921b2ec58cf8d9e018af5d7e93f15def6263 | /mimiron/models/scale_app.py | f3630ecac9490cac0a5aae5ae97bb70350b7d5cb | [] | no_license | tonicbupt/mimiron | 9c6ed99f8838f9180d7593bfaf8e5766e01c2911 | e51c8a59b7babbac44c0579797d5e255ede8a646 | refs/heads/master | 2016-09-06T01:55:37.508366 | 2015-07-28T08:52:59 | 2015-07-28T08:52:59 | 33,178,886 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,583 | py | # coding: utf-8
import sqlalchemy.exc
from mimiron.ext import db
from mimiron.models.base import Base
from mimiron.models.record import Record
class ScaleApp(Base):
__tablename__ = 'scale_app'
__table_args__ = db.UniqueConstraint('appname', 'version', 'entrypoint', 'env'),
appname = db.Column(db.String(30), nullable=False)
version = db.Column(db.String(50), nullable=False)
entrypoint = db.Column(db.CHAR(20), nullable=False)
env = db.Column(db.CHAR(20), nullable=False)
condition_groups = db.relationship('ConditionGroup', backref='scale_app', lazy='dynamic')
records = db.relationship('Record', backref='scale_app', lazy='dynamic')
def __init__(self, appname, version, entrypoint, env):
self.appname = appname
self.version = version
self.entrypoint = entrypoint
self.env = env
@classmethod
def list_all(cls, start=0, limit=None):
q = cls.query.offset(start)
if limit is not None:
q = q.limit(limit)
return q.all()
@classmethod
def get_or_create(cls, appname, version, entrypoint, env):
app = cls.query.filter_by(appname=appname, version=version,
entrypoint=entrypoint, env=env).first()
if app:
return app
try:
app = cls(appname, version, entrypoint, env)
db.session.add(app)
db.session.commit()
return app
except sqlalchemy.exc.IntegrityError:
db.session.rollback()
return None
@classmethod
def get_by_appname(cls, appname, start=0, limit=None):
q = cls.query.filter_by(appname=appname).offset(start)
if limit is not None:
q = q.limit(limit)
return q.all()
def add_record(self, container_id):
try:
r = Record(container_id)
db.session.add(r)
self.records.append(r)
db.session.commit()
return r
except sqlalchemy.exc.IntegrityError:
db.session.rollback()
return None
def list_records(self, start=0, limit=None):
q = self.records.order_by(Record.id.desc()).offset(start)
if limit is not None:
q = q.limit(limit)
return q.all()
def delete(self):
for cg in self.condition_groups.all():
cg.delete()
db.session.delete(self)
db.session.commit()
def to_dict(self):
d = super(ScaleApp, self).to_dict()
d.update(condition_groups=self.condition_groups.all())
return d
| [
"tonicbupt@gmail.com"
] | tonicbupt@gmail.com |
69fed5544d7ca3480815312b408af42df3de3ffc | bb150497a05203a718fb3630941231be9e3b6a32 | /tools/test/test-tools/tool-test-op-correctness/ttorch/test_linear.py | 90783638ec2e59efa8de2f83f0c73913751df932 | [] | no_license | PaddlePaddle/PaddleTest | 4fb3dec677f0f13f7f1003fd30df748bf0b5940d | bd3790ce72a2a26611b5eda3901651b5a809348f | refs/heads/develop | 2023-09-06T04:23:39.181903 | 2023-09-04T11:17:50 | 2023-09-04T11:17:50 | 383,138,186 | 42 | 312 | null | 2023-09-13T11:13:35 | 2021-07-05T12:44:59 | Python | UTF-8 | Python | false | false | 454 | py | #!/bin/env python
# -*- coding: utf-8 -*-
# @author DDDivano
# encoding=utf-8 vi:ts=4:sw=4:expandtab:ft=python
import torch
import numpy as np
def test_linear():
x = torch.from_numpy(np.ones(shape=[3, 5]))
weight = torch.from_numpy(np.ones(shape=[3, 5])*3)
bias = torch.from_numpy(np.ones(shape=[3]))
res = torch.nn.functional.linear(x, weight, bias)
expect = np.ones(shape=[3, 3]) * 16
np.testing.assert_allclose(res, expect)
| [
"noreply@github.com"
] | PaddlePaddle.noreply@github.com |
4cc340abd79f9612f362da49ea27672eba51f166 | acd41dc7e684eb2e58b6bef2b3e86950b8064945 | /res/packages/scripts/scripts/client/messenger/formatters/service_channel.py | 758f9e20a19a5f7c3455c8c3f3808f768ab4825b | [] | no_license | webiumsk/WoT-0.9.18.0 | e07acd08b33bfe7c73c910f5cb2a054a58a9beea | 89979c1ad547f1a1bbb2189f5ee3b10685e9a216 | refs/heads/master | 2021-01-20T09:37:10.323406 | 2017-05-04T13:51:43 | 2017-05-04T13:51:43 | 90,268,530 | 0 | 0 | null | null | null | null | WINDOWS-1250 | Python | false | false | 102,168 | py | # 2017.05.04 15:26:55 Střední Evropa (letní čas)
# Embedded file name: scripts/client/messenger/formatters/service_channel.py
import time
import types
import operator
from Queue import Queue
import constants
import account_helpers
import ArenaType
import BigWorld
import potapov_quests
from FortifiedRegionBase import FORT_ATTACK_RESULT, NOT_ACTIVATED
from adisp import async, process
from chat_shared import decompressSysMessage
from debug_utils import LOG_ERROR, LOG_WARNING, LOG_CURRENT_EXCEPTION, LOG_DEBUG
from gui.Scaleform.locale.ITEM_TYPES import ITEM_TYPES
from gui.Scaleform.locale.MENU import MENU
from gui.Scaleform.locale.SYSTEM_MESSAGES import SYSTEM_MESSAGES
from helpers import dependency
from shared_utils import BoundMethodWeakref, findFirst
from gui.goodies import g_goodiesCache
from gui.shared.formatters import text_styles
from gui import GUI_SETTINGS
from gui.LobbyContext import g_lobbyContext
from gui.Scaleform.locale.MESSENGER import MESSENGER
from gui.clans.formatters import getClanAbbrevString, getClanFullName
from gui.shared import formatters as shared_fmts, g_itemsCache
from gui.shared.fortifications import formatters as fort_fmts
from gui.shared.fortifications.FortBuilding import FortBuilding
from gui.shared.gui_items.Tankman import Tankman, calculateRoleLevel
from gui.shared.gui_items.dossier.factories import getAchievementFactory
from gui.shared.notifications import NotificationPriorityLevel, NotificationGuiSettings, MsgCustomEvents
from gui.shared.utils import getPlayerDatabaseID, getPlayerName
from gui.shared.utils.transport import z_loads
from gui.shared.gui_items.Vehicle import getUserName, getShortUserName, getTypeShortUserName
from gui.shared.money import Money, ZERO_MONEY, Currency
from gui.shared.formatters.currency import getBWFormatter
from gui.prb_control.formatters import getPrebattleFullDescription
from gui.prb_control import prbInvitesProperty
from helpers import i18n, html, getClientLanguage, getLocalizedData
from helpers import time_utils
from items import getTypeInfoByIndex, getTypeInfoByName, vehicles as vehicles_core
from account_helpers import rare_achievements
from dossiers2.custom.records import DB_ID_TO_RECORD
from dossiers2.ui.achievements import ACHIEVEMENT_BLOCK
from dossiers2.ui.layouts import IGNORED_BY_BATTLE_RESULTS
from messenger import g_settings
from messenger.ext import passCensor
from messenger.m_constants import MESSENGER_I18N_FILE
from predefined_hosts import g_preDefinedHosts
from constants import INVOICE_ASSET, AUTO_MAINTENANCE_TYPE, PREBATTLE_INVITE_STATE, AUTO_MAINTENANCE_RESULT, PREBATTLE_TYPE, FINISH_REASON, KICK_REASON_NAMES, KICK_REASON, NC_MESSAGE_TYPE, NC_MESSAGE_PRIORITY, SYS_MESSAGE_CLAN_EVENT, SYS_MESSAGE_CLAN_EVENT_NAMES, SYS_MESSAGE_FORT_EVENT, SYS_MESSAGE_FORT_EVENT_NAMES, FORT_BUILDING_TYPE, FORT_ORDER_TYPE, FORT_BUILDING_TYPE_NAMES, ARENA_GUI_TYPE, EVENT_TYPE
from messenger.formatters import TimeFormatter, NCContextItemFormatter
from skeletons.gui.server_events import IEventsCache
def _getTimeStamp(message):
if message.createdAt is not None:
result = time_utils.getTimestampFromUTC(message.createdAt.timetuple())
else:
result = time_utils.getCurrentTimestamp()
return result
def _extendCustomizationData(newData, extendable):
if extendable is None:
return
else:
customizations = newData.get('customizations', [])
for customizationItem in customizations:
custType = customizationItem['custType']
custValue = customizationItem['value']
custIsPermanent = customizationItem['isPermanent']
if custValue < 0:
extendable.append(i18n.makeString('#system_messages:customization/removed/%s' % custType))
elif custIsPermanent and custValue > 1:
extendable.append(i18n.makeString('#system_messages:customization/added/%sValue' % custType, custValue))
else:
extendable.append(i18n.makeString('#system_messages:customization/added/%s' % custType))
return
@async
def _getRareTitle(rareID, callback):
rare_achievements.getRareAchievementText(getClientLanguage(), rareID, lambda rID, text: callback(text.get('title')))
@async
@process
def _processRareAchievements(rares, callback):
unknownAchieves = 0
achievements = []
for rareID in rares:
title = yield _getRareTitle(rareID)
if title is None:
unknownAchieves += 1
else:
achievements.append(title)
if unknownAchieves:
achievements.append(i18n.makeString('#system_messages:%s/title' % ('actionAchievements' if unknownAchieves > 1 else 'actionAchievement')))
callback(achievements)
return
class ServiceChannelFormatter(object):
def format(self, data, *args):
return (None, None)
def isNotify(self):
return True
def isAsync(self):
return False
def _getGuiSettings(self, data, key = None, priorityLevel = None):
try:
isAlert = data.isHighImportance and data.active
except AttributeError:
isAlert = False
if priorityLevel is None:
priorityLevel = g_settings.msgTemplates.priority(key)
return NotificationGuiSettings(self.isNotify(), priorityLevel, isAlert)
class WaitItemsSyncFormatter(ServiceChannelFormatter):
def __init__(self):
self.__callbackQueue = None
return
def isAsync(self):
return True
@async
def _waitForSyncItems(self, callback):
if g_itemsCache.isSynced():
callback(True)
else:
self.__registerHandler(callback)
def __registerHandler(self, callback):
if not self.__callbackQueue:
self.__callbackQueue = Queue()
self.__callbackQueue.put(callback)
g_itemsCache.onSyncCompleted += self.__onSyncCompleted
def __unregisterHandler(self):
raise self.__callbackQueue.empty() or AssertionError
self.__callbackQueue = None
g_itemsCache.onSyncCompleted -= self.__onSyncCompleted
return
def __onSyncCompleted(self, *args):
while not self.__callbackQueue.empty():
self.__callbackQueue.get_nowait()(g_itemsCache.isSynced())
self.__unregisterHandler()
class ServerRebootFormatter(ServiceChannelFormatter):
def format(self, message, *args):
if message.data:
local_dt = time_utils.utcToLocalDatetime(message.data)
formatted = g_settings.msgTemplates.format('serverReboot', ctx={'date': local_dt.strftime('%c')})
return (formatted, self._getGuiSettings(message, 'serverReboot'))
else:
return (None, None)
return None
class ServerRebootCancelledFormatter(ServiceChannelFormatter):
def format(self, message, *args):
if message.data:
local_dt = time_utils.utcToLocalDatetime(message.data)
formatted = g_settings.msgTemplates.format('serverRebootCancelled', ctx={'date': local_dt.strftime('%c')})
return (formatted, self._getGuiSettings(message, 'serverRebootCancelled'))
else:
return (None, None)
return None
class BattleResultsFormatter(WaitItemsSyncFormatter):
__battleResultKeys = {-1: 'battleDefeatResult',
0: 'battleDrawGameResult',
1: 'battleVictoryResult'}
__eventBattleResultKeys = {-1: 'battleEndedGameResult',
0: 'battleEndedGameResult',
1: 'battleVictoryResult'}
__goldTemplateKey = 'battleResultGold'
__questsTemplateKey = 'battleQuests'
__i18n_penalty = i18n.makeString('#%s:serviceChannelMessages/battleResults/penaltyForDamageAllies' % MESSENGER_I18N_FILE)
__i18n_contribution = i18n.makeString('#%s:serviceChannelMessages/battleResults/contributionForDamageAllies' % MESSENGER_I18N_FILE)
def isNotify(self):
return True
@async
@process
def format(self, message, callback):
yield lambda callback: callback(True)
isSynced = yield self._waitForSyncItems()
if message.data and isSynced:
battleResults = message.data
arenaTypeID = battleResults.get('arenaTypeID', 0)
if arenaTypeID > 0 and arenaTypeID in ArenaType.g_cache:
arenaType = ArenaType.g_cache[arenaTypeID]
else:
arenaType = None
arenaCreateTime = battleResults.get('arenaCreateTime', None)
if arenaCreateTime and arenaType:
ctx = {'arenaName': i18n.makeString(arenaType.name),
'vehicleNames': 'N/A',
'xp': '0',
'credits': '0'}
vehicleNames = {}
popUpRecords = []
marksOfMastery = []
vehs = []
for vehIntCD, vehBattleResults in battleResults.get('playerVehicles', {}).iteritems():
v = g_itemsCache.items.getItemByCD(vehIntCD)
vehs.append(v)
vehicleNames[vehIntCD] = v.userName
popUpRecords.extend(vehBattleResults.get('popUpRecords', []))
if 'markOfMastery' in vehBattleResults and vehBattleResults['markOfMastery'] > 0:
marksOfMastery.append(vehBattleResults['markOfMastery'])
ctx['vehicleNames'] = ', '.join(map(operator.attrgetter('userName'), sorted(vehs)))
xp = battleResults.get('xp')
if xp:
ctx['xp'] = BigWorld.wg_getIntegralFormat(xp)
battleResKey = battleResults.get('isWinner', 0)
ctx['xpEx'] = self.__makeXpExString(xp, battleResKey, battleResults.get('xpPenalty', 0), battleResults)
ctx['gold'] = self.__makeGoldString(battleResults.get('gold', 0))
accCredits = battleResults.get('credits') - battleResults.get('creditsToDraw', 0)
if accCredits:
ctx['credits'] = BigWorld.wg_getIntegralFormat(accCredits)
ctx['creditsEx'] = self.__makeCreditsExString(accCredits, battleResults.get('creditsPenalty', 0), battleResults.get('creditsContributionIn', 0), battleResults.get('creditsContributionOut', 0))
ctx['fortResource'] = self.__makeFortResourceString(battleResults)
guiType = battleResults.get('guiType', 0)
ctx['fortResource'] = ''
if guiType == ARENA_GUI_TYPE.SORTIE:
ctx['fortResource'] = self.__makeFortResourceString(battleResults)
ctx['achieves'] = self.__makeAchievementsString(popUpRecords, marksOfMastery)
ctx['lock'] = self.__makeVehicleLockString(vehicleNames, battleResults)
ctx['quests'] = self.__makeQuestsAchieve(message)
team = battleResults.get('team', 0)
ctx['fortBuilding'] = ''
if guiType == ARENA_GUI_TYPE.FORT_BATTLE:
fortBuilding = battleResults.get('fortBuilding')
if fortBuilding is not None:
buildTypeID, buildTeam = fortBuilding.get('buildTypeID'), fortBuilding.get('buildTeam')
if buildTypeID:
ctx['fortBuilding'] = g_settings.htmlTemplates.format('battleResultFortBuilding', ctx={'fortBuilding': FortBuilding(typeID=buildTypeID).userName,
'clanAbbrev': ''})
if battleResKey == 0:
battleResKey = 1 if buildTeam == team else -1
if guiType == ARENA_GUI_TYPE.FORT_BATTLE_2 or guiType == ARENA_GUI_TYPE.SORTIE_2:
if battleResKey == 0:
winnerIfDraw = battleResults.get('winnerIfDraw')
if winnerIfDraw:
battleResKey = 1 if winnerIfDraw == team else -1
if guiType == ARENA_GUI_TYPE.FALLOUT_MULTITEAM:
templateName = self.__eventBattleResultKeys[battleResKey]
else:
templateName = self.__battleResultKeys[battleResKey]
bgIconSource = None
arenaUniqueID = battleResults.get('arenaUniqueID', 0)
if guiType == ARENA_GUI_TYPE.FORT_BATTLE:
bgIconSource = 'FORT_BATTLE'
formatted = g_settings.msgTemplates.format(templateName, ctx=ctx, data={'timestamp': arenaCreateTime,
'savedData': arenaUniqueID}, bgIconSource=bgIconSource)
settings = self._getGuiSettings(message, templateName)
settings.showAt = BigWorld.time()
callback((formatted, settings))
else:
callback((None, None))
else:
callback((None, None))
return
def __makeFortResourceString(self, battleResult):
fortResource = battleResult.get('fortResource', None)
if fortResource is None:
return ''
else:
fortResourceStr = BigWorld.wg_getIntegralFormat(fortResource) if not battleResult['isLegionary'] else '-'
return g_settings.htmlTemplates.format('battleResultFortResource', ctx={'fortResource': fortResourceStr})
def __makeQuestsAchieve(self, message):
fmtMsg = TokenQuestsFormatter(asBattleFormatter=True)._formatQuestAchieves(message)
if fmtMsg is not None:
return g_settings.htmlTemplates.format('battleQuests', {'achieves': fmtMsg})
else:
return ''
def __makeVehicleLockString(self, vehicleNames, battleResults):
locks = []
for vehIntCD, battleResult in battleResults.get('playerVehicles', {}).iteritems():
expireTime = battleResult.get('vehTypeUnlockTime', 0)
if not expireTime:
continue
vehicleName = vehicleNames.get(vehIntCD)
if vehicleName is None:
continue
locks.append(g_settings.htmlTemplates.format('battleResultLocks', ctx={'vehicleName': vehicleName,
'expireTime': TimeFormatter.getLongDatetimeFormat(expireTime)}))
return ', '.join(locks)
def __makeXpExString(self, xp, battleResKey, xpPenalty, battleResults):
if not xp:
return ''
exStrings = []
if xpPenalty > 0:
exStrings.append(self.__i18n_penalty % BigWorld.wg_getIntegralFormat(xpPenalty))
if battleResKey == 1:
xpFactorStrings = []
xpFactor = battleResults.get('dailyXPFactor', 1)
if xpFactor > 1:
xpFactorStrings.append(i18n.makeString('#%s:serviceChannelMessages/battleResults/doubleXpFactor' % MESSENGER_I18N_FILE) % xpFactor)
if xpFactorStrings:
exStrings.append(', '.join(xpFactorStrings))
if len(exStrings):
return ' ({0:s})'.format('; '.join(exStrings))
return ''
def __makeCreditsExString(self, accCredits, creditsPenalty, creditsContributionIn, creditsContributionOut):
if not accCredits:
return ''
exStrings = []
penalty = sum([creditsPenalty, creditsContributionOut])
if penalty > 0:
exStrings.append(self.__i18n_penalty % BigWorld.wg_getIntegralFormat(penalty))
if creditsContributionIn > 0:
exStrings.append(self.__i18n_contribution % BigWorld.wg_getIntegralFormat(creditsContributionIn))
if len(exStrings):
return ' ({0:s})'.format('; '.join(exStrings))
return ''
def __makeGoldString(self, gold):
if not gold:
return ''
return g_settings.htmlTemplates.format(self.__goldTemplateKey, {'gold': BigWorld.wg_getGoldFormat(gold)})
@classmethod
def __makeAchievementsString(cls, popUpRecords, marksOfMastery):
result = []
for recordIdx, value in popUpRecords:
recordName = DB_ID_TO_RECORD[recordIdx]
if recordName in IGNORED_BY_BATTLE_RESULTS:
continue
achieve = getAchievementFactory(recordName).create(value=value)
if achieve is not None and not achieve.isApproachable() and achieve not in result:
result.append(achieve)
for markOfMastery in marksOfMastery:
achieve = getAchievementFactory((ACHIEVEMENT_BLOCK.TOTAL, 'markOfMastery')).create(value=markOfMastery)
if achieve is not None:
result.append(achieve)
res = ''
if len(result):
res = g_settings.htmlTemplates.format('battleResultAchieves', {'achieves': ', '.join(map(lambda a: a.getUserName(), sorted(result)))})
return res
class AutoMaintenanceFormatter(ServiceChannelFormatter):
__messages = {AUTO_MAINTENANCE_RESULT.NOT_ENOUGH_ASSETS: {AUTO_MAINTENANCE_TYPE.REPAIR: '#messenger:serviceChannelMessages/autoRepairError',
AUTO_MAINTENANCE_TYPE.LOAD_AMMO: '#messenger:serviceChannelMessages/autoLoadError',
AUTO_MAINTENANCE_TYPE.EQUIP: '#messenger:serviceChannelMessages/autoEquipError'},
AUTO_MAINTENANCE_RESULT.OK: {AUTO_MAINTENANCE_TYPE.REPAIR: '#messenger:serviceChannelMessages/autoRepairSuccess',
AUTO_MAINTENANCE_TYPE.LOAD_AMMO: '#messenger:serviceChannelMessages/autoLoadSuccess',
AUTO_MAINTENANCE_TYPE.EQUIP: '#messenger:serviceChannelMessages/autoEquipSuccess'},
AUTO_MAINTENANCE_RESULT.NOT_PERFORMED: {AUTO_MAINTENANCE_TYPE.REPAIR: '#messenger:serviceChannelMessages/autoRepairSkipped',
AUTO_MAINTENANCE_TYPE.LOAD_AMMO: '#messenger:serviceChannelMessages/autoLoadSkipped',
AUTO_MAINTENANCE_TYPE.EQUIP: '#messenger:serviceChannelMessages/autoEquipSkipped'},
AUTO_MAINTENANCE_RESULT.DISABLED_OPTION: {AUTO_MAINTENANCE_TYPE.REPAIR: '#messenger:serviceChannelMessages/autoRepairDisabledOption',
AUTO_MAINTENANCE_TYPE.LOAD_AMMO: '#messenger:serviceChannelMessages/autoLoadDisabledOption',
AUTO_MAINTENANCE_TYPE.EQUIP: '#messenger:serviceChannelMessages/autoEquipDisabledOption'},
AUTO_MAINTENANCE_RESULT.NO_WALLET_SESSION: {AUTO_MAINTENANCE_TYPE.REPAIR: '#messenger:serviceChannelMessages/autoRepairErrorNoWallet',
AUTO_MAINTENANCE_TYPE.LOAD_AMMO: '#messenger:serviceChannelMessages/autoLoadErrorNoWallet',
AUTO_MAINTENANCE_TYPE.EQUIP: '#messenger:serviceChannelMessages/autoEquipErrorNoWallet'}}
__currencyTemplates = {Currency.CREDITS: 'PurchaseForCreditsSysMessage',
Currency.GOLD: 'PurchaseForGoldSysMessage'}
def isNotify(self):
return True
def format(self, message, *args):
vehicleCompDescr = message.data.get('vehTypeCD', None)
result = message.data.get('result', None)
typeID = message.data.get('typeID', None)
cost = Money(*message.data.get('cost', ()))
if vehicleCompDescr is not None and result is not None and typeID is not None:
vt = vehicles_core.getVehicleType(vehicleCompDescr)
if typeID == AUTO_MAINTENANCE_TYPE.REPAIR:
formatMsgType = 'RepairSysMessage'
else:
formatMsgType = self._getTemplateByCurrency(cost.getCurrency())
msg = i18n.makeString(self.__messages[result][typeID]) % getUserName(vt)
priorityLevel = NotificationPriorityLevel.MEDIUM
if result == AUTO_MAINTENANCE_RESULT.OK:
priorityLevel = NotificationPriorityLevel.LOW
templateName = formatMsgType
elif result == AUTO_MAINTENANCE_RESULT.NOT_ENOUGH_ASSETS:
templateName = 'ErrorSysMessage'
else:
templateName = 'WarningSysMessage'
if result == AUTO_MAINTENANCE_RESULT.OK:
msg += shared_fmts.formatPrice(cost.toAbs())
formatted = g_settings.msgTemplates.format(templateName, {'text': msg})
return (formatted, self._getGuiSettings(message, priorityLevel=priorityLevel))
else:
return (None, None)
return
def _getTemplateByCurrency(self, currency):
return self.__currencyTemplates.get(currency, 'PurchaseForCreditsSysMessage')
class AchievementFormatter(ServiceChannelFormatter):
def isNotify(self):
return True
def isAsync(self):
return True
@async
@process
def format(self, message, callback):
yield lambda callback: callback(True)
achievesList = list()
achieves = message.data.get('popUpRecords')
if achieves is not None:
achievesList.extend([ i18n.makeString('#achievements:{0[1]:s}'.format(name)) for name in achieves ])
rares = [ rareID for rareID in message.data.get('rareAchievements', []) if rareID > 0 ]
raresList = yield _processRareAchievements(rares)
achievesList.extend(raresList)
if not len(achievesList):
callback((None, None))
return
else:
formatted = g_settings.msgTemplates.format('achievementReceived', {'achieves': ', '.join(achievesList)})
callback((formatted, self._getGuiSettings(message, 'achievementReceived')))
return
class GoldReceivedFormatter(ServiceChannelFormatter):
def format(self, message, *args):
data = message.data
gold = data.get('gold', None)
transactionTime = data.get('date', None)
if gold and transactionTime:
formatted = g_settings.msgTemplates.format('goldReceived', {'date': TimeFormatter.getLongDatetimeFormat(transactionTime),
'gold': BigWorld.wg_getGoldFormat(account_helpers.convertGold(gold))})
return (formatted, self._getGuiSettings(message, 'goldReceived'))
else:
return (None, None)
return
class GiftReceivedFormatter(ServiceChannelFormatter):
__handlers = {'money': ('_GiftReceivedFormatter__formatMoneyGiftMsg', {1: 'creditsReceivedAsGift',
2: 'goldReceivedAsGift',
3: 'creditsAndGoldReceivedAsGift'}),
'xp': ('_GiftReceivedFormatter__formatXPGiftMsg', 'xpReceivedAsGift'),
'premium': ('_GiftReceivedFormatter__formatPremiumGiftMsg', 'premiumReceivedAsGift'),
'item': ('_GiftReceivedFormatter__formatItemGiftMsg', 'itemReceivedAsGift'),
'vehicle': ('_GiftReceivedFormatter__formatVehicleGiftMsg', 'vehicleReceivedAsGift')}
def format(self, message, *args):
data = message.data
giftType = data.get('type')
if giftType is not None:
handlerName, templateKey = self.__handlers.get(giftType, (None, None))
if handlerName is not None:
formatted, templateKey = getattr(self, handlerName)(templateKey, data)
return (formatted, self._getGuiSettings(message, templateKey))
return (None, None)
def __formatMoneyGiftMsg(self, keys, data):
accCredits = data.get('credits', 0)
gold = data.get('gold', 0)
result = (None, '')
ctx = {}
idx = 0
if accCredits > 0:
idx |= 1
ctx['credits'] = BigWorld.wg_getIntegralFormat(accCredits)
if gold > 0:
idx |= 2
ctx['gold'] = BigWorld.wg_getGoldFormat(gold)
if idx in keys:
key = keys[idx]
result = (g_settings.msgTemplates.format(keys[idx], ctx), key)
return result
def __formatXPGiftMsg(self, key, data):
xp = data.get('amount', 0)
result = None
if xp > 0:
result = g_settings.msgTemplates.format(key, ctx={'freeXP': BigWorld.wg_getIntegralFormat(xp)})
return (result, key)
def __formatPremiumGiftMsg(self, key, data):
days = data.get('amount', 0)
result = None
if days > 0:
result = g_settings.msgTemplates.format(key, ctx={'days': days})
return (result, key)
def __formatItemGiftMsg(self, key, data):
amount = data.get('amount', 0)
result = None
itemTypeIdx = data.get('itemTypeIdx')
itemCompactDesc = data.get('itemCD')
if amount > 0 and itemTypeIdx is not None and itemCompactDesc is not None:
result = g_settings.msgTemplates.format(key, ctx={'typeName': getTypeInfoByIndex(itemTypeIdx)['userString'],
'itemName': vehicles_core.getDictDescr(itemCompactDesc)['userString'],
'amount': amount})
return (result, key)
def __formatVehicleGiftMsg(self, key, data):
vCompDesc = data.get('typeCD', None)
result = None
if vCompDesc is not None:
result = g_settings.msgTemplates.format(key, ctx={'vehicleName': getUserName(vehicles_core.getVehicleType(vCompDesc))})
return (result, key)
class InvoiceReceivedFormatter(WaitItemsSyncFormatter):
__assetHandlers = {INVOICE_ASSET.GOLD: '_formatAmount',
INVOICE_ASSET.CREDITS: '_formatAmount',
INVOICE_ASSET.PREMIUM: '_formatAmount',
INVOICE_ASSET.FREE_XP: '_formatAmount',
INVOICE_ASSET.DATA: '_formatData'}
__operationTemplateKeys = {INVOICE_ASSET.GOLD: 'goldAccruedInvoiceReceived',
INVOICE_ASSET.CREDITS: 'creditsAccruedInvoiceReceived',
INVOICE_ASSET.PREMIUM: 'premiumAccruedInvoiceReceived',
INVOICE_ASSET.FREE_XP: 'freeXpAccruedInvoiceReceived',
INVOICE_ASSET.GOLD | 16: 'goldDebitedInvoiceReceived',
INVOICE_ASSET.CREDITS | 16: 'creditsDebitedInvoiceReceived',
INVOICE_ASSET.PREMIUM | 16: 'premiumDebitedInvoiceReceived',
INVOICE_ASSET.FREE_XP | 16: 'freeXpDebitedInvoiceReceived'}
__messageTemplateKeys = {INVOICE_ASSET.GOLD: 'goldInvoiceReceived',
INVOICE_ASSET.CREDITS: 'creditsInvoiceReceived',
INVOICE_ASSET.PREMIUM: 'premiumInvoiceReceived',
INVOICE_ASSET.FREE_XP: 'freeXpInvoiceReceived',
INVOICE_ASSET.DATA: 'dataInvoiceReceived'}
__i18nPiecesString = i18n.makeString('#{0:s}:serviceChannelMessages/invoiceReceived/pieces'.format(MESSENGER_I18N_FILE))
__i18nCrewString = i18n.makeString('#{0:s}:serviceChannelMessages/invoiceReceived/crewOnVehicle'.format(MESSENGER_I18N_FILE))
__i18nCrewWithLvlDroppedString = i18n.makeString('#{0:s}:serviceChannelMessages/invoiceReceived/crewWithLvlDroppedToBarracks'.format(MESSENGER_I18N_FILE))
__i18nCrewDroppedString = i18n.makeString('#{0:s}:serviceChannelMessages/invoiceReceived/crewDroppedToBarracks'.format(MESSENGER_I18N_FILE))
__i18nCrewWithdrawnString = i18n.makeString('#{0:s}:serviceChannelMessages/invoiceReceived/crewWithdrawn'.format(MESSENGER_I18N_FILE))
def _getMessageTemplateKey(self, assetType):
return self.__messageTemplateKeys[assetType]
def _getOperationTimeString(self, data):
operationTime = data.get('at', None)
if operationTime:
fDatetime = TimeFormatter.getLongDatetimeFormat(time_utils.makeLocalServerTime(operationTime))
else:
fDatetime = 'N/A'
return fDatetime
def __getFinOperationString(self, assetType, amount):
templateKey = 0 if amount > 0 else 16
templateKey |= assetType
ctx = {}
if assetType == INVOICE_ASSET.GOLD:
ctx['amount'] = BigWorld.wg_getGoldFormat(abs(amount))
else:
ctx['amount'] = BigWorld.wg_getIntegralFormat(abs(amount))
return g_settings.htmlTemplates.format(self.__operationTemplateKeys[templateKey], ctx=ctx)
def __getItemsString(self, items):
accrued = []
debited = []
for itemCompactDescr, count in items.iteritems():
if count:
try:
item = vehicles_core.getDictDescr(itemCompactDescr)
itemString = '{0:s} "{1:s}" - {2:d} {3:s}'.format(getTypeInfoByName(item['itemTypeName'])['userString'], item['userString'], abs(count), self.__i18nPiecesString)
if count > 0:
accrued.append(itemString)
else:
debited.append(itemString)
except:
LOG_ERROR('itemCompactDescr can not parse ', itemCompactDescr)
LOG_CURRENT_EXCEPTION()
result = ''
if len(accrued):
result = g_settings.htmlTemplates.format('itemsAccruedInvoiceReceived', ctx={'items': ', '.join(accrued)})
if len(debited):
if len(result):
result += '<br/>'
result += g_settings.htmlTemplates.format('itemsDebitedInvoiceReceived', ctx={'items': ', '.join(debited)})
return result
@classmethod
def __getVehicleInfo(cls, vehData, isWithdrawn):
vInfo = []
if isWithdrawn:
toBarracks = not vehData.get('dismissCrew', False)
action = cls.__i18nCrewDroppedString if toBarracks else cls.__i18nCrewWithdrawnString
vInfo.append(action)
else:
if 'rent' in vehData:
time = vehData['rent'].get('time', None)
rentDays = None
if time:
if time == float('inf'):
pass
elif time <= time_utils.DAYS_IN_YEAR:
rentDays = time
if rentDays:
rentDays = g_settings.htmlTemplates.format('rentDays', {'value': str(rentDays)})
vInfo.append(rentDays)
crewLevel = calculateRoleLevel(vehData.get('crewLvl', 50), vehData.get('crewFreeXP', 0))
if crewLevel > 50:
if not vehData.get('dismissCrew', False) and ('crewFreeXP' in vehData or 'crewLvl' in vehData or 'tankmen' in vehData):
crewWithLevelString = cls.__i18nCrewWithLvlDroppedString % crewLevel
else:
crewWithLevelString = cls.__i18nCrewString % crewLevel
vInfo.append(crewWithLevelString)
return '; '.join(vInfo)
@classmethod
def __getVehicleName(cls, vehCompDescr):
vehicleName = None
try:
if vehCompDescr < 0:
vehCompDescr = abs(vehCompDescr)
vehicleName = getUserName(vehicles_core.getVehicleType(vehCompDescr))
except:
LOG_ERROR('Wrong vehicle compact descriptor', vehCompDescr)
LOG_CURRENT_EXCEPTION()
return vehicleName
@classmethod
def _getVehicleNames(cls, vehicles):
addVehNames = []
removeVehNames = []
rentedVehNames = []
for vehCompDescr, vehData in vehicles.iteritems():
if 'customCompensation' in vehData:
continue
isNegative = False
if type(vehCompDescr) is types.IntType:
isNegative = vehCompDescr < 0
isRented = 'rent' in vehData
vehicleName = cls.__getVehicleName(vehCompDescr)
if vehicleName is None:
continue
vehicleInfo = cls.__getVehicleInfo(vehData, isNegative)
vehicleInfoString = ' ({0:s})'.format(vehicleInfo) if vehicleInfo else ''
vehUserString = '{0:s}{1:s}'.format(vehicleName, vehicleInfoString)
if isNegative:
removeVehNames.append(vehUserString)
elif isRented:
rentedVehNames.append(vehUserString)
else:
addVehNames.append(vehUserString)
return (addVehNames, removeVehNames, rentedVehNames)
@classmethod
def _getVehiclesString(cls, vehicles, htmlTplPostfix = 'InvoiceReceived'):
addVehNames, removeVehNames, rentedVehNames = cls._getVehicleNames(vehicles)
result = []
if len(addVehNames):
result.append(g_settings.htmlTemplates.format('vehiclesAccrued' + htmlTplPostfix, ctx={'vehicles': ', '.join(addVehNames)}))
if len(removeVehNames):
result.append(g_settings.htmlTemplates.format('vehiclesDebited' + htmlTplPostfix, ctx={'vehicles': ', '.join(removeVehNames)}))
if len(rentedVehNames):
result.append(g_settings.htmlTemplates.format('vehiclesRented' + htmlTplPostfix, ctx={'vehicles': ', '.join(rentedVehNames)}))
return '<br/>'.join(result)
@classmethod
def _getComptnString(cls, vehicles, htmlTplPostfix = 'InvoiceReceived'):
html = g_settings.htmlTemplates
result = []
for vehCompDescr, vehData in vehicles.iteritems():
vehicleName = cls.__getVehicleName(vehCompDescr)
if vehicleName is None:
continue
if 'rentCompensation' in vehData:
comp = Money(*vehData['rentCompensation'])
currency = comp.getCurrency(byWeight=True)
formatter = getBWFormatter(currency)
key = '{}RentCompensationReceived'.format(currency)
ctx = {currency: formatter(comp.get(currency)),
'vehicleName': vehicleName}
result.append(html.format(key, ctx=ctx))
if 'customCompensation' in vehData:
itemNames = [vehicleName]
comp = Money(*vehData['customCompensation'])
values = []
currencies = comp.getSetCurrencies(byWeight=True)
for currency in currencies:
formatter = getBWFormatter(currency)
key = '{}Compensation'.format(currency)
values.append(html.format(key + htmlTplPostfix, ctx={'amount': formatter(comp.get(currency))}))
if len(values):
result.append(html.format('compensationFor' + htmlTplPostfix, ctx={'items': ', '.join(itemNames),
'compensation': ', '.join(values)}))
return '<br/>'.join(result)
@classmethod
def _getTankmenString(cls, tmen):
tmanUserStrings = []
for tmanData in tmen:
try:
if isinstance(tmanData, dict):
tankman = Tankman(tmanData['tmanCompDescr'])
else:
tankman = Tankman(tmanData)
tmanUserStrings.append('{0:s} {1:s} ({2:s}, {3:s}, {4:d}%)'.format(tankman.rankUserName, tankman.lastUserName, tankman.roleUserName, getUserName(tankman.vehicleNativeDescr.type), tankman.roleLevel))
except:
LOG_ERROR('Wrong tankman data', tmanData)
LOG_CURRENT_EXCEPTION()
result = ''
if len(tmanUserStrings):
result = g_settings.htmlTemplates.format('tankmenInvoiceReceived', ctx={'tankman': ', '.join(tmanUserStrings)})
return result
@classmethod
def _getGoodiesString(cls, goodies):
result = []
boostersStrings = []
discountsStrings = []
for goodieID, ginfo in goodies.iteritems():
if goodieID in g_itemsCache.items.shop.boosters:
booster = g_goodiesCache.getBooster(goodieID)
if booster is not None and booster.enabled:
boostersStrings.append(booster.userName)
else:
discount = g_goodiesCache.getDiscount(goodieID)
if discount is not None and discount.enabled:
discountsStrings.append(discount.description)
if len(boostersStrings):
result.append(g_settings.htmlTemplates.format('boostersInvoiceReceived', ctx={'boosters': ', '.join(boostersStrings)}))
if len(discountsStrings):
result.append(g_settings.htmlTemplates.format('discountsInvoiceReceived', ctx={'discounts': ', '.join(discountsStrings)}))
return '; '.join(result)
def __getSlotsString(self, slots):
if slots > 0:
template = 'slotsAccruedInvoiceReceived'
else:
template = 'slotsDebitedInvoiceReceived'
return g_settings.htmlTemplates.format(template, {'amount': BigWorld.wg_getIntegralFormat(abs(slots))})
@classmethod
def __getBerthsString(cls, berths):
if berths > 0:
template = 'berthsAccruedInvoiceReceived'
else:
template = 'berthsDebitedInvoiceReceived'
return g_settings.htmlTemplates.format(template, {'amount': BigWorld.wg_getIntegralFormat(abs(berths))})
@async
@process
def __prerocessRareAchievements(self, data, callback):
yield lambda callback: callback(True)
dossiers = data.get('data', {}).get('dossier', {})
if dossiers:
self.__dossierResult = []
rares = [ rec['value'] for d in dossiers.itervalues() for (blck, _), rec in d.iteritems() if blck == ACHIEVEMENT_BLOCK.RARE ]
addDossierStrings = [ i18n.makeString('#achievements:{0:s}'.format(name)) for rec in dossiers.itervalues() for _, name in rec if name != '' ]
addDossiers = [ rare for rare in rares if rare > 0 ]
if addDossiers:
addDossierStrings += (yield _processRareAchievements(addDossiers))
if addDossierStrings:
self.__dossierResult.append(g_settings.htmlTemplates.format('dossiersAccruedInvoiceReceived', ctx={'dossiers': ', '.join(addDossierStrings)}))
delDossiers = [ abs(rare) for rare in rares if rare < 0 ]
if delDossiers:
delDossierStrings = yield _processRareAchievements(delDossiers)
self.__dossierResult.append(g_settings.htmlTemplates.format('dossiersDebitedInvoiceReceived', ctx={'dossiers': ', '.join(delDossierStrings)}))
callback(True)
def __getDossierString(self):
return '<br/>'.join(self.__dossierResult)
def __getTankmenFreeXPString(self, data):
freeXP = set()
spec = []
for tankmenDescr, xp in data.iteritems():
freeXP.add(xp)
tankman = Tankman(tankmenDescr)
spec.append('{} {} {}'.format(tankman.fullUserName, tankman.roleUserName, getShortUserName(tankman.vehicleNativeDescr.type)))
specStr = ' ({})'.format(', '.join(spec)) if spec else ''
if not len(freeXP) == 1:
raise AssertionError('Invoice has the same amount of experience to all tankmen')
freeXP = freeXP.pop()
template = freeXP > 0 and 'tankmenFreeXpAccruedInvoiceReceived'
else:
template = 'tankmenFreeXpDebitedInvoiceReceived'
return g_settings.htmlTemplates.format(template, {'tankmenFreeXp': BigWorld.wg_getIntegralFormat(abs(freeXP)),
'spec': specStr})
def __getL10nDescription(self, data):
descr = ''
lData = getLocalizedData(data.get('data', {}), 'localized_description', defVal=None)
if lData:
descr = i18n.encodeUtf8(html.escape(lData.get('description', u'')))
if len(descr):
descr = '<br/>' + descr
return descr
@classmethod
def _processCompensations(cls, data):
vehicles = data.get('vehicles')
comp = ZERO_MONEY
if vehicles is not None:
for value in vehicles.itervalues():
if 'rentCompensation' in value:
comp += Money(*value['rentCompensation'])
if 'customCompensation' in value:
comp += Money(*value['customCompensation'])
if 'gold' in data:
data['gold'] -= comp.gold
if data['gold'] == 0:
del data['gold']
if 'credits' in data:
data['credits'] -= comp.credits
if data['credits'] == 0:
del data['credits']
return
def _formatAmount(self, assetType, data):
amount = data.get('amount', None)
if amount is None:
return
else:
return g_settings.msgTemplates.format(self._getMessageTemplateKey(assetType), ctx={'at': self._getOperationTimeString(data),
'desc': self.__getL10nDescription(data),
'op': self.__getFinOperationString(assetType, amount)})
def _composeOperations(self, data):
dataEx = data.get('data', {})
if not dataEx:
return
else:
operations = []
self._processCompensations(dataEx)
gold = dataEx.get('gold')
if gold is not None:
operations.append(self.__getFinOperationString(INVOICE_ASSET.GOLD, gold))
accCredtis = dataEx.get('credits')
if accCredtis is not None:
operations.append(self.__getFinOperationString(INVOICE_ASSET.CREDITS, accCredtis))
freeXp = dataEx.get('freeXP')
if freeXp is not None:
operations.append(self.__getFinOperationString(INVOICE_ASSET.FREE_XP, freeXp))
premium = dataEx.get('premium')
if premium is not None:
operations.append(self.__getFinOperationString(INVOICE_ASSET.PREMIUM, premium))
items = dataEx.get('items', {})
if items:
operations.append(self.__getItemsString(items))
tmen = dataEx.get('tankmen', [])
vehicles = dataEx.get('vehicles', {})
if vehicles:
result = self._getVehiclesString(vehicles)
if len(result):
operations.append(result)
comptnStr = self._getComptnString(vehicles)
if len(comptnStr):
operations.append(comptnStr)
for v in vehicles.itervalues():
tmen.extend(v.get('tankmen', []))
if tmen:
operations.append(self._getTankmenString(tmen))
slots = dataEx.get('slots')
if slots:
operations.append(self.__getSlotsString(slots))
berths = dataEx.get('berths')
if berths:
operations.append(self.__getBerthsString(berths))
goodies = dataEx.get('goodies', {})
if goodies:
strGoodies = self._getGoodiesString(goodies)
if strGoodies:
operations.append(strGoodies)
dossier = dataEx.get('dossier', {})
if dossier:
operations.append(self.__getDossierString())
_extendCustomizationData(dataEx, operations)
tankmenFreeXP = dataEx.get('tankmenFreeXP', {})
if tankmenFreeXP:
operations.append(self.__getTankmenFreeXPString(tankmenFreeXP))
return operations
def _formatData(self, assetType, data):
operations = self._composeOperations(data)
if not operations:
return None
else:
return g_settings.msgTemplates.format(self._getMessageTemplateKey(assetType), ctx={'at': self._getOperationTimeString(data),
'desc': self.__getL10nDescription(data),
'op': '<br/>'.join(operations)})
@async
@process
def format(self, message, callback):
yield lambda callback: callback(True)
isSynced = yield self._waitForSyncItems()
formatted, settings = (None, None)
if isSynced:
data = message.data
yield self.__prerocessRareAchievements(data)
assetType = data.get('assetType', -1)
handler = self.__assetHandlers.get(assetType)
if handler is not None:
formatted = getattr(self, handler)(assetType, data)
if formatted is not None:
settings = self._getGuiSettings(message, self._getMessageTemplateKey(assetType))
callback((formatted, settings))
return
class AdminMessageFormatter(ServiceChannelFormatter):
def format(self, message, *args):
data = decompressSysMessage(message.data)
if data:
dataType = type(data)
text = ''
if dataType in types.StringTypes:
text = data
elif dataType is types.DictType:
text = getLocalizedData({'value': data}, 'value')
if not text:
LOG_ERROR('Text of message not found', message)
return (None, None)
formatted = g_settings.msgTemplates.format('adminMessage', {'text': text})
return (formatted, self._getGuiSettings(message, 'adminMessage'))
else:
return (None, None)
return None
class AccountTypeChangedFormatter(ServiceChannelFormatter):
def format(self, message, *args):
data = message.data
isPremium = data.get('isPremium', None)
expiryTime = data.get('expiryTime', None)
result = (None, None)
if isPremium is not None:
accountTypeName = i18n.makeString('#menu:accountTypes/premium') if isPremium else i18n.makeString('#menu:accountTypes/base')
expiryDatetime = TimeFormatter.getLongDatetimeFormat(expiryTime) if expiryTime else None
if expiryDatetime:
templateKey = 'accountTypeChangedWithExpiration'
ctx = {'accType': accountTypeName,
'expiryTime': expiryDatetime}
else:
templateKey = 'accountTypeChanged'
ctx = {'accType': accountTypeName}
formatted = g_settings.msgTemplates.format(templateKey, ctx=ctx)
result = (formatted, self._getGuiSettings(message, templateKey))
return result
class PremiumActionFormatter(ServiceChannelFormatter):
_templateKey = None
def _getMessage(self, isPremium, expiryTime):
return None
def format(self, message, *args):
data = message.data
isPremium = data.get('isPremium', None)
expiryTime = data.get('expiryTime', None)
if isPremium is not None:
return (self._getMessage(isPremium, expiryTime), self._getGuiSettings(message, self._templateKey))
else:
return (None, None)
class PremiumBoughtFormatter(PremiumActionFormatter):
_templateKey = 'premiumBought'
def _getMessage(self, isPremium, expiryTime):
result = None
if isPremium is True and expiryTime > 0:
result = g_settings.msgTemplates.format(self._templateKey, ctx={'expiryTime': text_styles.titleFont(TimeFormatter.getLongDatetimeFormat(expiryTime))})
return result
class PremiumExtendedFormatter(PremiumBoughtFormatter):
_templateKey = 'premiumExtended'
class PremiumExpiredFormatter(PremiumActionFormatter):
_templateKey = 'premiumExpired'
def _getMessage(self, isPremium, expiryTime):
result = None
if isPremium is False:
result = g_settings.msgTemplates.format(self._templateKey)
return result
class PrebattleFormatter(ServiceChannelFormatter):
__battleTypeByPrebattleType = {PREBATTLE_TYPE.TOURNAMENT: 'tournament',
PREBATTLE_TYPE.CLAN: 'clan'}
_battleFinishReasonKeys = {}
_defaultBattleFinishReasonKey = ('base', True)
def isNotify(self):
return True
def _getIconId(self, prbType):
iconId = 'BattleResultIcon'
if prbType == PREBATTLE_TYPE.CLAN:
iconId = 'ClanBattleResultIcon'
elif prbType == PREBATTLE_TYPE.TOURNAMENT:
iconId = 'TournamentBattleResultIcon'
return iconId
def _makeBattleTypeString(self, prbType):
typeString = self.__battleTypeByPrebattleType.get(prbType, 'prebattle')
key = '#{0:s}:serviceChannelMessages/prebattle/battleType/{1:s}'.format(MESSENGER_I18N_FILE, typeString)
return i18n.makeString(key)
def _makeDescriptionString(self, data, showBattlesCount = True):
if data.has_key('localized_data') and len(data['localized_data']):
description = getPrebattleFullDescription(data, escapeHtml=True)
else:
prbType = data.get('type')
description = self._makeBattleTypeString(prbType)
battlesLimit = data.get('battlesLimit', 0)
if showBattlesCount and battlesLimit > 1:
battlesCount = data.get('battlesCount')
if battlesCount > 0:
key = '#{0:s}:serviceChannelMessages/prebattle/numberOfBattle'.format(MESSENGER_I18N_FILE)
numberOfBattleString = i18n.makeString(key, battlesCount)
description = '{0:s} {1:s}'.format(description, numberOfBattleString)
else:
LOG_WARNING('Invalid value of battlesCount ', battlesCount)
return description
def _getOpponentsString(self, opponents):
first = i18n.encodeUtf8(opponents.get('1', {}).get('name', ''))
second = i18n.encodeUtf8(opponents.get('2', {}).get('name', ''))
result = ''
if len(first) > 0 and len(second) > 0:
result = g_settings.htmlTemplates.format('prebattleOpponents', ctx={'first': html.escape(first),
'second': html.escape(second)})
return result
def _getBattleResultString(self, winner, team):
result = 'undefined'
if 3 > winner > -1 and team in (1, 2):
if not winner:
result = 'draftGame'
else:
result = 'defeat' if team != winner else 'win'
return result
def _makeBattleResultString(self, finishReason, winner, team):
finishString, showResult = self._battleFinishReasonKeys.get(finishReason, self._defaultBattleFinishReasonKey)
if showResult:
resultString = self._getBattleResultString(winner, team)
key = '#{0:s}:serviceChannelMessages/prebattle/finish/{1:s}/{2:s}'.format(MESSENGER_I18N_FILE, finishString, resultString)
else:
key = '#{0:s}:serviceChannelMessages/prebattle/finish/{1:s}'.format(MESSENGER_I18N_FILE, finishString)
return i18n.makeString(key)
class PrebattleArenaFinishFormatter(PrebattleFormatter):
_battleFinishReasonKeys = {FINISH_REASON.TECHNICAL: ('technical', True),
FINISH_REASON.FAILURE: ('failure', False),
FINISH_REASON.UNKNOWN: ('failure', False)}
def format(self, message, *args):
LOG_DEBUG('prbArenaFinish', message)
data = message.data
prbType = data.get('type')
winner = data.get('winner')
team = data.get('team')
wins = data.get('wins')
finishReason = data.get('finishReason')
if None in [prbType,
winner,
team,
wins,
finishReason]:
return
else:
battleResult = self._makeBattleResultString(finishReason, winner, team)
subtotal = ''
battlesLimit = data.get('battlesLimit', 0)
if battlesLimit > 1:
battlesCount = data.get('battlesCount', -1)
winsLimit = data.get('winsLimit', -1)
if battlesCount == battlesLimit or winsLimit == wins[1] or winsLimit == wins[2]:
playerTeamWins = wins[team]
otherTeamWins = wins[2 if team == 1 else 1]
if winsLimit > 0 and playerTeamWins < winsLimit and otherTeamWins < winsLimit:
winner = None
elif playerTeamWins == otherTeamWins:
winner = 0
else:
winner = 1 if wins[1] > wins[2] else 2
sessionResult = self._makeBattleResultString(-1, winner, team)
subtotal = g_settings.htmlTemplates.format('prebattleTotal', ctx={'result': sessionResult,
'first': wins[1],
'second': wins[2]})
else:
subtotal = g_settings.htmlTemplates.format('prebattleSubtotal', ctx={'first': wins[1],
'second': wins[2]})
formatted = g_settings.msgTemplates.format('prebattleArenaFinish', ctx={'desc': self._makeDescriptionString(data),
'opponents': self._getOpponentsString(data.get('opponents', {})),
'result': battleResult,
'subtotal': subtotal}, data={'timestamp': _getTimeStamp(message),
'icon': self._getIconId(prbType)})
return (formatted, self._getGuiSettings(message, 'prebattleArenaFinish'))
class PrebattleKickFormatter(PrebattleFormatter):
def format(self, message, *args):
data = message.data
result = (None, None)
prbType = data.get('type')
kickReason = data.get('kickReason')
if prbType > 0 and kickReason > 0:
ctx = {}
key = '#system_messages:prebattle/kick/type/unknown'
if prbType in PREBATTLE_TYPE.SQUAD_PREBATTLES:
key = '#system_messages:prebattle/kick/type/squad'
elif prbType == PREBATTLE_TYPE.COMPANY:
key = '#system_messages:prebattle/kick/type/team'
ctx['type'] = i18n.makeString(key)
kickName = KICK_REASON_NAMES[kickReason]
key = '#system_messages:prebattle/kick/reason/{0:s}'.format(kickName)
ctx['reason'] = i18n.makeString(key)
formatted = g_settings.msgTemplates.format('prebattleKick', ctx=ctx)
result = (formatted, self._getGuiSettings(message, 'prebattleKick'))
return result
class PrebattleDestructionFormatter(PrebattleFormatter):
_battleFinishReasonKeys = {KICK_REASON.ARENA_CREATION_FAILURE: ('failure', False),
KICK_REASON.AVATAR_CREATION_FAILURE: ('failure', False),
KICK_REASON.VEHICLE_CREATION_FAILURE: ('failure', False),
KICK_REASON.PREBATTLE_CREATION_FAILURE: ('failure', False),
KICK_REASON.BASEAPP_CRASH: ('failure', False),
KICK_REASON.CELLAPP_CRASH: ('failure', False),
KICK_REASON.UNKNOWN_FAILURE: ('failure', False),
KICK_REASON.CREATOR_LEFT: ('creatorLeft', False),
KICK_REASON.PLAYERKICK: ('playerKick', False),
KICK_REASON.TIMEOUT: ('timeout', False)}
def format(self, message, *args):
LOG_DEBUG('prbDestruction', message)
data = message.data
prbType = data.get('type')
team = data.get('team')
wins = data.get('wins')
kickReason = data.get('kickReason')
if None in [prbType,
team,
wins,
kickReason]:
return (None, None)
else:
playerTeamWins = wins[team]
otherTeamWins = wins[2 if team == 1 else 1]
winsLimit = data.get('winsLimit')
if winsLimit > 0 and playerTeamWins < winsLimit and otherTeamWins < winsLimit:
winner = None
elif playerTeamWins == otherTeamWins:
winner = 0
else:
winner = 1 if wins[1] > wins[2] else 2
battleResult = self._makeBattleResultString(kickReason, winner, team)
total = ''
if data.get('battlesLimit', 0) > 1:
total = '({0:d}:{1:d})'.format(wins[1], wins[2])
formatted = g_settings.msgTemplates.format('prebattleDestruction', ctx={'desc': self._makeDescriptionString(data, showBattlesCount=False),
'opponents': self._getOpponentsString(data.get('opponents', {})),
'result': battleResult,
'total': total}, data={'timestamp': _getTimeStamp(message),
'icon': self._getIconId(prbType)})
return (formatted, self._getGuiSettings(message, 'prebattleDestruction'))
class VehCamouflageTimedOutFormatter(ServiceChannelFormatter):
def isNotify(self):
return True
def format(self, message, *args):
data = message.data
formatted = None
vehTypeCompDescr = data.get('vehTypeCompDescr')
if vehTypeCompDescr is not None:
vType = vehicles_core.getVehicleType(vehTypeCompDescr)
if vType is not None:
formatted = g_settings.msgTemplates.format('vehCamouflageTimedOut', ctx={'vehicleName': getUserName(vType)})
return (formatted, self._getGuiSettings(message, 'vehCamouflageTimedOut'))
class VehEmblemTimedOutFormatter(ServiceChannelFormatter):
def isNotify(self):
return True
def format(self, message, *args):
data = message.data
formatted = None
vehTypeCompDescr = data.get('vehTypeCompDescr')
if vehTypeCompDescr is not None:
vType = vehicles_core.getVehicleType(vehTypeCompDescr)
if vType is not None:
formatted = g_settings.msgTemplates.format('vehEmblemTimedOut', ctx={'vehicleName': getUserName(vType)})
return (formatted, self._getGuiSettings(message, 'vehEmblemTimedOut'))
class VehInscriptionTimedOutFormatter(ServiceChannelFormatter):
def isNotify(self):
return True
def format(self, message, *args):
data = message.data
formatted = None
vehTypeCompDescr = data.get('vehTypeCompDescr')
if vehTypeCompDescr is not None:
vType = vehicles_core.getVehicleType(vehTypeCompDescr)
if vType is not None:
formatted = g_settings.msgTemplates.format('vehInscriptionTimedOut', ctx={'vehicleName': getUserName(vType)})
return (formatted, self._getGuiSettings(message, 'vehInscriptionTimedOut'))
class ConverterFormatter(ServiceChannelFormatter):
def __i18nValue(self, key, isReceived, **kwargs):
key = ('%sReceived' if isReceived else '%sWithdrawn') % key
key = '#messenger:serviceChannelMessages/sysMsg/converter/%s' % key
return i18n.makeString(key) % kwargs
def __vehName(self, vehCompDescr):
return getUserName(vehicles_core.getVehicleType(abs(vehCompDescr)))
def format(self, message, *args):
data = message.data
text = []
if data.get('inscriptions'):
text.append(i18n.makeString('#messenger:serviceChannelMessages/sysMsg/converter/inscriptions'))
if data.get('emblems'):
text.append(i18n.makeString('#messenger:serviceChannelMessages/sysMsg/converter/emblems'))
if data.get('camouflages'):
text.append(i18n.makeString('#messenger:serviceChannelMessages/sysMsg/converter/camouflages'))
vehicles = data.get('vehicles')
if vehicles:
vehiclesReceived = [ self.__vehName(cd) for cd in vehicles if cd > 0 and g_itemsCache.items.doesVehicleExist(cd) ]
if len(vehiclesReceived):
text.append(self.__i18nValue('vehicles', True, vehicles=', '.join(vehiclesReceived)))
vehiclesWithdrawn = [ self.__vehName(cd) for cd in vehicles if cd < 0 and g_itemsCache.items.doesVehicleExist(abs(cd)) ]
if len(vehiclesWithdrawn):
text.append(self.__i18nValue('vehicles', False, vehicles=', '.join(vehiclesWithdrawn)))
slots = data.get('slots')
if slots:
text.append(self.__i18nValue('slots', slots > 0, slots=BigWorld.wg_getIntegralFormat(abs(slots))))
gold = data.get('gold')
if gold:
text.append(self.__i18nValue('gold', gold > 0, gold=BigWorld.wg_getGoldFormat(abs(gold))))
accCredits = data.get('credits')
if accCredits:
text.append(self.__i18nValue('credits', accCredits > 0, credits=BigWorld.wg_getIntegralFormat(abs(accCredits))))
freeXP = data.get('freeXP')
if freeXP:
text.append(self.__i18nValue('freeXP', freeXP > 0, freeXP=BigWorld.wg_getIntegralFormat(abs(freeXP))))
formatted = g_settings.msgTemplates.format('ConverterNotify', {'text': '<br/>'.join(text)})
return (formatted, self._getGuiSettings(message, 'ConverterNotify'))
class ClientSysMessageFormatter(ServiceChannelFormatter):
__templateKey = '%sSysMessage'
def format(self, data, *args):
if len(args):
msgType = args[0][0]
else:
msgType = 'Error'
templateKey = self.__templateKey % msgType
formatted = g_settings.msgTemplates.format(templateKey, ctx={'text': data})
return (formatted, self._getGuiSettings(args, templateKey))
def _getGuiSettings(self, data, key = None, priorityLevel = None):
if type(data) is types.TupleType and len(data):
auxData = data[0][:]
if len(data[0]) > 1 and priorityLevel is None:
priorityLevel = data[0][1]
else:
auxData = []
if priorityLevel is None:
priorityLevel = g_settings.msgTemplates.priority(key)
return NotificationGuiSettings(self.isNotify(), priorityLevel=priorityLevel, auxData=auxData)
class PremiumAccountExpiryFormatter(ClientSysMessageFormatter):
def format(self, data, *args):
formatted = g_settings.msgTemplates.format('durationOfPremiumAccountExpires', ctx={'expiryTime': text_styles.titleFont(TimeFormatter.getLongDatetimeFormat(data))})
return (formatted, self._getGuiSettings(args, 'durationOfPremiumAccountExpires'))
class AOGASNotifyFormatter(ClientSysMessageFormatter):
def format(self, data, *args):
formatted = g_settings.msgTemplates.format('AOGASNotify', {'text': i18n.makeString('#AOGAS:{0:s}'.format(data.name()))})
return (formatted, self._getGuiSettings(args, 'AOGASNotify'))
class VehicleTypeLockExpired(ServiceChannelFormatter):
def format(self, message, *args):
result = (None, None)
if message.data:
ctx = {}
vehTypeCompDescr = message.data.get('vehTypeCompDescr')
if vehTypeCompDescr is None:
templateKey = 'vehiclesAllLockExpired'
else:
templateKey = 'vehicleLockExpired'
ctx['vehicleName'] = getUserName(vehicles_core.getVehicleType(vehTypeCompDescr))
formatted = g_settings.msgTemplates.format(templateKey, ctx=ctx)
result = (formatted, self._getGuiSettings(message, 'vehicleLockExpired'))
return result
class ServerDowntimeCompensation(ServiceChannelFormatter):
__templateKey = 'serverDowntimeCompensation'
def format(self, message, *args):
result = (None, None)
subjects = ''
data = message.data
if data is not None:
for key, value in data.items():
if value:
if len(subjects) > 0:
subjects += ', '
subjects += i18n.makeString('#%s:serviceChannelMessages/' % MESSENGER_I18N_FILE + self.__templateKey + '/' + key)
if len(subjects) > 0:
formatted = g_settings.msgTemplates.format(self.__templateKey, ctx={'text': i18n.makeString('#%s:serviceChannelMessages/' % MESSENGER_I18N_FILE + self.__templateKey) % subjects})
result = (formatted, self._getGuiSettings(message, self.__templateKey))
return result
class ActionNotificationFormatter(ClientSysMessageFormatter):
__templateKey = 'action%s'
def format(self, message, *args):
result = (None, None)
data = message.get('data')
if data:
templateKey = self.__templateKey % message.get('state', '')
formatted = g_settings.msgTemplates.format(templateKey, ctx={'text': data}, data={'icon': message.get('type', '')})
result = (formatted, self._getGuiSettings(args, templateKey))
return result
class BattleTutorialResultsFormatter(ClientSysMessageFormatter):
__resultKeyWithBonuses = 'battleTutorialResBonuses'
__resultKeyWoBonuses = 'battleTutorialResWoBonuses'
def isNotify(self):
return True
def format(self, data, *args):
LOG_DEBUG('message data', data)
finishReason = data.get('finishReason', -1)
resultKey = data.get('resultKey', None)
finishKey = data.get('finishKey', None)
if finishReason > -1 and resultKey and finishKey:
resultString = i18n.makeString('#{0:s}:serviceChannelMessages/battleTutorial/results/{1:s}'.format(MESSENGER_I18N_FILE, resultKey))
reasonString = i18n.makeString('#{0:s}:serviceChannelMessages/battleTutorial/reasons/{1:s}'.format(MESSENGER_I18N_FILE, finishKey))
arenaTypeID = data.get('arenaTypeID', 0)
arenaName = 'N/A'
if arenaTypeID > 0:
arenaName = ArenaType.g_cache[arenaTypeID].name
vTypeCD = data.get('vTypeCD', None)
vName = 'N/A'
if vTypeCD is not None:
vName = getUserName(vehicles_core.getVehicleType(vTypeCD))
ctx = {'result': resultString,
'reason': reasonString,
'arenaName': i18n.makeString(arenaName),
'vehicleName': vName,
'freeXP': '0',
'credits': '0'}
freeXP = 0
credits_ = 0
chapters = data.get('chapters', [])
for chapter in chapters:
if chapter.get('received', False):
bonus = chapter.get('bonus', {})
freeXP += bonus.get('freeXP', 0)
credits_ += bonus.get('credits', 0)
if freeXP:
ctx['freeXP'] = BigWorld.wg_getIntegralFormat(freeXP)
if credits_:
ctx['credits'] = BigWorld.wg_getIntegralFormat(credits_)
all_ = data.get('areAllBonusesReceived', False)
if all_ and credits_ <= 0 and freeXP <= 0:
key = self.__resultKeyWoBonuses
else:
key = self.__resultKeyWithBonuses
startedAtTime = data.get('startedAt', time.time())
formatted = g_settings.msgTemplates.format(key, ctx=ctx, data={'timestamp': startedAtTime,
'savedData': data.get('arenaUniqueID', 0)})
return (formatted, self._getGuiSettings(args, key))
else:
return (None, None)
return
class TokenQuestsFormatter(WaitItemsSyncFormatter):
def __init__(self, asBattleFormatter = False):
super(TokenQuestsFormatter, self).__init__()
self._asBattleFormatter = asBattleFormatter
@async
@process
def format(self, message, callback):
yield lambda callback: callback(True)
isSynced = yield self._waitForSyncItems()
formatted, settings = (None, None)
if isSynced:
data = message.data or {}
completedQuestIDs = data.get('completedQuestIDs', set())
fmt = self._formatQuestAchieves(message)
if fmt is not None:
settings = self._getGuiSettings(message, self._getTemplateName(completedQuestIDs))
formatted = g_settings.msgTemplates.format(self._getTemplateName(completedQuestIDs), {'achieves': fmt})
callback((formatted, settings))
return
def _getTemplateName(self, completedQuestIDs = set()):
if len(completedQuestIDs):
for qID in completedQuestIDs:
if potapov_quests.g_cache.isPotapovQuest(qID):
return 'potapovQuests'
return 'tokenQuests'
def _formatQuestAchieves(self, message):
data = message.data
result = []
if not self._asBattleFormatter:
gold = data.get('gold', 0)
if gold:
result.append(self.__makeQuestsAchieve('battleQuestsGold', gold=BigWorld.wg_getIntegralFormat(gold)))
premium = data.get('premium', 0)
if premium:
result.append(self.__makeQuestsAchieve('battleQuestsPremium', days=premium))
if not self._asBattleFormatter:
freeXP = data.get('freeXP', 0)
if freeXP:
result.append(self.__makeQuestsAchieve('battleQuestsFreeXP', freeXP=BigWorld.wg_getIntegralFormat(freeXP)))
vehiclesList = data.get('vehicles', [])
for vehiclesData in vehiclesList:
if vehiclesData is not None and len(vehiclesData) > 0:
msg = InvoiceReceivedFormatter._getVehiclesString(vehiclesData, htmlTplPostfix='QuestsReceived')
if len(msg):
result.append(msg)
comptnStr = InvoiceReceivedFormatter._getComptnString(vehiclesData, htmlTplPostfix='QuestsReceived')
if len(comptnStr):
result.append('<br/>' + comptnStr)
if not self._asBattleFormatter:
creditsVal = data.get('credits', 0)
if creditsVal:
result.append(self.__makeQuestsAchieve('battleQuestsCredits', credits=BigWorld.wg_getIntegralFormat(creditsVal)))
slots = data.get('slots', 0)
if slots:
result.append(self.__makeQuestsAchieve('battleQuestsSlots', slots=BigWorld.wg_getIntegralFormat(slots)))
items = data.get('items', {})
itemsNames = []
for intCD, count in items.iteritems():
itemDescr = vehicles_core.getDictDescr(intCD)
itemsNames.append(i18n.makeString('#messenger:serviceChannelMessages/battleResults/quests/items/name', name=itemDescr['userString'], count=BigWorld.wg_getIntegralFormat(count)))
if len(itemsNames):
result.append(self.__makeQuestsAchieve('battleQuestsItems', names=', '.join(itemsNames)))
_extendCustomizationData(data, result)
berths = data.get('berths', 0)
if berths:
result.append(self.__makeQuestsAchieve('battleQuestsBerths', berths=BigWorld.wg_getIntegralFormat(berths)))
tmen = data.get('tankmen', {})
if tmen is not None and len(tmen) > 0:
result.append(InvoiceReceivedFormatter._getTankmenString(tmen))
goodies = data.get('goodies', {})
if goodies is not None and len(goodies) > 0:
strGoodies = InvoiceReceivedFormatter._getGoodiesString(goodies)
if strGoodies:
result.append(strGoodies)
if not self._asBattleFormatter:
achieves = data.get('popUpRecords', [])
achievesNames = set()
for recordIdx, value in achieves:
factory = getAchievementFactory(DB_ID_TO_RECORD[recordIdx])
if factory is not None:
a = factory.create(value=int(value))
if a is not None:
achievesNames.add(a.getUserName())
if len(achievesNames):
result.append(self.__makeQuestsAchieve('battleQuestsPopUps', achievements=', '.join(achievesNames)))
if len(result):
return '<br/>'.join(result)
else:
return
@classmethod
def __makeQuestsAchieve(cls, key, **kwargs):
return g_settings.htmlTemplates.format(key, kwargs)
class NCMessageFormatter(ServiceChannelFormatter):
__templateKeyFormat = 'notificationsCenterMessage_{0}'
def format(self, message, *args):
LOG_DEBUG('Message has received from notification center', message)
data = z_loads(message.data)
if not data:
return (None, None)
elif 'body' not in data or not data['body']:
return (None, None)
else:
templateKey = self.__getTemplateKey(data)
priority = self.__getGuiPriority(data)
topic = self.__getTopic(data)
body = self.__getBody(data)
settings = self._getGuiSettings(message, templateKey, priority)
msgType = data['type']
if msgType == NC_MESSAGE_TYPE.POLL:
if not GUI_SETTINGS.isPollEnabled:
return (None, None)
if not self.__fetchPollData(data, settings):
return (None, None)
formatted = g_settings.msgTemplates.format(templateKey, ctx={'topic': topic,
'body': body})
return (formatted, settings)
def __getTemplateKey(self, data):
if 'type' in data:
msgType = data['type']
if msgType not in NC_MESSAGE_TYPE.RANGE:
LOG_WARNING('Type of message is not valid, uses default type', msgType)
msgType = NC_MESSAGE_TYPE.INFO
else:
msgType = NC_MESSAGE_TYPE.INFO
return self.__templateKeyFormat.format(msgType)
def __getGuiPriority(self, data):
priority = NC_MESSAGE_PRIORITY.DEFAULT
if 'priority' in data:
priority = data['priority']
if priority not in NC_MESSAGE_PRIORITY.ORDER:
LOG_WARNING('Priority of message is not valid, uses default priority', priority)
priority = NC_MESSAGE_PRIORITY.DEFAULT
return NotificationPriorityLevel.convertFromNC(priority)
def __getTopic(self, data):
topic = ''
if 'topic' in data:
topic = i18n.encodeUtf8(data['topic'])
if len(topic):
topic = g_settings.htmlTemplates.format('notificationsCenterTopic', ctx={'topic': topic})
return topic
def __getBody(self, data):
body = i18n.encodeUtf8(data['body'])
if 'context' in data:
body = body % self.__formatContext(data['context'])
return body
def __fetchPollData(self, data, settings):
result = False
if 'link' in data and data['link']:
if 'topic' in data:
topic = i18n.encodeUtf8(data['topic'])
else:
topic = ''
settings.auxData = [data['link'], topic]
result = True
return result
def __formatContext(self, ctx):
result = {}
if type(ctx) is not types.DictType:
LOG_ERROR('Context is invalid', ctx)
return result
getItemFormat = NCContextItemFormatter.getItemFormat
for key, item in ctx.iteritems():
if len(item) > 1:
itemType, itemValue = item[0:2]
result[key] = getItemFormat(itemType, itemValue)
else:
LOG_ERROR('Context item is invalid', item)
result[key] = str(item)
return result
class ClanMessageFormatter(ServiceChannelFormatter):
__templates = {SYS_MESSAGE_CLAN_EVENT.LEFT_CLAN: 'clanMessageWarning'}
def format(self, message, *args):
LOG_DEBUG('Message has received from clan', message)
data = message.data
if data and 'event' in data:
event = data['event']
templateKey = self.__templates.get(event)
fullName = getClanFullName(passCensor(data['clanName']), passCensor(data['clanAbbrev']))
message = i18n.makeString('#messenger:serviceChannelMessages/clan/%s' % SYS_MESSAGE_CLAN_EVENT_NAMES[event])
formatted = g_settings.msgTemplates.format(templateKey, ctx={'message': message,
'fullClanName': fullName})
settings = self._getGuiSettings(message, templateKey)
return (formatted, settings)
else:
return (None, None)
return None
class FortMessageFormatter(ServiceChannelFormatter):
__templates = {SYS_MESSAGE_FORT_EVENT.DEF_HOUR_SHUTDOWN: 'fortHightPriorityMessageWarning',
SYS_MESSAGE_FORT_EVENT.BASE_DESTROYED: 'fortHightPriorityMessageWarning',
SYS_MESSAGE_FORT_EVENT.RESERVE_ACTIVATED: 'fortReserveActivatedMessage'}
DEFAULT_WARNING = 'fortMessageWarning'
def __init__(self):
super(FortMessageFormatter, self).__init__()
self.__messagesFormatters = {SYS_MESSAGE_FORT_EVENT.FORT_READY: BoundMethodWeakref(self._simpleMessage),
SYS_MESSAGE_FORT_EVENT.DEF_HOUR_SHUTDOWN: BoundMethodWeakref(self._simpleMessage),
SYS_MESSAGE_FORT_EVENT.RESERVE_ACTIVATED: BoundMethodWeakref(self._reserveActivatedMessage),
SYS_MESSAGE_FORT_EVENT.RESERVE_EXPIRED: BoundMethodWeakref(self._reserveExpiredMessage),
SYS_MESSAGE_FORT_EVENT.RESERVE_PRODUCED: BoundMethodWeakref(self._reserveProducedMessage),
SYS_MESSAGE_FORT_EVENT.STORAGE_OVERFLOW: BoundMethodWeakref(self._storageOverflowMessage),
SYS_MESSAGE_FORT_EVENT.ORDER_CANCELED: BoundMethodWeakref(self._orderCanceledMessage),
SYS_MESSAGE_FORT_EVENT.REATTACHED_TO_BASE: BoundMethodWeakref(self._reattachedToBaseMessage),
SYS_MESSAGE_FORT_EVENT.DEF_HOUR_ACTIVATED: BoundMethodWeakref(self._defHourManipulationMessage),
SYS_MESSAGE_FORT_EVENT.DEF_HOUR_CHANGED: BoundMethodWeakref(self._defHourManipulationMessage),
SYS_MESSAGE_FORT_EVENT.OFF_DAY_ACTIVATED: BoundMethodWeakref(self._offDayActivatedMessage),
SYS_MESSAGE_FORT_EVENT.VACATION_STARTED: BoundMethodWeakref(self._vacationActivatedMessage),
SYS_MESSAGE_FORT_EVENT.VACATION_FINISHED: BoundMethodWeakref(self._vacationFinishedMessage),
SYS_MESSAGE_FORT_EVENT.PERIPHERY_CHANGED: BoundMethodWeakref(self._peripheryChangedMessage),
SYS_MESSAGE_FORT_EVENT.BUILDING_DAMAGED: BoundMethodWeakref(self._buildingDamagedMessage),
SYS_MESSAGE_FORT_EVENT.BASE_DESTROYED: BoundMethodWeakref(self._simpleMessage),
SYS_MESSAGE_FORT_EVENT.ORDER_COMPENSATED: BoundMethodWeakref(self._orderCompensationMessage),
SYS_MESSAGE_FORT_EVENT.ATTACK_PLANNED: BoundMethodWeakref(self._attackPlannedMessage),
SYS_MESSAGE_FORT_EVENT.DEFENCE_PLANNED: BoundMethodWeakref(self._defencePlannedMessage),
SYS_MESSAGE_FORT_EVENT.BATTLE_DELETED: BoundMethodWeakref(self._battleDeletedMessage),
SYS_MESSAGE_FORT_EVENT.SPECIAL_ORDER_EXPIRED: BoundMethodWeakref(self._specialReserveExpiredMessage),
SYS_MESSAGE_FORT_EVENT.RESOURCE_SET: BoundMethodWeakref(self._resourceSetMessage),
SYS_MESSAGE_FORT_EVENT.RESERVE_SET: BoundMethodWeakref(self._reserveSetMessage),
SYS_MESSAGE_FORT_EVENT.FORT_GOT_8_LEVEL: BoundMethodWeakref(self._fortGotLvlMessage),
SYS_MESSAGE_FORT_EVENT.BATTLE_DELETED_LEVEL: BoundMethodWeakref(self._battleDeletedLevelMessage)}
def format(self, message, *args):
LOG_DEBUG('Message has received from fort', message)
data = message.data
if data and 'event' in data:
event = data['event']
templateKey = self.__templates.get(event, self.DEFAULT_WARNING)
formatter = self.__messagesFormatters.get(event)
if formatter is not None:
messageSting = formatter(data)
formatted = g_settings.msgTemplates.format(templateKey, ctx={'message': messageSting})
settings = self._getGuiSettings(message, templateKey)
return (formatted, settings)
LOG_WARNING('FortMessageFormatter has no available formatters for given message type: ', event)
return (None, None)
def _buildMessage(self, event, ctx = None):
if ctx is None:
ctx = {}
return i18n.makeString(('#messenger:serviceChannelMessages/fort/%s' % SYS_MESSAGE_FORT_EVENT_NAMES[event]), **ctx)
def _simpleMessage(self, data):
return self._buildMessage(data['event'])
def _peripheryChangedMessage(self, data):
return self._buildMessage(data['event'], {'peripheryName': g_preDefinedHosts.periphery(data['peripheryID']).name})
def _reserveActivatedMessage(self, data):
event = data['event']
orderTypeID = data['orderTypeID']
expirationTime = data['timeExpiration']
order = text_styles.neutral(fort_fmts.getOrderUserString(orderTypeID))
if event == SYS_MESSAGE_FORT_EVENT.RESERVE_ACTIVATED and FORT_ORDER_TYPE.isOrderPermanent(orderTypeID):
return i18n.makeString(MESSENGER.SERVICECHANNELMESSAGES_FORT_PERMANENT_RESERVE_ACTIVATED, order=order)
return self._buildMessage(event, {'order': order,
'timeLeft': time_utils.getTillTimeString(time_utils.getTimeDeltaFromNow(expirationTime), MENU.TIME_TIMEVALUEWITHSECS)})
def _reserveExpiredMessage(self, data):
return self._buildMessage(data['event'], {'order': fort_fmts.getOrderUserString(data['orderTypeID'])})
def _reserveProducedMessage(self, data):
return self._buildMessage(data['event'], {'order': fort_fmts.getOrderUserString(data['orderTypeID']),
'count': data['count']})
def _storageOverflowMessage(self, data):
return self._buildMessage(data['event'], {'building': fort_fmts.getBuildingUserString(data['buildTypeID'])})
def _orderCanceledMessage(self, data):
import fortified_regions
buildTypeID = data['buildTypeID']
orderTypeID = fortified_regions.g_cache.buildings[buildTypeID].orderType
return self._buildMessage(data['event'], {'building': fort_fmts.getBuildingUserString(buildTypeID),
'order': fort_fmts.getOrderUserString(orderTypeID)})
def _reattachedToBaseMessage(self, data):
return self._buildMessage(data['event'], {'building': fort_fmts.getBuildingUserString(FORT_BUILDING_TYPE.MILITARY_BASE)})
def _defHourManipulationMessage(self, data):
if data.get('event') == SYS_MESSAGE_FORT_EVENT.DEF_HOUR_ACTIVATED:
from gui.shared.fortifications.settings import MUST_SHOW_DEFENCE_START
from gui.shared.fortifications.fort_helpers import setRosterIntroWindowSetting
setRosterIntroWindowSetting(MUST_SHOW_DEFENCE_START)
return self._buildMessage(data['event'], {'defenceHour': fort_fmts.getDefencePeriodString(time_utils.getTimeTodayForUTC(data['defenceHour']))})
def _fortGotLvlMessage(self, data):
if data.get('event') == SYS_MESSAGE_FORT_EVENT.FORT_GOT_8_LEVEL:
from gui.shared.fortifications.settings import MUST_SHOW_FORT_UPGRADE
from gui.shared.fortifications.fort_helpers import setRosterIntroWindowSetting
setRosterIntroWindowSetting(MUST_SHOW_FORT_UPGRADE)
return self._simpleMessage(data)
def _offDayActivatedMessage(self, data):
offDay = data['offDay']
if offDay == NOT_ACTIVATED:
return i18n.makeString(MESSENGER.SERVICECHANNELMESSAGES_FORT_NO_OFF_DAY_ACTIVATED)
if 'defenceHour' in data:
from gui.shared.fortifications.fort_helpers import adjustOffDayToLocal, adjustDefenceHourToLocal
offDayLocal = adjustOffDayToLocal(offDay, adjustDefenceHourToLocal(data['defenceHour'])[0])
else:
LOG_WARNING('_offDayActivatedMessage: received incorrect data, using offDay without adjustment... ', data)
offDayLocal = offDay
return self._buildMessage(data['event'], {'offDay': fort_fmts.getDayOffString(offDayLocal)})
def _vacationActivatedMessage(self, data):
return self._buildMessage(data['event'], {'finish': BigWorld.wg_getShortDateFormat(data['timeEnd'])})
def _vacationFinishedMessage(self, data):
return self._buildMessage(data['event'])
def _buildingDamagedMessage(self, data):
buildTypeID = data['buildTypeID']
if buildTypeID == FORT_BUILDING_TYPE.MILITARY_BASE:
return i18n.makeString('#messenger:serviceChannelMessages/fort/{0}_{1}'.format(SYS_MESSAGE_FORT_EVENT_NAMES[data['event']], FORT_BUILDING_TYPE_NAMES[FORT_BUILDING_TYPE.MILITARY_BASE]))
return self._buildMessage(data['event'], {'building': fort_fmts.getBuildingUserString(buildTypeID)})
def _orderCompensationMessage(self, data):
return self._buildMessage(data['event'], {'orderTypeName': fort_fmts.getOrderUserString(data['orderTypeID'])})
def _attackPlannedMessage(self, data):
return self._buildMessage(data['event'], {'clan': getClanAbbrevString(data['defenderClanAbbrev']),
'date': BigWorld.wg_getShortDateFormat(data['timeAttack']),
'time': BigWorld.wg_getShortTimeFormat(data['timeAttack'])})
def _defencePlannedMessage(self, data):
return self._buildMessage(data['event'], {'clan': getClanAbbrevString(data['attackerClanAbbrev']),
'date': BigWorld.wg_getShortDateFormat(data['timeAttack']),
'time': BigWorld.wg_getShortTimeFormat(data['timeAttack'])})
def _battleDeletedMessage(self, data):
return self._buildMessage(data['event'], {'clan': getClanAbbrevString(data['enemyClanAbbrev'])})
def _battleDeletedLevelMessage(self, data):
return self._buildMessage(data['event'], {'clan': getClanAbbrevString(data['enemyClanAbbrev']),
'date': BigWorld.wg_getShortDateFormat(data['timeAttack']),
'time': BigWorld.wg_getShortTimeFormat(data['timeAttack'])})
def _specialReserveExpiredMessage(self, data):
resInc, resDec = data['resBonus']
resTotal = resInc - resDec
orderTypeID = data['orderTypeID']
messageKey = '#messenger:serviceChannelMessages/fort/SPECIAL_ORDER_EXPIRED_%s' % constants.FORT_ORDER_TYPE_NAMES[orderTypeID]
additional = ''
if resDec:
additional = i18n.makeString('%s_ADDITIONAL' % messageKey, resInc=BigWorld.wg_getIntegralFormat(resInc), resDec=BigWorld.wg_getIntegralFormat(resDec))
return i18n.makeString(messageKey, additional=additional, resTotal=BigWorld.wg_getIntegralFormat(resTotal))
def _resourceSetMessage(self, data):
try:
resourceDelta = data['resourceDelta']
if resourceDelta > 0:
messageKey = MESSENGER.SERVICECHANNELMESSAGES_FORT_PROM_RESOURCE_EARNED
else:
messageKey = MESSENGER.SERVICECHANNELMESSAGES_FORT_PROM_RESOURCE_WITHDRAWN
return i18n.makeString(messageKey, promresource=abs(resourceDelta))
except:
LOG_CURRENT_EXCEPTION()
def _reserveSetMessage(self, data):
try:
reserveDelta = data['reserveDelta']
if reserveDelta > 0:
messageKey = MESSENGER.SERVICECHANNELMESSAGES_FORT_RESERVES_EARNED
else:
messageKey = MESSENGER.SERVICECHANNELMESSAGES_FORT_RESERVES_WITHDRAWN
return i18n.makeString(messageKey, reserves=abs(reserveDelta))
except:
LOG_CURRENT_EXCEPTION()
class FortBattleResultsFormatter(ServiceChannelFormatter):
__battleResultKeys = {-1: 'battleFortDefeatResult',
0: 'battleFortDrawGameResult',
1: 'battleFortVictoryResult'}
def isNotify(self):
return True
def format(self, message, *args):
battleResult = message.data
if battleResult:
enemyClanAbbrev = battleResult.get('enemyClanName', '')
winnerCode = battleResult['isWinner']
if winnerCode == 0 and battleResult['attackResult'] == FORT_ATTACK_RESULT.TECHNICAL_DRAW:
winnerCode = -1
battleResult['isWinner'] = winnerCode
resourceKey = 'fortResourceCaptureByClan' if winnerCode > 0 else 'fortResourceLostByClan'
ctx = {'enemyClanAbbrev': getClanAbbrevString(enemyClanAbbrev),
'resourceClan': BigWorld.wg_getIntegralFormat(battleResult.get(resourceKey, 0)),
'resourcePlayer': BigWorld.wg_getIntegralFormat(battleResult.get('fortResource', 0))}
ctx['achieves'] = self._makeAchievementsString(battleResult)
templateName = self.__battleResultKeys[winnerCode]
settings = self._getGuiSettings(message, templateName)
settings.setCustomEvent(MsgCustomEvents.FORT_BATTLE_FINISHED, battleResult.get('battleID'))
formatted = g_settings.msgTemplates.format(templateName, ctx=ctx, data={'savedData': {'battleResult': battleResult}})
return (formatted, settings)
else:
return (None, None)
@classmethod
def _makeAchievementsString(cls, battleResult):
result = []
for recordIdx, value in battleResult.get('popUpRecords', []):
recordName = DB_ID_TO_RECORD[recordIdx]
if recordName in IGNORED_BY_BATTLE_RESULTS:
continue
achieve = getAchievementFactory(recordName).create(value=value)
if achieve is not None and not achieve.isApproachable():
result.append(achieve)
if 'markOfMastery' in battleResult and battleResult['markOfMastery'] > 0:
achieve = getAchievementFactory((ACHIEVEMENT_BLOCK.TOTAL, 'markOfMastery')).create(value=battleResult['markOfMastery'])
if achieve is not None:
result.append(achieve)
res = ''
if len(result):
res = g_settings.htmlTemplates.format('battleResultAchieves', {'achieves': ', '.join(map(lambda a: a.getUserName(), sorted(result)))})
return res
class FortBattleRoundEndFormatter(ServiceChannelFormatter):
__battleResultKeys = {-1: 'combatFortTechDefeatResult',
1: 'combatFortTechVictoryResult'}
def isNotify(self):
return True
def format(self, message, *args):
battleResult = message.data
if battleResult is not None:
ctx = {}
winnerCode = battleResult['isWinner']
if winnerCode == 0:
winnerCode = -1
templateName = self.__battleResultKeys[winnerCode]
settings = self._getGuiSettings(message, templateName)
if 'combats' in battleResult:
_, _, _, isDefendersBuilding, buildTypeID = battleResult['combats']
if battleResult['isDefence'] is isDefendersBuilding:
buildOwnerClanAbbrev = battleResult['ownClanName']
else:
buildOwnerClanAbbrev = battleResult['enemyClanName']
ctx['fortBuilding'] = g_settings.htmlTemplates.format('battleResultFortBuilding', ctx={'fortBuilding': FortBuilding(typeID=buildTypeID).userName,
'clanAbbrev': '[%s]' % buildOwnerClanAbbrev})
else:
ctx['fortBuilding'] = ''
formatted = g_settings.msgTemplates.format(templateName, ctx=ctx)
return (formatted, settings)
else:
return (None, None)
class FortBattleInviteFormatter(ServiceChannelFormatter):
def isNotify(self):
return True
@prbInvitesProperty
def prbInvites(self):
return None
def format(self, message, *args):
from notification.settings import NOTIFICATION_BUTTON_STATE
from gui.prb_control.formatters.invites import PrbFortBattleInviteHtmlTextFormatter
battleData = message.data
if battleData and g_lobbyContext.getServerSettings().isFortsEnabled():
inviteWrapper = self.__toFakeInvite(battleData)
formatter = PrbFortBattleInviteHtmlTextFormatter()
submitState = NOTIFICATION_BUTTON_STATE.VISIBLE
if self.prbInvites.canAcceptInvite(inviteWrapper):
submitState |= NOTIFICATION_BUTTON_STATE.ENABLED
msgType = 'fortBattleInvite'
battleID = battleData.get('battleID')
formatted = g_settings.msgTemplates.format(msgType, ctx={'text': formatter.getText(inviteWrapper)}, data={'timestamp': _getTimeStamp(message),
'buttonsStates': {'submit': submitState},
'savedData': {'battleID': battleID,
'peripheryID': battleData.get('peripheryID'),
'battleFinishTime': time_utils.getTimestampFromUTC(message.finishedAt.timetuple())},
'icon': formatter.getIconName(inviteWrapper)})
guiSettings = self._getGuiSettings(message, msgType)
guiSettings.setCustomEvent(MsgCustomEvents.FORT_BATTLE_INVITE, battleID)
return (formatted, guiSettings)
else:
return (None, None)
@classmethod
def __toFakeInvite(cls, battleData):
from gui.shared.ClanCache import g_clanCache
from gui.prb_control.invites import PrbInviteWrapper
return PrbInviteWrapper(clientID=-1, receiver=getPlayerName(), state=PREBATTLE_INVITE_STATE.ACTIVE, receiverDBID=getPlayerDatabaseID(), prebattleID=-1, receiverClanAbbrev=g_lobbyContext.getClanAbbrev(g_clanCache.clanInfo), peripheryID=battleData.get('peripheryID'), extraData=battleData, type=PREBATTLE_TYPE.FORT_BATTLE, alwaysAvailable=True)
class VehicleRentedFormatter(ServiceChannelFormatter):
_templateKey = 'vehicleRented'
def format(self, message, *args):
data = message.data
vehTypeCD = data.get('vehTypeCD', None)
expiryTime = data.get('time', None)
if vehTypeCD is not None:
return (self._getMessage(vehTypeCD, expiryTime), self._getGuiSettings(message, self._templateKey))
else:
return (None, None)
def _getMessage(self, vehTypeCD, expiryTime):
vehicleName = getUserName(vehicles_core.getVehicleType(vehTypeCD))
ctx = {'vehicleName': vehicleName,
'expiryTime': text_styles.titleFont(TimeFormatter.getLongDatetimeFormat(expiryTime))}
return g_settings.msgTemplates.format(self._templateKey, ctx=ctx)
class RentalsExpiredFormatter(ServiceChannelFormatter):
_templateKey = 'rentalsExpired'
def format(self, message, *args):
vehTypeCD = message.data.get('vehTypeCD', None)
if vehTypeCD is not None:
return (self._getMessage(vehTypeCD), self._getGuiSettings(message, self._templateKey))
else:
return (None, None)
def _getMessage(self, vehTypeCD):
vehicleName = getUserName(vehicles_core.getVehicleType(vehTypeCD))
ctx = {'vehicleName': vehicleName}
return g_settings.msgTemplates.format(self._templateKey, ctx=ctx)
class RefSystemReferralBoughtVehicleFormatter(ServiceChannelFormatter):
def isNotify(self):
return True
def format(self, message, *args):
settings = self._getGuiSettings(message, 'refSystemBoughtVehicle')
formatted = g_settings.msgTemplates.format('refSystemBoughtVehicle', {'userName': message.data.get('nickName', '')})
return (formatted, settings)
class RefSystemReferralContributedXPFormatter(WaitItemsSyncFormatter):
eventsCache = dependency.descriptor(IEventsCache)
def isNotify(self):
return True
@async
@process
def format(self, message, callback):
yield lambda callback: callback(True)
isSynced = yield self._waitForSyncItems()
if message.data and isSynced:
refSystemQuests = self.eventsCache.getHiddenQuests(lambda x: x.getType() == EVENT_TYPE.REF_SYSTEM_QUEST)
notCompleted = findFirst(lambda q: not q.isCompleted(), refSystemQuests.values())
if notCompleted:
data = message.data
settings = self._getGuiSettings(message, 'refSystemContributeXp')
formatted = g_settings.msgTemplates.format('refSystemContributeXp', {'userName': data.get('nickName', ''),
'xp': BigWorld.wg_getIntegralFormat(data.get('xp', 0))})
callback((formatted, settings))
else:
callback((None, None))
else:
callback((None, None))
return None
class RefSystemQuestsFormatter(TokenQuestsFormatter):
def _getTemplateName(self, completedQuestIDs = set()):
return 'refSystemQuests'
class PotapovQuestsFormatter(TokenQuestsFormatter):
def _getTemplateName(self, completedQuestIDs = set()):
return 'potapovQuests'
class GoodieFormatter(WaitItemsSyncFormatter):
@async
@process
def format(self, message, callback):
yield lambda callback: callback(True)
isSynced = yield self._waitForSyncItems()
if message.data and isSynced:
goodieID = message.data.get('gid', None)
if goodieID is not None:
booster = g_goodiesCache.getBooster(goodieID)
if booster is not None:
formatted = g_settings.msgTemplates.format(self._getTemplateName(), ctx={'boosterName': booster.userName})
callback((formatted, self._getGuiSettings(message, self._getTemplateName())))
return
callback((None, None))
else:
callback((None, None))
return
def _getTemplateName(self):
raise NotImplementedError
class GoodieRemovedFormatter(GoodieFormatter):
def _getTemplateName(self):
return 'boosterExpired'
class GoodieDisabledFormatter(GoodieFormatter):
def _getTemplateName(self):
return 'boosterDisabled'
class TelecomStatusFormatter(ServiceChannelFormatter):
@staticmethod
def __getVehicleNames(vehTypeCompDescrs):
itemGetter = g_itemsCache.items.getItemByCD
return ', '.join((itemGetter(vehicleCD).userName for vehicleCD in vehTypeCompDescrs))
def format(self, message, *args):
formatted, settings = (None, None)
try:
template = 'telecomVehicleStatus'
ctx = self.__getMessageContext(message.data)
settings = self._getGuiSettings(message, template)
formatted = g_settings.msgTemplates.format(template, ctx, data={'timestamp': time.time()})
except:
LOG_ERROR("Can't format telecom status message ", message)
LOG_CURRENT_EXCEPTION()
return (formatted, settings)
def __getMessageContext(self, data):
key = 'vehicleUnblocked' if data['orderStatus'] else 'vehicleBlocked'
msgctx = {'vehicles': self.__getVehicleNames(data['vehTypeCompDescrs'])}
ctx = {}
for txtBlock in ('title', 'comment', 'subcomment'):
ctx[txtBlock] = i18n.makeString('#system_messages:telecom/notifications/{0:s}/{1:s}'.format(key, txtBlock), **msgctx)
return ctx
class TelecomReceivedInvoiceFormatter(InvoiceReceivedFormatter):
@staticmethod
def invoiceHasCrew(data):
dataEx = data.get('data', {})
hasCrew = False
vehicles = dataEx.get('vehicles', {})
for vehicle in vehicles:
if vehicles[vehicle].get('tankmen', None):
hasCrew = True
return hasCrew
@staticmethod
def invoiceHasBrotherhood(data):
dataEx = data.get('data', {})
hasBrotherhood = False
vehicles = dataEx.get('vehicles', {})
for vehicle in vehicles:
tankmens = vehicles[vehicle].get('tankmen', [])
if tankmens:
for tankmen in tankmens:
skills = tankmen.get('freeSkills', [])
if 'brotherhood' in skills:
hasBrotherhood = True
return hasBrotherhood
def _getVehicles(self, data):
dataEx = data.get('data', {})
if not dataEx:
return
else:
vehicles = dataEx.get('vehicles', {})
rentedVehNames = None
if vehicles is not None and len(vehicles) > 0:
_, _, rentedVehNames = self._getVehicleNames(vehicles)
return rentedVehNames
def _getMessageTemplateKey(self, data):
return 'telecomVehicleReceived'
def _getMessageContext(self, data, vehicleNames):
ctx = {}
hasCrew = self.invoiceHasCrew(data)
if hasCrew:
if self.invoiceHasBrotherhood(data):
skills = ' (%s)' % i18n.makeString(ITEM_TYPES.TANKMAN_SKILLS_BROTHERHOOD)
else:
skills = ''
ctx['crew'] = i18n.makeString(SYSTEM_MESSAGES.TELECOM_NOTIFICATIONS_VEHICLERECEIVED_CREW, skills=skills)
else:
ctx['crew'] = ''
ctx['vehicles'] = ', '.join(vehicleNames)
ctx['datetime'] = self._getOperationTimeString(data)
return ctx
def _formatData(self, assetType, data):
vehicleNames = self._getVehicles(data)
if not vehicleNames:
return None
else:
return g_settings.msgTemplates.format(self._getMessageTemplateKey(None), ctx=self._getMessageContext(data, vehicleNames), data={'timestamp': time.time()})
class TelecomRemovedInvoiceFormatter(TelecomReceivedInvoiceFormatter):
def _getMessageTemplateKey(self, data):
return 'telecomVehicleRemoved'
def _getVehicles(self, data):
dataEx = data.get('data', {})
if not dataEx:
return
else:
vehicles = dataEx.get('vehicles', {})
removedVehNames = None
if vehicles:
_, removedVehNames, _ = self._getVehicleNames(vehicles)
return removedVehNames
def _getMessageContext(self, data, vehicleNames):
return {'vehicles': ', '.join(vehicleNames),
'datetime': self._getOperationTimeString(data)}
class PrbVehicleKickFormatter(ServiceChannelFormatter):
"""
Message coming from the server when the user is kicked from the squad due to the vehicle
level doesn't correspond to commander's vehicle level rules.
"""
def format(self, message, *args):
formatted = None
data = message.data
vehInvID = data.get('vehInvID', None)
g_itemsCache.items.getVehicle(vehInvID)
if vehInvID:
vehicle = g_itemsCache.items.getVehicle(vehInvID)
if vehicle:
formatted = g_settings.msgTemplates.format('prbVehicleKick', ctx={'vehName': vehicle.userName})
return (formatted, self._getGuiSettings(message, 'prbVehicleKick'))
class RotationGroupLockFormatter(ServiceChannelFormatter):
""" Vehicle rotation related message. This messages comes from server on rotation group lock.
"""
def format(self, message, *args):
templateKey = self._getMessageTemplateKey()
if isinstance(message.data, list):
groups = ', '.join(map(str, message.data))
else:
groups = message.data
formatted = g_settings.msgTemplates.format(templateKey, ctx={'groupNum': groups})
return (formatted, self._getGuiSettings(message, templateKey))
def _getMessageTemplateKey(self):
return 'RotationGroupLock'
class RotationGroupUnlockFormatter(RotationGroupLockFormatter):
""" Vehicle rotation related message. This messages comes from server on rotation group unlock.
"""
def _getMessageTemplateKey(self):
return 'RotationGroupUnlock'
# okay decompyling C:\Users\PC\wotmods\files\originals\res\packages\scripts\scripts\client\messenger\formatters\service_channel.pyc
# decompiled 1 files: 1 okay, 0 failed, 0 verify failed
# 2017.05.04 15:26:57 Střední Evropa (letní čas)
| [
"info@webium.sk"
] | info@webium.sk |
fcce29999db00b57d91a222f54cb5539f9325827 | 0e15ccb1b0ac2b1b246f7f0dbc3874f2f5ed5d72 | /190904/2048.py | cbc5f15246144019ff997ba5f0b5cb1090cd161f | [] | no_license | toohong5/algorithm | fa0eda1c3a28c7bb8c13ae10711a955eccc169ee | a54ae271738927592bd023e93d223a00dc368895 | refs/heads/master | 2020-07-22T16:35:02.021460 | 2019-11-15T08:56:31 | 2019-11-15T08:56:31 | 206,717,672 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 227 | py | import sys
sys.stdin = open('input5.txt', 'r')
T = int(input())
for tc in range(1, T + 1):
N, way = input().split()
N = int(N)
arr = [list(map(int, input().split())) for _ in range(N)]
if way == "up":
| [
"toohong5@gmail.com"
] | toohong5@gmail.com |
83f9c8bad1d536eb9ed04197aca5de98c52cbfa7 | 4a0e3ffff54be178b377a4c18fe0ced2d44b7be6 | /tests/test_simplification.py | ef4d8ef459a90652ce1d7f1345a8374f72c490e6 | [] | no_license | WinVector/data_algebra | 608371904c0fcc99ffab7e0fe57c49dc75fd6b21 | 1e96817919ae891ba108d8d7471b2200b2528271 | refs/heads/main | 2023-04-13T20:11:18.682084 | 2023-04-10T14:09:41 | 2023-04-10T14:09:41 | 203,080,133 | 113 | 5 | null | null | null | null | UTF-8 | Python | false | false | 1,517 | py | import data_algebra
import data_algebra.test_util
from data_algebra.data_ops import *
import pytest
def test_simplification_1():
ops = (
TableDescription(table_name="d", column_names=["col1", "col2", "col3"])
.extend({"sum23": "col2 + col3"})
.extend({"x": 1})
.extend({"x": 2})
.extend({"x": 3})
.extend({"x": 4})
.extend({"x": 5})
.select_columns(["x", "sum23", "col3"])
)
d = data_algebra.data_model.default_data_model().pd.DataFrame(
{"col1": [1, 2], "col2": [3, 4], "col3": [4, 5]}
)
res = ops.transform(d)
expect = data_algebra.data_model.default_data_model().pd.DataFrame(
{"x": [5, 5], "sum23": [7, 9], "col3": [4, 5],}
)
assert data_algebra.test_util.equivalent_frames(res, expect)
assert isinstance(ops, SelectColumnsNode)
assert isinstance(ops.sources[0], ExtendNode)
assert isinstance(ops.sources[0].sources[0], TableDescription)
def test_simplification_2():
d2 = data_algebra.data_model.default_data_model().pd.DataFrame({"col1": [0, 1], "col2": [1, 0],})
ops2 = (
describe_table(d2, table_name="d2")
.select_rows("col2 > 0")
.select_rows("col1 / col2 > 0")
)
res = ops2.transform(d2)
assert set(res.columns) == set(["col1", "col2"])
assert res.shape[0] == 0
assert isinstance(ops2, SelectRowsNode)
assert isinstance(ops2.sources[0], SelectRowsNode)
assert isinstance(ops2.sources[0].sources[0], TableDescription)
| [
"jmount@win-vector.com"
] | jmount@win-vector.com |
e06f3268b5fbbda6ff380b19dfcec992dfa4102a | 32bbe94e77deced5e58de97eb19e7c6126b001df | /backend/src/conftest.py | 6f9abb7eb37c760dac2ba89491f5762a265968c3 | [] | no_license | 3asyPe/astudy | 16d8adacc3bee9f2667c0a5f1be8228868440c6a | 0643a33a294c410523738f59f95c8d205dd63dc5 | refs/heads/master | 2023-06-25T11:23:39.500361 | 2021-07-28T13:33:48 | 2021-07-28T13:33:48 | 336,819,306 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,074 | py | import random
import string
import pytest
from django.conf import settings
from django.contrib.auth.models import AnonymousUser
from mixer.backend.django import mixer as _mixer
from app.test.api_client import DRFClient
from billing.models import BillingProfile
from carts.models import Wishlist
User = settings.AUTH_USER_MODEL
pytestmark = [pytest.mark.django_db]
@pytest.fixture
def api():
return DRFClient()
@pytest.fixture
def anon():
return DRFClient(anon=True)
@pytest.fixture
def mixer():
return _mixer
@pytest.fixture
def user(mixer):
return mixer.blend(User, email="testemail@gmail.com")
@pytest.fixture
def another_user(mixer):
return mixer.blend(User, email="testemail2@gmail.com")
@pytest.fixture
def anonymous_user(mixer):
return AnonymousUser()
@pytest.fixture
def course_factory(mixer):
def course_mixer():
return mixer.blend(
"courses.Course",
title=''.join([random.choice(string.hexdigits) for _ in range(0, 8)]),
subtitle=''.join([random.choice(string.hexdigits) for _ in range(0, 8)]),
price=3.33,
description=''.join([random.choice(string.hexdigits) for _ in range(0, 8)]),
)
return course_mixer
@pytest.fixture
def cart(mixer, user):
return mixer.blend("carts.Cart", user=user)
@pytest.fixture
def wishlist(user):
return Wishlist.objects.get_or_create(user=user)[0]
@pytest.fixture
def saved_for_later(mixer, user):
return mixer.blend("carts.SavedForLater", user=user)
@pytest.fixture
def billing_profile(mixer, user):
qs = BillingProfile.objects.filter(user=user, active=True)
if qs.exists():
return qs.first()
return mixer.blend("billing.BillingProfile", user=user)
@pytest.fixture
def card(mixer, billing_profile):
card = mixer.blend(
"billing.Card",
billing_profile=billing_profile,
stripe_id="card_1JD9PPAGKJR9v1iNUvmLh76d",
brand="VISA",
country="Belarus",
postal_code="424242",
last4="4242",
)
return card
| [
"alex.kvasha228@gmail.com"
] | alex.kvasha228@gmail.com |
91482f3078c6a8ffb8b5a646e4292e26b7ee4b5d | c1366a8772664a1076be1f709ec8364ded2cc119 | /Jump_to_python/자료형/문자열 자료형.py | 83a4d4ba86ad49e1d9b20118b1b020238912bfee | [] | no_license | Daboni/Python | 04eecda0ff57a385374f8cfd479a9db98cbf3d60 | e6bd7887a46535b3d5737836c2f8bbb668ef89ec | refs/heads/main | 2023-02-26T16:45:55.553629 | 2021-02-04T06:44:11 | 2021-02-04T06:44:11 | 322,223,972 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,944 | py | #문자열
"Life is too short, You need Python"
"a"
"123"
# ""
"Hello World"
# ''
'Python is fun'
# """ """
"""Life is too short, You need Python"""
# ''' '''
'''Life is too short, You need Python'''
# ' 포함시키기
food = "Python's favorite food is perl"
food #"Python's favorite food is perl"
# " 포함시키기
say = '"Python is very easy." he says.'
say #'"Python is very easy." he says.'
# \이용해서 '," 포함시키기
food = 'Python\'s favorite food is perl'
say = "\"Python is very easy.\" he says."
# \n 삽입으로 줄 바꾸기
multiline = "Life is too short\nYou need python"
# 연속된 ''' 또는 """ 사용하기
multiline = '''
Life is too short
You need python
'''
multiline = """
Life is too short
You need python
"""
# concatenation
head = "Python"
tail = "is fun!"
head + tail #'Python is fun!'
# 문자열 곱하기
a = "python"
a * 2 #'pythonpython'
# 문자열 길이
a = "Life is too short"
len(a) #17
# Indexing and Slicing
# Indexing
a = "Life is too short, You need Python"
a[3] #'e'
a[-1] #'n'
a[-2] #'o'
a[-0] #'L'
# Slicing
a = "Life is too short, You need Python"
b = a[0] + a[1] + a[2] + a[3]
b #'Life'
a[0:3] #'Lif'
a[0:5] #'Life '
a[0:4] #'Life'
a[19:] #'You need Python'
a[:] #'Life is too short, You need Python'
a[19:-7] #'You need'
# Slicing 으로 문자열 나누기
a = "20201217Rainy"
date = a[:8]
weather = a[8:]
date #'20201217'
weather #'Rainy'
year = a[:4]
day = a[4:8]
year #'2020'
day #'1217'
# 문자열 바꾸기
# python에서 문자열의 요솟값은 바꿀 수 없음(immutable한 자료형)
a = "Pithon"
a[:1] #'P'
a[2:] #'thon'
a[:1] + 'y' + a[2:] #'Pyhon'
# 문자열 formatting
# 숫자 바로 대입
"I eat %d apples." %3
'I eat 3 apples.'
# 문자열 바로 대입
"I eat %s apples." %"five"
'I eat fieve apples.'
# 숫자 값을 나타내는 변수로 대입
number = 3
"I eat %d apples." %number
'I eat 3 apples.'
# 2개 이상의 값 넣기
number = 10
day = "three"
"I ate %d apples. so I was sick for %s days." %(number, day)
'I ate 10 apples. so I was sick for three days.'
# %s 는 모든 형태의 값이든 문자열로 자동으로 변환시켜준다.
# %d와 %를 같이 쓸 때 %%를 쓴다.
"Error is %d%%" %98
'Error is 98%'
# 정렬과 공백
"%10s" %"hi"
' hi'
"-10sjane." %'hi'
'hi jane.'
# 소수점 표현
"%0.4f" %3.42134234
'3.4213'
"%10.4f" %3.42134234
' 3.4213'
# format 함수를 사용한 formatting
# 숫자 바로 대입
"I eat {} apples".format(3)
'I eat 3 apples'
# 문자열 바로 대입
"I eat {} apples".format("five")
'I eat five apples'
# 숫자 값을 가진 변수로 대입
number = 3
"I eat {} apples".format(number)
'I eat 3 apples'
# 2개 이상의 값 넣기
number = 10
day = "three"
"I ate {} apples. so I was sick for {} days.".format(number, day)
'I ate 10 apples. so I was sick for three days.'
# 이름으로 넣기
"I ate {number} apples. so I was sick for {day} days.".format(number=10,day=3)
'I ate 10 apples. so I was sick for 3 days.'
# 왼쪽 정렬
"{0:<10}".format("hi")
'hi '
# 오른쪽 정렬
"{0:>10}".format("hi")
' hi'
# 가운데 정렬
"{0:^10}".format("hi")
' hi '
# 공백 채우기
"{0:=^10}".format("hi")
'====hi===='
"{0:!<10}".format("hi")
'hi!!!!!!!!'
# f문자열 formatting
name = '홍길동'
age = 30
f'나의 이름은 {name}입니다. 나이는 {age}입니다.'
'나의 이름은 홍길동 입니다. 나이는 30입니다.'
f'나는 내년이면 {age+1}살이 된다.'
'나는 내년이면 31살이 된다.'
d = {'name':'홍길동', 'age':30}
f'나의 이름은 {d["name"]}입니다. 나이는 {d["age"]}입니다.'
'나의 이름은 홍길동입니다. 나이는 30입니다.'
# count()
a = "hobby"
a.count('b') #2
# find()
a = "Python is the best choice"
a.find('b') #14
a.find('k') #-1
# index()
a = "Life is too short"
a.index('t') #8
## index()함수는 없는 문자를 찾으면 오류 발생
#join()
",".join('abcd')
'a,b,c,d'
#upper()
a = "hi"
a.upper() #'HI'
#lower()
a = "HI"
a.lower() #'hi'
#lstrip()
a = " hi "
a.lstrip() #'hi '
#rstrip()
a = " hi "
a.rstrip() #' hi'
#strip()
a = " hi "
a.strip() #'hi'
#replace()
a = "Life is too short"
a.replace("Life", "Your leg") #'Your leg too short'
#split()
a = "Life is too short"
a.split() #['Life','is','too','short']
b = "a:b:c:d"
b.split(':') #['a','b','c','d']
| [
"noreply@github.com"
] | Daboni.noreply@github.com |
af11495a69e315fbb854ceeb2216c0a6520f719b | 1ebd997c78b5e31d55ab0d0678d4f20889c7fd01 | /meiduo/apps/carts/views.py | 1a7438919bfe668059fd6c4239b722de2ed3e827 | [
"MIT"
] | permissive | libin-c/Meiduo | f99ba4b105783c55011ebc301ae7068cbf946598 | 58468fd619a8d9f022df442a10a56b1b12ed1dd8 | refs/heads/master | 2022-12-13T10:02:56.715687 | 2019-07-13T11:26:53 | 2019-07-13T11:26:53 | 188,040,862 | 0 | 0 | MIT | 2022-12-10T04:49:02 | 2019-05-22T13:07:59 | HTML | UTF-8 | Python | false | false | 31,206 | py | import base64
import json
import pickle
from decimal import Decimal
from django.contrib.auth.mixins import LoginRequiredMixin
from django.http import HttpResponseForbidden, JsonResponse
from django.shortcuts import render
from django_redis import get_redis_connection
from apps.carts import constants
from apps.contents.models import SKU
# Create your views here.
from django.views import View
from apps.users.models import Address
from utils.cookiesecret import CookieSecret
from utils.response_code import RETCODE
# class CartsView(View):
# def get(self, request):
# """展示购物车"""
# user = request.user
# if user.is_authenticated:
# # 用户已登录,查询redis购物车
# redis_conn = get_redis_connection('carts')
# # 获取redis中的购物车数据
# redis_cart = redis_conn.hgetall('carts_%s' % user.id)
# # 获取redis中的选中状态
# cart_selected = redis_conn.smembers('selected_%s' % user.id)
#
# # 将redis中的数据构造成跟cookie中的格式一致,方便统一查询
# cart_dict = {}
# for sku_id, count in redis_cart.items():
# cart_dict[int(sku_id)] = {
# 'count': int(count),
# 'selected': sku_id in cart_selected
# }
# else:
# # 用户未登录,查询cookies购物车
# cart_str = request.COOKIES.get('carts')
# if cart_str:
# # 将cart_str转成bytes,再将bytes转成base64的bytes,最后将bytes转字典
# cart_dict = CookieSecret.loads(cart_str)
# # cart_dict = pickle.loads(base64.b64decode(cart_str.encode()))
# else:
# cart_dict = {}
# # 构造购物车渲染数据
# sku_ids = cart_dict.keys()
# skus = SKU.objects.filter(id__in=sku_ids)
# cart_skus = []
# for sku in skus:
# cart_skus.append({
# 'id': sku.id,
# 'name': sku.name,
# 'count': cart_dict.get(sku.id).get('count'),
# 'selected': str(cart_dict.get(sku.id).get('selected')), # 将True,转'True',方便json解析
# 'default_image_url': sku.default_image.url,
# 'price': str(sku.price), # 从Decimal('10.2')中取出'10.2',方便json解析
# 'amount': str(sku.price * cart_dict.get(sku.id).get('count')),
# })
#
# context = {
# 'cart_skus': cart_skus,
# }
#
# # 渲染购物车页面
# return render(request, 'cart.html', context)
#
# def post(self, request):
# """添加购物车"""
# # 接收参数
# json_dict = json.loads(request.body.decode())
# sku_id = json_dict.get('sku_id')
# count = json_dict.get('count')
# selected = json_dict.get('selected', True)
#
# # 判断参数是否齐全
# if not all([sku_id, count]):
# return HttpResponseForbidden('缺少必传参数')
# # 判断sku_id是否存在
# try:
# SKU.objects.get(id=sku_id)
# except SKU.DoesNotExist:
# return HttpResponseForbidden('商品不存在')
# # 判断count是否为数字
# try:
# count = int(count)
# except Exception:
# return HttpResponseForbidden('参数count有误')
# # 判断selected是否为bool值
# if selected:
# if not isinstance(selected, bool):
# return HttpResponseForbidden('参数selected有误')
#
# # 判断用户是否登录
# user = request.user
# if user.is_authenticated:
# # 用户已登录,操作redis购物车 采用的是 两张表 carts_use_id 和 selected_use_id
# redis_conn = get_redis_connection('carts')
# pl = redis_conn.pipeline()
# # 新增购物车数据
# pl.hincrby('carts_%s' % user.id, sku_id, count)
# # 新增选中的状态
# if selected:
# pl.sadd('selected_%s' % user.id, sku_id)
# # 执行管道
# pl.execute()
# # 响应结果
# return JsonResponse({'code': RETCODE.OK, 'errmsg': '添加购物车成功'})
# else:
# # 用户未登录,操作cookie购物车
# cart_str = request.COOKIES.get('carts')
# # 如果用户操作过cookie购物车
# if cart_str:
# # 将cart_str转成bytes,再将bytes转成base64的bytes,最后将bytes转字典
# cart_dict = CookieSecret.loads(cart_str)
# # cart_dict = pickle.loads(base64.b64decode(cart_str.encode()))
# else: # 用户从没有操作过cookie购物车
# cart_dict = {}
#
# # 判断要加入购物车的商品是否已经在购物车中,如有相同商品,累加求和,反之,直接赋值
# if sku_id in cart_dict:
# # 累加求和
# origin_count = cart_dict[sku_id]['count']
# count += origin_count
# cart_dict[sku_id] = {
# 'count': count,
# 'selected': selected
# }
# # 将字典转成bytes,再将bytes转成base64的bytes,最后将bytes转字符串
# cookie_cart_str = CookieSecret.dumps(cart_dict)
# # cookie_cart_str = base64.b64encode(pickle.dumps(cart_dict)).decode()
#
# # 创建响应对象
# response = JsonResponse({'code': RETCODE.OK, 'errmsg': '添加购物车成功'})
# # 响应结果并将购物车数据写入到cookie
# response.set_cookie('carts', cookie_cart_str, max_age=constants.CARTS_COOKIE_EXPIRES)
# return response
#
# def put(self, request):
# """修改购物车"""
# # 接收和校验参数
# # 接收参数
# json_dict = json.loads(request.body.decode())
# sku_id = json_dict.get('sku_id')
# count = json_dict.get('count')
# selected = json_dict.get('selected', True)
#
# # 判断参数是否齐全
# if not all([sku_id, count]):
# return HttpResponseForbidden('缺少必传参数')
# # 判断sku_id是否存在
# try:
# sku = SKU.objects.get(id=sku_id)
# except SKU.DoesNotExist:
# return HttpResponseForbidden('商品sku_id不存在')
# # 判断count是否为数字
# try:
# count = int(count)
# except Exception:
# return HttpResponseForbidden('参数count有误')
# # 判断selected是否为bool值
# if selected:
# if not isinstance(selected, bool):
# return HttpResponseForbidden('参数selected有误')
#
# # 判断用户是否登录
# user = request.user
# if user.is_authenticated:
# # 用户已登录,修改redis购物车
# redis_conn = get_redis_connection('carts')
# pl = redis_conn.pipeline()
# # 因为接口设计为幂等的,直接覆盖
# pl.hset('carts_%s' % user.id, sku_id, count)
# # 是否选中
# if selected:
# pl.sadd('selected_%s' % user.id, sku_id)
# else:
# pl.srem('selected_%s' % user.id, sku_id)
# pl.execute()
#
# # 创建响应对象
# cart_sku = {
# 'id': sku_id,
# 'count': count,
# 'selected': selected,
# 'name': sku.name,
# 'default_image_url': sku.default_image.url,
# 'price': sku.price,
# 'amount': sku.price * count,
# }
# return JsonResponse({'code': RETCODE.OK, 'errmsg': '修改购物车成功', 'cart_sku': cart_sku})
# else:
# # 用户未登录,修改cookie购物车
# # 用户未登录,修改cookie购物车
# cart_str = request.COOKIES.get('carts')
# if cart_str:
# # 将cart_str转成bytes,再将bytes转成base64的bytes,最后将bytes转字典
# cart_dict = CookieSecret.loads(cart_str)
# # cart_dict = pickle.loads(base64.b64decode(cart_str.encode()))
# else:
# cart_dict = {}
# # 因为接口设计为幂等的,直接覆盖
# cart_dict[sku_id] = {
# 'count': count,
# 'selected': selected
# }
# # 将字典转成bytes,再将bytes转成base64的bytes,最后将bytes转字符串
# cookie_cart_str = CookieSecret.dumps(cart_dict)
# # cookie_cart_str = base64.b64encode(pickle.dumps(cart_dict)).decode()
#
# # 创建响应对象
# cart_sku = {
# 'id': sku_id,
# 'count': count,
# 'selected': selected,
# 'name': sku.name,
# 'default_image_url': sku.default_image.url,
# 'price': sku.price,
# 'amount': sku.price * count,
# }
# response = JsonResponse({'code': RETCODE.OK, 'errmsg': '修改购物车成功', 'cart_sku': cart_sku})
# # 响应结果并将购物车数据写入到cookie
# response.set_cookie('carts', cookie_cart_str, max_age=constants.CARTS_COOKIE_EXPIRES)
# return response
#
# def delete(self, request):
# """删除购物车"""
# # 接收参数
# json_dict = json.loads(request.body.decode())
# sku_id = json_dict.get('sku_id')
#
# # 判断sku_id是否存在
# try:
# SKU.objects.get(id=sku_id)
# except SKU.DoesNotExist:
# return HttpResponseForbidden('商品不存在')
#
# # 判断用户是否登录
# user = request.user
# if user is not None and user.is_authenticated:
# # 用户未登录,删除redis购物车
# redis_conn = get_redis_connection('carts')
# pl = redis_conn.pipeline()
# # 删除键,就等价于删除了整条记录
# pl.hdel('carts_%s' % user.id, sku_id)
# pl.srem('selected_%s' % user.id, sku_id)
# pl.execute()
#
# # 删除结束后,没有响应的数据,只需要响应状态码即可
# return JsonResponse({'code': RETCODE.OK, 'errmsg': '删除购物车成功'})
# else:
# # 用户未登录,删除cookie购物车
# cart_str = request.COOKIES.get('carts')
# if cart_str:
# # 将cart_str转成bytes,再将bytes转成base64的bytes,最后将bytes转字典
# cart_dict = CookieSecret.loads(cart_str)
# # cart_dict = pickle.loads(base64.b64decode(cart_str.encode()))
# else:
# cart_dict = {}
#
# # 创建响应对象
# response = JsonResponse({'code': RETCODE.OK, 'errmsg': '删除购物车成功'})
# if sku_id in cart_dict:
# del cart_dict[sku_id]
# # 将字典转成bytes,再将bytes转成base64的bytes,最后将bytes转字符串
# cookie_cart_str = CookieSecret.dumps(cart_dict)
# # cookie_cart_str = base64.b64encode(pickle.dumps(cart_dict)).decode()
# # 响应结果并将购物车数据写入到cookie
# response.set_cookie('carts', cookie_cart_str, max_age=constants.CARTS_COOKIE_EXPIRES)
# return response
# class CartsSelectAllView(View):
# """全选购物车"""
#
# def put(self, request):
# # 接收参数
# json_dict = json.loads(request.body.decode())
# selected = json_dict.get('selected', True)
#
# # 校验参数
# if selected:
# if not isinstance(selected, bool):
# return HttpResponseForbidden('参数selected有误')
#
# # 判断用户是否登录
# user = request.user
# if user is not None and user.is_authenticated:
# # 用户已登录,操作redis购物车
# redis_conn = get_redis_connection('carts')
# cart = redis_conn.hgetall('carts_%s' % user.id)
# sku_id_list = cart.keys()
# if selected:
# # 全选
# redis_conn.sadd('selected_%s' % user.id, *sku_id_list)
# else:
# # 取消全选
# redis_conn.srem('selected_%s' % user.id, *sku_id_list)
# return JsonResponse({'code': RETCODE.OK, 'errmsg': '全选购物车成功'})
# else:
# # 用户已登录,操作cookie购物车
# cart = request.COOKIES.get('carts')
# response = JsonResponse({'code': RETCODE.OK, 'errmsg': '全选购物车成功'})
# if cart is not None:
# # cart = pickle.loads(base64.b64decode(cart.encode()))
# cart = CookieSecret.loads(cart)
# for sku_id in cart:
# cart[sku_id]['selected'] = selected
# # cookie_cart = base64.b64encode(pickle.dumps(cart)).decode()
# cookie_cart =CookieSecret.dumps(cart)
# response.set_cookie('carts', cookie_cart, max_age=constants.CARTS_COOKIE_EXPIRES)
#
# return response
#
#
# class CartsSimpleView(View):
# """商品页面右上角购物车"""
#
# def get(self, request):
# # 判断用户是否登录
# user = request.user
# if user.is_authenticated:
# # 用户已登录,查询Redis购物车
# redis_conn = get_redis_connection('carts')
# redis_cart = redis_conn.hgetall('carts_%s' % user.id)
# cart_selected = redis_conn.smembers('selected_%s' % user.id)
# # 将redis中的两个数据统一格式,跟cookie中的格式一致,方便统一查询
# cart_dict = {}
# for sku_id, count in redis_cart.items():
# cart_dict[int(sku_id)] = {
# 'count': int(count),
# 'selected': sku_id in cart_selected
# }
# else:
# # 用户未登录,查询cookie购物车
# cart_str = request.COOKIES.get('carts')
# if cart_str:
# # cart_dict = pickle.loads(base64.b64decode(cart_str.encode()))
# cart_dict =CookieSecret.loads(cart_str)
# else:
# cart_dict = {}
#
# # 构造简单购物车JSON数据
# cart_skus = []
# sku_ids = cart_dict.keys()
# skus = SKU.objects.filter(id__in=sku_ids)
# for sku in skus:
# cart_skus.append({
# 'id': sku.id,
# 'name': sku.name,
# 'count': cart_dict.get(sku.id).get('count'),
# 'default_image_url': sku.default_image.url
# })
#
# # 响应json列表数据
# return JsonResponse({'code': RETCODE.OK, 'errmsg': 'OK', 'cart_skus': cart_skus})
# def get_request(request):
# # 1.0 获取前端的数据
# json_dict = json.loads(request.body.decode())
# sku_id = json_dict.get('sku_id')
# count = json_dict.get('count')
# selected = json_dict.get('selected', True)
# user = request.user
# # 1.用户已登录,查询redis购物车
# carts_redis_client = get_redis_connection('carts')
# # 用户未登录,操作cookie购物车
# cart_str = request.COOKIES.get('carts')
# return {'sku_id': sku_id, 'count': count, 'selected': selected, 'user': user,
# 'carts_redis_client': carts_redis_client, 'cart_str': cart_str}
class CartsView(View):
'''
购物车的 增 删 改 查
'''
def get(self, request):
'''
展示购物车
:param request:
:return:
'''
user = request.user
if user.is_authenticated:
# 1.用户已登录,查询redis购物车
carts_redis_client = get_redis_connection('carts')
# 2.获取当前用户的 所有购物车数据
carts_data = carts_redis_client.hgetall(request.user.id)
# 3.转换格式-->和cookie一样的字典 方便后面构建数据
carts_dict = {int(data[0].decode()): json.loads(data[1].decode()) for data in carts_data.items()}
else:
# 用户未登录,查询cookies购物车
cookie_str = request.COOKIES.get('carts')
if cookie_str:
carts_dict = CookieSecret.loads(cookie_str)
else:
carts_dict = {}
sku_ids = carts_dict.keys()
skus = SKU.objects.filter(id__in=sku_ids)
cart_skus = []
for sku in skus:
cart_skus.append({
'id': sku.id,
'name': sku.name,
'count': carts_dict.get(sku.id).get('count'),
'selected': str(carts_dict.get(sku.id).get('selected')), # 将True,转'True',方便json解析
'default_image_url': sku.default_image.url,
'price': str(sku.price), # 从Decimal('10.2')中取出'10.2',方便json解析
'amount': str(sku.price * carts_dict.get(sku.id).get('count')),
})
context = {
'cart_skus': cart_skus,
}
# 渲染购物车页面
return render(request, 'cart.html', context)
def post(self, request):
'''
"""添加购物车"""
:param request:
:return:
'''
# 1.0 接收参数
json_dict = json.loads(request.body.decode())
sku_id = json_dict.get('sku_id')
count = json_dict.get('count')
selected = json_dict.get('selected', True)
# 2.0 校验 参数
# 2.1 判断参数是否齐全
if not all([sku_id, count]):
return HttpResponseForbidden('缺少必传参数')
# 2.2 判断sku_id是否存在
try:
SKU.objects.get(id=sku_id)
except SKU.DoesNotExist:
return HttpResponseForbidden('商品不存在')
# 2.3 判断count是否为数字
try:
count = int(count)
except Exception:
return HttpResponseForbidden('参数count有误')
# 2.3 判断selected是否为bool值
if selected:
if not isinstance(selected, bool):
return HttpResponseForbidden('参数selected有误')
# 3.0 判断用户是否登录
user = request.user
if user.is_authenticated:
# 3.1 登录 使用redis存储
carts_redis_client = get_redis_connection('carts')
# 3.2 获取以前数据库的数据
client_data = carts_redis_client.hgetall(user.id)
if not client_data:
# 不存在 新建数据
carts_redis_client.hset(user.id, sku_id, json.dumps({'count': count, 'selected': selected}))
# 如果商品已经存在就更新数据
if str(sku_id).encode() in client_data:
# 根据sku_id 取出商品
child_dict = json.loads(client_data[str(sku_id).encode()])
# 个数累加
child_dict['count'] += count
# 更新数据
carts_redis_client.hset(user.id, sku_id, json.dumps(child_dict))
else:
# 增加商品数据
carts_redis_client.hset(user.id, sku_id, json.dumps({'count': count, 'selected': selected}))
return JsonResponse({'code': RETCODE.OK, 'errmsg': '添加购物车成功'})
else:
# 4.0 用户未登录,操作cookie购物车
cart_str = request.COOKIES.get('carts')
# 4.1 如果用户操作过cookie购物车
if cart_str:
# 4.1.1 解密出明文
cart_dict = CookieSecret.loads(cart_str)
else: # 4.1.2 用户从没有操作过cookie购物车
cart_dict = {}
# 4.2 判断要加入购物车的商品是否已经在购物车中,如有相同商品,累加求和,反之,直接赋值
if sku_id in cart_dict:
# 累加求和
origin_count = cart_dict[sku_id]['count']
count += origin_count
cart_dict[sku_id] = {
'count': count,
'selected': selected
}
# 转成密文
cookie_cart_str = CookieSecret.dumps(cart_dict)
# 创建响应对象
response = JsonResponse({'code': RETCODE.OK, 'errmsg': '添加购物车成功'})
# 响应结果并将购物车数据写入到cookie
response.set_cookie('carts', cookie_cart_str, max_age=24 * 30 * 3600)
return response
def put(self, request):
'''
修改购物车
:param request:
:return:
'''
# 1.0 接收参数
json_dict = json.loads(request.body.decode())
sku_id = json_dict.get('sku_id')
count = json_dict.get('count')
selected = json_dict.get('selected', True)
# 2.0 校验参数
# 2.1 校验 参数
if not all([sku_id, count]):
return HttpResponseForbidden('缺少关键参数')
# 2.2 校验sku_id 是否存在
try:
sku = SKU.objects.get(id=sku_id)
except SKU.DoesNotExist:
return HttpResponseForbidden('该商品不存在')
# 2.3 校验count 是否为int 类型
try:
count = int(count)
except Exception:
return HttpResponseForbidden('count 参数不正确')
# 2.4 校验 selected 是否为 bool
if selected:
if not isinstance(selected, bool):
return HttpResponseForbidden('selected 参数不正确')
# 3.0 判断是否登录
user = request.user
cookie_cart_str = ""
if user.is_authenticated:
# 3.1 redis 链接数据库
carts_redis_client = get_redis_connection('carts')
# 3.2 覆盖redis 的数据
new_data = {
'count': count,
'selected': selected
}
carts_redis_client.hset(user.id, sku_id, json.dumps(new_data))
else:
# 用户未登录,删除cookie购物车
# 4.0 获取cookie
cart_str = request.COOKIES.get('carts')
# 4.1 如果存在 解密
if cart_str:
# 将cart_str转成bytes,再将bytes转成base64的bytes,最后将bytes转字典
cart_dict = CookieSecret.loads(cart_str)
# 4.2 不存在 为空字典
else:
cart_dict = {}
# 覆盖以前的数据
cart_dict[sku_id] = {
'count': count,
'selected': selected
}
# 转换成 密文数据
cookie_cart_str = CookieSecret.dumps(cart_dict)
# 构建前端的数据
cart_sku = {
'id': sku_id,
'count': count,
'selected': selected,
'name': sku.name,
'default_image_url': sku.default_image.url,
'price': sku.price,
'amount': sku.price * count,
}
response = JsonResponse({'code': RETCODE.OK, 'errmsg': '修改购物车成功', 'cart_sku': cart_sku})
if not user.is_authenticated:
# 响应结果并将购物车数据写入到cookie
response.set_cookie('carts', cookie_cart_str, max_age=constants.CARTS_COOKIE_EXPIRES)
return response
def delete(self, request):
'''
删除 购物车
:param request:
:return:
'''
# 1.0 接收参数
json_dict = json.loads(request.body.decode())
sku_id = json_dict.get('sku_id')
# 2.0 校验
try:
SKU.objects.get(id=sku_id)
except SKU.DoesNotExist:
return HttpResponseForbidden('商品不存在')
# 3.0 判断 是否登录
user = request.user
if user is not None and user.is_authenticated:
# 3.1 用户登录 redis
# 3.1.1 链接redis
carts_redis_client = get_redis_connection('carts')
# 3.1.2 删除 根据用户id 删除商品sku
carts_redis_client.hdel(user.id, sku_id)
# 3.1.3 删除结束后,没有响应的数据,只需要响应状态码即可
return JsonResponse({'code': RETCODE.OK, 'errmsg': '删除购物车成功'})
else:
# 3.2 cookie
# 3.2.1 获取 cookie
cart_str = request.COOKIES.get('carts')
# 3.2.2 如果存在解密
if cart_str:
cart_dict = CookieSecret.loads(cart_str)
else:
cart_dict = {}
# 4.0 创建响应对象
response = JsonResponse({'code': RETCODE.OK, 'errmsg': '删除购物车成功'})
# 4.1循环便利
if sku_id in cart_dict:
# 4.2 删除数据
del cart_dict[sku_id]
# 4.3 将字典转成密文
cookie_cart_str = CookieSecret.dumps(cart_dict)
# 响应结果并将购物车数据写入到cookie
response.set_cookie('carts', cookie_cart_str, max_age=constants.CARTS_COOKIE_EXPIRES)
return response
class CartsSelectAllView(View):
'''
全选购物车
'''
def put(self, request):
'''
全选购物车
:param request:
:return:
'''
# 1.0 接收参数
json_dict = json.loads(request.body.decode())
selected = json_dict.get('selected', True)
# 2.0 校验参数
if selected:
if not isinstance(selected, bool):
HttpResponseForbidden('selected 参数不正确')
# 3.0 判断是否登录
user = request.user
if user.is_authenticated:
# 3.1 登录成功 redis
# 3.1.1 链接redis
carts_redis_client = get_redis_connection('carts')
# 3.1.2 获取所有数据
carts_data = carts_redis_client.hgetall(user.id)
# 3.1.3 将所有商品改成True
# 循环遍历
for carts in carts_data.items():
sku_id = carts[0].decode()
carts_dict = json.loads(carts[1].decode())
if selected:
# 全选
carts_dict['selected'] = selected
else:
# 取消全选
carts_dict['selected'] = selected
carts_redis_client.hset(user.id, sku_id, json.dumps(carts_dict))
return JsonResponse({'code': RETCODE.OK, 'errmsg': '全选购物车成功'})
else:
# 3.2 未登录 cookie
# 获取carts 的cookie
carts_str = request.COOKIES.get('carts')
response = JsonResponse({'code': RETCODE.OK, 'errmsg': '全选购物车成功'})
# 3.2.1 如果存在 解密
if carts_str is not None:
carts_dict = CookieSecret.loads(carts_str)
for sku_id in carts_dict:
carts_dict[sku_id]['selected'] = selected
cookie_cart = CookieSecret.dumps(carts_dict)
response.set_cookie('carts', cookie_cart, max_age=constants.CARTS_COOKIE_EXPIRES)
return response
class CartsSimpleView(View):
"""商品页面右上角购物车"""
def get(self, request):
# 判断用户是否登录
user = request.user
if user.is_authenticated:
# 用户已登录,查询Redis购物车
carts_redis_client = get_redis_connection('carts')
carts_data = carts_redis_client.hgetall(user.id)
# 转换格式
cart_dict = {int(data[0].decode()): json.loads(data[1].decode()) for data in carts_data.items()}
else:
# 用户未登录,查询cookie购物车
cart_str = request.COOKIES.get('carts')
if cart_str:
cart_dict = CookieSecret.loads(cart_str)
else:
cart_dict = {}
# 构造简单购物车JSON数据
cart_skus = []
sku_ids = cart_dict.keys()
skus = SKU.objects.filter(id__in=sku_ids)
for sku in skus:
cart_skus.append({
'id': sku.id,
'name': sku.name,
'count': cart_dict.get(sku.id).get('count'),
'default_image_url': sku.default_image.url
})
# 响应json列表数据
return JsonResponse({'code': RETCODE.OK, 'errmsg': 'OK', 'cart_skus': cart_skus})
class OrderSettlementView(LoginRequiredMixin,View):
"""结算订单"""
def get(self, request):
"""提供订单结算页面"""
# 获取登录用户
user = request.user
# 查询地址信息
try:
addresses = Address.objects.filter(user=request.user, is_deleted=False)
except Address.DoesNotExist:
# 如果地址为空,渲染模板时会判断,并跳转到地址编辑页面
addresses = None
# 从Redis购物车中查询出被勾选的商品信息
redis_conn = get_redis_connection('carts')
redis_data = redis_conn.hgetall(user.id)
# cart_dict = {int(data[0].decode()): json.loads(data[1].decode()) for data in carts_data.items()}
carts_dict = {}
for carts in redis_data.items():
sku_id = int(carts[0].decode())
sku_dict = json.loads(carts[1].decode())
if sku_dict['selected']:
carts_dict[sku_id] =sku_dict
# 准备初始值
total_count = constants.TOTAL_COUNT
total_amount = Decimal(constants.TOTAL_AMOUNT)
# 查询商品信息
skus = SKU.objects.filter(id__in=carts_dict.keys())
for sku in skus:
sku.count = carts_dict[sku.id]['count']
sku.amount = sku.count * sku.price
# 计算总数量和总金额
total_count += sku.count
total_amount += sku.count * sku.price
# 补充运费
freight = Decimal(constants.FREIGHT)
# 渲染界面
context = {
'addresses': addresses,
'skus': skus,
'total_count': total_count,
'total_amount': total_amount,
'freight': freight,
'payment_amount':total_amount + freight,
'default_address_id':user.default_address_id
}
return render(request, 'place_order.html', context) | [
"84045407@qq.com"
] | 84045407@qq.com |
7d48311b4c76492c3f5616ad0a990dbeef8271b4 | 29b1757434a8069fd65bf11303a4422a4a7b8d47 | /grit/command/Remote.py | 316c67fcc4abda133a942fcfcfbb78ab0f5cde86 | [
"Artistic-2.0"
] | permissive | rec/grit | 720310e33aee7b31d26b976e1936e6e390dba2f4 | b5be6d50cb802db9c9510e68688908f3d4d6d162 | refs/heads/master | 2020-05-17T20:03:39.979504 | 2019-02-19T13:38:57 | 2019-02-19T13:38:57 | 21,712,161 | 2 | 1 | null | 2015-03-17T00:56:48 | 2014-07-10T22:14:25 | Python | UTF-8 | Python | false | false | 1,583 | py | from __future__ import absolute_import, division, print_function, unicode_literals
import json
import os
import urllib2
from grit import Call
from grit.Cache import cached
from grit import Project
from grit import Settings
_REMOTE = """
git remote add {nickname} git@github.com:{user}/{project}.git
"""
SAFE = True
HELP = """
grit r[emote] <user> [<nickname>]
Adds a remote branch for <user> named <nickname> (which defaults to <user>
if it's empty.
"""
def existing_remotes(cwd):
return set(Call.call_raw('git remote', cwd=cwd).split())
def add_remote(user, nickname, cwd=None, existing=None):
existing = existing or existing_remotes(cwd)
if nickname in existing:
return
remote = _REMOTE.format(
user=user, nickname=nickname, project=Settings.PROJECT)
Call.call(remote, cwd=cwd)
def remote(user='all', nickname='', cwd=None):
if user == 'all':
assert not nickname
remotes = Project.settings('remotes').items()
else:
remotes = [(nickname, user or user)]
existing = existing_remotes(cwd)
for nickname, user in remotes:
if nickname not in existing:
add_remote(user, nickname, cwd=cwd, existing=existing)
return remotes
@cached
def remotes():
return remote()
@cached
def inverse():
return dict((v, k) for (k, v) in remotes())
def add_nickname(user):
if user == Settings.USER:
nickname = 'origin'
else:
try:
nickname = inverse()[user]
except KeyError:
add_remote(user, user)
nickname = user
| [
"tom@swirly.com"
] | tom@swirly.com |
203327de968cec0206b4fe3aedf0a89d8e26ff80 | 0dc9bbce77d65a6991f7659c70bf4b81bb319a28 | /artascope/src/model/user_config.py | 65658857748e05322f7ca187a74384a026c9e800 | [
"MIT"
] | permissive | magus0219/icloud-photo-downloader | cca1f3aa0ee93fd3fb195d68d5e02edacea19bc5 | 6334530d971cf61089d031de99a38f204c201837 | refs/heads/master | 2023-05-06T14:51:21.145193 | 2020-08-10T14:10:49 | 2020-08-10T14:10:49 | 259,565,469 | 11 | 0 | MIT | 2021-06-02T01:53:05 | 2020-04-28T07:37:03 | Python | UTF-8 | Python | false | false | 2,212 | py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# Created by magus0219[magus0219@gmail.com] on 2020/3/30
from artascope.src.model.mixin import JsonDataMixin
class NotifyType:
NONE = 0
SLACK = 1
EMAIL = 2
class TargetType:
SFTP = 1
class SchedulerEnable:
Disable = 0
Enable = 1
class ReindexEnable:
Disable = 0
Enable = 1
class UserConfig(JsonDataMixin):
def __init__(
self,
icloud_username: str,
icloud_password: str,
target_type: int = TargetType.SFTP,
sftp_host: str = None,
sftp_port: int = None,
sftp_username: str = None,
sftp_password: str = None,
sftp_dir: str = None,
reindex_enable: int = ReindexEnable.Disable,
sftp_home: str = None,
admin_url_prefix: str = None,
notify_type: int = NotifyType.NONE,
slack_token: str = None,
slack_channel: str = None,
smtp_host: str = None,
smtp_port: int = None,
smtp_user: str = None,
smtp_password: str = None,
msg_from: str = None,
msg_to: str = None,
scheduler_enable: int = SchedulerEnable.Disable,
scheduler_crontab: str = None,
scheduler_last_day_cnt: int = None,
):
self.icloud_username = icloud_username
self.icloud_password = icloud_password
self.target_type = target_type
self.sftp_host = sftp_host
self.sftp_port = sftp_port
self.sftp_username = sftp_username
self.sftp_password = sftp_password
self.sftp_dir = sftp_dir
self.reindex_enable = reindex_enable
self.sftp_home = sftp_home
self.admin_url_prefix = admin_url_prefix
self.notify_type = notify_type
self.slack_token = slack_token
self.slack_channel = slack_channel
self.smtp_host = smtp_host
self.smtp_port = smtp_port
self.smtp_user = smtp_user
self.smtp_password = smtp_password
self.msg_from = msg_from
self.msg_to = msg_to
self.scheduler_enable = scheduler_enable
self.scheduler_crontab = scheduler_crontab
self.scheduler_last_day_cnt = scheduler_last_day_cnt
| [
"magus0219@gmail.com"
] | magus0219@gmail.com |
13f1d9708281dbfba9691ad955cf3dc31d7cedf6 | e1cacf76f531494414c846f61d055b06801052d8 | /sdk/python/kubeflow/katib/configuration.py | 6afecca91b0a56b71205147a7189f5e67a9965e8 | [
"Apache-2.0"
] | permissive | prem0912/katib | 388d6637f2fed8c67aa7bd0ad4419dedccb75ad7 | d19149ddcd5b59054d4c26fb23a141b8adbe9634 | refs/heads/master | 2020-06-17T16:10:55.954875 | 2020-06-03T09:19:53 | 2020-06-03T10:06:01 | 243,764,109 | 0 | 2 | Apache-2.0 | 2020-02-28T13:04:57 | 2020-02-28T13:04:57 | null | UTF-8 | Python | false | false | 7,643 | py | # coding: utf-8
"""
katib
swagger description for katib # noqa: E501
OpenAPI spec version: v0.1
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import copy
import logging
import multiprocessing
import sys
import urllib3
import six
from six.moves import http_client as httplib
class Configuration(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Ref: https://github.com/swagger-api/swagger-codegen
Do not edit the class manually.
"""
_default = None
def __init__(self):
"""Constructor"""
if self._default:
for key in self._default.__dict__.keys():
self.__dict__[key] = copy.copy(self._default.__dict__[key])
return
# Default Base url
self.host = "https://localhost"
# Temp file folder for downloading files
self.temp_folder_path = None
# Authentication Settings
# dict to store API key(s)
self.api_key = {}
# dict to store API prefix (e.g. Bearer)
self.api_key_prefix = {}
# Username for HTTP basic authentication
self.username = ""
# Password for HTTP basic authentication
self.password = ""
# Logging Settings
self.logger = {}
self.logger["package_logger"] = logging.getLogger("katib")
self.logger["urllib3_logger"] = logging.getLogger("urllib3")
# Log format
self.logger_format = '%(asctime)s %(levelname)s %(message)s'
# Log stream handler
self.logger_stream_handler = None
# Log file handler
self.logger_file_handler = None
# Debug file location
self.logger_file = None
# Debug switch
self.debug = False
# SSL/TLS verification
# Set this to false to skip verifying SSL certificate when calling API
# from https server.
self.verify_ssl = True
# Set this to customize the certificate file to verify the peer.
self.ssl_ca_cert = None
# client certificate file
self.cert_file = None
# client key file
self.key_file = None
# Set this to True/False to enable/disable SSL hostname verification.
self.assert_hostname = None
# urllib3 connection pool's maximum number of connections saved
# per pool. urllib3 uses 1 connection as default value, but this is
# not the best value when you are making a lot of possibly parallel
# requests to the same host, which is often the case here.
# cpu_count * 5 is used as default value to increase performance.
self.connection_pool_maxsize = multiprocessing.cpu_count() * 5
# Proxy URL
self.proxy = None
# Safe chars for path_param
self.safe_chars_for_path_param = ''
@classmethod
def set_default(cls, default):
cls._default = default
@property
def logger_file(self):
"""The logger file.
If the logger_file is None, then add stream handler and remove file
handler. Otherwise, add file handler and remove stream handler.
:param value: The logger_file path.
:type: str
"""
return self.__logger_file
@logger_file.setter
def logger_file(self, value):
"""The logger file.
If the logger_file is None, then add stream handler and remove file
handler. Otherwise, add file handler and remove stream handler.
:param value: The logger_file path.
:type: str
"""
self.__logger_file = value
if self.__logger_file:
# If set logging file,
# then add file handler and remove stream handler.
self.logger_file_handler = logging.FileHandler(self.__logger_file)
self.logger_file_handler.setFormatter(self.logger_formatter)
for _, logger in six.iteritems(self.logger):
logger.addHandler(self.logger_file_handler)
if self.logger_stream_handler:
logger.removeHandler(self.logger_stream_handler)
else:
# If not set logging file,
# then add stream handler and remove file handler.
self.logger_stream_handler = logging.StreamHandler()
self.logger_stream_handler.setFormatter(self.logger_formatter)
for _, logger in six.iteritems(self.logger):
logger.addHandler(self.logger_stream_handler)
if self.logger_file_handler:
logger.removeHandler(self.logger_file_handler)
@property
def debug(self):
"""Debug status
:param value: The debug status, True or False.
:type: bool
"""
return self.__debug
@debug.setter
def debug(self, value):
"""Debug status
:param value: The debug status, True or False.
:type: bool
"""
self.__debug = value
if self.__debug:
# if debug status is True, turn on debug logging
for _, logger in six.iteritems(self.logger):
logger.setLevel(logging.DEBUG)
# turn on httplib debug
httplib.HTTPConnection.debuglevel = 1
else:
# if debug status is False, turn off debug logging,
# setting log level to default `logging.WARNING`
for _, logger in six.iteritems(self.logger):
logger.setLevel(logging.WARNING)
# turn off httplib debug
httplib.HTTPConnection.debuglevel = 0
@property
def logger_format(self):
"""The logger format.
The logger_formatter will be updated when sets logger_format.
:param value: The format string.
:type: str
"""
return self.__logger_format
@logger_format.setter
def logger_format(self, value):
"""The logger format.
The logger_formatter will be updated when sets logger_format.
:param value: The format string.
:type: str
"""
self.__logger_format = value
self.logger_formatter = logging.Formatter(self.__logger_format)
def get_api_key_with_prefix(self, identifier):
"""Gets API key (with prefix if set).
:param identifier: The identifier of apiKey.
:return: The token for api key authentication.
"""
if (self.api_key.get(identifier) and
self.api_key_prefix.get(identifier)):
return self.api_key_prefix[identifier] + ' ' + self.api_key[identifier] # noqa: E501
elif self.api_key.get(identifier):
return self.api_key[identifier]
def get_basic_auth_token(self):
"""Gets HTTP basic authentication header (string).
:return: The token for basic HTTP authentication.
"""
return urllib3.util.make_headers(
basic_auth=self.username + ':' + self.password
).get('authorization')
def auth_settings(self):
"""Gets Auth Settings dict for api client.
:return: The Auth Settings information dict.
"""
return {
}
def to_debug_report(self):
"""Gets the essential information for debugging.
:return: The report for debugging.
"""
return "Python SDK Debug Report:\n"\
"OS: {env}\n"\
"Python Version: {pyversion}\n"\
"Version of the API: v0.1\n"\
"SDK Package Version: 0.0.1".\
format(env=sys.platform, pyversion=sys.version)
| [
"premnath.vel@gmail.com"
] | premnath.vel@gmail.com |
8f437d715a4f2b8228b57dd1fbf993432c1036ab | 2af94f8a7609d47fdcea28a2132c4f8bacb103e3 | /lib/_idigi_data.py | 793da64bda691d043bc092f2f50eb3cd4c3c53b3 | [] | no_license | bernhara/DigiGateway4Raph | 685527723f0b306f387233c78d27fe9d78717c38 | f36ba29ef883d70f94b8609ff734b5dcde786c66 | refs/heads/master | 2020-07-05T19:56:27.027547 | 2019-08-19T06:10:46 | 2019-08-19T06:10:46 | 202,756,662 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,503 | py | """idigi_data
idigi_data provides an easy way for Digi device based python apps to
push up data files to the Device Cloud server the device belongs to.
See idigi_pc.py to run on a PC.
"""
import sys
import os
sys.path.append("WEB/python/idigi_data.zip")
try:
# Device Cloud is built into Digi devices. You need to be running python on a Digi
# device with firmware support for Device Cloud to use this module.
import cwm as _idigi
except:
# idigi_pc is an optional module that allows using idigi_data (in a limited way)
# on a PC.
print 'WARNING: STUB: using idigi_pc'
import idigi_pc as _idigi
import digi_httplib as httplib
from mimetypes import guess_type
__all__= ["send_idigi_data", "send_idigi_data_with_type", "send_idigi_xml", "get_idigi_values"]
def send_idigi_data (data, filename, collection=None, secure=True):
"""
Send data to the Device Cloud server with the filename specified.
Note the filename must be specified and will be used to store the
document. If the filename already exists in the database the
existing file will be overwritten.
A file extension will be used to guess the content type using mimetypes.
For instance, file1.xml will be stored as XML. file2.jpg will be saved as
a JPEG.
`collection` is an optional paramater specifying any subcollections that
the file should be stored in. None means use the root collection for this
device. collections are specified without leading or trailing slashes ('/')
and must be separated with a slash (directory like).
Example collection: my_collections/sensors/morning_readings
By default, all data is transferred using an encrypted transfer. If
an unencrypted transfer is desired, specify `secure=False`.
Returns (success, error, errmsg):
Success:
True if successful, False if the upload failed.
error:
status of transfer. If HTTP transport, http status is returned.
Errors:
100-510 HTTP errors (see httplib.py).
10000 Data service is not available on this device
errmsg:
text associated with error
"""
this_type, encoding = guess_type(filename)
if this_type == None:
raise ValueError("File extension not recognized")
return _send_to_idigi (data, filename, collection, this_type, secure)
def send_idigi_data_with_type (data, filename, collection, content_type, secure=True):
"""
Send data to the Device Cloud server with the filename specified.
Note the filename must be specified and will be used to store the
document. If the filename already exists in the database the
existing file will be overwritten.
The content type will be used to store the file. The content must
be a valid content type. Example: `text/xml`
`collection` specifies any subcollections that the file should be
stored in. None means use the root collection for this device.
collections are specified without leading or trailing slashes
('/') and must be separated with a slash (directory like).
Example collection: my_collections/sensors/morning_readings
By default, all data is transferred using an encrypted transfer. If
an unencrypted transfer is desired, specify secure=False.
Returns (success, error, errmsg):
Success:
`True` if successful, `False` if the upload failed.
error:
status of transfer. If HTTP transport, http status is returned.
Errors:
100-510 HTTP errors (see httplib.py).
10000 Data service is not available on this device
errmsg:
text associated with error
"""
return _send_to_idigi (data, filename, collection, content_type, secure)
def send_idigi_xml (userXml, filename, collection=None, secure=True):
"""
Send the xml string userXml to the data server with the filename specified.
Note the filename must be specified and will be used to store the
document. If the filename already exists in the database the
existing file will be overwritten.
A file extension of .xml is recommended (for example: my_file.xml)
`collection` is an optional paramater specifying any
subcollections that the file should be stored in. None means use
the root collection for this device. collections are specified
without leading or trailing slashes ('/') and must be separated
with a slash (directory like).
Example collection: my_collections/sensors/morning_readings
By default, all data is transferred using an encrypted transfer.
If an unencrypted transfer is desired, specify secure=False.
Returns (success, error, errmsg):
Success:
`True` if successful, `False` if the upload failed.
error:
status of transfer. If HTTP transport, http status is returned.
Errors:
100-510 HTTP errors (see httplib.py).
10000 Data service is not available on this device
errmsg:
text associated with error
"""
this_type = 'text/xml'
return _send_to_idigi (userXml, filename, collection, this_type, secure)
def _send_to_idigi (data, filename, collection, content_type, secure=True):
if data == None or filename == None:
return False
try:
host, token, path, port, securePort = _idigi._get_ws_parms()
if secure == True:
host = "%s:%d" % (host, securePort)
else:
host = "%s:%d" % (host, port)
except:
host, token, path = _idigi._get_ws_parms()
hostSplit = host.split(":")
port = hostSplit[1]
if host == None or host[0] == ":" or token == None or path == None or \
port == None or port == 0:
err = 10000
msg = "Data Service not available, check Remote Management configuration"
return False, err, msg
if collection == None:
fullPath = path
else:
fullPath = path + "/" + collection
tout = 300 # TODO: Parameterize
if secure == True:
con = httplib.HTTPSConnection(host)
else:
con = httplib.HTTPConnection(host)
con.putrequest('PUT', '%s/%s' % (fullPath, filename))
con.putheader('Content-Type', content_type)
clen = len(data)
con.putheader('Content-Length', `clen`)
con.putheader('Authorization', 'Basic %s' % token)
con.endheaders()
con.send(data)
response = con.getresponse()
errcode = response.status
errmsg = response.reason
headers = response.msg
con.close()
if errcode != 200 and errcode != 201:
return False, errcode, errmsg
else:
return True, errcode, errmsg
def get_idigi_values():
"""\
Used to return the current runtime Device Cloud values and parameters.
"""
return _idigi._get_ws_parms()
| [
"ORBA6563@S-ORBA65630.rd.francetelecom.fr"
] | ORBA6563@S-ORBA65630.rd.francetelecom.fr |
e8637c520683003b8fa83bcc0a7a54244c0cb2aa | ff0a81d12b17a45a214380f4bdbb2360e050ff40 | /src/accounts/migrations/0001_initial.py | ee232af4db0f7eb2fcf0a2240e24198592e30880 | [] | no_license | rcmiskin10/university-marketplace | 45425275f6f7a73ab81441d613c26def2410a8e4 | 726e99153ad36d0aa38141822285f79feb910c06 | refs/heads/master | 2020-04-05T23:42:59.025790 | 2017-01-11T03:15:20 | 2017-01-11T03:15:20 | 68,738,382 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,806 | py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import datetime
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='MyUser',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('password', models.CharField(max_length=128, verbose_name='password')),
('last_login', models.DateTimeField(null=True, verbose_name='last login', blank=True)),
('email', models.EmailField(unique=True, max_length=255, verbose_name=b'email address')),
('username', models.CharField(max_length=120, null=True, blank=True)),
('first_name', models.CharField(max_length=120, null=True, blank=True)),
('last_name', models.CharField(max_length=120, null=True, blank=True)),
('is_active', models.BooleanField(default=False)),
('is_admin', models.BooleanField(default=False)),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='UserProfile',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('activation_key', models.CharField(max_length=40, blank=True)),
('key_expires', models.DateTimeField(default=datetime.date(2016, 3, 6))),
('user', models.OneToOneField(to=settings.AUTH_USER_MODEL)),
],
options={
'verbose_name_plural': 'User profiles',
},
),
]
| [
"rcmiskin@gmail.com"
] | rcmiskin@gmail.com |
f4ae5179833a9c7f84ab735605567beee5973043 | 861a3f230b19bb5db1e34750e7c469a5989bf162 | /11.HashTable/2.jewels-and-stones/3_counter.py | d56c1f27ba1e577815a47a43ef8032f167a7adbe | [] | no_license | jana-choi/PythonCodingTest | 6ef3786c9a9c71dc749723a4db614a1833332791 | 294588392cf551a77a28d9153098c3d823f271f7 | refs/heads/master | 2022-12-24T10:49:36.712057 | 2020-09-25T01:59:58 | 2020-09-25T01:59:58 | 285,706,524 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 264 | py | from collections import Counter
def numJewelsInStones(J, S):
freqs = Counter(S)
count = 0
for char in J:
count += freqs[char]
return count
if __name__ == "__main__":
J = "aA"
S = "aAAbbbb"
print(numJewelsInStones(J, S)) | [
"lovej0918@gmail.com"
] | lovej0918@gmail.com |
d224412bcfacaefd9576997b2fd07907fabe1ad6 | 7889f7f0532db6a7f81e6f8630e399c90438b2b9 | /3.7.2/_downloads/edc9e862d9e0d115b20877b32f705afc/pcolor_demo.py | 2e17cd7db90b2f637939f4660fe465fbf44089c0 | [] | no_license | matplotlib/matplotlib.github.com | ef5d23a5bf77cb5af675f1a8273d641e410b2560 | 2a60d39490941a524e5385670d488c86083a032c | refs/heads/main | 2023-08-16T18:46:58.934777 | 2023-08-10T05:07:57 | 2023-08-10T05:08:30 | 1,385,150 | 25 | 59 | null | 2023-08-30T15:59:50 | 2011-02-19T03:27:35 | null | UTF-8 | Python | false | false | 3,626 | py | """
===========
Pcolor demo
===========
Generating images with `~.axes.Axes.pcolor`.
Pcolor allows you to generate 2D image-style plots. Below we will show how
to do so in Matplotlib.
"""
import matplotlib.pyplot as plt
import numpy as np
from matplotlib.colors import LogNorm
# Fixing random state for reproducibility
np.random.seed(19680801)
###############################################################################
# A simple pcolor demo
# --------------------
Z = np.random.rand(6, 10)
fig, (ax0, ax1) = plt.subplots(2, 1)
c = ax0.pcolor(Z)
ax0.set_title('default: no edges')
c = ax1.pcolor(Z, edgecolors='k', linewidths=4)
ax1.set_title('thick edges')
fig.tight_layout()
plt.show()
###############################################################################
# Comparing pcolor with similar functions
# ---------------------------------------
#
# Demonstrates similarities between `~.axes.Axes.pcolor`,
# `~.axes.Axes.pcolormesh`, `~.axes.Axes.imshow` and
# `~.axes.Axes.pcolorfast` for drawing quadrilateral grids.
# Note that we call ``imshow`` with ``aspect="auto"`` so that it doesn't force
# the data pixels to be square (the default is ``aspect="equal"``).
# make these smaller to increase the resolution
dx, dy = 0.15, 0.05
# generate 2 2d grids for the x & y bounds
y, x = np.mgrid[-3:3+dy:dy, -3:3+dx:dx]
z = (1 - x/2 + x**5 + y**3) * np.exp(-x**2 - y**2)
# x and y are bounds, so z should be the value *inside* those bounds.
# Therefore, remove the last value from the z array.
z = z[:-1, :-1]
z_min, z_max = -abs(z).max(), abs(z).max()
fig, axs = plt.subplots(2, 2)
ax = axs[0, 0]
c = ax.pcolor(x, y, z, cmap='RdBu', vmin=z_min, vmax=z_max)
ax.set_title('pcolor')
fig.colorbar(c, ax=ax)
ax = axs[0, 1]
c = ax.pcolormesh(x, y, z, cmap='RdBu', vmin=z_min, vmax=z_max)
ax.set_title('pcolormesh')
fig.colorbar(c, ax=ax)
ax = axs[1, 0]
c = ax.imshow(z, cmap='RdBu', vmin=z_min, vmax=z_max,
extent=[x.min(), x.max(), y.min(), y.max()],
interpolation='nearest', origin='lower', aspect='auto')
ax.set_title('image (nearest, aspect="auto")')
fig.colorbar(c, ax=ax)
ax = axs[1, 1]
c = ax.pcolorfast(x, y, z, cmap='RdBu', vmin=z_min, vmax=z_max)
ax.set_title('pcolorfast')
fig.colorbar(c, ax=ax)
fig.tight_layout()
plt.show()
###############################################################################
# Pcolor with a log scale
# -----------------------
#
# The following shows pcolor plots with a log scale.
N = 100
X, Y = np.meshgrid(np.linspace(-3, 3, N), np.linspace(-2, 2, N))
# A low hump with a spike coming out.
# Needs to have z/colour axis on a log scale, so we see both hump and spike.
# A linear scale only shows the spike.
Z1 = np.exp(-X**2 - Y**2)
Z2 = np.exp(-(X * 10)**2 - (Y * 10)**2)
Z = Z1 + 50 * Z2
fig, (ax0, ax1) = plt.subplots(2, 1)
c = ax0.pcolor(X, Y, Z, shading='auto',
norm=LogNorm(vmin=Z.min(), vmax=Z.max()), cmap='PuBu_r')
fig.colorbar(c, ax=ax0)
c = ax1.pcolor(X, Y, Z, cmap='PuBu_r', shading='auto')
fig.colorbar(c, ax=ax1)
plt.show()
#############################################################################
#
# .. admonition:: References
#
# The use of the following functions, methods, classes and modules is shown
# in this example:
#
# - `matplotlib.axes.Axes.pcolor` / `matplotlib.pyplot.pcolor`
# - `matplotlib.axes.Axes.pcolormesh` / `matplotlib.pyplot.pcolormesh`
# - `matplotlib.axes.Axes.pcolorfast`
# - `matplotlib.axes.Axes.imshow` / `matplotlib.pyplot.imshow`
# - `matplotlib.figure.Figure.colorbar` / `matplotlib.pyplot.colorbar`
# - `matplotlib.colors.LogNorm`
| [
"quantum.analyst@gmail.com"
] | quantum.analyst@gmail.com |
a46bba20a44b6de65d085978136586ecbe0b0a15 | 971e0efcc68b8f7cfb1040c38008426f7bcf9d2e | /tests/periodicities/Hour/Cycle_Hour_50_H_60.py | 3d8f5f121c5580733d8e90bcdcf73b41d655c38b | [
"BSD-3-Clause",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | antoinecarme/pyaf | a105d172c2e7544f8d580d75f28b751351dd83b6 | b12db77cb3fa9292e774b2b33db8ce732647c35e | refs/heads/master | 2023-09-01T09:30:59.967219 | 2023-07-28T20:15:53 | 2023-07-28T20:15:53 | 70,790,978 | 457 | 77 | BSD-3-Clause | 2023-03-08T21:45:40 | 2016-10-13T09:30:30 | Python | UTF-8 | Python | false | false | 81 | py | import tests.periodicities.period_test as per
per.buildModel((60 , 'H' , 50));
| [
"antoine.carme@laposte.net"
] | antoine.carme@laposte.net |
275dfa655f73d8d0dc3ad70f06d43f610209f283 | 2d5cc685fd861c16a44e6578dff659bc197d44f8 | /ioflo/aio/serial/serialing.py | f614559f296086b6d703dba667e5b8f28b1620b4 | [
"Apache-2.0",
"LicenseRef-scancode-warranty-disclaimer",
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | dreamerparadise/ioflo | b642e3325760d124c8c608cefd3fb23c408785ff | 177ac656d7c4ff801aebb0d8b401db365a5248ce | refs/heads/master | 2023-04-03T04:05:24.934544 | 2020-11-19T22:07:49 | 2020-11-19T22:07:49 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 15,662 | py | """
Asynchronous (nonblocking) serial io
"""
from __future__ import absolute_import, division, print_function
import sys
import os
import errno
from collections import deque
# Import ioflo libs
from ...aid.sixing import *
from ...aid.consoling import getConsole
console = getConsole()
class ConsoleNb(object):
"""
Class to manage non blocking io on serial console.
Opens non blocking read file descriptor on console
Use instance method close to close file descriptor
Use instance methods getline & put to read & write to console
Needs os module
"""
def __init__(self):
"""Initialization method for instance.
"""
self.fd = None #console file descriptor needs to be opened
def open(self, port='', canonical=True):
"""
Opens fd on terminal console in non blocking mode.
port is the serial port device path name
or if '' then use os.ctermid() which
returns path name of console usually '/dev/tty'
canonical sets the mode for the port. Canonical means no characters
available until a newline
os.O_NONBLOCK makes non blocking io
os.O_RDWR allows both read and write.
os.O_NOCTTY don't make this the controlling terminal of the process
O_NOCTTY is only for cross platform portability BSD never makes it the
controlling terminal
Don't use print at same time since it will mess up non blocking reads.
Default is canonical mode so no characters available until newline
need to add code to enable non canonical mode
It appears that canonical mode only applies to the console. For other
serial ports the characters are available immediately
"""
if not port:
port = os.ctermid() #default to console
try:
self.fd = os.open(port, os.O_NONBLOCK | os.O_RDWR | os.O_NOCTTY)
except OSError as ex:
console.terse("os.error = {0}\n".format(ex))
return False
return True
def close(self):
"""Closes fd.
"""
if self.fd:
os.close(self.fd)
self.fd = None
def getLine(self,bs = 80):
"""Gets nonblocking line from console up to bs characters including newline.
Returns empty string if no characters available else returns line.
In canonical mode no chars available until newline is entered.
"""
line = ''
try:
line = os.read(self.fd, bs)
except OSError as ex1: #if no chars available generates exception
try: #need to catch correct exception
errno = ex1.args[0] #if args not sequence get TypeError
if errno == 35:
pass #No characters available
else:
raise #re raise exception ex1
except TypeError as ex2: #catch args[0] mismatch above
raise ex1 #ignore TypeError, re-raise exception ex1
return line
def put(self, data = '\n'):
"""Writes data string to console.
"""
return(os.write(self.fd, data))
class DeviceNb(object):
"""
Class to manage non blocking IO on serial device port.
Opens non blocking read file descriptor on serial port
Use instance method close to close file descriptor
Use instance methods get & put to read & write to serial device
Needs os module
"""
def __init__(self, port=None, speed=9600, bs=1024):
"""
Initialization method for instance.
port = serial device port path string
speed = serial port speed in bps
bs = buffer size for reads
"""
self.fd = None #serial device port file descriptor, must be opened first
self.port = port or os.ctermid() #default to console
self.speed = speed or 9600
self.bs = bs or 1024
self.opened = False
def open(self, port=None, speed=None, bs=None):
"""
Opens fd on serial port in non blocking mode.
port is the serial port device path name or
if '' then use os.ctermid() which
returns path name of console usually '/dev/tty'
os.O_NONBLOCK makes non blocking io
os.O_RDWR allows both read and write.
os.O_NOCTTY don't make this the controlling terminal of the process
O_NOCTTY is only for cross platform portability BSD never makes it the
controlling terminal
Don't use print and console at same time since it will mess up non blocking reads.
Raw mode
def setraw(fd, when=TCSAFLUSH):
Put terminal into a raw mode.
mode = tcgetattr(fd)
mode[IFLAG] = mode[IFLAG] & ~(BRKINT | ICRNL | INPCK | ISTRIP | IXON)
mode[OFLAG] = mode[OFLAG] & ~(OPOST)
mode[CFLAG] = mode[CFLAG] & ~(CSIZE | PARENB)
mode[CFLAG] = mode[CFLAG] | CS8
mode[LFLAG] = mode[LFLAG] & ~(ECHO | ICANON | IEXTEN | ISIG)
mode[CC][VMIN] = 1
mode[CC][VTIME] = 0
tcsetattr(fd, when, mode)
# set up raw mode / no echo / binary
cflag |= (TERMIOS.CLOCAL|TERMIOS.CREAD)
lflag &= ~(TERMIOS.ICANON|TERMIOS.ECHO|TERMIOS.ECHOE|TERMIOS.ECHOK|TERMIOS.ECHONL|
TERMIOS.ISIG|TERMIOS.IEXTEN) #|TERMIOS.ECHOPRT
for flag in ('ECHOCTL', 'ECHOKE'): # netbsd workaround for Erk
if hasattr(TERMIOS, flag):
lflag &= ~getattr(TERMIOS, flag)
oflag &= ~(TERMIOS.OPOST)
iflag &= ~(TERMIOS.INLCR|TERMIOS.IGNCR|TERMIOS.ICRNL|TERMIOS.IGNBRK)
if hasattr(TERMIOS, 'IUCLC'):
iflag &= ~TERMIOS.IUCLC
if hasattr(TERMIOS, 'PARMRK'):
iflag &= ~TERMIOS.PARMRK
"""
if port is not None:
self.port = port
if speed is not None:
self.speed = speed
if bs is not None:
self.bs = bs
self.fd = os.open(self.port, os.O_NONBLOCK | os.O_RDWR | os.O_NOCTTY)
system = platform.system()
if (system == 'Darwin') or (system == 'Linux'): #use termios to set values
import termios
iflag, oflag, cflag, lflag, ispeed, ospeed, cc = range(7)
settings = termios.tcgetattr(self.fd)
#print(settings)
settings[lflag] = (settings[lflag] & ~termios.ICANON)
settings[lflag] = (settings[lflag] & ~termios.ECHO) # no echo
#ignore carriage returns on input
#settings[iflag] = (settings[iflag] | (termios.IGNCR)) #ignore cr
# 8N1 8bit word no parity one stop bit nohardware handshake ctsrts
# to set size have to mask out(clear) CSIZE bits and or in size
settings[cflag] = ((settings[cflag] & ~termios.CSIZE) | termios.CS8)
# no parity clear PARENB
settings[cflag] = (settings[cflag] & ~termios.PARENB)
#one stop bit clear CSTOPB
settings[cflag] = (settings[cflag] & ~termios.CSTOPB)
#no hardware handshake clear crtscts
settings[cflag] = (settings[cflag] & ~termios.CRTSCTS)
# in linux the speed flag does not equal value so always set it
speedattr = "B{0}".format(self.speed) # convert numeric speed to attribute name string
speed = getattr(termios, speedattr)
settings[ispeed] = speed
settings[ospeed] = speed
termios.tcsetattr(self.fd, termios.TCSANOW, settings)
#print(settings)
self.opened = True
def reopen(self):
"""
Idempotently open serial device port
"""
self.close()
return self.open()
def close(self):
"""Closes fd.
"""
if self.fd:
os.close(self.fd)
self.fd = None
self.opened = False
def receive(self):
"""
Reads nonblocking characters from serial device up to bs characters
Returns empty bytes if no characters available else returns all available.
In canonical mode no chars are available until newline is entered.
"""
data = b''
try:
data = os.read(self.fd, self.bs) #if no chars available generates exception
except OSError as ex1: # ex1 is the target instance of the exception
if ex1.errno == errno.EAGAIN: #BSD 35, Linux 11
pass #No characters available
else:
raise #re raise exception ex1
return data
def send(self, data=b'\n'):
"""
Writes data bytes to serial device port.
Returns number of bytes sent
"""
try:
count = os.write(self.fd, data)
except OSError as ex1: # ex1 is the target instance of the exception
if ex1.errno == errno.EAGAIN: #BSD 35, Linux 11
count = 0 # buffer full can't write
else:
raise #re raise exception ex1
return count
class SerialNb(object):
"""
Class to manage non blocking IO on serial device port using pyserial
Opens non blocking read file descriptor on serial port
Use instance method close to close file descriptor
Use instance methods get & put to read & write to serial device
Needs os module
"""
def __init__(self, port=None, speed=9600, bs=1024):
"""
Initialization method for instance.
port = serial device port path string
speed = serial port speed in bps
bs = buffer size for reads
"""
self.serial = None # Serial instance
self.port = port or os.ctermid() #default to console
self.speed = speed or 9600
self.bs = bs or 1024
self.opened = False
def open(self, port=None, speed=None, bs=None):
"""
Opens fd on serial port in non blocking mode.
port is the serial port device path name or
if None then use os.ctermid() which returns path name of console
usually '/dev/tty'
"""
if port is not None:
self.port = port
if speed is not None:
self.speed = speed
if bs is not None:
self.bs = bs
import serial # import pyserial
self.serial = serial.Serial(port=self.port,
baudrate=self.speed,
timeout=0,
writeTimeout=0)
#self.serial.nonblocking()
self.serial.reset_input_buffer()
self.opened = True
def reopen(self):
"""
Idempotently open serial device port
"""
self.close()
return self.open()
def close(self):
"""
Closes .serial
"""
if self.serial:
self.serial.reset_output_buffer()
self.serial.close()
self.serial = None
self.opened = False
def receive(self):
"""
Reads nonblocking characters from serial device up to bs characters
Returns empty bytes if no characters available else returns all available.
In canonical mode no chars are available until newline is entered.
"""
data = b''
try:
data = self.serial.read(self.bs) #if no chars available generates exception
except OSError as ex1: # ex1 is the target instance of the exception
if ex1.errno == errno.EAGAIN: #BSD 35, Linux 11
pass #No characters available
else:
raise #re raise exception ex1
return data
def send(self, data=b'\n'):
"""
Writes data bytes to serial device port.
Returns number of bytes sent
"""
try:
count = self.serial.write(data)
except OSError as ex1: # ex1 is the target instance of the exception
if ex1.errno == errno.EAGAIN: #BSD 35, Linux 11
count = 0 # buffer full can't write
else:
raise #re raise exception ex1
return count
class Driver(object):
"""
Nonblocking Serial Device Port Driver
"""
def __init__(self,
name=u'',
uid=0,
port=None,
speed=9600,
bs=1024,
server=None):
"""
Initialization method for instance.
Parameters:
name = user friendly name for driver
uid = unique identifier for driver
port = serial device port path string
speed = serial port speed in bps
canonical = canonical mode True or False
bs = buffer size for reads
server = serial port device server if any
Attributes:
name = user friendly name for driver
uid = unique identifier for driver
server = serial device server nonblocking
txes = deque of data bytes to send
rxbs = bytearray of data bytes received
"""
self.name = name
self.uid = uid
if not server:
try:
import serial
self.server = SerialNb(port=port,
speed=speed,
bs=bs)
except ImportError as ex:
console.terse("Error: importing pyserial\n{0}\n".format(ex))
self.server = DeviceNb(port=port,
speed=speed,
bs=bs)
else:
self.server = server
self.txes = deque() # deque of data to send
self.rxbs = bytearray() # byte array of data received
def serviceReceives(self):
"""
Service receives until no more
"""
while self.server.opened:
data = self.server.receive() # bytes
if not data:
break
self.rxbs.extend(data)
def serviceReceiveOnce(self):
'''
Retrieve from server only one reception
'''
if self.server.opened:
data = self.server.receive()
if data:
self.rxbs.extend(data)
def clearRxbs(self):
"""
Clear .rxbs
"""
del self.rxbs[:]
def scan(self, start):
"""
Returns offset of given start byte in self.rxbs
Returns None if start is not given or not found
If strip then remove any bytes before offset
"""
offset = self.rxbs.find(start)
if offset < 0:
return None
return offset
def tx(self, data):
'''
Queue data onto .txes
'''
self.txes.append(data)
def _serviceOneTx(self):
"""
Handle one tx data
"""
data = self.txes.popleft()
count = self.server.send(data)
if count < len(data): # put back unsent portion
self.txes.appendleft(data[count:])
return False # blocked
console.profuse("{0}: Sent: {1}\n".format(self.name, data))
return True # send more
def serviceTxes(self):
"""
Service txes data
"""
while self.txes and self.server.opened:
again = self._serviceOneTx()
if not again:
break # blocked try again later
def serviceTxOnce(self):
'''
Service one data on the .txes deque to send through device
'''
if self.txes and self.server.opened:
self._serviceOneTx()
| [
"smith.samuel.m@gmail.com"
] | smith.samuel.m@gmail.com |
3a9a7eb89471a88ba39f8be086383d95256bb9ae | 8dcd3ee098b4f5b80879c37a62292f42f6b2ae17 | /venv/Lib/site-packages/pandas/tests/indexing/test_floats.py | 6adde65abed574cfb762758f4bab265e6ce2ace4 | [] | no_license | GregVargas1999/InfinityAreaInfo | 53fdfefc11c4af8f5d2b8f511f7461d11a3f7533 | 2e4a7c6a2424514ca0ec58c9153eb08dc8e09a4a | refs/heads/master | 2022-12-01T20:26:05.388878 | 2020-08-11T18:37:05 | 2020-08-11T18:37:05 | 286,821,452 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 30,009 | py | import numpy as np
import pandas._testing as tm
import pytest
from pandas import DataFrame, Float64Index, Index, Int64Index, RangeIndex, Series
class TestFloatIndexers:
def check(self, result, original, indexer, getitem):
"""
comparator for results
we need to take care if we are indexing on a
Series or a frame
"""
if isinstance(original, Series):
expected = original.iloc[indexer]
else:
if getitem:
expected = original.iloc[:, indexer]
else:
expected = original.iloc[indexer]
tm.assert_almost_equal(result, expected)
def test_scalar_error(self):
# GH 4892
# float_indexers should raise exceptions
# on appropriate Index types & accessors
# this duplicates the code below
# but is specifically testing for the error
# message
for index in [
tm.makeStringIndex,
tm.makeUnicodeIndex,
tm.makeCategoricalIndex,
tm.makeDateIndex,
tm.makeTimedeltaIndex,
tm.makePeriodIndex,
tm.makeIntIndex,
tm.makeRangeIndex,
]:
i = index(5)
s = Series(np.arange(len(i)), index=i)
msg = "Cannot index by location index"
with pytest.raises(TypeError, match=msg):
s.iloc[3.0]
msg = (
"cannot do positional indexing on {klass} with these "
r"indexers \[3\.0\] of {kind}".format(klass=type(i), kind=str(float))
)
with pytest.raises(TypeError, match=msg):
s.iloc[3.0] = 0
def test_scalar_non_numeric(self):
# GH 4892
# float_indexers should raise exceptions
# on appropriate Index types & accessors
for index in [
tm.makeStringIndex,
tm.makeUnicodeIndex,
tm.makeCategoricalIndex,
tm.makeDateIndex,
tm.makeTimedeltaIndex,
tm.makePeriodIndex,
]:
i = index(5)
for s in [
Series(np.arange(len(i)), index=i),
DataFrame(np.random.randn(len(i), len(i)), index=i, columns=i),
]:
# getting
for idxr, getitem in [(lambda x: x.iloc, False), (lambda x: x, True)]:
# gettitem on a DataFrame is a KeyError as it is indexing
# via labels on the columns
if getitem and isinstance(s, DataFrame):
error = KeyError
msg = r"^3(\.0)?$"
else:
error = TypeError
msg = (
r"cannot do (label|index|positional) indexing "
r"on {klass} with these indexers \[3\.0\] of "
r"{kind}|"
"Cannot index by location index with a "
"non-integer key".format(klass=type(i), kind=str(float))
)
with pytest.raises(error, match=msg):
idxr(s)[3.0]
# label based can be a TypeError or KeyError
if s.index.inferred_type in {
"categorical",
"string",
"unicode",
"mixed",
}:
error = KeyError
msg = r"^3$"
else:
error = TypeError
msg = (
r"cannot do (label|index) indexing "
r"on {klass} with these indexers \[3\.0\] of "
r"{kind}".format(klass=type(i), kind=str(float))
)
with pytest.raises(error, match=msg):
s.loc[3.0]
# contains
assert 3.0 not in s
# setting with a float fails with iloc
msg = (
r"cannot do (label|index|positional) indexing"
r" on {klass} with these indexers \[3\.0\] of"
r" {kind}".format(klass=type(i), kind=str(float))
)
with pytest.raises(TypeError, match=msg):
s.iloc[3.0] = 0
# setting with an indexer
if s.index.inferred_type in ["categorical"]:
# Value or Type Error
pass
elif s.index.inferred_type in ["datetime64", "timedelta64", "period"]:
# these should prob work
# and are inconsistent between series/dataframe ATM
# for idxr in [lambda x: x]:
# s2 = s.copy()
#
# with pytest.raises(TypeError):
# idxr(s2)[3.0] = 0
pass
else:
s2 = s.copy()
s2.loc[3.0] = 10
assert s2.index.is_object()
for idxr in [lambda x: x]:
s2 = s.copy()
idxr(s2)[3.0] = 0
assert s2.index.is_object()
# fallsback to position selection, series only
s = Series(np.arange(len(i)), index=i)
s[3]
msg = (
r"cannot do (label|index) indexing"
r" on {klass} with these indexers \[3\.0\] of"
r" {kind}".format(klass=type(i), kind=str(float))
)
with pytest.raises(TypeError, match=msg):
s[3.0]
def test_scalar_with_mixed(self):
s2 = Series([1, 2, 3], index=["a", "b", "c"])
s3 = Series([1, 2, 3], index=["a", "b", 1.5])
# lookup in a pure stringstr
# with an invalid indexer
for idxr in [lambda x: x, lambda x: x.iloc]:
msg = (
r"cannot do label indexing"
r" on {klass} with these indexers \[1\.0\] of"
r" {kind}|"
"Cannot index by location index with a non-integer key".format(
klass=str(Index), kind=str(float)
)
)
with pytest.raises(TypeError, match=msg):
idxr(s2)[1.0]
with pytest.raises(KeyError, match=r"^1$"):
s2.loc[1.0]
result = s2.loc["b"]
expected = 2
assert result == expected
# mixed index so we have label
# indexing
for idxr in [lambda x: x]:
msg = (
r"cannot do label indexing"
r" on {klass} with these indexers \[1\.0\] of"
r" {kind}".format(klass=str(Index), kind=str(float))
)
with pytest.raises(TypeError, match=msg):
idxr(s3)[1.0]
result = idxr(s3)[1]
expected = 2
assert result == expected
msg = "Cannot index by location index with a non-integer key"
with pytest.raises(TypeError, match=msg):
s3.iloc[1.0]
with pytest.raises(KeyError, match=r"^1$"):
s3.loc[1.0]
result = s3.loc[1.5]
expected = 3
assert result == expected
def test_scalar_integer(self):
# test how scalar float indexers work on int indexes
# integer index
for i in [Int64Index(range(5)), RangeIndex(5)]:
for s in [
Series(np.arange(len(i))),
DataFrame(np.random.randn(len(i), len(i)), index=i, columns=i),
]:
# coerce to equal int
for idxr, getitem in [(lambda x: x.loc, False), (lambda x: x, True)]:
result = idxr(s)[3.0]
self.check(result, s, 3, getitem)
# coerce to equal int
for idxr, getitem in [(lambda x: x.loc, False), (lambda x: x, True)]:
if isinstance(s, Series):
def compare(x, y):
assert x == y
expected = 100
else:
compare = tm.assert_series_equal
if getitem:
expected = Series(100, index=range(len(s)), name=3)
else:
expected = Series(100.0, index=range(len(s)), name=3)
s2 = s.copy()
idxr(s2)[3.0] = 100
result = idxr(s2)[3.0]
compare(result, expected)
result = idxr(s2)[3]
compare(result, expected)
# contains
# coerce to equal int
assert 3.0 in s
def test_scalar_float(self):
# scalar float indexers work on a float index
index = Index(np.arange(5.0))
for s in [
Series(np.arange(len(index)), index=index),
DataFrame(
np.random.randn(len(index), len(index)), index=index, columns=index
),
]:
# assert all operations except for iloc are ok
indexer = index[3]
for idxr, getitem in [(lambda x: x.loc, False), (lambda x: x, True)]:
# getting
result = idxr(s)[indexer]
self.check(result, s, 3, getitem)
# setting
s2 = s.copy()
result = idxr(s2)[indexer]
self.check(result, s, 3, getitem)
# random integer is a KeyError
with pytest.raises(KeyError, match=r"^3\.5$"):
idxr(s)[3.5]
# contains
assert 3.0 in s
# iloc succeeds with an integer
expected = s.iloc[3]
s2 = s.copy()
s2.iloc[3] = expected
result = s2.iloc[3]
self.check(result, s, 3, False)
# iloc raises with a float
msg = "Cannot index by location index with a non-integer key"
with pytest.raises(TypeError, match=msg):
s.iloc[3.0]
msg = (
r"cannot do positional indexing"
r" on {klass} with these indexers \[3\.0\] of"
r" {kind}".format(klass=str(Float64Index), kind=str(float))
)
with pytest.raises(TypeError, match=msg):
s2.iloc[3.0] = 0
def test_slice_non_numeric(self):
# GH 4892
# float_indexers should raise exceptions
# on appropriate Index types & accessors
for index in [
tm.makeStringIndex,
tm.makeUnicodeIndex,
tm.makeDateIndex,
tm.makeTimedeltaIndex,
tm.makePeriodIndex,
]:
index = index(5)
for s in [
Series(range(5), index=index),
DataFrame(np.random.randn(5, 2), index=index),
]:
# getitem
for l in [slice(3.0, 4), slice(3, 4.0), slice(3.0, 4.0)]:
msg = (
"cannot do slice indexing "
r"on {klass} with these indexers \[(3|4)\.0\] of "
"{kind}".format(klass=type(index), kind=str(float))
)
with pytest.raises(TypeError, match=msg):
s.iloc[l]
for idxr in [lambda x: x.loc, lambda x: x.iloc, lambda x: x]:
msg = (
"cannot do slice indexing "
r"on {klass} with these indexers "
r"\[(3|4)(\.0)?\] "
r"of ({kind_float}|{kind_int})".format(
klass=type(index),
kind_float=str(float),
kind_int=str(int),
)
)
with pytest.raises(TypeError, match=msg):
idxr(s)[l]
# setitem
for l in [slice(3.0, 4), slice(3, 4.0), slice(3.0, 4.0)]:
msg = (
"cannot do slice indexing "
r"on {klass} with these indexers \[(3|4)\.0\] of "
"{kind}".format(klass=type(index), kind=str(float))
)
with pytest.raises(TypeError, match=msg):
s.iloc[l] = 0
for idxr in [lambda x: x.loc, lambda x: x.iloc, lambda x: x]:
msg = (
"cannot do slice indexing"
r" on {klass} with these indexers"
r" \[(3|4)(\.0)?\]"
r" of ({kind_float}|{kind_int})".format(
klass=type(index),
kind_float=str(float),
kind_int=str(int),
)
)
with pytest.raises(TypeError, match=msg):
idxr(s)[l] = 0
def test_slice_integer(self):
# same as above, but for Integer based indexes
# these coerce to a like integer
# oob indicates if we are out of bounds
# of positional indexing
for index, oob in [
(Int64Index(range(5)), False),
(RangeIndex(5), False),
(Int64Index(range(5)) + 10, True),
]:
# s is an in-range index
s = Series(range(5), index=index)
# getitem
for l in [slice(3.0, 4), slice(3, 4.0), slice(3.0, 4.0)]:
for idxr in [lambda x: x.loc]:
result = idxr(s)[l]
# these are all label indexing
# except getitem which is positional
# empty
if oob:
indexer = slice(0, 0)
else:
indexer = slice(3, 5)
self.check(result, s, indexer, False)
# positional indexing
msg = (
"cannot do slice indexing "
r"on {klass} with these indexers \[(3|4)\.0\] of "
"{kind}".format(klass=type(index), kind=str(float))
)
with pytest.raises(TypeError, match=msg):
s[l]
# getitem out-of-bounds
for l in [slice(-6, 6), slice(-6.0, 6.0)]:
for idxr in [lambda x: x.loc]:
result = idxr(s)[l]
# these are all label indexing
# except getitem which is positional
# empty
if oob:
indexer = slice(0, 0)
else:
indexer = slice(-6, 6)
self.check(result, s, indexer, False)
# positional indexing
msg = (
"cannot do slice indexing "
r"on {klass} with these indexers \[-6\.0\] of "
"{kind}".format(klass=type(index), kind=str(float))
)
with pytest.raises(TypeError, match=msg):
s[slice(-6.0, 6.0)]
# getitem odd floats
for l, res1 in [
(slice(2.5, 4), slice(3, 5)),
(slice(2, 3.5), slice(2, 4)),
(slice(2.5, 3.5), slice(3, 4)),
]:
for idxr in [lambda x: x.loc]:
result = idxr(s)[l]
if oob:
res = slice(0, 0)
else:
res = res1
self.check(result, s, res, False)
# positional indexing
msg = (
"cannot do slice indexing "
r"on {klass} with these indexers \[(2|3)\.5\] of "
"{kind}".format(klass=type(index), kind=str(float))
)
with pytest.raises(TypeError, match=msg):
s[l]
# setitem
for l in [slice(3.0, 4), slice(3, 4.0), slice(3.0, 4.0)]:
for idxr in [lambda x: x.loc]:
sc = s.copy()
idxr(sc)[l] = 0
result = idxr(sc)[l].values.ravel()
assert (result == 0).all()
# positional indexing
msg = (
"cannot do slice indexing "
r"on {klass} with these indexers \[(3|4)\.0\] of "
"{kind}".format(klass=type(index), kind=str(float))
)
with pytest.raises(TypeError, match=msg):
s[l] = 0
def test_integer_positional_indexing(self):
""" make sure that we are raising on positional indexing
w.r.t. an integer index """
s = Series(range(2, 6), index=range(2, 6))
result = s[2:4]
expected = s.iloc[2:4]
tm.assert_series_equal(result, expected)
for idxr in [lambda x: x, lambda x: x.iloc]:
for l in [slice(2, 4.0), slice(2.0, 4), slice(2.0, 4.0)]:
klass = RangeIndex
msg = (
"cannot do slice indexing "
r"on {klass} with these indexers \[(2|4)\.0\] of "
"{kind}".format(klass=str(klass), kind=str(float))
)
with pytest.raises(TypeError, match=msg):
idxr(s)[l]
def test_slice_integer_frame_getitem(self):
# similar to above, but on the getitem dim (of a DataFrame)
for index in [Int64Index(range(5)), RangeIndex(5)]:
s = DataFrame(np.random.randn(5, 2), index=index)
def f(idxr):
# getitem
for l in [slice(0.0, 1), slice(0, 1.0), slice(0.0, 1.0)]:
result = idxr(s)[l]
indexer = slice(0, 2)
self.check(result, s, indexer, False)
# positional indexing
msg = (
"cannot do slice indexing "
r"on {klass} with these indexers \[(0|1)\.0\] of "
"{kind}".format(klass=type(index), kind=str(float))
)
with pytest.raises(TypeError, match=msg):
s[l]
# getitem out-of-bounds
for l in [slice(-10, 10), slice(-10.0, 10.0)]:
result = idxr(s)[l]
self.check(result, s, slice(-10, 10), True)
# positional indexing
msg = (
"cannot do slice indexing "
r"on {klass} with these indexers \[-10\.0\] of "
"{kind}".format(klass=type(index), kind=str(float))
)
with pytest.raises(TypeError, match=msg):
s[slice(-10.0, 10.0)]
# getitem odd floats
for l, res in [
(slice(0.5, 1), slice(1, 2)),
(slice(0, 0.5), slice(0, 1)),
(slice(0.5, 1.5), slice(1, 2)),
]:
result = idxr(s)[l]
self.check(result, s, res, False)
# positional indexing
msg = (
"cannot do slice indexing "
r"on {klass} with these indexers \[0\.5\] of "
"{kind}".format(klass=type(index), kind=str(float))
)
with pytest.raises(TypeError, match=msg):
s[l]
# setitem
for l in [slice(3.0, 4), slice(3, 4.0), slice(3.0, 4.0)]:
sc = s.copy()
idxr(sc)[l] = 0
result = idxr(sc)[l].values.ravel()
assert (result == 0).all()
# positional indexing
msg = (
"cannot do slice indexing "
r"on {klass} with these indexers \[(3|4)\.0\] of "
"{kind}".format(klass=type(index), kind=str(float))
)
with pytest.raises(TypeError, match=msg):
s[l] = 0
f(lambda x: x.loc)
def test_slice_float(self):
# same as above, but for floats
index = Index(np.arange(5.0)) + 0.1
for s in [
Series(range(5), index=index),
DataFrame(np.random.randn(5, 2), index=index),
]:
for l in [slice(3.0, 4), slice(3, 4.0), slice(3.0, 4.0)]:
expected = s.iloc[3:4]
for idxr in [lambda x: x.loc, lambda x: x]:
# getitem
result = idxr(s)[l]
if isinstance(s, Series):
tm.assert_series_equal(result, expected)
else:
tm.assert_frame_equal(result, expected)
# setitem
s2 = s.copy()
idxr(s2)[l] = 0
result = idxr(s2)[l].values.ravel()
assert (result == 0).all()
def test_floating_index_doc_example(self):
index = Index([1.5, 2, 3, 4.5, 5])
s = Series(range(5), index=index)
assert s[3] == 2
assert s.loc[3] == 2
assert s.loc[3] == 2
assert s.iloc[3] == 3
def test_floating_misc(self):
# related 236
# scalar/slicing of a float index
s = Series(np.arange(5), index=np.arange(5) * 2.5, dtype=np.int64)
# label based slicing
result1 = s[1.0:3.0]
result2 = s.loc[1.0:3.0]
result3 = s.loc[1.0:3.0]
tm.assert_series_equal(result1, result2)
tm.assert_series_equal(result1, result3)
# exact indexing when found
result1 = s[5.0]
result2 = s.loc[5.0]
result3 = s.loc[5.0]
assert result1 == result2
assert result1 == result3
result1 = s[5]
result2 = s.loc[5]
result3 = s.loc[5]
assert result1 == result2
assert result1 == result3
assert s[5.0] == s[5]
# value not found (and no fallbacking at all)
# scalar integers
with pytest.raises(KeyError, match=r"^4\.0$"):
s.loc[4]
with pytest.raises(KeyError, match=r"^4\.0$"):
s.loc[4]
with pytest.raises(KeyError, match=r"^4\.0$"):
s[4]
# fancy floats/integers create the correct entry (as nan)
# fancy tests
expected = Series([2, 0], index=Float64Index([5.0, 0.0]))
for fancy_idx in [[5.0, 0.0], np.array([5.0, 0.0])]: # float
tm.assert_series_equal(s[fancy_idx], expected)
tm.assert_series_equal(s.loc[fancy_idx], expected)
tm.assert_series_equal(s.loc[fancy_idx], expected)
expected = Series([2, 0], index=Index([5, 0], dtype="int64"))
for fancy_idx in [[5, 0], np.array([5, 0])]: # int
tm.assert_series_equal(s[fancy_idx], expected)
tm.assert_series_equal(s.loc[fancy_idx], expected)
tm.assert_series_equal(s.loc[fancy_idx], expected)
# all should return the same as we are slicing 'the same'
result1 = s.loc[2:5]
result2 = s.loc[2.0:5.0]
result3 = s.loc[2.0:5]
result4 = s.loc[2.1:5]
tm.assert_series_equal(result1, result2)
tm.assert_series_equal(result1, result3)
tm.assert_series_equal(result1, result4)
# previously this did fallback indexing
result1 = s[2:5]
result2 = s[2.0:5.0]
result3 = s[2.0:5]
result4 = s[2.1:5]
tm.assert_series_equal(result1, result2)
tm.assert_series_equal(result1, result3)
tm.assert_series_equal(result1, result4)
result1 = s.loc[2:5]
result2 = s.loc[2.0:5.0]
result3 = s.loc[2.0:5]
result4 = s.loc[2.1:5]
tm.assert_series_equal(result1, result2)
tm.assert_series_equal(result1, result3)
tm.assert_series_equal(result1, result4)
# combined test
result1 = s.loc[2:5]
result2 = s.loc[2:5]
result3 = s[2:5]
tm.assert_series_equal(result1, result2)
tm.assert_series_equal(result1, result3)
# list selection
result1 = s[[0.0, 5, 10]]
result2 = s.loc[[0.0, 5, 10]]
result3 = s.loc[[0.0, 5, 10]]
result4 = s.iloc[[0, 2, 4]]
tm.assert_series_equal(result1, result2)
tm.assert_series_equal(result1, result3)
tm.assert_series_equal(result1, result4)
with pytest.raises(KeyError, match="with any missing labels"):
s[[1.6, 5, 10]]
with pytest.raises(KeyError, match="with any missing labels"):
s.loc[[1.6, 5, 10]]
with pytest.raises(KeyError, match="with any missing labels"):
s[[0, 1, 2]]
with pytest.raises(KeyError, match="with any missing labels"):
s.loc[[0, 1, 2]]
result1 = s.loc[[2.5, 5]]
result2 = s.loc[[2.5, 5]]
tm.assert_series_equal(result1, result2)
tm.assert_series_equal(result1, Series([1, 2], index=[2.5, 5.0]))
result1 = s[[2.5]]
result2 = s.loc[[2.5]]
result3 = s.loc[[2.5]]
tm.assert_series_equal(result1, result2)
tm.assert_series_equal(result1, result3)
tm.assert_series_equal(result1, Series([1], index=[2.5]))
def test_floating_tuples(self):
# see gh-13509
s = Series([(1, 1), (2, 2), (3, 3)], index=[0.0, 0.1, 0.2], name="foo")
result = s[0.0]
assert result == (1, 1)
expected = Series([(1, 1), (2, 2)], index=[0.0, 0.0], name="foo")
s = Series([(1, 1), (2, 2), (3, 3)], index=[0.0, 0.0, 0.2], name="foo")
result = s[0.0]
tm.assert_series_equal(result, expected)
def test_float64index_slicing_bug(self):
# GH 5557, related to slicing a float index
ser = {
256: 2321.0,
1: 78.0,
2: 2716.0,
3: 0.0,
4: 369.0,
5: 0.0,
6: 269.0,
7: 0.0,
8: 0.0,
9: 0.0,
10: 3536.0,
11: 0.0,
12: 24.0,
13: 0.0,
14: 931.0,
15: 0.0,
16: 101.0,
17: 78.0,
18: 9643.0,
19: 0.0,
20: 0.0,
21: 0.0,
22: 63761.0,
23: 0.0,
24: 446.0,
25: 0.0,
26: 34773.0,
27: 0.0,
28: 729.0,
29: 78.0,
30: 0.0,
31: 0.0,
32: 3374.0,
33: 0.0,
34: 1391.0,
35: 0.0,
36: 361.0,
37: 0.0,
38: 61808.0,
39: 0.0,
40: 0.0,
41: 0.0,
42: 6677.0,
43: 0.0,
44: 802.0,
45: 0.0,
46: 2691.0,
47: 0.0,
48: 3582.0,
49: 0.0,
50: 734.0,
51: 0.0,
52: 627.0,
53: 70.0,
54: 2584.0,
55: 0.0,
56: 324.0,
57: 0.0,
58: 605.0,
59: 0.0,
60: 0.0,
61: 0.0,
62: 3989.0,
63: 10.0,
64: 42.0,
65: 0.0,
66: 904.0,
67: 0.0,
68: 88.0,
69: 70.0,
70: 8172.0,
71: 0.0,
72: 0.0,
73: 0.0,
74: 64902.0,
75: 0.0,
76: 347.0,
77: 0.0,
78: 36605.0,
79: 0.0,
80: 379.0,
81: 70.0,
82: 0.0,
83: 0.0,
84: 3001.0,
85: 0.0,
86: 1630.0,
87: 7.0,
88: 364.0,
89: 0.0,
90: 67404.0,
91: 9.0,
92: 0.0,
93: 0.0,
94: 7685.0,
95: 0.0,
96: 1017.0,
97: 0.0,
98: 2831.0,
99: 0.0,
100: 2963.0,
101: 0.0,
102: 854.0,
103: 0.0,
104: 0.0,
105: 0.0,
106: 0.0,
107: 0.0,
108: 0.0,
109: 0.0,
110: 0.0,
111: 0.0,
112: 0.0,
113: 0.0,
114: 0.0,
115: 0.0,
116: 0.0,
117: 0.0,
118: 0.0,
119: 0.0,
120: 0.0,
121: 0.0,
122: 0.0,
123: 0.0,
124: 0.0,
125: 0.0,
126: 67744.0,
127: 22.0,
128: 264.0,
129: 0.0,
260: 197.0,
268: 0.0,
265: 0.0,
269: 0.0,
261: 0.0,
266: 1198.0,
267: 0.0,
262: 2629.0,
258: 775.0,
257: 0.0,
263: 0.0,
259: 0.0,
264: 163.0,
250: 10326.0,
251: 0.0,
252: 1228.0,
253: 0.0,
254: 2769.0,
255: 0.0,
}
# smoke test for the repr
s = Series(ser)
result = s.value_counts()
str(result)
| [
"44142880+GregVargas1999@users.noreply.github.com"
] | 44142880+GregVargas1999@users.noreply.github.com |
78f2cb9e8e6566e1b063e3cf67098480f3c48b68 | 54049cdb26004a52689254c7b3fedff9e2c6d163 | /bloom/commands/patch/remove_cmd.py | c5da59a8c1bd50ca790ca9fa0126e0f09437b43b | [
"BSD-3-Clause"
] | permissive | hershwg/bloom | d6692e8bc63a3e95e8165fb80a75b32271b7a526 | 9fbdd6f6de52cf4263b76a93e37a730725242e93 | refs/heads/master | 2021-01-15T20:19:23.232209 | 2012-10-25T07:00:34 | 2012-10-25T07:00:34 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,525 | py | from __future__ import print_function
import sys
from argparse import ArgumentParser
from bloom.util import add_global_arguments
from bloom.util import execute_command
from bloom.util import handle_global_arguments
from bloom.logging import log_prefix
from bloom.logging import error
from bloom.logging import debug
from bloom.git import branch_exists
from bloom.git import checkout
from bloom.git import get_current_branch
from bloom.git import track_branches
from bloom.commands.patch.common import get_patch_config
@log_prefix('[git-bloom-patch remove]: ')
def remove_patches(directory=None):
# Get the current branch
current_branch = get_current_branch(directory)
# Ensure the current branch is valid
if current_branch is None:
error("Could not determine current branch, are you in a git repo?")
return 1
# Construct the patches branch
patches_branch = 'patches/' + current_branch
try:
# See if the patches branch exists
if branch_exists(patches_branch, False, directory=directory):
if not branch_exists(patches_branch, True, directory=directory):
track_branches(patches_branch, directory)
else:
error("No patches branch (" + patches_branch + ") found, cannot "
"remove patches.")
return 1
# Get the parent branch from the patches branch
config = get_patch_config(patches_branch, directory=directory)
parent, spec = config['parent'], config['base']
if None in [parent, spec]:
error("Could not retrieve patches info.")
return 1
debug("Removing patches from " + current_branch + " back to base "
"commit " + spec)
# Reset this branch using git reset --hard spec
execute_command('git reset --hard ' + spec, cwd=directory)
finally:
if current_branch:
checkout(current_branch, directory=directory)
return 0
def get_parser():
"""Returns a parser.ArgumentParser with all arguments defined"""
parser = ArgumentParser(description="""
Removes any applied patches from the working branch, including any un-exported
patches, so use with caution.
""")
return parser
def main():
# Assumptions: in a git repo, this command verb was passed, argv has enough
sysargs = sys.argv[2:]
parser = get_parser()
parser = add_global_arguments(parser)
args = parser.parse_args(sysargs)
handle_global_arguments(args)
return remove_patches()
| [
"wjwwood@gmail.com"
] | wjwwood@gmail.com |
584b1b85b712de23c8b5e7dada6996900e1969ae | adea9fc9697f5201f4cb215571025b0493e96b25 | /napalm_yang/models/openconfig/network_instances/network_instance/mpls/signaling_protocols/rsvp_te/neighbors/__init__.py | c65e45cc12c61069514b7e373ae41569e428f2c7 | [
"Apache-2.0"
] | permissive | andyjsharp/napalm-yang | d8a8b51896ef7c6490f011fe265db46f63f54248 | ef80ebbfb50e188f09486380c88b058db673c896 | refs/heads/develop | 2021-09-09T02:09:36.151629 | 2018-03-08T22:44:04 | 2018-03-08T22:44:04 | 114,273,455 | 0 | 0 | null | 2018-03-08T22:44:05 | 2017-12-14T16:33:35 | Python | UTF-8 | Python | false | false | 10,766 | py |
from operator import attrgetter
from pyangbind.lib.yangtypes import RestrictedPrecisionDecimalType
from pyangbind.lib.yangtypes import RestrictedClassType
from pyangbind.lib.yangtypes import TypedListType
from pyangbind.lib.yangtypes import YANGBool
from pyangbind.lib.yangtypes import YANGListType
from pyangbind.lib.yangtypes import YANGDynClass
from pyangbind.lib.yangtypes import ReferenceType
from pyangbind.lib.base import PybindBase
from decimal import Decimal
from bitarray import bitarray
import six
# PY3 support of some PY2 keywords (needs improved)
if six.PY3:
import builtins as __builtin__
long = int
unicode = str
elif six.PY2:
import __builtin__
from . import neighbor
class neighbors(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-network-instance - based on the path /network-instances/network-instance/mpls/signaling-protocols/rsvp-te/neighbors. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: Configuration and state for RSVP neighbors connecting
to the device
"""
__slots__ = ('_pybind_generated_by', '_path_helper', '_yang_name', '_extmethods', '__neighbor',)
_yang_name = 'neighbors'
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__neighbor = YANGDynClass(base=YANGListType("address",neighbor.neighbor, yang_name="neighbor", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='address', extensions=None), is_container='list', yang_name="neighbor", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='list', is_config=True)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return [u'network-instances', u'network-instance', u'mpls', u'signaling-protocols', u'rsvp-te', u'neighbors']
def _get_neighbor(self):
"""
Getter method for neighbor, mapped from YANG variable /network_instances/network_instance/mpls/signaling_protocols/rsvp_te/neighbors/neighbor (list)
YANG Description: List of RSVP neighbors of the local system
"""
return self.__neighbor
def _set_neighbor(self, v, load=False):
"""
Setter method for neighbor, mapped from YANG variable /network_instances/network_instance/mpls/signaling_protocols/rsvp_te/neighbors/neighbor (list)
If this variable is read-only (config: false) in the
source YANG file, then _set_neighbor is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_neighbor() directly.
YANG Description: List of RSVP neighbors of the local system
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGListType("address",neighbor.neighbor, yang_name="neighbor", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='address', extensions=None), is_container='list', yang_name="neighbor", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='list', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """neighbor must be of a type compatible with list""",
'defined-type': "list",
'generated-type': """YANGDynClass(base=YANGListType("address",neighbor.neighbor, yang_name="neighbor", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='address', extensions=None), is_container='list', yang_name="neighbor", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='list', is_config=True)""",
})
self.__neighbor = t
if hasattr(self, '_set'):
self._set()
def _unset_neighbor(self):
self.__neighbor = YANGDynClass(base=YANGListType("address",neighbor.neighbor, yang_name="neighbor", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='address', extensions=None), is_container='list', yang_name="neighbor", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='list', is_config=True)
neighbor = __builtin__.property(_get_neighbor, _set_neighbor)
_pyangbind_elements = {'neighbor': neighbor, }
from . import neighbor
class neighbors(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-network-instance-l2 - based on the path /network-instances/network-instance/mpls/signaling-protocols/rsvp-te/neighbors. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: Configuration and state for RSVP neighbors connecting
to the device
"""
__slots__ = ('_pybind_generated_by', '_path_helper', '_yang_name', '_extmethods', '__neighbor',)
_yang_name = 'neighbors'
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__neighbor = YANGDynClass(base=YANGListType("address",neighbor.neighbor, yang_name="neighbor", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='address', extensions=None), is_container='list', yang_name="neighbor", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='list', is_config=True)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return [u'network-instances', u'network-instance', u'mpls', u'signaling-protocols', u'rsvp-te', u'neighbors']
def _get_neighbor(self):
"""
Getter method for neighbor, mapped from YANG variable /network_instances/network_instance/mpls/signaling_protocols/rsvp_te/neighbors/neighbor (list)
YANG Description: List of RSVP neighbors of the local system
"""
return self.__neighbor
def _set_neighbor(self, v, load=False):
"""
Setter method for neighbor, mapped from YANG variable /network_instances/network_instance/mpls/signaling_protocols/rsvp_te/neighbors/neighbor (list)
If this variable is read-only (config: false) in the
source YANG file, then _set_neighbor is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_neighbor() directly.
YANG Description: List of RSVP neighbors of the local system
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGListType("address",neighbor.neighbor, yang_name="neighbor", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='address', extensions=None), is_container='list', yang_name="neighbor", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='list', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """neighbor must be of a type compatible with list""",
'defined-type': "list",
'generated-type': """YANGDynClass(base=YANGListType("address",neighbor.neighbor, yang_name="neighbor", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='address', extensions=None), is_container='list', yang_name="neighbor", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='list', is_config=True)""",
})
self.__neighbor = t
if hasattr(self, '_set'):
self._set()
def _unset_neighbor(self):
self.__neighbor = YANGDynClass(base=YANGListType("address",neighbor.neighbor, yang_name="neighbor", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='address', extensions=None), is_container='list', yang_name="neighbor", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='list', is_config=True)
neighbor = __builtin__.property(_get_neighbor, _set_neighbor)
_pyangbind_elements = {'neighbor': neighbor, }
| [
"dbarrosop@dravetech.com"
] | dbarrosop@dravetech.com |
323456b2a796bba44c887fe9bdc4cce4f6cb8c09 | 365c85a280596d88082c1f150436453f96e18c15 | /Python/Daily/2029. stone_game_ix.py | f647f8a361d8cfb7d86d66e00b93c306cc7099bf | [] | no_license | Crisescode/leetcode | 0177c1ebd47b0a63476706562bcf898f35f1c4f2 | c3a60010e016995f06ad4145e174ae19668e15af | refs/heads/master | 2023-06-01T06:29:41.992368 | 2023-05-16T12:32:10 | 2023-05-16T12:32:10 | 243,040,322 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 2,553 | py | #! /usr/bin/env python
# -*- coding: utf-8 -*-
# https://leetcode-cn.com/problems/stone-game-ix/
# Alice and Bob continue their games with stones. There is a row of n stones, and each stone
# has an associated value. You are given an integer array stones, where stones[i] is the value of the ith stone.
#
# Alice and Bob take turns, with Alice starting first. On each turn, the player may remove any
# stone from stones. The player who removes a stone loses if the sum of the values of all removed
# stones is divisible by 3. Bob will win automatically if there are no remaining stones (even if it is Alice's turn).
#
# Assuming both players play optimally, return true if Alice wins and false if Bob wins.
#
"""
Example 1:
Input: stones = [2,1]
Output: true
Explanation: The game will be played as follows:
- Turn 1: Alice can remove either stone.
- Turn 2: Bob removes the remaining stone.
The sum of the removed stones is 1 + 2 = 3 and is divisible by 3. Therefore, Bob loses and Alice wins the game.
Example 2:
Input: stones = [2]
Output: false
Explanation: Alice will remove the only stone, and the sum of the values on the removed stones is 2.
Since all the stones are removed and the sum of values is not divisible by 3, Bob wins the game.
Example 3:
Input: stones = [5,1,2,4,3]
Output: false
Explanation: Bob will always win. One possible way for Bob to win is shown below:
- Turn 1: Alice can remove the second stone with value 1. Sum of removed stones = 1.
- Turn 2: Bob removes the fifth stone with value 3. Sum of removed stones = 1 + 3 = 4.
- Turn 3: Alices removes the fourth stone with value 4. Sum of removed stones = 1 + 3 + 4 = 8.
- Turn 4: Bob removes the third stone with value 2. Sum of removed stones = 1 + 3 + 4 + 2 = 10.
- Turn 5: Alice removes the first stone with value 5. Sum of removed stones = 1 + 3 + 4 + 2 + 5 = 15.
Alice loses the game because the sum of the removed stones (15) is divisible by 3. Bob wins the game.
"""
from typing import List
class Solution:
def stoneGameIX(self, stones: List[int]) -> bool:
cnt0 = cnt1 = cnt2 = 0
for stone in stones:
if stone % 3 == 0:
cnt0 += 1
elif stone % 3 == 1:
cnt1 += 1
else:
cnt2 += 1
if cnt0 % 2 == 0:
return cnt1 > 0 and cnt2 > 0
else:
return abs(cnt1 - cnt2) > 2
if __name__ == "__main__":
print(Solution().stoneGameIX([5, 1, 2, 4, 3]))
| [
"zhaopanp2018@outlook.com"
] | zhaopanp2018@outlook.com |
fcbc0c05426acee364789632ee62a20c419a7bbf | 703441f9d50220a552b4c638d80a20d0e1e28950 | /cmspubstyle/__init__.py | ca61476495b40e3fcaad0d9f42e6814995c1aa89 | [
"MIT"
] | permissive | raggleton/cmspubstyle | 29777241448f4fb6a88be97a3c209589527cc461 | 49a966375bd8e1378bf07b4e7476f3fceb5f43be | refs/heads/master | 2023-01-02T17:54:18.170036 | 2020-10-12T14:04:02 | 2020-10-12T14:04:02 | 137,239,255 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 73 | py | """Check CMS publications against PubComm rules"""
__version__ = "0.1.0" | [
"robin.aggleton@cern.ch"
] | robin.aggleton@cern.ch |
7c37858040a76a9843570a391d8938e7658c7749 | d936bc1d23f3ccee2e87f7a911ad7eada9147d0f | /canvas_mouse_draw_shape.py | 8ca2ae61d3758d5649e80e2bf4c5a5207376efeb | [] | no_license | mesebilisim/python-kod-bankasi | b83982fa962194b43bd3e1fbbbd5bf4ad56a6c11 | 8740cba66fd764ec8cf4174aa5e6cade49ae0af6 | refs/heads/master | 2020-07-27T21:41:35.882996 | 2015-10-13T22:55:23 | 2015-10-13T22:55:23 | 73,424,342 | 1 | 0 | null | 2016-11-10T22:00:23 | 2016-11-10T22:00:23 | null | UTF-8 | Python | false | false | 1,427 | py |
from Tkinter import *
trace = 0
class CanvasEventsDemo:
def __init__(self, parent=None):
canvas = Canvas(width=300, height=300, bg='beige')
canvas.pack()
canvas.bind('<ButtonPress-1>', self.onStart)
canvas.bind('<B1-Motion>', self.onGrow)
canvas.bind('<Double-1>', self.onClear)
canvas.bind('<ButtonPress-3>', self.onMove)
self.canvas = canvas
self.drawn = None
self.kinds = [canvas.create_oval, canvas.create_rectangle]
def onStart(self, event):
self.shape = self.kinds[0]
self.kinds = self.kinds[1:] + self.kinds[:1]
self.start = event
self.drawn = None
def onGrow(self, event):
canvas = event.widget
if self.drawn: canvas.delete(self.drawn)
objectId = self.shape(self.start.x, self.start.y, event.x, event.y)
if trace: print objectId
self.drawn = objectId
def onClear(self, event):
event.widget.delete('all')
def onMove(self, event):
if self.drawn:
if trace: print self.drawn
canvas = event.widget
diffX, diffY = (event.x - self.start.x), (event.y - self.start.y)
canvas.move(self.drawn, diffX, diffY)
self.start = event
if __name__ == '__main__':
CanvasEventsDemo()
mainloop()
| [
"electrocoder@gmail.com"
] | electrocoder@gmail.com |
680f3dc411c72dd3386788bef61685df9c7fcb4f | 3cedc7c1519d3b013aad9ec4e6a6ee7834da7589 | /appium/没整理/lesson2/toutiao1.py | 1ef5799e27844b16d15878ddd160fd41e56ce680 | [] | no_license | hzrg/songqin_course | 53437100669ee93d2ac5ecae5de938b1a4007d7f | 05e422ce34a42fd6d3819722a19252f8005e79ed | refs/heads/master | 2022-02-09T13:27:59.871400 | 2019-06-13T06:08:45 | 2019-06-13T06:08:45 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,148 | py | # coding=utf8
from appium import webdriver
import time,traceback
desired_capabilities = {}
desired_capabilities['platformName'] = 'Android'
desired_capabilities['automationName'] = 'Appium'
desired_capabilities['platformVersion'] = '5.1'
desired_capabilities['deviceName'] = '192.168.56.104:5555'
desired_capabilities['app'] = '/Users/zhoujunjun/Downloads/toutiao.apk'
desired_capabilities['appPackage'] = 'io.manong.developerdaily'
desired_capabilities['appActivity'] = 'io.toutiao.android.ui.activity.LaunchActivity'
desired_capabilities['unicodeKeyboard'] = True
desired_capabilities['noReset'] = True
desired_capabilities['newCommandTimeout'] = 6000
driver = webdriver.Remote('http://localhost:4723/wd/hub', desired_capabilities)
#driver.implicitly_wait(10)
try:
# -----------------
driver.find_element_by_class_name("android.widget.ImageButton")
time.sleep(2)
tvs = driver.find_elements_by_class_name("android.widget.TextView")
for tv in tvs:
print(tv.text)
# -----------------
except:
print(traceback.format_exc())
input('**** Press to quit..')
driver.quit() | [
"1174497735@qq.com"
] | 1174497735@qq.com |
331c3fec668136c0d08a36f50d5718da3362a69b | d170dbc754cca63e712284294b6286065fec6df5 | /fa/jquery/templates/admin/models.pt.py | a3278b8727dcae923aec797de8987774cd997501 | [] | no_license | FormAlchemy/fa.jquery | 3d2ba746a1631db32d2f83fba7c6ae9b75bb312f | 8aa4686c66cf8612fd4a5c2e7fc34e29cb8b606f | refs/heads/master | 2016-09-06T06:03:12.277972 | 2013-10-10T08:46:36 | 2013-10-10T08:46:36 | 1,256,529 | 9 | 12 | null | 2017-12-21T08:05:22 | 2011-01-15T00:55:19 | Python | UTF-8 | Python | false | false | 5,037 | py | registry = dict(version=0)
def bind():
from cPickle import loads as _loads
_lookup_attr = _loads('cchameleon.core.codegen\nlookup_attr\np1\n.')
_attrs_4358996304 = _loads('(dp1\nVclass\np2\nVfa_model ui-widget-header ui-corner-all\np3\ns.')
_init_scope = _loads('cchameleon.core.utils\necontext\np1\n.')
_re_amp = _loads("cre\n_compile\np1\n(S'&(?!([A-Za-z]+|#[0-9]+);)'\np2\nI0\ntRp3\n.")
_init_stream = _loads('cchameleon.core.generation\ninitialize_stream\np1\n.')
_attrs_4358996112 = _loads('(dp1\n.')
_attrs_4358950800 = _loads('(dp1\n.')
_init_default = _loads('cchameleon.core.generation\ninitialize_default\np1\n.')
_init_tal = _loads('cchameleon.core.generation\ninitialize_tal\np1\n.')
def render(econtext, rcontext=None):
macros = econtext.get('macros')
_translate = econtext.get('_translate')
_slots = econtext.get('_slots')
target_language = econtext.get('target_language')
u'_init_stream()'
(_out, _write, ) = _init_stream()
u'_init_tal()'
(_attributes, repeat, ) = _init_tal()
u'_init_default()'
_default = _init_default()
u'None'
default = None
u'None'
_domain = None
u"main.macros['master']"
_metal = _lookup_attr(econtext['main'], 'macros')['master']
def _callback_main(econtext, _repeat, _out=_out, _write=_write, _domain=_domain, **_ignored):
if _repeat:
repeat.update(_repeat)
attrs = _attrs_4358950800
u'models.items()'
_write(u'<div>\n ')
_tmp1 = _lookup_attr(econtext['models'], 'items')()
item = None
(_tmp1, _tmp2, ) = repeat.insert('item', _tmp1)
for item in _tmp1:
_tmp2 = (_tmp2 - 1)
attrs = _attrs_4358996112
u"''"
_write(u'<div>\n ')
_default.value = default = ''
u'item[0]'
_content = item[0]
attrs = _attrs_4358996304
u'item[1]'
_write(u'<a class="fa_model ui-widget-header ui-corner-all"')
_tmp3 = item[1]
if (_tmp3 is _default):
_tmp3 = None
if ((_tmp3 is not None) and (_tmp3 is not False)):
if (_tmp3.__class__ not in (str, unicode, int, float, )):
_tmp3 = unicode(_translate(_tmp3, domain=_domain, mapping=None, target_language=target_language, default=None))
else:
if not isinstance(_tmp3, unicode):
_tmp3 = str(_tmp3)
if ('&' in _tmp3):
if (';' in _tmp3):
_tmp3 = _re_amp.sub('&', _tmp3)
else:
_tmp3 = _tmp3.replace('&', '&')
if ('<' in _tmp3):
_tmp3 = _tmp3.replace('<', '<')
if ('>' in _tmp3):
_tmp3 = _tmp3.replace('>', '>')
if ('"' in _tmp3):
_tmp3 = _tmp3.replace('"', '"')
_write(((' href="' + _tmp3) + '"'))
u'_content'
_write('>')
_tmp3 = _content
_tmp = _tmp3
if (_tmp.__class__ not in (str, unicode, int, float, )):
try:
_tmp = _tmp.__html__
except:
_tmp = _translate(_tmp, domain=_domain, mapping=None, target_language=target_language, default=None)
else:
_tmp = _tmp()
_write(_tmp)
_tmp = None
if (_tmp is not None):
if not isinstance(_tmp, unicode):
_tmp = str(_tmp)
if ('&' in _tmp):
if (';' in _tmp):
_tmp = _re_amp.sub('&', _tmp)
else:
_tmp = _tmp.replace('&', '&')
if ('<' in _tmp):
_tmp = _tmp.replace('<', '<')
if ('>' in _tmp):
_tmp = _tmp.replace('>', '>')
_write(_tmp)
_write(u'</a>\n </div>')
if (_tmp2 == 0):
break
_write(' ')
_write(u'\n </div>\n')
u"{'main': _callback_main}"
_tmp = {'main': _callback_main, }
u"main.macros['master']"
_metal.render(_tmp, _out=_out, _write=_write, _domain=_domain, econtext=econtext)
return _out.getvalue()
return render
__filename__ = '/Users/gawel/py/formalchemy_project/fa.jquery/fa/jquery/templates/admin/models.pt'
registry[(None, True, '1488bdb950901f8f258549439ef6661a49aae984')] = bind()
| [
"gael@gawel.org"
] | gael@gawel.org |
50953bdf36c326c7d5d2cae9869a92215db15261 | a54d5a5ae5ba352963f1166a29e1bb6c867157ab | /python/test/test_good_name.py | 30030f836c9780fecc2b6f39ca94cdcb806270bf | [] | no_license | alephist/edabit-coding-challenges | 06f573e90ffbd13bc54ecbdaa8e6a225aa44f5d8 | 35f1fc84848fc44e184aae1ae231a36319c1c81e | refs/heads/main | 2023-07-30T22:39:37.468756 | 2021-09-18T07:47:02 | 2021-09-18T07:47:02 | 341,467,751 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 800 | py | import unittest
from typing import Tuple
from good_name import name_score
test_values: Tuple[Tuple[str, str]] = (
('MUBASHIR', "THE BEST"),
('MATT', "THE BEST"),
('PAKISTAN', "THE BEST"),
('AIRFORCE', "THE BEST"),
('GUV', 'NOT TOO GOOD'),
('PUBG', "NOT TOO GOOD"),
('ME', "PRETTY GOOD"),
('BOB', "PRETTY GOOD"),
('JLJ', 'PRETTY GOOD'),
('YOU', 'VERY GOOD'),
('FABIO', "VERY GOOD"),
('ROBBY', 'THE BEST'),
('BILL GATES', "THE BEST")
)
class GoodNameTestCase(unittest.TestCase):
def test_add_all_letters_from_name_to_get_score_rating(self):
for name, expected_rating in test_values:
with self.subTest():
self.assertEqual(name_score(name), expected_rating)
if __name__ == '__main__':
unittest.main()
| [
"justin.necesito@gmail.com"
] | justin.necesito@gmail.com |
1905fd29a2ad0d74d4ce302177cb049f49249cd9 | 2f98aa7e5bfc2fc5ef25e4d5cfa1d7802e3a7fae | /python/python_19988.py | f78479142fcdf29382cc31b81425d60aff326ea2 | [] | no_license | AK-1121/code_extraction | cc812b6832b112e3ffcc2bb7eb4237fd85c88c01 | 5297a4a3aab3bb37efa24a89636935da04a1f8b6 | refs/heads/master | 2020-05-23T08:04:11.789141 | 2015-10-22T19:19:40 | 2015-10-22T19:19:40 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 129 | py | # Trouble with aligning two y-axis ticks with matplotlib
lim1 = ax1.get_ylim()
lim2 = (lim1[0]*2, lim1[1] *2)
ax2.set_ylim(lim2)
| [
"ubuntu@ip-172-31-7-228.us-west-2.compute.internal"
] | ubuntu@ip-172-31-7-228.us-west-2.compute.internal |
2e57d9700d3d664bc7705e1dbba720f6395b5865 | c15db2bb1756ee63bab13e583ff70c18e765d575 | /drf_generators/templates/apiview.py | 3972099189a1aef1850173941c99580a581d08f9 | [
"Python-2.0",
"LicenseRef-scancode-unknown-license-reference",
"MIT"
] | permissive | tkliuxing/drf-generators | 89857c6c0d3b301cd32611f90a9d4ef8fab92d77 | 899c3f4efb3c3fe10ee582f3950bb6e48fc03350 | refs/heads/master | 2020-12-29T22:14:13.732920 | 2020-08-10T07:00:51 | 2020-08-10T07:00:51 | 238,751,406 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,324 | py |
__all__ = ['API_VIEW', 'API_URL']
API_URL = """from django.conf.urls import include, url
from . import api
urlpatterns = [
{% for model in models %}
url(r'^{{ model|lower }}/(?P<id>[0-9]+)/$', api.{{ model }}APIView.as_view()),
url(r'^{{ model|lower }}/$', api.{{ model }}APIListView.as_view()),
{% endfor %}
]
"""
API_VIEW = """from rest_framework.pagination import PageNumberPagination
from rest_framework.response import Response
from rest_framework.views import APIView
from . import serializers
from . import models
{% for model in models %}
class {{ model }}APIView(APIView):
def get(self, request, id, format=None):
try:
item = models.{{ model }}.objects.get(pk=id)
serializer = serializers.{{ model }}Serializer(item)
return Response(serializer.data)
except models.{{ model }}.DoesNotExist:
return Response(status=404)
def put(self, request, id, format=None):
try:
item = models.{{ model }}.objects.get(pk=id)
except models.{{ model }}.DoesNotExist:
return Response(status=404)
serializer = serializers.{{ model }}Serializer(item, data=request.data)
if serializer.is_valid():
serializer.save()
return Response(serializer.data)
return Response(serializer.errors, status=400)
def delete(self, request, id, format=None):
try:
item = models.{{ model }}.objects.get(pk=id)
except models.{{ model }}.DoesNotExist:
return Response(status=404)
item.delete()
return Response(status=204)
class {{ model }}APIListView(APIView):
def get(self, request, format=None):
items = models.{{ model }}.objects.order_by('pk')
paginator = PageNumberPagination()
result_page = paginator.paginate_queryset(items, request)
serializer = serializers.{{ model }}Serializer(result_page, many=True)
return paginator.get_paginated_response(serializer.data)
def post(self, request, format=None):
serializer = serializers.{{ model }}Serializer(data=request.data)
if serializer.is_valid():
serializer.save()
return Response(serializer.data, status=201)
return Response(serializer.errors, status=400)
{% endfor %}"""
| [
"ouyanghongyu@gmail.com"
] | ouyanghongyu@gmail.com |
c9adeac271c2119cfa73776e7e87d0a4969b0509 | dd3f5a712dbab0d3c4f4526c64c08ba710f78b81 | /Basic/dataStructure/structure/t06dequeue.py | e9d2f37337664d1f56c4b9a4fbfcb8b579fc4bc1 | [] | no_license | nameusea/pyGreat | 3988ebcce3f80a7e458a20f9b2e3ccba368efcf8 | dde8b6a1348620ffd3b2d65db3d5b4331e5c78be | refs/heads/master | 2023-04-25T09:02:32.831423 | 2021-05-17T11:31:22 | 2021-05-17T11:31:22 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 897 | py | class Dequeue(object):
def __init__(self):
'''初始化一个空队列'''
self.__list = []
def add_front(self, item):
'''从队列首添加'''
self.__list.insert(0, item)
def add_rear(self, item):
'''从队列尾添加'''
self.__list.append(item)
def pop_front(self):
'''从队列首删除'''
return self.__list.pop(0) # 时间复杂度O(n)
def pop_rear(self):
'''从队列尾删除'''
return self.__list.pop()
def is_empty(self):
'''是否空'''
return not self.__list
def size(self):
'''元素数量'''
return len(self.__list)
if __name__ == '__main__':
dq = Dequeue()
dq.add_front(4)
dq.add_front(5)
dq.add_rear(6)
print(dq.size())
print(dq.pop_front())
print(dq.size())
print(dq.pop_rear())
print(dq.size())
| [
"darcyzhang@DarcydeMacBook-Air.local"
] | darcyzhang@DarcydeMacBook-Air.local |
af9153c190844bd9253289db42ce6a05a332e11f | 55b89b8b18c97e71e052c7287b2424f0e8118555 | /tests/testdata/gen_manifest/getbook.py | 7e768be647e87b83342d51605e8a02354691910f | [
"Apache-2.0"
] | permissive | renovate-bot/sample-tester | a0e55bb376fefcff56a91c09454a242adfea6a8d | b3c18b333e8bd1dc2a25c58fc998845adbd6e3f0 | refs/heads/master | 2023-06-08T14:02:18.195945 | 2021-07-12T20:57:30 | 2021-07-12T20:57:30 | 198,908,002 | 0 | 0 | Apache-2.0 | 2019-07-25T22:18:59 | 2019-07-25T22:18:59 | null | UTF-8 | Python | false | false | 118 | py | # This is a mock sample
# [START getbook_sample]
# The interesting part
# [END getbook_sample]
# More boilerplate
| [
"noreply@github.com"
] | renovate-bot.noreply@github.com |
cfb30854f4250f879b33cfc6726d4d9f5f349292 | 2dffc42f8eeb1a28e6fb9f27ffea730afd912e62 | /conf.py | c1c344fc577705fb620342a8616f78139b58d456 | [] | no_license | kylemcdonald/rippleTank | 859a8889ac63741b19eda8f8d74e89aa54bb754c | d45ff5fad80dc87191b8819edda7d08105b42ec7 | refs/heads/master | 2020-09-07T17:21:23.148722 | 2017-07-21T20:35:38 | 2017-07-21T20:35:38 | 220,858,467 | 1 | 0 | null | 2019-11-10T22:15:18 | 2019-11-10T22:15:18 | null | UTF-8 | Python | false | false | 4,813 | py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# rippletank documentation build configuration file, created by
# sphinx-quickstart on Wed Jul 5 16:35:18 2017.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
# import os
# import sys
# sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = ['sphinx.ext.mathjax', 'sphinx.ext.autodoc', 'sphinx.ext.githubpages']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'rippletank'
copyright = '2017, Juan Barbosa'
author = 'Juan Barbosa'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '1.0'
# The full version, including alpha/beta/rc tags.
release = '1.0.0'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'alabaster'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# -- Options for HTMLHelp output ------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = 'rippletankdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'rippletank.tex', 'rippletank Documentation',
'Juan Barbosa', 'manual'),
]
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'rippletank', 'rippletank Documentation',
[author], 1)
]
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'rippletank', 'rippletank Documentation',
author, 'rippletank', 'One line description of project.',
'Miscellaneous'),
]
| [
"js.barbosa10@uniandes.edu.co"
] | js.barbosa10@uniandes.edu.co |
216a32e87fa6cda93c871bfcf130c1a61a7e4723 | e38f7b5d46fd8a65c15e49488fc075e5c62943c9 | /pychron/lasers/tasks/panes/co2.py | 1745ef46b0037c5117b26def82274ef321ff93ef | [] | no_license | INGPAN/pychron | 3e13f9d15667e62c347f5b40af366096ee41c051 | 8592f9fc722f037a61b0b783d587633e22f11f2f | refs/heads/master | 2021-08-15T00:50:21.392117 | 2015-01-19T20:07:41 | 2015-01-19T20:07:41 | 111,054,121 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,515 | py | #===============================================================================
# Copyright 2013 Jake Ross
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#===============================================================================
#============= enthought library imports =======================
from traits.api import HasTraits
from traitsui.api import View, Item
from pychron.lasers.tasks.laser_panes import BaseLaserPane, ClientPane, \
StageControlPane, ControlPane, AxesPane
#============= standard library imports ========================
#============= local library imports ==========================
class FusionsCO2Pane(BaseLaserPane):
pass
class FusionsCO2ClientPane(ClientPane):
pass
class FusionsCO2StagePane(StageControlPane):
id = 'pychron.fusions.co2.stage'
class FusionsCO2ControlPane(ControlPane):
id = 'pychron.fusions.co2.control'
class FusionsCO2AxesPane(AxesPane):
id = 'pychron.fusions.co2.axes'
#============= EOF =============================================
| [
"jirhiker@gmail.com"
] | jirhiker@gmail.com |
c618ca60468f267f06da2cbd256b606154cc1254 | 6a7e9e0e9c08132166f566bd88ae1c46ff8f9c0a | /azure-cognitiveservices-language-luis/azure/cognitiveservices/language/luis/runtime/models/luis_result.py | 15b9ac3de342daddf2340f74c9e4f7d1970e7d5b | [
"MIT"
] | permissive | ashirey-msft/azure-sdk-for-python | d92381d11c48f194ec9f989f5f803db614fb73f2 | e04778e13306dad2e8fb044970215bad6296afb6 | refs/heads/master | 2020-03-23T06:05:39.283442 | 2018-09-15T00:18:26 | 2018-09-15T00:18:26 | 141,188,192 | 0 | 1 | MIT | 2018-07-16T20:02:52 | 2018-07-16T20:02:52 | null | UTF-8 | Python | false | false | 2,695 | py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class LuisResult(Model):
"""Prediction, based on the input query, containing intent(s) and entities.
:param query: The input utterance that was analized.
:type query: str
:param altered_query: The corrected utterance (when spell checking was
enabled).
:type altered_query: str
:param top_scoring_intent:
:type top_scoring_intent:
~azure.cognitiveservices.language.luis.runtime.models.IntentModel
:param intents: All the intents (and their score) that were detected from
utterance.
:type intents:
list[~azure.cognitiveservices.language.luis.runtime.models.IntentModel]
:param entities: The entities extracted from the utterance.
:type entities:
list[~azure.cognitiveservices.language.luis.runtime.models.EntityModel]
:param composite_entities: The composite entities extracted from the
utterance.
:type composite_entities:
list[~azure.cognitiveservices.language.luis.runtime.models.CompositeEntityModel]
:param sentiment_analysis:
:type sentiment_analysis:
~azure.cognitiveservices.language.luis.runtime.models.Sentiment
"""
_attribute_map = {
'query': {'key': 'query', 'type': 'str'},
'altered_query': {'key': 'alteredQuery', 'type': 'str'},
'top_scoring_intent': {'key': 'topScoringIntent', 'type': 'IntentModel'},
'intents': {'key': 'intents', 'type': '[IntentModel]'},
'entities': {'key': 'entities', 'type': '[EntityModel]'},
'composite_entities': {'key': 'compositeEntities', 'type': '[CompositeEntityModel]'},
'sentiment_analysis': {'key': 'sentimentAnalysis', 'type': 'Sentiment'},
}
def __init__(self, **kwargs):
super(LuisResult, self).__init__(**kwargs)
self.query = kwargs.get('query', None)
self.altered_query = kwargs.get('altered_query', None)
self.top_scoring_intent = kwargs.get('top_scoring_intent', None)
self.intents = kwargs.get('intents', None)
self.entities = kwargs.get('entities', None)
self.composite_entities = kwargs.get('composite_entities', None)
self.sentiment_analysis = kwargs.get('sentiment_analysis', None)
| [
"lmazuel@microsoft.com"
] | lmazuel@microsoft.com |
8978268857118174c16dc40b067aae092d316737 | f59d9f7edacd17af7af2c8a4c900bde4fe18ba1a | /measure_extinction/utils/mock_spectra_data.py | 4ddd463f2a6db111b9657372d0b077a560140350 | [
"BSD-3-Clause"
] | permissive | karllark/measure_extinction | 8a1fead4db8be8dda44b3c4e8687fd9fd3a4d537 | 464fc09a60e0d0ee226b590542404e972b62d2e9 | refs/heads/master | 2023-05-27T06:48:32.434042 | 2023-03-06T13:40:50 | 2023-03-06T13:40:50 | 116,707,319 | 1 | 4 | null | 2023-04-28T12:40:04 | 2018-01-08T17:31:44 | Python | UTF-8 | Python | false | false | 6,858 | py | import pkg_resources
import argparse
import numpy as np
import matplotlib.pyplot as plt
from astropy.table import QTable
import astropy.units as u
__all__ = ["mock_stis_data"]
def mock_stis_single_grating(moddata, gname="G140L", applylsfs=True):
"""
Mock up a single grating STIS low resolution observation using tabulated
line spread functions (LSFs)
Parameters
----------
moddata : astropy.table
Model spectrum at high enough resolution to support "convolution" with
the LSFs
ganme : str
name of the grating to mocked
applylsfs : boolean
allows for mocking with and without the LSFs
Returns
-------
cmoddata : astropy.table
Convolved and cropped model spectrum for the grating requested
"""
if gname == "G140L":
gtags = ["G140L_1200", "G140L_1500"]
gwaves = [1200.0, 1500.0] * u.Angstrom
grange = [1118.7028617667227, 1715.2138336094122] * u.Angstrom
gdelta = 0.5831004076162168 * u.Angstrom # Angstrom/pixel
elif gname == "G230L":
gtags = ["G230L_1700", "G230L_2400"]
gwaves = [1700.0, 2400.0] * u.Angstrom
grange = [1572.0793168982548, 3155.9334544319254] * u.Angstrom
gdelta = 1.548239955089159 * u.Angstrom # Angstrom/pixel
elif gname == "G430L":
gtags = ["G430L_3200", "G430L_5500"]
gwaves = [3200.0, 5500.0] * u.Angstrom
grange = [2894.535384018087, 5704.064392633997] * u.Angstrom
gdelta = 2.7463714795193273 * u.Angstrom # Angstrom/pixel
elif gname == "G750L":
gtags = ["G750L_7000"]
gwaves = [7000.0] * u.Angstrom
grange = [5257.602037433256, 10249.424213346618] * u.Angstrom
gdelta = 4.879600079462369 * u.Angstrom # Angstrom/pixel
else:
raise ValueError(f"Grating {gname} not supported")
nlsfs = len(gtags)
data_path = pkg_resources.resource_filename("measure_extinction", "utils/STIS_LSF/")
lsfs = []
for i, ctag in enumerate(gtags):
a = QTable.read(
f"{data_path}/data/LSF_{ctag}.txt",
format="ascii.commented_header",
header_start=-1,
)
a["DELTWAVE"] = a["Rel_pixel"] * gdelta
if i > 0:
if len(lsfs[0]["DELTWAVE"]) != len(a["DELTWAVE"]):
b = QTable()
b["DELTWAVE"] = lsfs[0]["DELTWAVE"]
b["52x2.0"] = np.interp(
b["DELTWAVE"], a["DELTWAVE"], a["52x2.0"], left=0.0, right=0.0
)
a = b
lsfs.append(a)
minlsfdwave = min(lsfs[0]["DELTWAVE"])
maxlsfdwave = min(lsfs[0]["DELTWAVE"])
# crop wide to include full possible lsf range
gvals = (moddata["WAVELENGTH"] >= (grange[0] - minlsfdwave)) & (
moddata["WAVELENGTH"] <= (grange[1] + maxlsfdwave)
)
incwmoddata = moddata[:][gvals]
# convolve
outcwmoddata = moddata[:][gvals]
if applylsfs:
# for each wavelength, use average weighting with the appropriate LSF
clsfwave = lsfs[0]["DELTWAVE"]
for i, cwave in enumerate(outcwmoddata["WAVELENGTH"]):
# generate LSFs at each wavelength by interpolating/extrapolating
# from the 2 provided LSFs or just replicating a single LSFs
if nlsfs == 1:
clsf = lsfs[0]["52x2.0"]
elif nlsfs == 2:
clsf = lsfs[1]["52x2.0"] + (
(gwaves[1] - cwave) / (gwaves[1] - gwaves[0])
) * (lsfs[1]["52x2.0"] - lsfs[0]["52x2.0"])
clsfwave = lsfs[0]["DELTWAVE"] + cwave
# interpolate onto model wavelength grid
clsf_int = np.interp(
outcwmoddata["WAVELENGTH"], clsfwave, clsf, right=0.0, left=0.0
)
outcwmoddata["FLUX"][i] = np.average(incwmoddata["FLUX"], weights=clsf_int)
# crop tight to only include the expected wavelengths
gvals = (outcwmoddata["WAVELENGTH"] >= grange[0]) & (
outcwmoddata["WAVELENGTH"] <= grange[1]
)
cmoddata = QTable()
cmoddata["WAVELENGTH"] = outcwmoddata["WAVELENGTH"][gvals]
cmoddata["FLUX"] = outcwmoddata["FLUX"][gvals]
cmoddata["STAT-ERROR"] = outcwmoddata["SIGMA"][gvals]
cmoddata["SYS-ERROR"] = outcwmoddata["SIGMA"][gvals]
cmoddata["NPTS"] = outcwmoddata["NPTS"][gvals]
return cmoddata
def mock_stis_data(moddata, applylsfs=True):
"""
Mock STIS low-resolution grating observations given a model spectrum
Parameters
----------
moddata : astropy.table
Model spectrum at high enough resolution to support "convolution" with
the LSFs
applylsfs : boolean
allows for mocking with and without the LSFs
Returns
-------
tablist : list of astropy.tables
Each entry appropriate for one of the four low resolution gratings
"""
allspec = []
allspec.append(
mock_stis_single_grating(moddata, gname="G140L", applylsfs=applylsfs)
)
allspec.append(
mock_stis_single_grating(moddata, gname="G230L", applylsfs=applylsfs)
)
allspec.append(
mock_stis_single_grating(moddata, gname="G430L", applylsfs=applylsfs)
)
allspec.append(
mock_stis_single_grating(moddata, gname="G750L", applylsfs=applylsfs)
)
return allspec
if __name__ == "__main__":
# commandline parser
parser = argparse.ArgumentParser()
parser.add_argument("--png", help="save figure as a png file", action="store_true")
parser.add_argument("--pdf", help="save figure as a pdf file", action="store_true")
args = parser.parse_args()
moddata = QTable.read(
"/home/kgordon/Python_git/extstar_data/Models/tlusty_BT30000g300v10_full.fits"
)
fig, ax = plt.subplots(nrows=4, figsize=(18, 10))
# setup the plots
fontsize = 12
font = {"size": fontsize}
plt.rc("font", **font)
plt.rc("lines", linewidth=2)
plt.rc("axes", linewidth=2)
plt.rc("xtick.major", width=2)
plt.rc("ytick.major", width=2)
mockobs_wolsfs = mock_stis_data(moddata, applylsfs=False)
mockobs = mock_stis_data(moddata)
for i, cmockobs in enumerate(mockobs):
ax[i].plot(mockobs_wolsfs[i]["WAVELENGTH"], mockobs_wolsfs[i]["FLUX"], "k-")
# old way of doing things
# stis_fwhm_pix = 5000.0 / 1000.0
# g = Gaussian1DKernel(stddev=stis_fwhm_pix / 2.355)
# nflux = convolve(mockobs_wolsfs[i]["FLUX"].data, g)
# ax[i].plot(mockobs_wolsfs[i]["WAVELENGTH"], nflux, "r:")
ax[i].plot(cmockobs["WAVELENGTH"], cmockobs["FLUX"], "b-")
ax[i].set_ylabel("Flux")
ax[3].set_xlabel(r"$\lambda$ [$\AA$]")
fig.tight_layout()
if args.png:
fig.savefig("mock_stis_obs.png")
elif args.pdf:
fig.savefig("mock_stis_obs.pdf")
else:
plt.show()
| [
"kgordon@stsci.edu"
] | kgordon@stsci.edu |
5b9d2d32cc0987d491c966fd7fafda8d931a36aa | 09e57dd1374713f06b70d7b37a580130d9bbab0d | /data/p4VQE/R1/benchmark/startQiskit_QC76.py | 7e339b6337cf42dff01c14d045fcf73a39befa91 | [
"BSD-3-Clause"
] | permissive | UCLA-SEAL/QDiff | ad53650034897abb5941e74539e3aee8edb600ab | d968cbc47fe926b7f88b4adf10490f1edd6f8819 | refs/heads/main | 2023-08-05T04:52:24.961998 | 2021-09-19T02:56:16 | 2021-09-19T02:56:16 | 405,159,939 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,545 | py | # qubit number=3
# total number=9
import numpy as np
from qiskit import QuantumCircuit, execute, Aer, QuantumRegister, ClassicalRegister, transpile, BasicAer, IBMQ
import networkx as nx
from qiskit.visualization import plot_histogram
from typing import *
from pprint import pprint
from math import log2
from collections import Counter
from qiskit.test.mock import FakeVigo, FakeYorktown
kernel = 'circuit/bernstein'
def make_circuit(n:int) -> QuantumCircuit:
# circuit begin
input_qubit = QuantumRegister(n,"qc")
prog = QuantumCircuit(input_qubit)
prog.h(input_qubit[0]) # number=1
prog.h(input_qubit[1]) # number=2
prog.h(input_qubit[2]) # number=3
prog.h(input_qubit[3]) # number=4
for edge in E:
k = edge[0]
l = edge[1]
prog.cp(-2 * gamma, input_qubit[k-1], input_qubit[l-1])
prog.p(gamma, k)
prog.p(gamma, l)
prog.rx(2 * beta, range(len(V)))
prog.swap(input_qubit[1],input_qubit[0]) # number=5
prog.swap(input_qubit[1],input_qubit[0]) # number=6
prog.swap(input_qubit[2],input_qubit[0]) # number=7
prog.swap(input_qubit[2],input_qubit[0]) # number=8
# circuit end
return prog
if __name__ == '__main__':
n = 4
V = np.arange(0, n, 1)
E = [(0, 1, 1.0), (0, 2, 1.0), (1, 2, 1.0), (3, 2, 1.0), (3, 1, 1.0)]
G = nx.Graph()
G.add_nodes_from(V)
G.add_weighted_edges_from(E)
step_size = 0.1
a_gamma = np.arange(0, np.pi, step_size)
a_beta = np.arange(0, np.pi, step_size)
a_gamma, a_beta = np.meshgrid(a_gamma, a_beta)
F1 = 3 - (np.sin(2 * a_beta) ** 2 * np.sin(2 * a_gamma) ** 2 - 0.5 * np.sin(4 * a_beta) * np.sin(4 * a_gamma)) * (
1 + np.cos(4 * a_gamma) ** 2)
result = np.where(F1 == np.amax(F1))
a = list(zip(result[0], result[1]))[0]
gamma = a[0] * step_size
beta = a[1] * step_size
prog = make_circuit(4)
sample_shot =5200
writefile = open("../data/startQiskit_QC76.csv", "w")
# prog.draw('mpl', filename=(kernel + '.png'))
IBMQ.load_account()
provider = IBMQ.get_provider(hub='ibm-q')
provider.backends()
backend = provider.get_backend("ibmq_5_yorktown")
circuit1 = transpile(prog, FakeYorktown())
circuit1.measure_all()
prog = circuit1
info = execute(prog,backend=backend, shots=sample_shot).result().get_counts()
print(info, file=writefile)
print("results end", file=writefile)
print(circuit1.depth(), file=writefile)
print(circuit1, file=writefile)
writefile.close()
| [
"wangjiyuan123@yeah.net"
] | wangjiyuan123@yeah.net |
07cb116d8b5a5a623a6fa3730a1a72aecaa55cdc | ba694353a3cb1cfd02a6773b40f693386d0dba39 | /sdk/python/pulumi_google_native/cloudkms/v1/ekm_config_iam_binding.py | 34794a1a7d1dd5e219686f38c800ef77ef350cbd | [
"BSD-3-Clause",
"Apache-2.0"
] | permissive | pulumi/pulumi-google-native | cc57af8bd3d1d6b76f1f48333ed1f1b31d56f92b | 124d255e5b7f5440d1ef63c9a71e4cc1d661cd10 | refs/heads/master | 2023-08-25T00:18:00.300230 | 2023-07-20T04:25:48 | 2023-07-20T04:25:48 | 323,680,373 | 69 | 16 | Apache-2.0 | 2023-09-13T00:28:04 | 2020-12-22T16:39:01 | Python | UTF-8 | Python | false | false | 13,721 | py | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import copy
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
from ... import iam as _iam
__all__ = ['EkmConfigIamBindingArgs', 'EkmConfigIamBinding']
@pulumi.input_type
class EkmConfigIamBindingArgs:
def __init__(__self__, *,
members: pulumi.Input[Sequence[pulumi.Input[str]]],
name: pulumi.Input[str],
role: pulumi.Input[str],
condition: Optional[pulumi.Input['_iam.v1.ConditionArgs']] = None):
"""
The set of arguments for constructing a EkmConfigIamBinding resource.
:param pulumi.Input[Sequence[pulumi.Input[str]]] members: Identities that will be granted the privilege in role. Each entry can have one of the following values:
* user:{emailid}: An email address that represents a specific Google account. For example, alice@gmail.com or joe@example.com.
* serviceAccount:{emailid}: An email address that represents a service account. For example, my-other-app@appspot.gserviceaccount.com.
* group:{emailid}: An email address that represents a Google group. For example, admins@example.com.
* domain:{domain}: A G Suite domain (primary, instead of alias) name that represents all the users of that domain. For example, google.com or example.com.
:param pulumi.Input[str] name: The name of the resource to manage IAM policies for.
:param pulumi.Input[str] role: The role that should be applied. Only one `IamBinding` can be used per role.
:param pulumi.Input['_iam.v1.ConditionArgs'] condition: An IAM Condition for a given binding.
"""
pulumi.set(__self__, "members", members)
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "role", role)
if condition is not None:
pulumi.set(__self__, "condition", condition)
@property
@pulumi.getter
def members(self) -> pulumi.Input[Sequence[pulumi.Input[str]]]:
"""
Identities that will be granted the privilege in role. Each entry can have one of the following values:
* user:{emailid}: An email address that represents a specific Google account. For example, alice@gmail.com or joe@example.com.
* serviceAccount:{emailid}: An email address that represents a service account. For example, my-other-app@appspot.gserviceaccount.com.
* group:{emailid}: An email address that represents a Google group. For example, admins@example.com.
* domain:{domain}: A G Suite domain (primary, instead of alias) name that represents all the users of that domain. For example, google.com or example.com.
"""
return pulumi.get(self, "members")
@members.setter
def members(self, value: pulumi.Input[Sequence[pulumi.Input[str]]]):
pulumi.set(self, "members", value)
@property
@pulumi.getter
def name(self) -> pulumi.Input[str]:
"""
The name of the resource to manage IAM policies for.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: pulumi.Input[str]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def role(self) -> pulumi.Input[str]:
"""
The role that should be applied. Only one `IamBinding` can be used per role.
"""
return pulumi.get(self, "role")
@role.setter
def role(self, value: pulumi.Input[str]):
pulumi.set(self, "role", value)
@property
@pulumi.getter
def condition(self) -> Optional[pulumi.Input['_iam.v1.ConditionArgs']]:
"""
An IAM Condition for a given binding.
"""
return pulumi.get(self, "condition")
@condition.setter
def condition(self, value: Optional[pulumi.Input['_iam.v1.ConditionArgs']]):
pulumi.set(self, "condition", value)
class EkmConfigIamBinding(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
condition: Optional[pulumi.Input[pulumi.InputType['_iam.v1.ConditionArgs']]] = None,
members: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
name: Optional[pulumi.Input[str]] = None,
role: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
Sets the access control policy on the specified resource. Replaces any existing policy. Can return `NOT_FOUND`, `INVALID_ARGUMENT`, and `PERMISSION_DENIED` errors.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[pulumi.InputType['_iam.v1.ConditionArgs']] condition: An IAM Condition for a given binding.
:param pulumi.Input[Sequence[pulumi.Input[str]]] members: Identities that will be granted the privilege in role. Each entry can have one of the following values:
* user:{emailid}: An email address that represents a specific Google account. For example, alice@gmail.com or joe@example.com.
* serviceAccount:{emailid}: An email address that represents a service account. For example, my-other-app@appspot.gserviceaccount.com.
* group:{emailid}: An email address that represents a Google group. For example, admins@example.com.
* domain:{domain}: A G Suite domain (primary, instead of alias) name that represents all the users of that domain. For example, google.com or example.com.
:param pulumi.Input[str] name: The name of the resource to manage IAM policies for.
:param pulumi.Input[str] role: The role that should be applied. Only one `IamBinding` can be used per role.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: EkmConfigIamBindingArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Sets the access control policy on the specified resource. Replaces any existing policy. Can return `NOT_FOUND`, `INVALID_ARGUMENT`, and `PERMISSION_DENIED` errors.
:param str resource_name: The name of the resource.
:param EkmConfigIamBindingArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(EkmConfigIamBindingArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
condition: Optional[pulumi.Input[pulumi.InputType['_iam.v1.ConditionArgs']]] = None,
members: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
name: Optional[pulumi.Input[str]] = None,
role: Optional[pulumi.Input[str]] = None,
__props__=None):
opts = pulumi.ResourceOptions.merge(_utilities.get_resource_opts_defaults(), opts)
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = EkmConfigIamBindingArgs.__new__(EkmConfigIamBindingArgs)
__props__.__dict__["condition"] = condition
if members is None and not opts.urn:
raise TypeError("Missing required property 'members'")
__props__.__dict__["members"] = members
if name is None and not opts.urn:
raise TypeError("Missing required property 'name'")
__props__.__dict__["name"] = name
if role is None and not opts.urn:
raise TypeError("Missing required property 'role'")
__props__.__dict__["role"] = role
__props__.__dict__["etag"] = None
__props__.__dict__["project"] = None
super(EkmConfigIamBinding, __self__).__init__(
'google-native:cloudkms/v1:EkmConfigIamBinding',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None) -> 'EkmConfigIamBinding':
"""
Get an existing EkmConfigIamBinding resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = EkmConfigIamBindingArgs.__new__(EkmConfigIamBindingArgs)
__props__.__dict__["condition"] = None
__props__.__dict__["etag"] = None
__props__.__dict__["members"] = None
__props__.__dict__["name"] = None
__props__.__dict__["project"] = None
__props__.__dict__["role"] = None
return EkmConfigIamBinding(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter
def condition(self) -> pulumi.Output[Optional['_iam.v1.outputs.Condition']]:
"""
An IAM Condition for a given binding. See https://cloud.google.com/iam/docs/conditions-overview for additional details.
"""
return pulumi.get(self, "condition")
@property
@pulumi.getter
def etag(self) -> pulumi.Output[str]:
"""
The etag of the resource's IAM policy.
"""
return pulumi.get(self, "etag")
@property
@pulumi.getter
def members(self) -> pulumi.Output[Sequence[str]]:
"""
Specifies the principals requesting access for a Google Cloud resource. `members` can have the following values: * `allUsers`: A special identifier that represents anyone who is on the internet; with or without a Google account. * `allAuthenticatedUsers`: A special identifier that represents anyone who is authenticated with a Google account or a service account. Does not include identities that come from external identity providers (IdPs) through identity federation. * `user:{emailid}`: An email address that represents a specific Google account. For example, `alice@example.com` . * `serviceAccount:{emailid}`: An email address that represents a Google service account. For example, `my-other-app@appspot.gserviceaccount.com`. * `serviceAccount:{projectid}.svc.id.goog[{namespace}/{kubernetes-sa}]`: An identifier for a [Kubernetes service account](https://cloud.google.com/kubernetes-engine/docs/how-to/kubernetes-service-accounts). For example, `my-project.svc.id.goog[my-namespace/my-kubernetes-sa]`. * `group:{emailid}`: An email address that represents a Google group. For example, `admins@example.com`. * `domain:{domain}`: The G Suite domain (primary) that represents all the users of that domain. For example, `google.com` or `example.com`. * `deleted:user:{emailid}?uid={uniqueid}`: An email address (plus unique identifier) representing a user that has been recently deleted. For example, `alice@example.com?uid=123456789012345678901`. If the user is recovered, this value reverts to `user:{emailid}` and the recovered user retains the role in the binding. * `deleted:serviceAccount:{emailid}?uid={uniqueid}`: An email address (plus unique identifier) representing a service account that has been recently deleted. For example, `my-other-app@appspot.gserviceaccount.com?uid=123456789012345678901`. If the service account is undeleted, this value reverts to `serviceAccount:{emailid}` and the undeleted service account retains the role in the binding. * `deleted:group:{emailid}?uid={uniqueid}`: An email address (plus unique identifier) representing a Google group that has been recently deleted. For example, `admins@example.com?uid=123456789012345678901`. If the group is recovered, this value reverts to `group:{emailid}` and the recovered group retains the role in the binding.
"""
return pulumi.get(self, "members")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
The name of the resource to manage IAM policies for.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def project(self) -> pulumi.Output[str]:
"""
The project in which the resource belongs. If it is not provided, a default will be supplied.
"""
return pulumi.get(self, "project")
@property
@pulumi.getter
def role(self) -> pulumi.Output[str]:
"""
Role that is assigned to the list of `members`, or principals. For example, `roles/viewer`, `roles/editor`, or `roles/owner`.
"""
return pulumi.get(self, "role")
| [
"noreply@github.com"
] | pulumi.noreply@github.com |
9035e78db4008b3c0bde93ae8d77a2fcd26f4761 | 935b9efca392b124d571319568c08ba45446d2a0 | /lino_book/projects/lydia/tests/dumps/18.8.0/courses_line.py | f6c815319873d98bc3450f6d0444c1bee99e56b1 | [
"BSD-2-Clause"
] | permissive | wallento/book | 6efba2baa1e42bb99514a937342000271dfe798b | 8c5a68f30f9ab65479a988608bda66ea6209afd8 | refs/heads/master | 2020-04-06T10:58:01.629671 | 2018-11-07T09:41:54 | 2018-11-07T09:41:54 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 742 | py | # -*- coding: UTF-8 -*-
logger.info("Loading 3 objects to table courses_line...")
# fields: id, ref, name, excerpt_title, company, contact_person, contact_role, course_area, topic, description, every_unit, every, event_type, fee, guest_role, options_cat, fees_cat, body_template
loader.save(create_courses_line(1,None,['Individual therapies', '', ''],['', '', ''],None,None,None,'IT',None,['', '', ''],u'W',1,4,2,1,None,1,u''))
loader.save(create_courses_line(2,None,['Life groups', '', ''],['', '', ''],None,None,None,'LG',None,['', '', ''],u'W',1,4,2,1,None,1,u''))
loader.save(create_courses_line(3,None,['Other groups', '', ''],['', '', ''],None,None,None,'OG',None,['', '', ''],u'W',1,5,1,1,None,1,u''))
loader.flush_deferred_objects()
| [
"luc.saffre@gmail.com"
] | luc.saffre@gmail.com |
b0894045935a4bd7927b3d27d1bcbe9580c0d34e | 683e5c676c02a746ba35ef6045c99983ad4b4b0c | /matomo_sdk_py/matomo_sdk_py.py | be960c84fb4b3446264311e9ee51c37069567f5a | [
"Apache-2.0"
] | permissive | autofitcloud/matomo-sdk-py | f14ec95b6f8c9b94f74dcfa3f81f90612188a0cb | 802434b125da8c9da168a18d82e8f539716df501 | refs/heads/master | 2023-05-24T21:01:30.376179 | 2019-12-04T13:26:46 | 2019-12-04T13:26:46 | 217,569,529 | 0 | 0 | Apache-2.0 | 2023-05-22T22:31:29 | 2019-10-25T16:03:40 | Python | UTF-8 | Python | false | false | 1,773 | py | import requests
SKIP_PING=False
def ping_matomo(action_name, action_base, idsite, uuid_val, matomo_url):
"""
Gather anonymous usage statistics
action_name - same field in matomo
action_base - website URL in matomo, e.g. https://example.com
idsite - integer representing ID of website in matomo
uuid_val - matomo field "uid"
matomo_url - URL of matomo host, eg https://example.matomo.cloud/piwik.php
"""
# if any previous failure, just skip it completely
global SKIP_PING # http://stackoverflow.com/questions/423379/ddg#423596
if SKIP_PING:
return
from urllib.parse import urljoin, urlencode
# build action url
# https://stackoverflow.com/questions/9718541/reconstructing-absolute-urls-from-relative-urls-on-a-page#comment51058834_9718651
action_url = urljoin(action_base, action_name)
# https://stackoverflow.com/a/39144239/4126114
req_i = {
"idsite": idsite,
"rec": 1,
"action_name": action_name,
"uid": uuid_val,
# use the UID for matomo's visitor ID,
# truncated to 16 characters as documented
# More info at:
# https://matomo.org/docs/user-id/
# https://developer.matomo.org/api-reference/tracking-api
"cid": uuid_val[:16],
"url": action_url
}
payload = {"requests": ["?"+urlencode(req_i)]}
# use POST instead of GET to avoid arguments showing up in the clear
# https://developer.matomo.org/api-reference/tracking-api
try:
response = requests.post(matomo_url, json=payload, timeout=1) # 1 second
except requests.exceptions.ConnectionError as error:
# just ignore the failure to connect
# in order not to obstruct the CLI
SKIP_PING=True
pass
except requests.exceptions.ReadTimeout as error:
# also ignore
SKIP_PING=True
pass
| [
"shadiakiki1986@gmail.com"
] | shadiakiki1986@gmail.com |
6717d03c5d2ec776966afa1f4b7a7c9ab42bf79e | af4f53500502faf8e0b3a97cf8f517fec80bdc5c | /tests/optimize/test_lie_algebra.py | 4b988e782beee11261baf1e81017c7aecc128de8 | [
"Apache-2.0"
] | permissive | quantshah/pennylane | c5590406674384bc6a26bd5dccc6619f9255242d | b904c966d124e66dbf82cc0b42f580db2d9a7cc1 | refs/heads/master | 2022-11-22T19:41:10.892782 | 2022-08-15T12:41:48 | 2022-08-15T12:41:48 | 355,849,206 | 0 | 0 | Apache-2.0 | 2021-04-08T09:48:31 | 2021-04-08T09:48:31 | null | UTF-8 | Python | false | false | 10,907 | py | # Copyright 2018-2021 Xanadu Quantum Technologies Inc.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Unit tests for the ``LieAlgebraOptimizer``.
"""
import pytest
from scipy.sparse.linalg import expm
import numpy as np
import pennylane as qml
from pennylane.optimize import LieAlgebraOptimizer
def circuit_1():
"""Simple circuit."""
qml.Hadamard(wires=[0])
qml.Hadamard(wires=[1])
def circuit_2():
"""Simply parameterized circuit."""
qml.RX(0.1, wires=[0])
qml.RY(0.5, wires=[1])
qml.CNOT(wires=[0, 1])
qml.RY(0.6, wires=[0])
def circuit_3():
"""Three-qubit circuit."""
qml.RY(0.5, wires=[0])
qml.RY(0.6, wires=[1])
qml.RY(0.7, wires=[2])
qml.CNOT(wires=[0, 1])
qml.CNOT(wires=[1, 2])
qml.RX(-0.6, wires=[0])
qml.RX(-0.3, wires=[1])
qml.RX(-0.2, wires=[2])
hamiltonian_1 = qml.Hamiltonian(
coeffs=[-1.0] * 3,
observables=[qml.PauliX(0), qml.PauliZ(1), qml.PauliY(0) @ qml.PauliX(1)],
)
hamiltonian_2 = qml.Hamiltonian(
coeffs=[-0.2, 0.3, -0.15],
observables=[
qml.PauliY(1),
qml.PauliZ(0) @ qml.PauliZ(1),
qml.PauliX(0) @ qml.PauliX(1),
],
)
hamiltonian_3 = qml.Hamiltonian(
coeffs=[-2.0], observables=[qml.PauliY(0) @ qml.PauliY(1) @ qml.PauliY(2)]
)
@pytest.mark.parametrize(
"circuit,hamiltonian",
[
(circuit_1, hamiltonian_1),
(circuit_1, hamiltonian_2),
(circuit_2, hamiltonian_1),
(circuit_2, hamiltonian_2),
(circuit_3, hamiltonian_3),
],
)
def test_lie_algebra_omegas(circuit, hamiltonian):
"""Test that we calculate the Riemannian gradient coefficients Tr{[rho, H] P_j} correctly."""
# pylint: disable=no-member
nqubits = max([max(ps.wires) for ps in hamiltonian.ops]) + 1
wires = range(nqubits)
dev = qml.device("default.qubit", wires=nqubits)
@qml.qnode(dev)
def get_state():
circuit()
return qml.state()
@qml.qnode(dev)
def lie_circuit():
circuit()
return qml.expval(hamiltonian)
phi = get_state()
rho = np.outer(phi, phi.conj())
hamiltonian_np = qml.utils.sparse_hamiltonian(hamiltonian, wires).toarray()
lie_algebra_np = hamiltonian_np @ rho - rho @ hamiltonian_np
opt = LieAlgebraOptimizer(circuit=lie_circuit)
ops = opt.get_su_n_operators(None)[0]
omegas_np = []
for op in ops:
op = qml.operation.expand_matrix(op.matrix(), op.wires, wires)
omegas_np.append(1j * np.trace(lie_algebra_np @ op))
omegas = opt.get_omegas()
assert np.allclose(omegas, omegas_np)
@pytest.mark.parametrize(
"circuit,hamiltonian",
[
(circuit_1, hamiltonian_1),
(circuit_1, hamiltonian_2),
(circuit_2, hamiltonian_1),
(circuit_2, hamiltonian_2),
(circuit_3, hamiltonian_3),
],
)
def test_lie_algebra_omegas_restricted(circuit, hamiltonian):
"""Test that we calculate the (restricted) Riemannian gradient coefficients correctly."""
# pylint: disable=no-member
nqubits = max([max(ps.wires) for ps in hamiltonian.ops]) + 1
wires = range(nqubits)
dev = qml.device("default.qubit", wires=nqubits)
@qml.qnode(dev)
def get_state():
circuit()
return qml.state()
@qml.qnode(dev)
def lie_circuit():
circuit()
return qml.expval(hamiltonian)
phi = get_state()
rho = np.outer(phi, phi.conj())
hamiltonian_np = qml.utils.sparse_hamiltonian(hamiltonian, wires).toarray()
lie_algebra_np = hamiltonian_np @ rho - rho @ hamiltonian_np
restriction = qml.Hamiltonian(
coeffs=[1.0] * 3,
observables=[qml.PauliX(0), qml.PauliY(1), qml.PauliY(0) @ qml.PauliY(1)],
)
opt = LieAlgebraOptimizer(circuit=lie_circuit, restriction=restriction)
ops = opt.get_su_n_operators(restriction)[0]
omegas_np = []
for op in ops:
op = qml.operation.expand_matrix(op.matrix(), op.wires, wires)
omegas_np.append(1j * np.trace(lie_algebra_np @ op))
omegas = opt.get_omegas()
assert np.allclose(omegas, omegas_np)
@pytest.mark.parametrize(
"circuit,hamiltonian",
[
(circuit_1, hamiltonian_1),
(circuit_1, hamiltonian_2),
(circuit_2, hamiltonian_1),
(circuit_3, hamiltonian_3),
],
)
def test_lie_algebra_evolution(circuit, hamiltonian):
"""Test that the optimizer produces the correct unitary to append."""
# pylint: disable=no-member
nqubits = max([max(ps.wires) for ps in hamiltonian.ops]) + 1
wires = range(nqubits)
dev = qml.device("default.qubit", wires=nqubits)
@qml.qnode(dev)
def get_state():
circuit()
return qml.state()
@qml.qnode(dev)
def lie_circuit():
circuit()
return qml.expval(hamiltonian)
phi = get_state()
rho = np.outer(phi, phi.conj())
hamiltonian_np = qml.utils.sparse_hamiltonian(hamiltonian, wires).toarray()
lie_algebra_np = hamiltonian_np @ rho - rho @ hamiltonian_np
phi_exact = expm(-0.1 * lie_algebra_np * 2**nqubits) @ phi
rho_exact = np.outer(phi_exact, phi_exact.conj())
opt = LieAlgebraOptimizer(circuit=lie_circuit, stepsize=0.1, exact=True)
opt.step_and_cost()
cost_pl = opt.circuit()
cost_exact = np.trace(rho_exact @ hamiltonian_np)
assert np.allclose(cost_pl, cost_exact, atol=1e-4)
@pytest.mark.parametrize(
"circuit,hamiltonian",
[
(circuit_1, hamiltonian_1),
(circuit_1, hamiltonian_2),
(circuit_2, hamiltonian_1),
(circuit_2, hamiltonian_2),
(circuit_3, hamiltonian_3),
],
)
def test_lie_algebra_step(circuit, hamiltonian):
"""Test that we can take subsequent steps with the optimizer."""
nqubits = max([max(ps.wires) for ps in hamiltonian.ops]) + 1
dev = qml.device("default.qubit", wires=nqubits)
@qml.qnode(dev)
def lie_circuit():
circuit()
return qml.expval(hamiltonian)
opt = LieAlgebraOptimizer(circuit=lie_circuit)
opt.step()
opt.step()
@pytest.mark.parametrize(
"circuit,hamiltonian",
[
(circuit_1, hamiltonian_1),
(circuit_1, hamiltonian_2),
(circuit_2, hamiltonian_1),
(circuit_2, hamiltonian_2),
(circuit_3, hamiltonian_3),
],
)
def test_lie_algebra_step_trotterstep(circuit, hamiltonian):
"""Test that we can take subsequent steps with the optimizer."""
nqubits = max([max(ps.wires) for ps in hamiltonian.ops]) + 1
dev = qml.device("default.qubit", wires=nqubits)
@qml.qnode(dev)
def lie_circuit():
circuit()
return qml.expval(hamiltonian)
opt = LieAlgebraOptimizer(circuit=lie_circuit, trottersteps=3)
opt.step()
opt.step()
def test_lie_algebra_circuit_input_1_check():
"""Test that a type error is raise for non-QNode circuits."""
def circuit():
qml.RY(0.5, wires=0)
with pytest.raises(TypeError, match="circuit must be a QNode"):
LieAlgebraOptimizer(circuit=circuit, stepsize=0.001)
def test_lie_algebra_hamiltonian_input_1_check():
"""Test that a type error is raise for non-QNode circuits."""
@qml.qnode(qml.device("default.qubit", wires=3))
def circuit():
qml.RY(0.5, wires=0)
return qml.state()
with pytest.raises(
TypeError,
match="circuit must return the expectation value of a Hamiltonian",
):
LieAlgebraOptimizer(circuit=circuit, stepsize=0.001)
def test_lie_algebra_nqubits_check():
"""Test that we warn if the system is too big."""
@qml.qnode(qml.device("default.qubit", wires=5))
def circuit():
qml.RY(0.5, wires=0)
return qml.expval(qml.Hamiltonian(coeffs=[-1.0], observables=[qml.PauliX(0)]))
with pytest.warns(UserWarning, match="The exact Riemannian gradient is exponentially"):
LieAlgebraOptimizer(circuit=circuit, stepsize=0.001)
def test_lie_algebra_restriction_check():
"""Test that a type error is raise for non-QNode circuits."""
@qml.qnode(qml.device("default.qubit", wires=3))
def circuit():
qml.RY(0.5, wires=0)
return qml.expval(qml.Hamiltonian(coeffs=[-1.0], observables=[qml.PauliX(0)]))
restriction = "not_a_hamiltonian"
with pytest.raises(
TypeError,
match="restriction must be a Hamiltonian",
):
LieAlgebraOptimizer(circuit=circuit, restriction=restriction, stepsize=0.001)
def test_docstring_example():
"""Test the docstring example with Trotterized evolution."""
hamiltonian = qml.Hamiltonian(
coeffs=[-1.0] * 3,
observables=[qml.PauliX(0), qml.PauliZ(1), qml.PauliY(0) @ qml.PauliX(1)],
)
@qml.qnode(qml.device("default.qubit", wires=2))
def quant_fun():
qml.RX(0.1, wires=[0])
qml.RY(0.5, wires=[1])
qml.CNOT(wires=[0, 1])
qml.RY(0.6, wires=[0])
return qml.expval(hamiltonian)
opt = LieAlgebraOptimizer(circuit=quant_fun, stepsize=0.1)
for _ in range(12):
circuit, cost = opt.step_and_cost()
circuit()
assert np.isclose(cost, -2.236068, atol=1e-3)
def test_docstring_example_exact():
"""Test that the optimizer works with matrix exponential."""
hamiltonian = qml.Hamiltonian(
coeffs=[-1.0] * 3,
observables=[qml.PauliX(0), qml.PauliZ(1), qml.PauliY(0) @ qml.PauliX(1)],
)
@qml.qnode(qml.device("default.qubit", wires=2))
def quant_fun():
qml.RX(0.1, wires=[0])
qml.RY(0.5, wires=[1])
qml.CNOT(wires=[0, 1])
qml.RY(0.6, wires=[0])
return qml.expval(hamiltonian)
opt = LieAlgebraOptimizer(circuit=quant_fun, stepsize=0.1, exact=True)
for _ in range(12):
circuit, cost = opt.step_and_cost()
circuit()
assert np.isclose(cost, -2.236068, atol=1e-3)
def test_example_shots():
"""Test that the optimizer works with finite shots."""
hamiltonian = qml.Hamiltonian(
coeffs=[-1.0] * 3,
observables=[qml.PauliX(0), qml.PauliZ(1), qml.PauliY(0) @ qml.PauliX(1)],
)
@qml.qnode(qml.device("default.qubit", wires=2, shots=1000))
def quant_fun():
qml.RX(0.1, wires=[0])
qml.RY(0.5, wires=[1])
qml.CNOT(wires=[0, 1])
qml.RY(0.6, wires=[0])
return qml.expval(hamiltonian)
opt = LieAlgebraOptimizer(circuit=quant_fun, stepsize=0.1, exact=False)
for _ in range(3):
opt.step_and_cost()
| [
"noreply@github.com"
] | quantshah.noreply@github.com |
8f6323614670e2e31fd6089d6d719aba5fe21293 | 82b3bcc6467c93c8b84948e7df1ec32fe4c4b004 | /WEEK 4/Day 4/python-lesson-day4/stuff.py | 7a5f26446b1d590d01b86288ca383be2de42f360 | [] | no_license | MrAch26/Developers_Institute | b7868fc94556bfced4cb53537278c3ec17991426 | 840c9710278af033ccdb5f5c3edd7a2a97476aba | refs/heads/master | 2023-03-28T19:31:11.666544 | 2021-04-06T06:54:20 | 2021-04-06T06:54:20 | 298,250,894 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 471 | py |
def test(*args):
print(type(args))
print(args[4])
print("printing Args")
for item in args:
print(item)
# * packs items into a tuple, and unpacks into a list
# ** packs items into a dict
def kwtest(**kwargs):
print(type(kwargs))
print("printing KWargs")
for k,v in kwargs.items():
print(k,"-",v)
def packitup(*args, **kwargs):
return args, kwargs
result = ((1,2,3), {name:"jon", surname:"spiller"})
result[0][2] # 3
result[1]['surname'] # "spiller" | [
"MrAch26@users.noreply.github.com"
] | MrAch26@users.noreply.github.com |
40605fa6ce7b50c0bece515198274ad164e27a67 | 9189089752d970ced51a1c50503fce399b93f589 | /create-sw-file-tools/play_with_geonames-v2.py | c8ba486cc37a6e1a1d6455a22448a6bb495b3c04 | [] | no_license | SwannedLakee/Feynman-knowledge-engine | 464b0d187213b28d9e3995c5e27b41e216359046 | 9a09ff8041dda20dadc3368700b718266757cf24 | refs/heads/master | 2023-03-24T05:25:30.320650 | 2020-03-13T12:31:59 | 2020-03-13T12:31:59 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,121 | py | #!/usr/bin/env python3
import sys
from the_semantic_db_code import *
from the_semantic_db_functions import *
from the_semantic_db_processor import *
#C = context_list("geonames AU")
# is new_context() faster than context_list() ??
C = new_context("geonames US")
# NB: may need to filter down to ASCII chars.
#file = "data/ascii-just-adelaide.txt"
#file = "data/ascii-cities15000.txt"
#file = "data/short-play.txt" # yup. code seems to work!
#file = "data/AU.txt" # nope! Bugs out on non-ascii chars.
#file = "data/ascii-AU.txt" # tidied using clean-ascii.sh
#file = "data/ascii-cities15000.txt"
file = "data/ascii-US.txt" # too big for RAM for now.
#file = "data/ascii-cities1000.txt"
with open(file,'r') as f:
for line in f:
# print("line:",line)
# fields = len(line.split("\t"))
# print("fields:",fields)
id,name,asciiname,altname,lat,long,feat_class,feat_code,country,cc2,admin1,admin2,admin3,admin4,population,elevation,dem,tz,mod_date = line.split("\t")
# print("id: ",id)
# print("name: ",asciiname)
# print("lat: ",lat)
# print("long: ",long)
# print("country: ",country)
# print("population:",population)
# print("dem: ",dem)
# print("tz: ",tz)
# print()
x = ket("id: " + id)
# C.learn("id",x,"geonameid: " + id)
C.add_learn("id",asciiname,x)
C.learn("name",x,asciiname)
C.learn("latitude",x,"latitude: " + lat)
C.learn("latitude-self",x,x.multiply(float(lat)))
C.learn("longitude",x,"longitude: " + long)
C.learn("longitude-self",x,x.multiply(float(long)))
C.learn("country-code",x,"country-code: " + country)
if int(population) > 0:
C.learn("population",x,"population: " + population)
C.learn("population-self",x,x.multiply(int(population)))
if elevation != '':
C.learn("elevation",x,"m: " + elevation)
if tz != '':
C.learn("tz",x,"time-zone: " + tz)
name = "sw-examples/improved-geonames-us.sw"
save_sw(C,name)
# first play with profiler:
#import cProfile
#cProfile.run('save_sw(C,name)')
#print(C.dump_universe())
| [
"garry@semantic-db.org"
] | garry@semantic-db.org |
be45c52e65245d926bc6bd8a07045f441de50ede | 92137962a84e724df31b63367854349a875f1c43 | /tapis_cli/clients/basic/main.py | 24b0135901c881fa43e4e7cdef09ea4e8c6aea53 | [
"BSD-3-Clause"
] | permissive | TACC-Cloud/tapis-cli | e3a26e79a20d1ada4cb2dc9ef204cae3e385bfe7 | d34e8635d3dbacc8276cf52b6bae04caacd655de | refs/heads/main | 2023-04-08T14:47:27.707885 | 2022-02-13T17:43:26 | 2022-02-13T17:43:26 | 203,083,094 | 11 | 3 | BSD-3-Clause | 2022-04-01T20:23:23 | 2019-08-19T02:21:28 | Python | UTF-8 | Python | false | false | 1,122 | py | from ..http import HTTPFormatOne, HTTPFormatMany
__all__ = ['BasicHTTPFormatOne', 'BasicHTTPFormatMany']
def add_common_arguments(parser):
parser.add_argument('--username',
dest='username',
type=str,
help="Username")
parser.add_argument('--password',
dest='password',
type=str,
help="Password")
return parser
class BasicHTTPFormatOne(HTTPFormatOne):
"""HTTP+Basic Record Display
"""
def get_parser(self, prog_name):
parser = super(BasicHTTPFormatOne, self).get_parser(prog_name)
parser = add_common_arguments(parser)
return parser
def take_action(self, parsed_args):
return ((), ())
class BasicHTTPFormatMany(HTTPFormatMany):
"""HTTP+Basic Records Listing
"""
def get_parser(self, prog_name):
parser = super(BasicHTTPFormatMany, self).get_parser(prog_name)
parser = add_common_arguments(parser)
return parser
def take_action(self, parsed_args):
return ((), ())
| [
"vaughn@tacc.utexas.edu"
] | vaughn@tacc.utexas.edu |
6d13e399ffbaad02f5508a957a4baeefe8099831 | 214ea06bc0967044e0d5cffcc3567decc5685d1e | /rbtools/clients/tests/test_svn.py | f17f6965acd616a1361604a21b72b1817802fb83 | [
"MIT"
] | permissive | Oberon00/rbtools | e0ef888a964f57dea09082f3fcbbaea1d7ed3237 | 6272a27a43bf2e1e1af4c1a87238fccc037f8843 | refs/heads/master | 2020-05-03T23:23:13.565567 | 2019-03-21T23:36:38 | 2019-03-21T23:36:38 | 178,864,567 | 0 | 0 | MIT | 2019-04-01T13:02:35 | 2019-04-01T13:02:34 | null | UTF-8 | Python | false | false | 31,351 | py | """Unit tests for SubversionClient."""
from __future__ import unicode_literals
import json
import os
import sys
from functools import wraps
from hashlib import md5
from kgb import SpyAgency
from nose import SkipTest
from six.moves.urllib.request import urlopen
from rbtools.api.client import RBClient
from rbtools.api.tests import MockResponse
from rbtools.clients.errors import (InvalidRevisionSpecError,
TooManyRevisionsError)
from rbtools.clients.svn import SVNRepositoryInfo, SVNClient
from rbtools.clients.tests import FOO1, FOO2, FOO3, SCMClientTests
from rbtools.utils.checks import is_valid_version
from rbtools.utils.filesystem import is_exe_in_path
from rbtools.utils.process import execute
def svn_version_set_hash(svn16_hash, svn17_hash, svn19_hash):
"""Pass the appropriate hash to the wrapped function.
SVN 1.6, 1.7/1.8, and 1.9+ will generate slightly different output for
``svn diff`` when generating the diff with a working copy. This works
around that by checking the installed SVN version and passing the
appropriate hash.
"""
def decorator(f):
@wraps(f)
def wrapped(self):
self.client.get_repository_info()
version = self.client.subversion_client_version
if version < (1, 7):
return f(self, svn16_hash)
elif version < (1, 9):
return f(self, svn17_hash)
else:
return f(self, svn19_hash)
return wrapped
return decorator
class SVNRepositoryInfoTests(SpyAgency, SCMClientTests):
"""Unit tests for rbtools.clients.svn.SVNRepositoryInfo."""
payloads = {
'http://localhost:8080/api/': {
'mimetype': 'application/vnd.reviewboard.org.root+json',
'rsp': {
'uri_templates': {},
'links': {
'self': {
'href': 'http://localhost:8080/api/',
'method': 'GET',
},
'repositories': {
'href': 'http://localhost:8080/api/repositories/',
'method': 'GET',
},
},
'stat': 'ok',
},
},
'http://localhost:8080/api/repositories/?tool=Subversion': {
'mimetype': 'application/vnd.reviewboard.org.repositories+json',
'rsp': {
'repositories': [
{
# This one doesn't have a mirror_path, to emulate
# Review Board 1.6.
'id': 1,
'name': 'SVN Repo 1',
'path': 'https://svn1.example.com/',
'links': {
'info': {
'href': ('https://localhost:8080/api/'
'repositories/1/info/'),
'method': 'GET',
},
},
},
{
'id': 2,
'name': 'SVN Repo 2',
'path': 'https://svn2.example.com/',
'mirror_path': 'svn+ssh://svn2.example.com/',
'links': {
'info': {
'href': ('https://localhost:8080/api/'
'repositories/2/info/'),
'method': 'GET',
},
},
},
],
'links': {
'next': {
'href': ('http://localhost:8080/api/repositories/'
'?tool=Subversion&page=2'),
'method': 'GET',
},
},
'total_results': 3,
'stat': 'ok',
},
},
'http://localhost:8080/api/repositories/?tool=Subversion&page=2': {
'mimetype': 'application/vnd.reviewboard.org.repositories+json',
'rsp': {
'repositories': [
{
'id': 3,
'name': 'SVN Repo 3',
'path': 'https://svn3.example.com/',
'mirror_path': 'svn+ssh://svn3.example.com/',
'links': {
'info': {
'href': ('https://localhost:8080/api/'
'repositories/3/info/'),
'method': 'GET',
},
},
},
],
'total_results': 3,
'stat': 'ok',
},
},
'https://localhost:8080/api/repositories/1/info/': {
'mimetype': 'application/vnd.reviewboard.org.repository-info+json',
'rsp': {
'info': {
'uuid': 'UUID-1',
'url': 'https://svn1.example.com/',
'root_url': 'https://svn1.example.com/',
},
'stat': 'ok',
},
},
'https://localhost:8080/api/repositories/2/info/': {
'mimetype': 'application/vnd.reviewboard.org.repository-info+json',
'rsp': {
'info': {
'uuid': 'UUID-2',
'url': 'https://svn2.example.com/',
'root_url': 'https://svn2.example.com/',
},
'stat': 'ok',
},
},
'https://localhost:8080/api/repositories/3/info/': {
'mimetype': 'application/vnd.reviewboard.org.repository-info+json',
'rsp': {
'info': {
'uuid': 'UUID-3',
'url': 'https://svn3.example.com/',
'root_url': 'https://svn3.example.com/',
},
'stat': 'ok',
},
},
}
def setUp(self):
super(SVNRepositoryInfoTests, self).setUp()
def _urlopen(url, **kwargs):
url = url.get_full_url()
try:
payload = self.payloads[url]
except KeyError:
return MockResponse(404, {}, json.dumps({
'rsp': {
'stat': 'fail',
'err': {
'code': 100,
'msg': 'Object does not exist',
},
},
}))
return MockResponse(
200,
{
'Content-Type': payload['mimetype'],
},
json.dumps(payload['rsp']))
self.spy_on(urlopen, call_fake=_urlopen)
self.api_client = RBClient('http://localhost:8080/')
self.root_resource = self.api_client.get_root()
def test_find_server_repository_info_with_path_match(self):
"""Testing SVNRepositoryInfo.find_server_repository_info with
path matching
"""
info = SVNRepositoryInfo('https://svn1.example.com/', '/', '')
repo_info = info.find_server_repository_info(self.root_resource)
self.assertEqual(repo_info, info)
self.assertEqual(repo_info.repository_id, 1)
def test_find_server_repository_info_with_mirror_path_match(self):
"""Testing SVNRepositoryInfo.find_server_repository_info with
mirror_path matching
"""
info = SVNRepositoryInfo('svn+ssh://svn2.example.com/', '/', '')
repo_info = info.find_server_repository_info(self.root_resource)
self.assertEqual(repo_info, info)
self.assertEqual(repo_info.repository_id, 2)
def test_find_server_repository_info_with_uuid_match(self):
"""Testing SVNRepositoryInfo.find_server_repository_info with
UUID matching
"""
info = SVNRepositoryInfo('svn+ssh://blargle/', '/', 'UUID-3')
repo_info = info.find_server_repository_info(self.root_resource)
self.assertNotEqual(repo_info, info)
self.assertEqual(repo_info.repository_id, 3)
def test_relative_paths(self):
"""Testing SVNRepositoryInfo._get_relative_path"""
info = SVNRepositoryInfo('http://svn.example.com/svn/', '/', '')
self.assertEqual(info._get_relative_path('/foo', '/bar'), None)
self.assertEqual(info._get_relative_path('/', '/trunk/myproject'),
None)
self.assertEqual(info._get_relative_path('/trunk/myproject', '/'),
'/trunk/myproject')
self.assertEqual(
info._get_relative_path('/trunk/myproject', ''),
'/trunk/myproject')
self.assertEqual(
info._get_relative_path('/trunk/myproject', '/trunk'),
'/myproject')
self.assertEqual(
info._get_relative_path('/trunk/myproject', '/trunk/myproject'),
'/')
class SVNClientTests(SCMClientTests):
def setUp(self):
super(SVNClientTests, self).setUp()
if not is_exe_in_path('svn'):
raise SkipTest('svn not found in path')
self.svn_dir = os.path.join(self.testdata_dir, 'svn-repo')
self.clone_dir = self.chdir_tmp()
self.svn_repo_url = 'file://' + self.svn_dir
self._run_svn(['co', self.svn_repo_url, 'svn-repo'])
os.chdir(os.path.join(self.clone_dir, 'svn-repo'))
self.client = SVNClient(options=self.options)
self.options.svn_show_copies_as_adds = None
def _run_svn(self, command):
return execute(['svn'] + command, env=None, split_lines=False,
ignore_errors=False, extra_ignore_errors=())
def _svn_add_file(self, filename, data, changelist=None):
"""Add a file to the test repo."""
is_new = not os.path.exists(filename)
with open(filename, 'wb') as f:
f.write(data)
if is_new:
self._run_svn(['add', filename])
if changelist:
self._run_svn(['changelist', changelist, filename])
def _svn_add_dir(self, dirname):
"""Add a directory to the test repo."""
if not os.path.exists(dirname):
os.mkdir(dirname)
self._run_svn(['add', dirname])
def test_parse_revision_spec_no_args(self):
"""Testing SVNClient.parse_revision_spec with no specified revisions"""
revisions = self.client.parse_revision_spec()
self.assertTrue(isinstance(revisions, dict))
self.assertTrue('base' in revisions)
self.assertTrue('tip' in revisions)
self.assertTrue('parent_base' not in revisions)
self.assertEqual(revisions['base'], 'BASE')
self.assertEqual(revisions['tip'], '--rbtools-working-copy')
def test_parse_revision_spec_one_revision(self):
"""Testing SVNClient.parse_revision_spec with one specified numeric
revision"""
revisions = self.client.parse_revision_spec(['3'])
self.assertTrue(isinstance(revisions, dict))
self.assertTrue('base' in revisions)
self.assertTrue('tip' in revisions)
self.assertTrue('parent_base' not in revisions)
self.assertEqual(revisions['base'], 2)
self.assertEqual(revisions['tip'], 3)
def test_parse_revision_spec_one_revision_changelist(self):
"""Testing SVNClient.parse_revision_spec with one specified changelist
revision"""
self._svn_add_file('foo.txt', FOO3, 'my-change')
revisions = self.client.parse_revision_spec(['my-change'])
self.assertTrue(isinstance(revisions, dict))
self.assertTrue('base' in revisions)
self.assertTrue('tip' in revisions)
self.assertTrue('parent_base' not in revisions)
self.assertEqual(revisions['base'], 'BASE')
self.assertEqual(revisions['tip'],
SVNClient.REVISION_CHANGELIST_PREFIX + 'my-change')
def test_parse_revision_spec_one_revision_nonexistant_changelist(self):
"""Testing SVNClient.parse_revision_spec with one specified invalid
changelist revision"""
self._svn_add_file('foo.txt', FOO3, 'my-change')
self.assertRaises(
InvalidRevisionSpecError,
lambda: self.client.parse_revision_spec(['not-my-change']))
def test_parse_revision_spec_one_arg_two_revisions(self):
"""Testing SVNClient.parse_revision_spec with R1:R2 syntax"""
revisions = self.client.parse_revision_spec(['1:3'])
self.assertTrue(isinstance(revisions, dict))
self.assertTrue('base' in revisions)
self.assertTrue('tip' in revisions)
self.assertTrue('parent_base' not in revisions)
self.assertEqual(revisions['base'], 1)
self.assertEqual(revisions['tip'], 3)
def test_parse_revision_spec_two_arguments(self):
"""Testing SVNClient.parse_revision_spec with two revisions"""
revisions = self.client.parse_revision_spec(['1', '3'])
self.assertTrue(isinstance(revisions, dict))
self.assertTrue('base' in revisions)
self.assertTrue('tip' in revisions)
self.assertTrue('parent_base' not in revisions)
self.assertEqual(revisions['base'], 1)
self.assertEqual(revisions['tip'], 3)
def test_parse_revision_spec_one_revision_url(self):
"""Testing SVNClient.parse_revision_spec with one revision and a
repository URL"""
self.options.repository_url = \
'http://svn.apache.org/repos/asf/subversion/trunk'
revisions = self.client.parse_revision_spec(['1549823'])
self.assertTrue(isinstance(revisions, dict))
self.assertTrue('base' in revisions)
self.assertTrue('tip' in revisions)
self.assertTrue('parent_base' not in revisions)
self.assertEqual(revisions['base'], 1549822)
self.assertEqual(revisions['tip'], 1549823)
def test_parse_revision_spec_two_revisions_url(self):
"""Testing SVNClient.parse_revision_spec with R1:R2 syntax and a
repository URL"""
self.options.repository_url = \
'http://svn.apache.org/repos/asf/subversion/trunk'
revisions = self.client.parse_revision_spec(['1549823:1550211'])
self.assertTrue(isinstance(revisions, dict))
self.assertTrue('base' in revisions)
self.assertTrue('tip' in revisions)
self.assertTrue('parent_base' not in revisions)
self.assertEqual(revisions['base'], 1549823)
self.assertEqual(revisions['tip'], 1550211)
def test_parse_revision_spec_invalid_spec(self):
"""Testing SVNClient.parse_revision_spec with invalid specifications"""
self.assertRaises(InvalidRevisionSpecError,
self.client.parse_revision_spec,
['aoeu'])
self.assertRaises(InvalidRevisionSpecError,
self.client.parse_revision_spec,
['aoeu', '1234'])
self.assertRaises(TooManyRevisionsError,
self.client.parse_revision_spec,
['1', '2', '3'])
def test_parse_revision_spec_non_unicode_log(self):
"""Testing SVNClient.parse_revision_spec with a non-utf8 log entry"""
# Note: the svn log entry for commit r2 contains one non-utf8 character
revisions = self.client.parse_revision_spec(['2'])
self.assertTrue(isinstance(revisions, dict))
self.assertTrue('base' in revisions)
self.assertTrue('tip' in revisions)
self.assertTrue('parent_base' not in revisions)
self.assertEqual(revisions['base'], 1)
self.assertEqual(revisions['tip'], 2)
def test_get_commit_message_working_copy(self):
"""Testing SVNClient.get_commit_message with a working copy change"""
revisions = self.client.parse_revision_spec()
message = self.client.get_commit_message(revisions)
self.assertIsNone(message)
def test_get_commit_message_committed_revision(self):
"""Testing SVNClient.get_commit_message with a single committed
revision
"""
revisions = self.client.parse_revision_spec(['2'])
message = self.client.get_commit_message(revisions)
self.assertTrue('summary' in message)
self.assertTrue('description' in message)
self.assertEqual(message['summary'],
'Commit 2 -- a non-utf8 character: \xe9')
self.assertEqual(message['description'],
'Commit 2 -- a non-utf8 character: \xe9\n')
def test_get_commit_message_committed_revisions(self):
"""Testing SVNClient.get_commit_message with multiple committed
revisions
"""
revisions = self.client.parse_revision_spec(['1:3'])
message = self.client.get_commit_message(revisions)
self.assertTrue('summary' in message)
self.assertTrue('description' in message)
self.assertEqual(message['summary'],
'Commit 2 -- a non-utf8 character: \xe9')
self.assertEqual(message['description'], 'Commit 3')
@svn_version_set_hash('6613644d417f7c90f83f3a2d16b1dad5',
'7630ea80056a7340d93a556e9af60c63',
'6a5339da19e60c7706e44aeebfa4da5f')
def test_diff_exclude(self, md5sum):
"""Testing SVNClient diff with file exclude patterns"""
self._svn_add_file('bar.txt', FOO1)
self._svn_add_file('exclude.txt', FOO2)
revisions = self.client.parse_revision_spec([])
result = self.client.diff(revisions,
exclude_patterns=['exclude.txt'])
self.assertTrue(isinstance(result, dict))
self.assertTrue('diff' in result)
self.assertEqual(md5(result['diff']).hexdigest(), md5sum)
def test_diff_exclude_in_subdir(self):
"""Testing SVNClient diff with exclude patterns in a subdir"""
self._svn_add_file('foo.txt', FOO1)
self._svn_add_dir('subdir')
self._svn_add_file(os.path.join('subdir', 'exclude.txt'), FOO2)
os.chdir('subdir')
revisions = self.client.parse_revision_spec([])
result = self.client.diff(
revisions,
exclude_patterns=['exclude.txt'])
self.assertTrue(isinstance(result, dict))
self.assertTrue('diff' in result)
self.assertEqual(result['diff'], b'')
def test_diff_exclude_root_pattern_in_subdir(self):
"""Testing SVNClient diff with repo exclude patterns in a subdir"""
self._svn_add_file('exclude.txt', FOO1)
self._svn_add_dir('subdir')
os.chdir('subdir')
revisions = self.client.parse_revision_spec([])
result = self.client.diff(
revisions,
exclude_patterns=[os.path.join(os.path.sep, 'exclude.txt'),
'.'])
self.assertTrue(isinstance(result, dict))
self.assertTrue('diff' in result)
self.assertEqual(result['diff'], b'')
@svn_version_set_hash('043befc507b8177a0f010dc2cecc4205',
'1b68063237c584d38a9a3ddbdf1f72a2',
'466f7c2092e085354f5b24b91d48dd80')
def test_same_diff_multiple_methods(self, md5_sum):
"""Testing SVNClient identical diff generated from root, subdirectory,
and via target"""
# Test diff generation for a single file, where 'svn diff' is invoked
# from three different locations. This should result in an identical
# diff for all three cases. Add a new subdirectory and file
# (dir1/A.txt) which will be the lone change captured in the diff.
# Cases:
# 1) Invoke 'svn diff' from checkout root.
# 2) Invoke 'svn diff' from dir1/ subdirectory.
# 3) Create dir2/ subdirectory parallel to dir1/. Invoke 'svn diff'
# from dir2/ where '../dir1/A.txt' is provided as a specific
# target.
#
# This test is inspired by #3749 which broke cases 2 and 3.
self._svn_add_dir('dir1')
self._svn_add_file('dir1/A.txt', FOO3)
# Case 1: Generate diff from checkout root.
revisions = self.client.parse_revision_spec()
result = self.client.diff(revisions)
self.assertTrue(isinstance(result, dict))
self.assertTrue('diff' in result)
self.assertEqual(md5(result['diff']).hexdigest(), md5_sum)
# Case 2: Generate diff from dir1 subdirectory.
os.chdir('dir1')
result = self.client.diff(revisions)
self.assertTrue(isinstance(result, dict))
self.assertTrue('diff' in result)
self.assertEqual(md5(result['diff']).hexdigest(), md5_sum)
# Case 3: Generate diff from dir2 subdirectory, but explicitly target
# only ../dir1/A.txt.
os.chdir('..')
self._svn_add_dir('dir2')
os.chdir('dir2')
result = self.client.diff(revisions, ['../dir1/A.txt'])
self.assertTrue(isinstance(result, dict))
self.assertTrue('diff' in result)
self.assertEqual(md5(result['diff']).hexdigest(), md5_sum)
@svn_version_set_hash('902d662a110400f7470294b2d9e72d36',
'13803373ded9af750384a4601d5173ce',
'f11dfbe58925871c5f64b6ca647a8d3c')
def test_diff_non_unicode_characters(self, md5_sum):
"""Testing SVNClient diff with a non-utf8 file"""
self._svn_add_file('A.txt', '\xe2'.encode('iso-8859-1'))
self._run_svn(['propset', 'svn:mime-type', 'text/plain', 'A.txt'])
revisions = self.client.parse_revision_spec()
result = self.client.diff(revisions)
self.assertTrue(isinstance(result, dict))
self.assertTrue('diff' in result)
self.assertEqual(md5(result['diff']).hexdigest(), md5_sum)
@svn_version_set_hash('60c4d21f4d414da947f4e7273e6d1326',
'60c4d21f4d414da947f4e7273e6d1326',
'571e47c456698bad35bca06523473008')
def test_diff_non_unicode_filename_repository_url(self, md5sum):
"""Testing SVNClient diff with a non-utf8 filename via repository_url
option"""
self.options.repository_url = self.svn_repo_url
# Note: commit r4 adds one file with a non-utf8 character in both its
# filename and content.
revisions = self.client.parse_revision_spec(['4'])
result = self.client.diff(revisions)
self.assertTrue(isinstance(result, dict))
self.assertTrue('diff' in result)
self.assertEqual(md5(result['diff']).hexdigest(), md5sum)
@svn_version_set_hash('ac1835240ec86ee14ddccf1f2236c442',
'ac1835240ec86ee14ddccf1f2236c442',
'610f5506e670dc55a2464a6ad9af015c')
def test_show_copies_as_adds_enabled(self, md5sum):
"""Testing SVNClient with --show-copies-as-adds functionality
enabled"""
self.check_show_copies_as_adds('y', md5sum)
@svn_version_set_hash('d41d8cd98f00b204e9800998ecf8427e',
'd41d8cd98f00b204e9800998ecf8427e',
'b656e2f9b70ade256c3fe855c13ee52c')
def test_show_copies_as_adds_disabled(self, md5sum):
"""Testing SVNClient with --show-copies-as-adds functionality
disabled"""
self.check_show_copies_as_adds('n', md5sum)
def check_show_copies_as_adds(self, state, md5sum):
"""Helper function to evaluate --show-copies-as-adds"""
self.client.get_repository_info()
# Ensure valid SVN client version.
if not is_valid_version(self.client.subversion_client_version,
self.client.SHOW_COPIES_AS_ADDS_MIN_VERSION):
raise SkipTest('Subversion client is too old to test '
'--show-copies-as-adds.')
self.options.svn_show_copies_as_adds = state
self._svn_add_dir('dir1')
self._svn_add_dir('dir2')
self._run_svn(['copy', 'foo.txt', 'dir1'])
# Generate identical diff via several methods:
# 1) from checkout root
# 2) via changelist
# 3) from checkout root when all relevant files belong to a changelist
# 4) via explicit include target
revisions = self.client.parse_revision_spec()
result = self.client.diff(revisions)
self.assertTrue(isinstance(result, dict))
self.assertTrue('diff' in result)
self.assertEqual(md5(result['diff']).hexdigest(), md5sum)
self._run_svn(['changelist', 'cl1', 'dir1/foo.txt'])
revisions = self.client.parse_revision_spec(['cl1'])
result = self.client.diff(revisions)
self.assertTrue(isinstance(result, dict))
self.assertTrue('diff' in result)
self.assertEqual(md5(result['diff']).hexdigest(), md5sum)
revisions = self.client.parse_revision_spec()
result = self.client.diff(revisions)
self.assertTrue(isinstance(result, dict))
self.assertTrue('diff' in result)
self.assertEqual(md5(result['diff']).hexdigest(), md5sum)
self._run_svn(['changelist', '--remove', 'dir1/foo.txt'])
os.chdir('dir2')
revisions = self.client.parse_revision_spec()
result = self.client.diff(revisions, ['../dir1'])
self.assertTrue(isinstance(result, dict))
self.assertTrue('diff' in result)
self.assertEqual(md5(result['diff']).hexdigest(), md5sum)
def test_history_scheduled_with_commit_nominal(self):
"""Testing SVNClient.history_scheduled_with_commit nominal cases"""
self.client.get_repository_info()
# Ensure valid SVN client version.
if not is_valid_version(self.client.subversion_client_version,
self.client.SHOW_COPIES_AS_ADDS_MIN_VERSION):
raise SkipTest('Subversion client is too old to test '
'history_scheduled_with_commit().')
self._svn_add_dir('dir1')
self._svn_add_dir('dir2')
self._run_svn(['copy', 'foo.txt', 'dir1'])
# Squash stderr to prevent error message in test output.
sys.stderr = open(os.devnull, 'w')
# Ensure SystemExit is raised when attempting to generate diff via
# several methods:
# 1) from checkout root
# 2) via changelist
# 3) from checkout root when all relevant files belong to a changelist
# 4) via explicit include target
revisions = self.client.parse_revision_spec()
self.assertRaises(SystemExit, self.client.diff, revisions)
self._run_svn(['changelist', 'cl1', 'dir1/foo.txt'])
revisions = self.client.parse_revision_spec(['cl1'])
self.assertRaises(SystemExit, self.client.diff, revisions)
revisions = self.client.parse_revision_spec()
self.assertRaises(SystemExit, self.client.diff, revisions)
self._run_svn(['changelist', '--remove', 'dir1/foo.txt'])
os.chdir('dir2')
revisions = self.client.parse_revision_spec()
self.assertRaises(SystemExit, self.client.diff, revisions, ['../dir1'])
def test_history_scheduled_with_commit_special_case_non_local_mods(self):
"""Testing SVNClient.history_scheduled_with_commit is bypassed when
diff is not for local modifications in a working copy"""
self.client.get_repository_info()
# Ensure valid SVN client version.
if not is_valid_version(self.client.subversion_client_version,
self.client.SHOW_COPIES_AS_ADDS_MIN_VERSION):
raise SkipTest('Subversion client is too old to test '
'history_scheduled_with_commit().')
# While within a working copy which contains a scheduled commit with
# addition-with-history, ensure history_scheduled_with_commit() is not
# executed when generating a diff between two revisions either
# 1) locally or 2) via --reposistory-url option.
self._run_svn(['copy', 'foo.txt', 'foo_copy.txt'])
revisions = self.client.parse_revision_spec(['1:2'])
result = self.client.diff(revisions)
self.assertTrue(isinstance(result, dict))
self.assertTrue('diff' in result)
self.assertEqual(md5(result['diff']).hexdigest(),
'ed154720a7459c2649cab4d2fa34fa93')
self.options.repository_url = self.svn_repo_url
revisions = self.client.parse_revision_spec(['2'])
result = self.client.diff(revisions)
self.assertTrue(isinstance(result, dict))
self.assertTrue('diff' in result)
self.assertEqual(md5(result['diff']).hexdigest(),
'ed154720a7459c2649cab4d2fa34fa93')
def test_history_scheduled_with_commit_special_case_exclude(self):
"""Testing SVNClient.history_scheduled_with_commit with exclude file"""
self.client.get_repository_info()
# Ensure valid SVN client version.
if not is_valid_version(self.client.subversion_client_version,
self.client.SHOW_COPIES_AS_ADDS_MIN_VERSION):
raise SkipTest('Subversion client is too old to test '
'history_scheduled_with_commit().')
# Lone file with history is also excluded. In this case there should
# be no SystemExit raised and an (empty) diff should be produced. Test
# from checkout root and via changelist.
self._run_svn(['copy', 'foo.txt', 'foo_copy.txt'])
revisions = self.client.parse_revision_spec([])
result = self.client.diff(revisions, [], ['foo_copy.txt'])
self.assertTrue(isinstance(result, dict))
self.assertTrue('diff' in result)
self.assertEqual(md5(result['diff']).hexdigest(),
'd41d8cd98f00b204e9800998ecf8427e')
self._run_svn(['changelist', 'cl1', 'foo_copy.txt'])
revisions = self.client.parse_revision_spec(['cl1'])
result = self.client.diff(revisions, [], ['foo_copy.txt'])
self.assertTrue(isinstance(result, dict))
self.assertTrue('diff' in result)
self.assertEqual(md5(result['diff']).hexdigest(),
'd41d8cd98f00b204e9800998ecf8427e')
def test_rename_diff_mangling_bug_4546(self):
"""Test diff with removal of lines that look like headers"""
# If a file has lines that look like "-- XX (YY)", and one of those
# files gets removed, our rename handling would filter them out. Test
# that the bug is fixed.
with open('bug-4546.txt', 'w') as f:
f.write('-- test line1\n'
'-- test line2\n'
'-- test line (test2)\n')
revisions = self.client.parse_revision_spec()
result = self.client.diff(revisions)
self.assertTrue(isinstance(result, dict))
self.assertTrue('diff' in result)
self.assertTrue(b'--- test line (test1)' in result['diff'])
| [
"trowbrds@gmail.com"
] | trowbrds@gmail.com |
f807bce27f1dd1c18871c787b6582c62f5b1e254 | 82b946da326148a3c1c1f687f96c0da165bb2c15 | /sdk/python/pulumi_azure_native/web/v20210101/get_web_app_domain_ownership_identifier_slot.py | 3b8508cf4c91fe3a7690d3c8355195664eae3ed8 | [
"BSD-3-Clause",
"Apache-2.0"
] | permissive | morrell/pulumi-azure-native | 3916e978382366607f3df0a669f24cb16293ff5e | cd3ba4b9cb08c5e1df7674c1c71695b80e443f08 | refs/heads/master | 2023-06-20T19:37:05.414924 | 2021-07-19T20:57:53 | 2021-07-19T20:57:53 | 387,815,163 | 0 | 0 | Apache-2.0 | 2021-07-20T14:18:29 | 2021-07-20T14:18:28 | null | UTF-8 | Python | false | false | 4,347 | py | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
__all__ = [
'GetWebAppDomainOwnershipIdentifierSlotResult',
'AwaitableGetWebAppDomainOwnershipIdentifierSlotResult',
'get_web_app_domain_ownership_identifier_slot',
]
@pulumi.output_type
class GetWebAppDomainOwnershipIdentifierSlotResult:
"""
A domain specific resource identifier.
"""
def __init__(__self__, id=None, kind=None, name=None, type=None, value=None):
if id and not isinstance(id, str):
raise TypeError("Expected argument 'id' to be a str")
pulumi.set(__self__, "id", id)
if kind and not isinstance(kind, str):
raise TypeError("Expected argument 'kind' to be a str")
pulumi.set(__self__, "kind", kind)
if name and not isinstance(name, str):
raise TypeError("Expected argument 'name' to be a str")
pulumi.set(__self__, "name", name)
if type and not isinstance(type, str):
raise TypeError("Expected argument 'type' to be a str")
pulumi.set(__self__, "type", type)
if value and not isinstance(value, str):
raise TypeError("Expected argument 'value' to be a str")
pulumi.set(__self__, "value", value)
@property
@pulumi.getter
def id(self) -> str:
"""
Resource Id.
"""
return pulumi.get(self, "id")
@property
@pulumi.getter
def kind(self) -> Optional[str]:
"""
Kind of resource.
"""
return pulumi.get(self, "kind")
@property
@pulumi.getter
def name(self) -> str:
"""
Resource Name.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def type(self) -> str:
"""
Resource type.
"""
return pulumi.get(self, "type")
@property
@pulumi.getter
def value(self) -> Optional[str]:
"""
String representation of the identity.
"""
return pulumi.get(self, "value")
class AwaitableGetWebAppDomainOwnershipIdentifierSlotResult(GetWebAppDomainOwnershipIdentifierSlotResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetWebAppDomainOwnershipIdentifierSlotResult(
id=self.id,
kind=self.kind,
name=self.name,
type=self.type,
value=self.value)
def get_web_app_domain_ownership_identifier_slot(domain_ownership_identifier_name: Optional[str] = None,
name: Optional[str] = None,
resource_group_name: Optional[str] = None,
slot: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetWebAppDomainOwnershipIdentifierSlotResult:
"""
A domain specific resource identifier.
:param str domain_ownership_identifier_name: Name of domain ownership identifier.
:param str name: Name of the app.
:param str resource_group_name: Name of the resource group to which the resource belongs.
:param str slot: Name of the deployment slot. If a slot is not specified, the API will delete the binding for the production slot.
"""
__args__ = dict()
__args__['domainOwnershipIdentifierName'] = domain_ownership_identifier_name
__args__['name'] = name
__args__['resourceGroupName'] = resource_group_name
__args__['slot'] = slot
if opts is None:
opts = pulumi.InvokeOptions()
if opts.version is None:
opts.version = _utilities.get_version()
__ret__ = pulumi.runtime.invoke('azure-native:web/v20210101:getWebAppDomainOwnershipIdentifierSlot', __args__, opts=opts, typ=GetWebAppDomainOwnershipIdentifierSlotResult).value
return AwaitableGetWebAppDomainOwnershipIdentifierSlotResult(
id=__ret__.id,
kind=__ret__.kind,
name=__ret__.name,
type=__ret__.type,
value=__ret__.value)
| [
"noreply@github.com"
] | morrell.noreply@github.com |
8f2f52a14e7110870ebe759c5b112bb4055bfa0f | 27327e2e0a6844a58a2c7019effabd10f35a652c | /pythonchallenge/crossin_mryk2.py | 4f088c487c0071f4354e39d3957b533f4fca0b84 | [] | no_license | jtr109/AdvancePython | 996b975483502ebfb2c3f03ceb1d1d2b6bbf0b30 | a72926c533d41495d9d1d60a8d020d3fe047d0e2 | refs/heads/master | 2021-01-20T10:59:49.282810 | 2016-08-25T10:09:20 | 2016-08-25T10:09:20 | 66,245,617 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 177 | py | def sort_and_unique(l):
new_l = list(set(l))
new_l.sort()
return new_l
if __name__ == '__main__':
l = [4, 7, 3, 4, 1, 9, 8, 3, 7]
print(sort_and_unique(l))
| [
"lyp_login@outlook.com"
] | lyp_login@outlook.com |
23017da9226d9dc9b4476b492e8cdfa7b4fb0f17 | e3098a32e5825c88db0f20938ec4ca89054ec52c | /shaney.py | 78ec8bd87bc43fce563e32825057b5ae9c7f92f4 | [] | no_license | yuvipanda/frailgrey | 9e807c25f3e5e08ee537f3c7ac46a534407f74da | 472973aa7477bca7936fd44bc14b8fad62f4e647 | refs/heads/master | 2020-06-05T01:35:47.935109 | 2012-10-05T06:45:41 | 2012-10-05T06:45:41 | 259,656 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,027 | py | # shaney.py by Greg McFarlane
# some editing by Joe Strout
#
# search for "Mark V. Shaney" on the WWW for more info!
import sys
import random
import string
def generate(text, count):
words = text.split()
end_sentence = []
dict = {}
prev1 = ''
prev2 = ''
for word in words:
if prev1 != '' and prev2 != '':
key = (prev2, prev1)
if dict.has_key(key):
dict[key].append(word)
else:
dict[key] = [word]
if prev1[-1:] == '.':
end_sentence.append(key)
prev2 = prev1
prev1 = word
key = ()
sentence = ""
while 1:
if dict.has_key(key):
word = random.choice(dict[key])
sentence = sentence + word + ' '
key = (key[1], word)
if key in end_sentence:
yield sentence
sentence = ""
count = count - 1
if count <= 0:
break
else:
key = random.choice(end_sentence)
| [
"yuvipanda@gmail.com"
] | yuvipanda@gmail.com |
740b55aeecc180691958ce5d79ae04df440cc154 | ee00ebe5e71c36b05fbff993b19e9723b963313f | /45_jump_game_2.py | 4a4f2be0a2b148c4e181e07c60f39a8a23966d34 | [] | no_license | 26XINXIN/leetcode | f365560d93604a28abf399707b333f3c11f924ec | 78ed11f34fd03e9a188c9c6cb352e883016d05d9 | refs/heads/master | 2021-06-28T16:31:45.103879 | 2020-09-19T20:33:55 | 2020-09-19T20:33:55 | 144,975,903 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 525 | py | class Solution:
def jump(self, nums):
"""
:type nums: List[int]
:rtype: int
"""
n = len(nums)
first, last = 0, 0
jump_times = 0
while last < n - 1:
# print(first, last)
farest = 0
for i in range(first, last + 1):
farest = farest if farest > i + nums[i] else i + nums[i]
first = last
last = farest
jump_times += 1
return jump_times
print(Solution().jump([1,2])) | [
"yangxin.nlp@bytedance.com"
] | yangxin.nlp@bytedance.com |
039fd47273a7f03c29529774896c1ad054dd0856 | 5bd8909ecedbc68b23e2bf6d4560a02b4eea3fa1 | /blog/migrations/0008_auto_20201228_0115.py | 8aa17a3ceebb7508b529e2be38c5e11866ee05ee | [] | no_license | HadiGhazali/zoomit | 5c0ca881ed43a1f0baccb90d88309c35ac549e74 | 39a14394e84d86e434f506f270e80084710a4507 | refs/heads/main | 2023-02-19T11:31:18.949557 | 2021-01-20T18:14:55 | 2021-01-20T18:14:55 | 318,006,841 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,010 | py | # Generated by Django 3.1.4 on 2020-12-28 01:15
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('blog', '0007_auto_20201225_1529'),
]
operations = [
migrations.RenameField(
model_name='hitcount',
old_name='date',
new_name='create_date',
),
migrations.AddField(
model_name='hitcount',
name='update_date',
field=models.DateTimeField(auto_now_add=True, null=True),
),
migrations.AlterField(
model_name='urlhit',
name='post',
field=models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, related_name='url_hit', related_query_name='url_hit', to='blog.post', verbose_name='post'),
),
migrations.AlterField(
model_name='urlhit',
name='url',
field=models.CharField(max_length=150),
),
]
| [
"hadivardanjani1378@gmail.com"
] | hadivardanjani1378@gmail.com |
a4a7daf902c1db9cb57fbf3b4c0b6878a28e5589 | e21599d08d2df9dac2dee21643001c0f7c73b24f | /Others/Modules/xml/create_xml.py | 943aeadcfe0f48fa947e92b0b54c945162ad1537 | [] | no_license | herolibra/PyCodeComplete | c7bf2fb4ce395737f8c67749148de98a36a71035 | 4ef7d2c3aec6d28a53eed0e649cdeb74df3d783b | refs/heads/master | 2022-07-17T05:39:03.554760 | 2020-05-03T07:00:14 | 2020-05-03T07:00:14 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,350 | py | # coding=utf-8
import xml.dom.minidom
# 在内存中创建一个空的文档
doc = xml.dom.minidom.Document()
# 创建一个根节点Managers对象
root = doc.createElement('Managers')
# 设置根节点的属性
root.setAttribute('company', 'xx科技')
root.setAttribute('address', '科技软件园')
# 将根节点添加到文档对象中
doc.appendChild(root)
managerList = [{'name' : 'joy', 'age' : 27, 'sex' : '女'},
{'name' : 'tom', 'age' : 30, 'sex' : '男'},
{'name' : 'ruby', 'age' : 29, 'sex' : '女'}
]
for i in managerList :
nodeManager = doc.createElement('Manager')
nodeName = doc.createElement('name')
# 给叶子节点name设置一个文本节点,用于显示文本内容
nodeName.appendChild(doc.createTextNode(str(i['name'])))
nodeAge = doc.createElement("age")
nodeAge.appendChild(doc.createTextNode(str(i["age"])))
nodeSex = doc.createElement("sex")
nodeSex.appendChild(doc.createTextNode(str(i["sex"])))
# 将各叶子节点添加到父节点Manager中,
# 最后将Manager添加到根节点Managers中
nodeManager.appendChild(nodeName)
nodeManager.appendChild(nodeAge)
nodeManager.appendChild(nodeSex)
root.appendChild(nodeManager)
# 开始写xml文档
fp = open('Manager.xml', 'w')
doc.writexml(fp, indent='\t', addindent='\t', newl='\n', encoding="utf-8")
| [
"zengyuetian@cloutropy.com"
] | zengyuetian@cloutropy.com |
dd31f3e5ecd70f29f6e610e0bb210939483e3274 | bd4812ba7af196d2e866cbf2935b2e7308d95066 | /python/leetcode/389_find_difference.py | 7133e92ba94094b445c901ba3128895d677dbe32 | [
"Apache-2.0"
] | permissive | yxun/notebook | f507201e15c4376f0655121724254c0d5275c3b1 | 00eb1953d872a9a93a13d7cf23d8e4ed641d1ce7 | refs/heads/master | 2023-09-01T03:50:48.142295 | 2023-08-17T12:11:25 | 2023-08-17T12:11:25 | 207,569,654 | 2 | 2 | Apache-2.0 | 2023-08-17T12:11:26 | 2019-09-10T13:38:49 | Java | UTF-8 | Python | false | false | 1,128 | py | #%%
"""
- Find the Difference
- https://leetcode.com/problems/find-the-difference/
- Easy
Given two strings s and t which consist of only lowercase letters.
String t is generated by random shuffling string s and then add one more letter at a random position.
Find the letter that was added in t.
Example:
Input:
s = "abcd"
t = "abcde"
Output:
e
Explanation:
'e' is the letter that was added.
"""
#%%
##
class S1:
def findTheDifference(self, s, t):
"""
:type s: str
:type t: str
:rtype: str
"""
m = sorted(s)
n = sorted(t)
for i in range(len(m)):
if m[i] != n[i]:
return n[i]
return n[-1]
#%%
class S2:
def findTheDifference(self, s, t):
res = {}
for i in s:
res[i] = res.get(i,0) + 1
for j in t:
res[j] = res.get(j,0) - 1
for key in res:
if abs(res[key]) == 1:
return key
#%%
class S3:
def findTheDifference(self, s, t):
from collections import Counter
return list((Counter(t) - Counter(s)).keys()).pop()
| [
"yuanlin.yxu@gmail.com"
] | yuanlin.yxu@gmail.com |
3e03394d28989c705b6f829c76a9c7e4c36a2689 | 1012f61f46ff7aaf37cd3ce0ead64e035ec201dc | /coding-challange/codewars/6kyu/2020-01-19~2020-04-14/does-my-number-look-big-in-this/does-my-number-look-big-in-this.py | ef92ef56a4daa61186ead7f941fbab381a28b528 | [] | no_license | polyglotm/coding-dojo | 89efe22f5a34088e94c9e3a4e25cad510b04172a | 43da9c75e3125f5cb1ac317d275475f1c0ea6727 | refs/heads/develop | 2023-08-17T11:59:30.945061 | 2023-08-16T14:13:45 | 2023-08-16T14:13:45 | 188,733,115 | 2 | 0 | null | 2023-03-04T05:49:21 | 2019-05-26T21:26:25 | JavaScript | UTF-8 | Python | false | false | 597 | py | # does-my-number-look-big-in-this
# https://www.codewars.com/kata/5287e858c6b5a9678200083c
from unittest import TestCase
from functools import reduce
def narcissistic(value):
length = len(str(value))
return value == reduce(lambda a, b: a + b, [int(char) ** length for char in str(value)])
TestCase().assertEqual(narcissistic(7), True, '7 is narcissistic');
TestCase().assertEqual(narcissistic(371), True, '371 is narcissistic');
TestCase().assertEqual(narcissistic(122), False, '122 is not narcissistic')
TestCase().assertEqual(narcissistic(4887), False, '4887 is not narcissistic')
| [
"polyglot.m@gmail.com"
] | polyglot.m@gmail.com |
a2575af33bb634af6f622eb1520fc75ac98f8c12 | a4586ad2d7c8747c79a0a6fa25a367706f7b5638 | /Chap3/project/api_weather_2.py | 66a2159d6b7e4f82c5ff968372baeee39ff4281a | [] | no_license | AIHackerTest/Bruce-Qiao_Py101-004 | e1ff0272d19b3ff734af2a96fd7fe9de2ef026e7 | 402026e0d2552578de017169ea9a8d17318a8471 | refs/heads/master | 2021-06-25T10:07:19.665410 | 2017-09-12T08:17:58 | 2017-09-12T08:17:58 | 103,240,197 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,199 | py | """ 使用心知天气API """
import requests
import json
import re
from const_value import API, KEY, UNIT, LANGUAGE
from flask import Flask, render_template, request
def fetchWeather(location):
result = requests.get(
API, params={
'key': KEY,
'location': location,
'language': LANGUAGE,
'unit': UNIT},
timeout=5)
return json.loads(result.content)
def change_date_format(raw_date):
expr = r"\b(?P<year>\d\d\d\d)-(?P<month>\d\d)-(?P<date>\d\d)T(?P<hour>\d\d):(?P<minute>\d\d):(?P<second>\d\d)\b"
x = re.search(expr, raw_date)
return x.group('year') + '-' + x.group('month') + '-' + x.group('date') + ' ' + x.group('hour') + ':' + x.group('minute')
def json_to_dict(weather_json):
weather_dict = {}
weather_dict['city'] = weather_json['results'][0]['location']['name']
weather_dict['weather_condition'] = weather_json['results'][0]['now']['text']
weather_dict['temperature'] = weather_json['results'][0]['now']['temperature']
weather_dict['update_time'] = change_date_format(weather_json['results'][0]['last_update'])
return weather_dict
app = Flask(__name__)
inquiry_list = []
@app.route("/", methods=['POST', 'GET'])
def main():
inquiry_outcome = None
inquiry_history = None
help_information = None
error = None
if request.method == "POST":
if request.form['action'] == u'查询':
result = fetchWeather(request.form['location'])
if "status" in result.keys():
error=result['status']
else:
inquiry_outcome = json_to_dict(result)
inquiry_list.append(inquiry_outcome)
elif request.form['action'] == u'历史':
inquiry_history = inquiry_list
else:
#request.form['action'] == u'帮助':
help_information = 1
return render_template("api_weather.html",
inquiry_outcome=inquiry_outcome,
inquiry_history=inquiry_history,
help_information=help_information,
error=error)
else:
return render_template("api_weather.html")
if __name__ == '__main__':
app.run(debug = True)
| [
"xiaowan5219@gmail.com"
] | xiaowan5219@gmail.com |
eff68dae2504df1a5e60f809aa964bca0e998e02 | 06e34e2dface0b87fa785cab7e65422a5f20ba18 | /Solutions/985-Sum-of-Even-Numbers-After-Queries/python.py | 131d4b9326480a44fbd1879ea6c56af2850dc50c | [] | no_license | JerryHu1994/LeetCode-Practice | c9841b0ce70451c19c8a429a3898c05b6233e1d4 | b0ce69985c51a9a794397cd98a996fca0e91d7d1 | refs/heads/master | 2022-02-10T04:42:28.033364 | 2022-01-02T04:44:22 | 2022-01-02T04:44:22 | 117,118,143 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 469 | py | class Solution(object):
def sumEvenAfterQueries(self, A, queries):
"""
:type A: List[int]
:type queries: List[List[int]]
:rtype: List[int]
"""
ans = []
currsum = sum([i for i in A if i%2 == 0])
for val, ind in queries:
if A[ind]%2 == 0: currsum -= A[ind]
A[ind] = A[ind] + val
if A[ind]%2 == 0: currsum += A[ind]
ans.append(currsum)
return ans | [
"hjr01211@gmail.com"
] | hjr01211@gmail.com |
378a2de6fb2b861c7fca12322550f26bd2b5ec40 | f445450ac693b466ca20b42f1ac82071d32dd991 | /generated_tempdir_2019_09_15_163300/generated_part002299.py | 75cd1f983de3a20c28837508a34522dc6bd7b2fc | [] | no_license | Upabjojr/rubi_generated | 76e43cbafe70b4e1516fb761cabd9e5257691374 | cd35e9e51722b04fb159ada3d5811d62a423e429 | refs/heads/master | 2020-07-25T17:26:19.227918 | 2019-09-15T15:41:48 | 2019-09-15T15:41:48 | 208,357,412 | 4 | 1 | null | null | null | null | UTF-8 | Python | false | false | 3,998 | py | from sympy.abc import *
from matchpy.matching.many_to_one import CommutativeMatcher
from matchpy import *
from matchpy.utils import VariableWithCount
from collections import deque
from multiset import Multiset
from sympy.integrals.rubi.constraints import *
from sympy.integrals.rubi.utility_function import *
from sympy.integrals.rubi.rules.miscellaneous_integration import *
from sympy import *
class CommutativeMatcher95023(CommutativeMatcher):
_instance = None
patterns = {
0: (0, Multiset({0: 1}), [
(VariableWithCount('i2.2.1.4.0', 1, 1, S(0)), Add)
]),
1: (1, Multiset({1: 1}), [
(VariableWithCount('i2.4.0', 1, 1, S(0)), Add)
])
}
subjects = {}
subjects_by_id = {}
bipartite = BipartiteGraph()
associative = Add
max_optional_count = 1
anonymous_patterns = set()
def __init__(self):
self.add_subject(None)
@staticmethod
def get():
if CommutativeMatcher95023._instance is None:
CommutativeMatcher95023._instance = CommutativeMatcher95023()
return CommutativeMatcher95023._instance
@staticmethod
def get_match_iter(subject):
subjects = deque([subject]) if subject is not None else deque()
subst0 = Substitution()
# State 95022
subst1 = Substitution(subst0)
try:
subst1.try_add_variable('i2.2.1.4.1.0_1', S(1))
except ValueError:
pass
else:
pass
# State 95024
if len(subjects) >= 1:
tmp2 = subjects.popleft()
subst2 = Substitution(subst1)
try:
subst2.try_add_variable('i2.2.1.4.1.0', tmp2)
except ValueError:
pass
else:
pass
# State 95025
if len(subjects) == 0:
pass
# 0: x*d
yield 0, subst2
subjects.appendleft(tmp2)
subst1 = Substitution(subst0)
try:
subst1.try_add_variable('i2.4.1.0_1', S(1))
except ValueError:
pass
else:
pass
# State 97036
if len(subjects) >= 1:
tmp5 = subjects.popleft()
subst2 = Substitution(subst1)
try:
subst2.try_add_variable('i2.4.1.0', tmp5)
except ValueError:
pass
else:
pass
# State 97037
if len(subjects) == 0:
pass
# 1: x*f
yield 1, subst2
subjects.appendleft(tmp5)
if len(subjects) >= 1 and isinstance(subjects[0], Mul):
tmp7 = subjects.popleft()
associative1 = tmp7
associative_type1 = type(tmp7)
subjects8 = deque(tmp7._args)
matcher = CommutativeMatcher95027.get()
tmp9 = subjects8
subjects8 = []
for s in tmp9:
matcher.add_subject(s)
for pattern_index, subst1 in matcher.match(tmp9, subst0):
pass
if pattern_index == 0:
pass
# State 95028
if len(subjects) == 0:
pass
# 0: x*d
yield 0, subst1
if pattern_index == 1:
pass
# State 97038
if len(subjects) == 0:
pass
# 1: x*f
yield 1, subst1
subjects.appendleft(tmp7)
return
yield
from matchpy.matching.many_to_one import CommutativeMatcher
from collections import deque
from .generated_part002300 import *
from matchpy.utils import VariableWithCount
from multiset import Multiset | [
"franz.bonazzi@gmail.com"
] | franz.bonazzi@gmail.com |
b472f6f5cc57716d39c06e423808c87c7390c6b7 | a34e3d435f48ef87477d3ae13ca8a43015e5052c | /tifffile_test.py | 07af1815e1e30af537c4481aac2250714b5604aa | [] | no_license | haehn/sandbox | 636069372fc7bb7fd72b5fde302f42b815e8e9b0 | e49a0a30a1811adb73577ff697d81db16ca82808 | refs/heads/master | 2021-01-22T03:39:03.415863 | 2015-02-11T23:16:22 | 2015-02-11T23:16:22 | 26,128,048 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 130 | py | import tifffile as tif
import time
start_t = time.clock()
i = tif.imread('test.tif')
print time.clock() - start_t
print i.shape
| [
"haehn@seas.harvard.edu"
] | haehn@seas.harvard.edu |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.