blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 2
616
| content_id
stringlengths 40
40
| detected_licenses
listlengths 0
69
| license_type
stringclasses 2
values | repo_name
stringlengths 5
118
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringlengths 4
63
| visit_date
timestamp[us] | revision_date
timestamp[us] | committer_date
timestamp[us] | github_id
int64 2.91k
686M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 23
values | gha_event_created_at
timestamp[us] | gha_created_at
timestamp[us] | gha_language
stringclasses 213
values | src_encoding
stringclasses 30
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 2
10.3M
| extension
stringclasses 246
values | content
stringlengths 2
10.3M
| authors
listlengths 1
1
| author_id
stringlengths 0
212
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
e2ded373e1f8f469c36f864985b1007db0cdfbeb
|
d314e9f320fbb4c31980efb4292be31c931e8714
|
/harness/determined/ipc.py
|
7cb033aae53e61954b12af5b5a38bd4ab572ec75
|
[
"Apache-2.0"
] |
permissive
|
takabayashi/determined
|
34919124882db2f31eb230645b249d30ff352c96
|
820c7250d8fdc6abba83c106f36eede6fc9f5f3a
|
refs/heads/master
| 2022-05-29T15:07:55.989815
| 2020-04-29T16:39:53
| 2020-04-29T17:26:54
| 260,006,201
| 1
| 0
|
Apache-2.0
| 2020-04-29T18:15:30
| 2020-04-29T18:15:29
| null |
UTF-8
|
Python
| false
| false
| 13,233
|
py
|
import logging
import time
from typing import Any, Callable, Dict, List, Optional, Tuple, cast
import zmq
from zmq.error import ZMQBindError, ZMQError
from determined_common import check
class _OneSidedBarrier:
"""
_OneSidedBarrier is a message from participants (usually workers) to a single process (usually
the chief) indicating to the chief that the workers are ready for the next phase of
computation.
"""
def __init__(self, message: Any) -> None:
self.message = message
class MetricsInfo:
"""
MetricsInfo contains validation metrics and the number of batches used to generate those
metrics. Used to communicate metrics between training processes.
"""
def __init__(self, metrics: Dict[str, Any], num_batches: int):
self.metrics = metrics
self.num_batches = num_batches
class ConnectedMessage:
"""
ConnectedMessage is sent by a ZMQBroadcastClient to a ZMQBroadcastServer as the very first
message. The ZMQBroadcastServer must gather one ConnectedMessage from each client before it is
safe to broadcast.
"""
pass
class ReadyMessage:
"""
ReadyMessage is sent by a SubprocessLauncher to the SubprocessReceiver when it is ready to
start receiving workloads.
"""
pass
class _SerialMessage:
"""
_SerialMessage wraps a payload in a monotonically-increasing serial number, which makes it easy
to confirm that our broadcasting does not get out-of-sync.
"""
def __init__(self, serial: int, payload: Any) -> None:
self.serial = serial
self.payload = payload
class ZMQBroadcastServer:
"""
Similar to ZMQServer except with broadcast/gather semantics on exactly two ports.
Using a constant number of ports allows the SubprocessReceiver to be configured without knowing
any rank information (i.e., before user code is read and horovod can be initialized).
A ConnectedMessage must be observed from each connection before it is safe to broadcast. This
can be accomplished by calling gather_with_polling() and checking that all gathered messages
are ConnectedMessages.
ZMQBroadcastServer uses ZMQ PUB-SUB pattern to transmit messages to worker processes, and uses
the PUSH-PULL pattern to collect responses from workers. The reason for this assymetry is that
PUSH-PULL connections block during send() if the remote end is dead. Therefore, PUSH-PULL
cannot be used to transmitting from server to worker, because if all the workers die, the
server would hang.
Additionally, the server can't receive messages from workers via the PUB-SUB pattern, because
the startup semantics of PUB-SUB connections in ZMQ are slightly odd; the SUB socket must
connect to the PUB socket. Normally this happens when you do sub_socket.connect(), but if the
server creates a SUB socket and does sub_socket.bind(), then when the client creates a PUB
socket and calls pub_socket.connect(), ZMQ still has to create a connection from the SUB to the
PUB (since sub_socket used bind() instead of connect()) and the server's SUB socket will
usually miss the first message sent by the client's PUB socket.
See ZMQ documentation for a related discussion on PUB-SUB sockets:
http://zguide.zeromq.org/page:all#Getting-the-Message-Out (look for "one more important thing")
http://zguide.zeromq.org/page:all#Node-Coordination
"""
def __init__(self, num_connections: int, port_range: Optional[Tuple[int, int]] = None) -> None:
self._num_connections = num_connections
context = zmq.Context()
self._pub_socket = context.socket(zmq.PUB)
self._pull_socket = context.socket(zmq.PULL)
if port_range is None:
self._pub_port = self._pub_socket.bind_to_random_port("tcp://*") # type: int
self._pull_port = self._pull_socket.bind_to_random_port("tcp://*") # type: int
else:
port_min, port_max = port_range
self._pub_port = self._pub_socket.bind_to_random_port("tcp://*", port_min, port_max)
self._pull_port = self._pull_socket.bind_to_random_port("tcp://*", port_min, port_max)
self._send_serial = 0
self._recv_serial = 0
def __enter__(self) -> "ZMQBroadcastServer":
return self
def __exit__(self, *_: Any) -> None:
self.close()
def close(self) -> None:
self._pub_socket.close()
self._pull_socket.close()
def get_pub_port(self) -> int:
return self._pub_port
def get_pull_port(self) -> int:
return self._pull_port
def broadcast(self, obj: Any) -> None:
"""
Broadcast a message object to each connection.
"""
self._pub_socket.send_pyobj(_SerialMessage(self._send_serial, obj))
self._send_serial += 1
def gather_with_polling(self, health_check: Callable[[], None]) -> List[Any]:
"""
Gather a response message from each connection, with a health_check callback that can raise
an error if something goes wrong.
"""
return self._recv_all_with_polling(health_check)
def _recv_all_with_polling(self, health_check: Callable[[], None]) -> List[Any]:
messages = [] # type: List[Any]
while len(messages) < self._num_connections:
if self._pull_socket.poll(1000) == 0:
# Call the polling function (probably check if a subprocess is alive).
health_check()
continue
messages.append(self._recv_one(self._recv_serial))
self._recv_serial += 1
return messages
def _recv_one(self, serial: int) -> Any:
"""
Receive one _SerialMessage from the socket and confirm that it is in-order.
"""
obj = self._pull_socket.recv_pyobj()
if isinstance(obj, _SerialMessage):
check.eq(obj.serial, serial, "Out-of-order client message detected")
return obj.payload
raise AssertionError(f"Unexpected message type encountered: {type(obj)}")
class ZMQBroadcastClient:
def __init__(self, srv_pub_url: str, srv_pull_url: str) -> None:
context = zmq.Context()
self._sub_socket = context.socket(zmq.SUB)
# Subscriber always listens to ALL messages.
self._sub_socket.subscribe(b"")
self._sub_socket.connect(srv_pub_url)
self._push_socket = context.socket(zmq.PUSH)
self._push_socket.connect(srv_pull_url)
self._send_serial = 0
self._recv_serial = 0
def __enter__(self) -> "ZMQBroadcastClient":
return self
def __exit__(self, *_: Any) -> None:
self.close()
def close(self) -> None:
self._sub_socket.close()
self._push_socket.close()
def send(self, obj: Any) -> None:
message = _SerialMessage(self._send_serial, obj)
self._send_serial += 1
self._push_socket.send_pyobj(message)
def recv(self) -> Any:
obj = self._sub_socket.recv_pyobj()
if isinstance(obj, _SerialMessage):
check.eq(obj.serial, self._recv_serial, "Out-of-order server message detected")
self._recv_serial += 1
return obj.payload
raise AssertionError(f"Unexpected message type encountered: {type(obj)}")
class ZMQServer:
"""
ZMQServer connects the trial controller with training subprocesses.
It also synchronizes the chief trial runner with all non-chief trial
runners when using Horovod.
For communicating with training subprocess, we initialize a separate
socket (which binds to a unique port) for each connection.
Clients connecting to the ZMQ server (workers or non-chief trial controllers)
need to send the initial message, and each socket needs to have a strict
send-receive message ordering (a requirement for ZMQ REQ and REP sockets).
ZMQServer takes as input either a list of specific ports, or a range of ports.
If a range of ports is specified, ZMQ will randomly select an available port
within the range.
"""
def __init__(
self,
num_connections: Optional[int] = None,
ports: Optional[List[int]] = None,
port_range: Optional[Tuple[int, int]] = None,
) -> None:
self.context = zmq.Context()
self.sockets = [] # type: List[zmq.Socket]
self.ports = [] # type: List[int]
if ports:
check.is_none(port_range)
self._bind_to_specified_ports(ports=ports)
check.eq(len(self.ports), len(ports))
else:
check.is_not_none(num_connections)
check.is_not_none(port_range)
num_connections = cast(int, num_connections)
port_range = cast(Tuple[int, int], port_range)
self._bind_to_random_ports(port_range=port_range, num_connections=num_connections)
check.eq(len(self.ports), num_connections)
def __enter__(self) -> "ZMQServer":
return self
def __exit__(self, *_: Any) -> None:
self.close()
def _bind_to_specified_ports(self, ports: List[int]) -> None:
for port in ports:
socket = self.context.socket(zmq.REP)
try:
socket.bind(f"tcp://*:{port}")
except ZMQError:
logging.warning(f"Failed to bind to port {port}.")
exit(1)
self.sockets.append(socket)
self.ports.append(port)
def _bind_to_random_ports(self, port_range: Tuple[int, int], num_connections: int) -> None:
check.lt(num_connections, port_range[1] - port_range[0])
for _ in range(num_connections):
socket = self.context.socket(zmq.REP)
try:
selected_port = socket.bind_to_random_port(
addr="tcp://*", min_port=port_range[0], max_port=port_range[1]
)
self.ports.append(selected_port)
except ZMQBindError:
logging.warning(f"Failed to bind to port range {port_range}.")
exit(1)
self.sockets.append(socket)
def get_ports(self) -> List[int]:
return self.ports
def send(self, py_obj: Any) -> None:
for socket in self.sockets:
socket.send_pyobj(py_obj)
def receive_blocking(self, send_rank: int) -> Any:
check.lt(send_rank, len(self.sockets))
message = self.sockets[send_rank].recv_pyobj()
return message
def receive_non_blocking(
self, send_rank: int, deadline: Optional[float] = None
) -> Tuple[bool, Any]:
check.lt(send_rank, len(self.sockets))
timeout = 1000 if not deadline else int(deadline - time.time()) * 1000
timeout = max(timeout, 100)
if self.sockets[send_rank].poll(timeout) == 0:
return False, None
message = self.sockets[send_rank].recv_pyobj()
return True, message
def barrier(
self, num_connections: int, message: Any = None, timeout: Optional[int] = None
) -> List[Any]:
"""
This is a one-sided barrier, where the chief blocks until
all non-chief trial containers have sent a message.
"""
check.eq(len(self.sockets), 1)
messages = [] # type: List[Any]
start_time = time.time()
for _ in range(num_connections):
if timeout:
message_received, barrier_message = self.receive_non_blocking(
send_rank=0, deadline=start_time + timeout
)
if not message_received:
return messages
else:
barrier_message = self.receive_blocking(0)
check.is_instance(barrier_message, _OneSidedBarrier)
messages.append(barrier_message.message)
self.sockets[0].send_pyobj(_OneSidedBarrier(message=message))
return messages
def close(self) -> None:
for socket in self.sockets:
socket.close()
class ZMQClient:
"""
ZMQClient connects training subprocesses with trial-controller.
It also signals the chief trial-controller, when the non-chief
trial controller has successfully started sshd.
"""
def __init__(self, ip_address: str, port: int) -> None:
self.context = zmq.Context()
self.socket = self.context.socket(zmq.REQ)
self.socket.connect(f"tcp://{ip_address}:{port}")
def __enter__(self) -> "ZMQClient":
return self
def __exit__(self, *_: Any) -> None:
self.close()
def send(self, py_obj: Any) -> None:
self.socket.send_pyobj(py_obj)
def receive(self) -> Any:
return self.socket.recv_pyobj()
def barrier(self, message: Any = None) -> Any:
"""
This is a one-sided barrier, where the chief blocks until
all non-chief trial containers have sent a message.
"""
self.socket.send_pyobj(_OneSidedBarrier(message=message))
barrier_message = self.socket.recv_pyobj()
check.is_instance(barrier_message, _OneSidedBarrier)
return barrier_message.message
def close(self) -> None:
self.socket.close()
|
[
"hello@determined.ai"
] |
hello@determined.ai
|
622595217199714974e8967515ca307ed7900e52
|
b1bf615bfa1ee2065e3adfe90310814c3b27c61d
|
/2020-12-24/make-sum-divisible-by-p.py
|
829b242c230d12f5a764d05f10434507131476dd
|
[] |
no_license
|
Huajiecheng/leetcode
|
73b09a88e61ea3b16ca3bf440fadd1470652ccf2
|
4becf814a2a06611ee909ec700380ab83ac8ab99
|
refs/heads/main
| 2023-03-19T21:54:20.952909
| 2021-03-06T03:34:52
| 2021-03-06T03:34:52
| 320,959,720
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 579
|
py
|
class Solution:
def minSubarray(self, nums: List[int], p: int) -> int:
total = sum(nums)%p
if total == 0:
return 0
pref = {0:-1}
result = len(nums)
temp = 0
for i in range(len(nums)):
temp = (temp + nums[i])%p
if (temp - total)%p in pref:
if result > (i - pref[(temp - total)%p]):
result = i - pref[(temp - total)%p]
pref[temp] = i
if result == len(nums):
return -1
else:
return result
|
[
"chenghuajie1998@gmail.com"
] |
chenghuajie1998@gmail.com
|
4a4297cf16a767efcc9ea61ef9ea5aa97d08900e
|
9af167954df1b7f2dda03647403ec10f45ab73c1
|
/example_classification.py
|
b8dd9b24b1a9e73ebc6b75a3031b71c58867c656
|
[] |
no_license
|
alexanderdz27/NeuralGenetic
|
7659cdda5fbdc6257a601cc9da0ea116c89e390c
|
82e73d92dc1a32ad17f982cfa8230d6eb4d17321
|
refs/heads/master
| 2023-06-05T05:40:45.660055
| 2021-06-18T03:33:16
| 2021-06-18T03:33:16
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 6,479
|
py
|
import numpy
import pygad
import pygad.nn
import pygad.gann
def fitness_func(solution, sol_idx):
global GANN_instance, data_inputs, data_outputs
predictions = pygad.nn.predict(last_layer=GANN_instance.population_networks[sol_idx],
data_inputs=data_inputs)
correct_predictions = numpy.where(predictions == data_outputs)[0].size
solution_fitness = (correct_predictions/data_outputs.size)*100
return solution_fitness
def callback_generation(ga_instance):
global GANN_instance, last_fitness
population_matrices = pygad.gann.population_as_matrices(population_networks=GANN_instance.population_networks,
population_vectors=ga_instance.population)
GANN_instance.update_population_trained_weights(population_trained_weights=population_matrices)
print("Generation = {generation}".format(generation=ga_instance.generations_completed))
print("Fitness = {fitness}".format(fitness=ga_instance.best_solution()[1]))
print("Change = {change}".format(change=ga_instance.best_solution()[1] - last_fitness))
last_fitness = ga_instance.best_solution()[1].copy()
# Holds the fitness value of the previous generation.
last_fitness = 0
# Reading the input data.
data_inputs = numpy.load("dataset_features.npy") # Download from https://github.com/ahmedfgad/NumPyANN/blob/master/dataset_features.npy
# Optional step of filtering the input data using the standard deviation.
features_STDs = numpy.std(a=data_inputs, axis=0)
data_inputs = data_inputs[:, features_STDs>50]
# Reading the output data.
data_outputs = numpy.load("outputs.npy") # Download from https://github.com/ahmedfgad/NumPyANN/blob/master/outputs.npy
# The length of the input vector for each sample (i.e. number of neurons in the input layer).
num_inputs = data_inputs.shape[1]
# The number of neurons in the output layer (i.e. number of classes).
num_classes = 4
# Creating an initial population of neural networks. The return of the initial_population() function holds references to the networks, not their weights. Using such references, the weights of all networks can be fetched.
num_solutions = 8 # A solution or a network can be used interchangeably.
GANN_instance = pygad.gann.GANN(num_solutions=num_solutions,
num_neurons_input=num_inputs,
num_neurons_hidden_layers=[150, 50],
num_neurons_output=num_classes,
hidden_activations=["relu", "relu"],
output_activation="softmax")
# population does not hold the numerical weights of the network instead it holds a list of references to each last layer of each network (i.e. solution) in the population. A solution or a network can be used interchangeably.
# If there is a population with 3 solutions (i.e. networks), then the population is a list with 3 elements. Each element is a reference to the last layer of each network. Using such a reference, all details of the network can be accessed.
population_vectors = pygad.gann.population_as_vectors(population_networks=GANN_instance.population_networks)
# To prepare the initial population, there are 2 ways:
# 1) Prepare it yourself and pass it to the initial_population parameter. This way is useful when the user wants to start the genetic algorithm with a custom initial population.
# 2) Assign valid integer values to the sol_per_pop and num_genes parameters. If the initial_population parameter exists, then the sol_per_pop and num_genes parameters are useless.
initial_population = population_vectors.copy()
num_parents_mating = 4 # Number of solutions to be selected as parents in the mating pool.
num_generations = 500 # Number of generations.
mutation_percent_genes = 10 # Percentage of genes to mutate. This parameter has no action if the parameter mutation_num_genes exists.
parent_selection_type = "sss" # Type of parent selection.
crossover_type = "single_point" # Type of the crossover operator.
mutation_type = "random" # Type of the mutation operator.
keep_parents = -1 # Number of parents to keep in the next population. -1 means keep all parents and 0 means keep nothing.
ga_instance = pygad.GA(num_generations=num_generations,
num_parents_mating=num_parents_mating,
initial_population=initial_population,
fitness_func=fitness_func,
mutation_percent_genes=mutation_percent_genes,
parent_selection_type=parent_selection_type,
crossover_type=crossover_type,
mutation_type=mutation_type,
keep_parents=keep_parents,
on_generation=callback_generation)
ga_instance.run()
# After the generations complete, some plots are showed that summarize how the outputs/fitness values evolve over generations.
ga_instance.plot_fitness()
# Returning the details of the best solution.
solution, solution_fitness, solution_idx = ga_instance.best_solution()
print("Parameters of the best solution : {solution}".format(solution=solution))
print("Fitness value of the best solution = {solution_fitness}".format(solution_fitness=solution_fitness))
print("Index of the best solution : {solution_idx}".format(solution_idx=solution_idx))
if ga_instance.best_solution_generation != -1:
print("Best fitness value reached after {best_solution_generation} generations.".format(best_solution_generation=ga_instance.best_solution_generation))
# Predicting the outputs of the data using the best solution.
predictions = pygad.nn.predict(last_layer=GANN_instance.population_networks[solution_idx],
data_inputs=data_inputs)
print("Predictions of the trained network : {predictions}".format(predictions=predictions))
# Calculating some statistics
num_wrong = numpy.where(predictions != data_outputs)[0]
num_correct = data_outputs.size - num_wrong.size
accuracy = 100 * (num_correct/data_outputs.size)
print("Number of correct classifications : {num_correct}.".format(num_correct=num_correct))
print("Number of wrong classifications : {num_wrong}.".format(num_wrong=num_wrong.size))
print("Classification accuracy : {accuracy}.".format(accuracy=accuracy))
|
[
"noreply@github.com"
] |
alexanderdz27.noreply@github.com
|
4e84132f9e9c8d0f91a156d0d1a7a5ec7f69aa1d
|
d013ace4b5c22895b837d0db42a4059497499430
|
/ch10/ex03_zip.py
|
a0d73bc94b1606c9d36712bf227534118d16091a
|
[] |
no_license
|
jeonghaejun/01.python
|
b83fd317d353dc71a1ac27964e49f694f042ce49
|
770e79391b257b38246e7e912db29e5adf965208
|
refs/heads/master
| 2023-02-25T13:45:29.310438
| 2021-01-31T10:57:21
| 2021-01-31T10:57:21
| 328,698,070
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 850
|
py
|
info = """ 고길동 홍길동 둘리 도우너
30 40 50 60
70 90 60 56 78
80 100 20 90 100
30 40 50 60"""
info_lines = info.splitlines()
students = info_lines[0].split() # key파트
scores = info_lines[1:]
print(scores)
# scores = [line.split() for line in scores] # value파트
# print(scores)
# result = dict(zip(students, scores))
# print(result)
# {'고길동': ['30', '40', '50', '60'],
# '홍길동': ['70', '90', '60', '56', '78'],
# '둘리': ['80', '100', '20', '90', '100'],
# '도우너': ['30', '40', '50', '60']}
# 아직 점수가 문자열이다 숫자로 바까야해 더배워서
# append로도 가능 빈 dic에다가 append와
# for문을이용해서 집어넣는 과정 루프가 많아진다.
scores = [list(map(int, line.split())) for line in scores]
print(scores)
result = dict(zip(students, scores))
print(result)
|
[
"wjdgownsll@gmail.com"
] |
wjdgownsll@gmail.com
|
8f76bfa8589646fd35727b1844481d3e8b72db0a
|
aa7c04dfce69ebb57049b79c2bca07909037bf21
|
/manage.py
|
fd65ea474f6097185f90cdf09d20c5e7365b4694
|
[] |
no_license
|
bricedurand/imagotv
|
68586456c86002ef148ac3b40dded9df5d604c5f
|
77afa9fb1d1a2de8580503df476d283f992c8d29
|
refs/heads/master
| 2022-12-11T18:03:15.910907
| 2019-10-09T14:30:00
| 2019-10-09T14:30:00
| 247,509,990
| 0
| 0
| null | 2022-12-08T07:25:28
| 2020-03-15T16:54:47
|
Python
|
UTF-8
|
Python
| false
| false
| 627
|
py
|
#!/usr/bin/env python
"""Django's command-line utility for administrative tasks."""
import os
import sys
def main():
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'imagotv.settings')
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
if __name__ == '__main__':
main()
|
[
"brice.durand2@gmail.com"
] |
brice.durand2@gmail.com
|
e839a3146f26510aa1e0238cb2c25c17901e88ef
|
50008b3b7fb7e14f793e92f5b27bf302112a3cb4
|
/recipes/Python/577680_Multithreaded_Mandelbrot_Fractal/recipe-577680.py
|
cadeff717c7ba504886676411207393bf73ca6fb
|
[
"MIT"
] |
permissive
|
betty29/code-1
|
db56807e19ac9cfe711b41d475a322c168cfdca6
|
d097ca0ad6a6aee2180d32dce6a3322621f655fd
|
refs/heads/master
| 2023-03-14T08:15:47.492844
| 2021-02-24T15:39:59
| 2021-02-24T15:39:59
| 341,878,663
| 0
| 0
|
MIT
| 2021-02-24T15:40:00
| 2021-02-24T11:31:15
|
Python
|
UTF-8
|
Python
| false
| false
| 1,749
|
py
|
# Multi-threaded Mandelbrot Fractal (Do not run using IDLE!)
# FB - 201104306
import threading
from PIL import Image
w = 512 # image width
h = 512 # image height
image = Image.new("RGB", (w, h))
wh = w * h
maxIt = 256 # max number of iterations allowed
# drawing region (xa < xb & ya < yb)
xa = -2.0
xb = 1.0
ya = -1.5
yb = 1.5
xd = xb - xa
yd = yb - ya
numThr = 5 # number of threads to run
# lock = threading.Lock()
class ManFrThread(threading.Thread):
def __init__ (self, k):
self.k = k
threading.Thread.__init__(self)
def run(self):
# each thread only calculates its own share of pixels
for i in range(k, wh, numThr):
kx = i % w
ky = int(i / w)
a = xa + xd * kx / (w - 1.0)
b = ya + yd * ky / (h - 1.0)
x = a
y = b
for kc in range(maxIt):
x0 = x * x - y * y + a
y = 2.0 * x * y + b
x = x0
if x * x + y * y > 4:
# various color palettes can be created here
red = (kc % 8) * 32
green = (16 - kc % 16) * 16
blue = (kc % 16) * 16
# lock.acquire()
global image
image.putpixel((kx, ky), (red, green, blue))
# lock.release()
break
if __name__ == "__main__":
tArr = []
for k in range(numThr): # create all threads
tArr.append(ManFrThread(k))
for k in range(numThr): # start all threads
tArr[k].start()
for k in range(numThr): # wait until all threads finished
tArr[k].join()
image.save("MandelbrotFractal.png", "PNG")
|
[
"betty@qburst.com"
] |
betty@qburst.com
|
3d620860ad59574efaeb995c21fa074d91cd3f78
|
14b48383d46e3acc3c10318d199a7c2855c53c69
|
/diagnose/query_table.py
|
d8ddbbf1068a0649f85b080ea88667c686780aa9
|
[] |
no_license
|
zhongyu211/diagnose
|
afb2ab68df5be0ede4c63229e6b363d8f9a6f245
|
8b0565364a959ea9469e0fcd0de5bffe40640c08
|
refs/heads/master
| 2021-01-23T12:17:56.948637
| 2015-07-30T15:45:41
| 2015-07-30T15:45:41
| 39,955,296
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,056
|
py
|
from diagnose_app.models import *
class query_table:
"""
this class use to query table based on conditions
"""
def __init__(self,tableName,startDay,startTime,endDay,endTime,counterList):
self.__tableName__ = tableName
self.__startDay__ = startDay
self.__startTime__ = startTime
self.__endDay__ = endDay
self.__endTime__ = endTime
self.__counterList__ = counterList
self.__querylist__ = []
self.returnQueryList = []
def query(self):
"""
this function query all records between 'startDay startTime' and 'endDay endTime'
"""
if self.__startDay__ == self.__endDay__ and self.__startTime__ == self.__endTime__:
self.__querylist__.extend(eval(self.__tableName__+".objects.filter(Day__exact=self.__startDay__).filter(Time__exact=self.__startTime__)"))
elif self.__startDay__ == self.__endDay__ and self.__startTime__ != self.__endTime__:
self.__querylist__.extend(eval(self.__tableName__+".objects.filter(Day__exact=self.__startDay__).filter(Time__gte=self.__startTime__).filter(Time__lte=self.__endTime__)"))
else:
self.__querylist__.extend(eval(self.__tableName__+".objects.filter(Day__gt=self.__startDay__).filter(Day__lt=self.__endDay__)"))
self.__querylist__.extend(eval(self.__tableName__+".objects.filter(Day__exact=self.__startDay__).filter(Time__gte=self.__startTime__)"))
self.__querylist__.extend(eval(self.__tableName__+".objects.filter(Day__exact=self.__endDay__).filter(Time__lte=self.__endTime__)"))
def query_list(self):
"""
this function store the related info into returnQueryList, which is as below:
if connterList = [counter1,counter2,counter3]
the returnQueryList =
[
[timeStamp1, counter1value,counter2value,counter3value],
......
]
"""
self.query()
for record in self.__querylist__:
temp_list = []
timeStamp = str(record.Day) + " " + str(record.Time)
temp_list.append(timeStamp)
for counter in self.__counterList__:
temp_list.append(eval("record."+counter))
self.returnQueryList.append(temp_list)
return self.returnQueryList
|
[
"zhongyu211@126.com"
] |
zhongyu211@126.com
|
d5149a9a3254804f686937ca02866d417bae3195
|
ca7aa979e7059467e158830b76673f5b77a0f5a3
|
/Python_codes/p03548/s664916381.py
|
4058808e3a1fb64328b5edad5b5303adb72da23f
|
[] |
no_license
|
Aasthaengg/IBMdataset
|
7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901
|
f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8
|
refs/heads/main
| 2023-04-22T10:22:44.763102
| 2021-05-13T17:27:22
| 2021-05-13T17:27:22
| 367,112,348
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 62
|
py
|
x,y,z=map(int,input().split())
xx=x-z
yy=y+z
print(int(xx/yy))
|
[
"66529651+Aastha2104@users.noreply.github.com"
] |
66529651+Aastha2104@users.noreply.github.com
|
236ced02e8996b3a0752c78e789074fc455c672d
|
1844de0e8eb99b70fa4a94ab8e85a8f81ab4c7cc
|
/util_future/util_data/basic_info.py
|
252c6cb403ba041e1fbf0db970435c995beca592
|
[] |
no_license
|
NewLanded/time_friend
|
2e8d901a05cb8b9155b51ed7ede44a9791471c18
|
ff150cf29a1e26067179eb7c59c2a4dff1777afe
|
refs/heads/main
| 2023-06-27T11:39:50.348289
| 2021-08-02T06:48:17
| 2021-08-02T06:48:17
| 380,187,400
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,654
|
py
|
import re
from itertools import count
from fastapi import HTTPException, status
from config import TRADE_CODE_LIST
from util_base.db import (get_boolean_value, get_multi_data,
get_single_value)
class BasicInfo:
def __init__(self, db_conn):
self.db_conn = db_conn
def __del__(self):
# self.db_conn.close()
pass
def get_ts_code_by_main_ts_code_with_date(self, main_ts_code, start_date, end_date):
sql = """
select trade_date, mapping_ts_code from future_main_code_data
where (ts_code in (select ts_code from future_main_code_data where mapping_ts_code=%(main_ts_code)s) or ts_code = %(main_ts_code)s) and trade_date between %(start_date)s and %(end_date)s order by trade_date
"""
args = {"main_ts_code": main_ts_code, "start_date": start_date, "end_date": end_date}
result = get_multi_data(self.db_conn, sql, args)
return result
def get_ts_code_by_main_ts_code(self, main_ts_code, data_date):
sql = """
select mapping_ts_code from future_main_code_data
where ts_code = %(main_ts_code)s and trade_date = %(data_date)s
"""
args = {"main_ts_code": main_ts_code, "data_date": data_date}
result = get_single_value(self.db_conn, sql, args)
return result
def get_main_ts_code_by_ts_code(self, ts_code):
sql = """
select ts_code from future_main_code_data
where mapping_ts_code = %(ts_code)s
"""
args = {"ts_code": ts_code}
result = get_single_value(self.db_conn, sql, args)
return result
def get_active_ts_code_info(self, data_date):
ts_code_list = self.get_active_ts_code(data_date)
if not ts_code_list:
raise HTTPException(status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail="未获取到有效的ts_code")
sql = """
select ts_code, exchange, name from future_basic_info_data
where ts_code=ANY(%(ts_code_list)s::text[])
"""
args = {"ts_code_list": ts_code_list}
result = get_multi_data(self.db_conn, sql, args)
return result
def get_active_ts_code(self, data_date):
data_date = self.get_active_trade_day(data_date)
sql = """
select ts_code, mapping_ts_code from future_main_code_data
where trade_date = %(data_date)s
"""
args = {"data_date": data_date}
result = get_multi_data(self.db_conn, sql, args)
result_new = []
for row in result:
symbol_main, symbol_ts = row[0].split('.')[0], row[1].split('.')[0]
if re.match(r'[A-Z]*', symbol_ts).group(0) == re.match(r'[A-Z]*', symbol_main).group(0):
result_new.append(row[1])
result = list(set(result_new))
result = [i for i in result if re.match(r"(\D+)([\d|\.]+)(\D*)", i).group(1) in TRADE_CODE_LIST]
result.sort()
return result
def get_future_info_by_symbol(self, symbol_code_list):
index_dict = dict(zip([i for i in symbol_code_list], count()))
sql = """
select ts_code, name from future_basic_info_data
where symbol=ANY(%(symbol_code_list)s::text[])
"""
args = {"symbol_code_list": symbol_code_list}
result = get_multi_data(self.db_conn, sql, args)
result = sorted(result, key=lambda x: index_dict[x[0].split('.')[0]])
return result
def get_active_trade_day(self, data_date):
sql = """
select max(date) from sec_date_info where date <= %(data_date)s and is_workday_flag=true;
"""
args = {"data_date": data_date}
date = get_single_value(self.db_conn, sql, args)
return date
def get_previous_trade_day(self, data_date):
sql = """
select max(date) from sec_date_info where date < %(data_date)s and is_workday_flag=true;
"""
args = {"data_date": data_date}
date = get_single_value(self.db_conn, sql, args)
return date
def get_next_trade_day(self, data_date):
sql = """
select min(date) from sec_date_info where date > %(data_date)s and is_workday_flag=true;
"""
args = {"data_date": data_date}
date = get_single_value(self.db_conn, sql, args)
return date
def is_trade_day(self, data_date):
sql = """
select 1 from sec_date_info where date=%(data_date)s and is_workday_flag=true
"""
args = {"data_date": data_date}
result = get_boolean_value(self.db_conn, sql, args)
result = True if result else False
return result
|
[
"l1141041@163.com"
] |
l1141041@163.com
|
d652e8d342d4ed592c456d84677d33b5263bc550
|
07ceea42987c134c9073a4115ada928501f9ff41
|
/asg_28.py
|
74f07efbcf3b5c1e3b596b518f6b0e39dde148e2
|
[] |
no_license
|
nirajandata/iwbootcamp_assignments
|
62d10bd7d3ce0c719f69c65a0504a2655fd193e3
|
a898779cff843c54ab60ad2c8c867a8aa6dde7cb
|
refs/heads/main
| 2023-03-07T23:50:54.919365
| 2021-02-20T11:50:32
| 2021-02-20T11:50:32
| 337,075,903
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 42
|
py
|
m1={1:2,4:5}
m={6:8}
m.update(m1)
print(m)
|
[
"nirajan.data@gmail.com"
] |
nirajan.data@gmail.com
|
d735c26448881883d2ba49b2b1bbe561d190b6f6
|
0a3c7011795a704e7908d9d3261f40021c3fa8f7
|
/app/modules/public/mod_home/views.py
|
3ed51529db46e16ae7c933e06fb8937fe17d5a26
|
[] |
no_license
|
assemblio/hct
|
10b08e7173eb2a06fbae510b25bffb6a2d2e4c4b
|
fd526a32045fa1182fa67ea18fc47db967a3c598
|
refs/heads/master
| 2021-01-23T02:53:33.861624
| 2015-08-30T19:14:35
| 2015-08-30T19:14:35
| 31,592,213
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 575
|
py
|
from flask import request, render_template
from flask import Blueprint
# Define the blueprint:
mod_home = Blueprint('mod_home', __name__)
# Set the route and accepted methods
@mod_home.route('/')
def index():
return render_template('home/index.html')
@mod_home.route('/about-us')
def about_us():
return render_template('home/about-us.html')
@mod_home.route('/contacts')
def contacts():
return render_template('home/contacts.html')
@mod_home.route('/terms-and-conditions')
def terms_conditions():
return render_template('home/terms-and-conditions.html')
|
[
"partin.imeri@gmail.com"
] |
partin.imeri@gmail.com
|
0258b0c719764b2c7c5bae595205a21bf97528f7
|
864285315c3a154639355f14ab1ff14633576405
|
/mapclientplugins/segmentationstep/model/node.py
|
3ee013be596ccf7cbfb66abb1198c320b7ed3ebc
|
[] |
no_license
|
hsorby/segmentationstep
|
774dc537967c9643bd0094dc4e64eefa472588b0
|
321505374f9434ac0ae832b0b00398c2d4ac1fbe
|
refs/heads/main
| 2021-09-28T09:06:07.197158
| 2015-08-14T07:59:55
| 2015-08-14T07:59:55
| 21,375,254
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 20,685
|
py
|
'''
MAP Client, a program to generate detailed musculoskeletal models for OpenSim.
Copyright (C) 2012 University of Auckland
This file is part of MAP Client. (http://launchpad.net/mapclient)
MAP Client is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
MAP Client is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with MAP Client. If not, see <http://www.gnu.org/licenses/>..
'''
import json
from opencmiss.zinc.status import OK
from mapclientplugins.segmentationstep.model.abstractmodel import AbstractModel
from mapclientplugins.segmentationstep.zincutils import createFiniteElementField
from mapclientplugins.segmentationstep.segmentpoint import SegmentPointStatus
from mapclientplugins.segmentationstep.model.curve import CurveModel
from mapclientplugins.segmentationstep.plane import PlaneAttitude
class NodeModel(AbstractModel):
def __init__(self, context):
super(NodeModel, self).__init__(context)
self._attributes_that_auto_serialize = [ "_nodes", "_plane_attitudes"]
self._plane = None
self._plane_attitude_store = []
self._plane_attitudes = {}
self._nodes = {}
self._curves = {}
self._on_plane_conditional_field = None
self._on_plane_point_cloud_field = None
self._on_plane_curve_field = None
self._on_plane_interpolation_point_field = None
def setPlane(self, plane):
self._plane = plane
def initialize(self):
self._setupNodeRegion()
self._on_plane_conditional_field = self._createOnPlaneConditionalField()
self._on_plane_point_cloud_field = self._createOnPlanePointCloudField()
self._on_plane_curve_field = self._createOnPlaneCurveField()
self._on_plane_interpolation_point_field = self._createOnPlaneInterpolation()
def getPointCloud(self):
cloud = []
node_nodeset = self._point_cloud_group.getMasterNodeset()
datapoint_nodeset = self._interpolation_point_group.getMasterNodeset()
def _getLocations(nodeset):
locations = []
ni = nodeset.createNodeiterator()
node = ni.next()
while node.isValid():
locations.append(self.getNodeLocation(node))
node = ni.next()
return locations
cloud += _getLocations(node_nodeset)
cloud += _getLocations(datapoint_nodeset)
return cloud
def _serializeNodeset(self, group):
str_rep = ''
ni = group.createNodeiterator()
node = ni.next()
while node.isValid():
str_rep += '"' + str(node.getIdentifier()) + '":' + json.dumps(self.getNodeLocation(node))
node = ni.next()
if node.isValid():
str_rep += ','
return str_rep
def _serializeSelection(self):
node_ids = []
ni = self._group.createNodeiterator()
node = ni.next()
while node.isValid():
node_ids.append(node.getIdentifier())
node = ni.next()
return json.dumps(node_ids)
def serialize(self):
str_rep = '{'
str_rep += '"_basic_points":{' + self._serializeNodeset(self._point_cloud_group)
str_rep += '},'
str_rep += '"_curve_points":{' + self._serializeNodeset(self._curve_group)
str_rep += '},'
str_rep += '"_selection":' + self._serializeSelection()
str_rep += ','
str_rep += '"_plane":' + self._plane.serialize() + ','
str_rep += '"_curves":{ ' # this space after the curly bracket is very important do not remove it.
for curve_index in self._curves:
str_rep += '"' + str(curve_index) + '":' + self._curves[curve_index].serialize() + ','
str_rep = str_rep[:-1] + '},'
str_rep += '"_plane_attitude_store":['
for plane_attitude in self._plane_attitude_store:
str_rep += plane_attitude.serialize()
if plane_attitude != self._plane_attitude_store[-1]:
str_rep += ', '
str_rep += '],'
for attr in self._attributes_that_auto_serialize:
str_rep += '"' + attr + '":' + json.dumps(getattr(self, attr))
if attr != self._attributes_that_auto_serialize[-1]:
str_rep += ','
str_rep += '}'
return str_rep
def _deserializeNodeset(self, group, data):
for node_id in data:
node = self._createNodeAtLocation(data[node_id], group.getName(), int(node_id))
group.addNode(node)
def _deserializeSelection(self, data):
for node_id in data:
node = self.getNodeByIdentifier(node_id)
self._group.addNode(node)
def deserialize(self, str_rep):
scene = self._region.getScene()
scene.beginChange()
master_nodeset = self._point_cloud_group.getMasterNodeset() # removeAllNodes()
master_nodeset.destroyAllNodes()
master_nodeset = self._interpolation_point_group.getMasterNodeset()
master_nodeset.destroyAllNodes()
self.setSelection([])
d = json.loads(str_rep)
self._deserializeNodeset(self._point_cloud_group, d['_basic_points'])
del d['_basic_points']
self._deserializeNodeset(self._curve_group, d['_curve_points'])
del d['_curve_points']
self._plane.deserialize(json.dumps(d['_plane']))
del d['_plane']
self._curves = {}
curves = d['_curves']
for curve_index in curves:
c = CurveModel(self)
c.deserialize(json.dumps(curves[curve_index]))
self.insertCurve(int(curve_index), c)
del d['_curves']
self._plane_attitude_store = []
plane_attitude_store = d['_plane_attitude_store']
for plane_attitude in plane_attitude_store:
p = PlaneAttitude(None, None)
p.deserialize(json.dumps(plane_attitude))
self._plane_attitude_store.append(p)
del d['_plane_attitude_store']
selection = d['_selection']
selection_field = scene.getSelectionField()
if not selection_field.isValid():
scene.setSelectionField(self._selection_group_field)
del d['_selection']
self.__dict__.update(d)
self.setSelection(selection)
scene.endChange()
def _setupNodeRegion(self):
self._region = self._context.getDefaultRegion().createChild('point_cloud')
# scene = self._region.getScene()
self._coordinate_field = createFiniteElementField(self._region)
fieldmodule = self._region.getFieldmodule()
fieldmodule.beginChange()
nodeset = fieldmodule.findNodesetByName('nodes')
self._scale_field = fieldmodule.createFieldConstant([1.0, 1.0, 1.0])
self._scaled_coordinate_field = self._coordinate_field * self._scale_field
# Setup the selection fields
self._selection_group_field = fieldmodule.createFieldGroup()
selectiongroup = self._selection_group_field.createFieldNodeGroup(nodeset)
self._group = selectiongroup.getNodesetGroup()
# Setup the point cloud fields
self._point_cloud_group_field = fieldmodule.createFieldGroup()
pointcloudgroup = self._point_cloud_group_field.createFieldNodeGroup(nodeset)
self._point_cloud_group = pointcloudgroup.getNodesetGroup()
# Setup the curve fields
self._curve_group_field = fieldmodule.createFieldGroup()
curvegroup = self._curve_group_field.createFieldNodeGroup(nodeset)
self._curve_group = curvegroup.getNodesetGroup()
datapointset = fieldmodule.findNodesetByName('datapoints')
self._interpolation_point_group_field = fieldmodule.createFieldGroup()
segmentationpointgroup = self._curve_group_field.createFieldNodeGroup(datapointset)
self._interpolation_point_group = segmentationpointgroup.getNodesetGroup()
fieldmodule.endChange()
def _createOnPlaneConditionalField(self):
fieldmodule = self._region.getFieldmodule()
fieldmodule.beginChange()
alias_normal_field = fieldmodule.createFieldAlias(self._plane.getNormalField())
alias_point_field = fieldmodule.createFieldAlias(self._plane.getRotationPointField())
plane_equation_field = self._createPlaneEquationField(fieldmodule, self._scaled_coordinate_field, alias_normal_field, alias_point_field)
tolerance_field = fieldmodule.createFieldConstant(0.5)
abs_field = fieldmodule.createFieldAbs(plane_equation_field)
on_plane_field = fieldmodule.createFieldLessThan(abs_field, tolerance_field)
fieldmodule.endChange()
return on_plane_field
def _createOnPlanePointCloudField(self):
fieldmodule = self._region.getFieldmodule()
fieldmodule.beginChange()
and_field = fieldmodule.createFieldAnd(self._on_plane_conditional_field, self._point_cloud_group_field)
fieldmodule.endChange()
return and_field
def _createOnPlaneCurveField(self):
fieldmodule = self._region.getFieldmodule()
fieldmodule.beginChange()
and_field = fieldmodule.createFieldAnd(self._on_plane_conditional_field, self._curve_group_field)
fieldmodule.endChange()
return and_field
def _createOnPlaneInterpolation(self):
fieldmodule = self._region.getFieldmodule()
fieldmodule.beginChange()
and_field = fieldmodule.createFieldAnd(self._on_plane_conditional_field, self._interpolation_point_group_field)
fieldmodule.endChange()
return and_field
def _createPlaneEquationField(self, fieldmodule, coordinate_field, plane_normal_field, point_on_plane_field):
d = fieldmodule.createFieldDotProduct(plane_normal_field, point_on_plane_field)
plane_equation_field = fieldmodule.createFieldDotProduct(coordinate_field, plane_normal_field) - d
return plane_equation_field
def setScale(self, scale):
'''
Don't call this 'setScale' method directly let the main model do that
this way we can ensure that the two scale fields have the same
values.
'''
fieldmodule = self._region.getFieldmodule()
fieldcache = fieldmodule.createFieldcache()
self._scale_field.assignReal(fieldcache, scale)
def getPointCloudGroupField(self):
return self._point_cloud_group_field
def getPointCloudGroup(self):
return self._point_cloud_group
def getCurveGroupField(self):
return self._curve_group_field
def getCurveGroup(self):
return self._curve_group
def getInterpolationPointGroup(self):
return self._interpolation_point_group
def getOnPlanePointCloudField(self):
return self._on_plane_point_cloud_field
def getOnPlaneInterpolationField(self):
return self._on_plane_conditional_field
def getOnPlaneCurveField(self):
return self._on_plane_curve_field
def getOnPlaneSegmentationPointField(self):
return self._on_plane_conditional_field
def getSelectionGroupField(self):
return self._selection_group_field
def getSelectionGroup(self):
return self._group
def isSelected(self, node):
return self._group.containsNode(node)
def getCurrentSelection(self):
selection = []
ni = self._group.createNodeiterator()
node = ni.next()
while node.isValid():
selection.append(node.getIdentifier())
node = ni.next()
return selection
def setSelection(self, selection):
fieldmodule = self._region.getFieldmodule()
nodeset = self._group.getMasterNodeset() # fieldmodule.findNodesetByName('nodes')
fieldmodule.beginChange()
self._selection_group_field.clear()
for node_id in selection:
node = nodeset.findNodeByIdentifier(node_id)
self._group.addNode(node)
if node_id == selection[0]:
plane_attitude = self.getNodePlaneAttitude(node_id)
self._plane.setPlaneEquation(plane_attitude.getNormal(), plane_attitude.getPoint())
fieldmodule.endChange()
def getNodeByIdentifier(self, node_id):
fieldmodule = self._region.getFieldmodule()
nodeset = fieldmodule.findNodesetByName('nodes')
node = nodeset.findNodeByIdentifier(node_id)
return node
def getNodePlaneAttitude(self, node_id):
return self._plane_attitude_store[self._nodes[str(node_id)]]
def getNodeStatus(self, node_id):
node = self.getNodeByIdentifier(node_id)
node_status = SegmentPointStatus(node_id, self.getNodeLocation(node), self.getNodePlaneAttitude(node_id))
return node_status
def _addId(self, plane_attitude, node_id):
if plane_attitude in self._plane_attitude_store:
index = self._plane_attitude_store.index(plane_attitude)
self._plane_attitudes[str(index)].append(node_id)
else:
if None in self._plane_attitude_store:
index = self._plane_attitude_store.index(None)
self._plane_attitude_store[index] = plane_attitude
else:
index = len(self._plane_attitude_store)
self._plane_attitude_store.append(plane_attitude)
self._plane_attitudes[str(index)] = [node_id]
def _removeId(self, plane_attitude, node_id):
plane_attitude_index = self._plane_attitude_store.index(plane_attitude)
index = self._plane_attitudes[str(plane_attitude_index)].index(node_id)
del self._plane_attitudes[str(plane_attitude_index)][index]
if len(self._plane_attitudes[str(plane_attitude_index)]) == 0:
del self._plane_attitudes[str(plane_attitude_index)]
self._plane_attitude_store[plane_attitude_index] = None
def getElementByIdentifier(self, element_id):
fieldmodule = self._region.getFieldmodule()
mesh = fieldmodule.findMeshByDimension(1)
if element_id is None:
element_id = -1
return mesh.findElementByIdentifier(element_id)
def getNextCurveIdentifier(self):
next_identifier = 0
while next_identifier in self._curves:
next_identifier += 1
return next_identifier
def insertCurve(self, curve_identifier, curve):
self._curves[curve_identifier] = curve
def popCurve(self, curve_identifier):
if curve_identifier in self._curves:
curve = self._curves[curve_identifier]
del self._curves[curve_identifier]
node_ids = curve.getNodes()
for node_id in node_ids:
self.removeNode(node_id)
curve.removeAllNodes()
def getCurveIdentifiers(self):
return self._curves.keys()
def getCurveIdentifier(self, curve):
for curve_identifier in self._curves:
if curve == self._curves[curve_identifier]:
return curve_identifier
return None
def getCurveWithIdentifier(self, index):
return self._curves[index]
def getCurveForNode(self, node_id):
for curve_identifier in self._curves:
curve = self._curves[curve_identifier]
if node_id in curve:
return curve
return None
def addNode(self, node_id, location, plane_attitude):
if node_id is -1:
node = self._createNodeAtLocation(location)
node_id = node.getIdentifier()
self._addId(plane_attitude, node_id)
self._nodes[str(node_id)] = self._plane_attitude_store.index(plane_attitude)
return node_id
def addNodes(self, node_statuses):
fieldmodule = self._region.getFieldmodule()
fieldmodule.beginChange()
node_ids = []
for node_status in node_statuses:
node_id = self.addNode(node_status.getNodeIdentifier(), node_status.getLocation(), node_status.getPlaneAttitude())
node_ids.append(node_id)
fieldmodule.endChange()
def modifyNode(self, node_id, location, plane_attitude):
current_plane_attitude = self._plane_attitude_store[self._nodes[str(node_id)]]
node = self.getNodeByIdentifier(node_id)
self.setNodeLocation(node, location)
if current_plane_attitude != plane_attitude:
self._removeId(current_plane_attitude, node_id)
self._addId(plane_attitude, node_id)
self._nodes[str(node_id)] = self._plane_attitude_store.index(plane_attitude)
def setNodeLocation(self, node, location):
fieldmodule = self._region.getFieldmodule()
fieldcache = fieldmodule.createFieldcache()
fieldmodule.beginChange()
fieldcache.setNode(node)
self._coordinate_field.assignReal(fieldcache, location)
fieldmodule.endChange()
def getNodeLocation(self, node):
fieldmodule = self._region.getFieldmodule()
fieldcache = fieldmodule.createFieldcache()
fieldmodule.beginChange()
fieldcache.setNode(node)
result, location = self._coordinate_field.evaluateReal(fieldcache, 3)
fieldmodule.endChange()
if result == OK:
return location
return None
def removeElement(self, element_id):
fieldmodule = self._region.getFieldmodule()
fieldmodule.beginChange()
mesh = fieldmodule.findMeshByDimension(1)
element = mesh.findElementByIdentifier(element_id)
mesh.destroyElement(element)
fieldmodule.endChange()
def createDatapoint(self, location=None):
node = self._createNodeAtLocation(location, 'datapoints')
self._interpolation_point_group.addNode(node)
return node
def removeDatapoint(self, datapoint):
nodeset = datapoint.getNodeset()
nodeset.destroyNode(datapoint)
def removeNodes(self, node_statuses):
fieldmodule = self._region.getFieldmodule()
fieldmodule.beginChange()
for node_status in node_statuses:
self.removeNode(node_status.getNodeIdentifier())
fieldmodule.endChange()
def removeNode(self, node_id):
if str(node_id) in self._nodes:
plane_attitude = self._plane_attitude_store[self._nodes[str(node_id)]]
self._removeId(plane_attitude, node_id)
del self._nodes[str(node_id)]
node = self.getNodeByIdentifier(node_id)
nodeset = node.getNodeset()
nodeset.destroyNode(node)
def createNodes(self, node_statuses, group=None):
node_ids = []
for node_status in node_statuses:
node_id = self.addNode(-1, node_status.getLocation(), node_status.getPlaneAttitude())
if group is not None:
node = self.getNodeByIdentifier(node_id)
group.addNode(node)
node_ids.append(node_id)
return node_ids
def createNode(self):
'''
Create a node with the models coordinate field.
'''
fieldmodule = self._region.getFieldmodule()
fieldmodule.beginChange()
nodeset = fieldmodule.findNodesetByName('nodes')
template = nodeset.createNodetemplate()
template.defineField(self._coordinate_field)
scene = self._region.getScene()
selection_field = scene.getSelectionField()
if not selection_field.isValid():
scene.setSelectionField(self._selection_group_field)
self._selection_group_field.clear()
node = nodeset.createNode(-1, template)
self._group.addNode(node)
fieldmodule.endChange()
return node
def _createNodeAtLocation(self, location, dataset='nodes', node_id=-1):
'''
Creates a node at the given location without
adding it to the current selection.
'''
fieldmodule = self._region.getFieldmodule()
fieldmodule.beginChange()
nodeset = fieldmodule.findNodesetByName(dataset)
template = nodeset.createNodetemplate()
template.defineField(self._coordinate_field)
node = nodeset.createNode(node_id, template)
self.setNodeLocation(node, location)
fieldmodule.endChange()
return node
|
[
"h.sorby@auckland.ac.nz"
] |
h.sorby@auckland.ac.nz
|
081385780b3a6973dbe32ec0b3e9133546fce8a1
|
7064db507df92534747556be5e9c71ac629c9446
|
/Analysis/trappist1/plotEvolTrappist1.py
|
fe9561b912a65aeaecc96fd3362beae4125cbf91
|
[] |
no_license
|
dflemin3/EHI
|
766a974625e6d16ae3452cd57710f527564e267c
|
de2394838cf5609d0ec77f365613994aa52ed9e2
|
refs/heads/master
| 2020-04-18T04:29:35.052930
| 2019-05-15T22:24:28
| 2019-05-15T22:24:28
| 167,240,810
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,973
|
py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
"""
import numpy as np
import os
import matplotlib as mpl
import matplotlib.pyplot as plt
from matplotlib.gridspec import GridSpec
#Typical plot parameters that make for pretty plots
mpl.rcParams['figure.figsize'] = (9,8)
mpl.rcParams['font.size'] = 26.0
## for Palatino and other serif fonts use:
mpl.rc('font',**{'family':'serif'})
mpl.rc('text', usetex=True)
# Read in evolutionary tracks
dataName = "trappist1WDelta20EpsBolmont"
dataDir = os.path.join("../../Data", dataName + "Evol.npz")
data = np.load(dataDir)
nsamples = len(data["Luminosity"])
### Plot Lum, LumXUV, radius evolution and compare to observations ###
fig, axes = plt.subplots(ncols=3, figsize=(21,6))
for ii in range(nsamples):
# Left: lum
axes[0].plot(data["time"][ii], data["Luminosity"][ii], alpha=0.1, color="k",
lw=2, zorder=1)
# Middle: lumLUX
axes[1].plot(data["time"][ii], data["LXUVStellar"][ii], alpha=0.3, color="k",
lw=2)
# Middle: lumLUX
axes[2].plot(data["time"][ii], data["Radius"][ii], alpha=0.1, color="k",
lw=2)
# Plot constraints, format
# Luminosity from Grootel+2018
x = np.linspace(0, 1e10, 100)
axes[0].fill_between(x, 0.000522-0.000019, 0.000522+0.000019, color="C0",
alpha=0.3, zorder=0)
axes[0].axhline(0.000522, color="C0", lw=2, ls="--", zorder=2)
axes[0].set_ylabel("Luminosity [L$_{\odot}$]", fontsize=25)
axes[0].set_xlabel("Time [yr]", fontsize=25)
axes[0].set_ylim(4.0e-4, 1.5e-2)
axes[0].set_yscale("log")
axes[0].set_xscale("log")
# XUV Luminosity from Wheatley+2017
axes[1].fill_between(x, 10**(-6.4-0.1), 10**(-6.4+0.1), color="C0",
alpha=0.3, zorder=0)
axes[1].axhline(10**-6.4, color="C0", lw=2, ls="--", zorder=2)
axes[1].set_ylabel("XUV Luminosity [L$_{\odot}$]", fontsize=25)
axes[1].set_xlabel("Time [yr]", fontsize=25)
axes[1].set_ylim(2.0e-7, 2.0e-4)
axes[1].set_yscale("log")
axes[1].set_xscale("log")
# Radius from Grootel+2018
axes[2].fill_between(x, 0.121-0.003, 0.121+0.003, color="C0", alpha=0.3, zorder=0)
axes[2].axhline(0.121, color="C0", lw=2, ls="--", zorder=2)
axes[2].set_ylabel("Radius [R$_{\odot}$]", fontsize=25)
axes[2].set_xlabel("Time [yr]", fontsize=25)
axes[2].set_xscale("log")
axes[2].set_ylim(0.08, 0.42)
fig.savefig(os.path.join("../../Plots", dataName + "Trappist1Evol.png"),
bbox_inches="tight", dpi=200)
### Plot runaway greenhouse HZ limit ###
fig, ax = plt.subplots(figsize=(7,6))
for ii in range(nsamples):
# Plot inner HZ limit
ax.plot(data["time"][ii], data["HZLimRunaway"][ii], alpha=0.1,
color="k", lw=2, zorder=1)
# Plot outer HZ limit
ax.plot(data["time"][ii], data["HZLimEarlyMars"][ii], alpha=0.1,
color="k", lw=2, zorder=1)
## Format, plot planet's semi-major axes from Gillon+2017 and Luger+2017
ba = .01111
baSig = 0.00034
ca = 0.01521
caSig = 0.00047
da = 0.02144
daSig = 0.00066
ea = 0.02817
eaSig = 0.00087
fa = 0.0371
faSig = 0.0011
ga = 0.0451
gaSig = 0.0014
ha = 0.06134
haSig = 0.002251
planets = [ba, ca, da, ea, fa, ga, ha]
planetsSig = [baSig, caSig, daSig, eaSig, faSig, gaSig, haSig]
planetsName = ["b", "c", "d", "e", "f", "g", "h"]
planetsColor = ["C0", "C1", "C2", "C3", "C4", "C5", "C6"]
x = np.linspace(0, 1.0e10, 100)
for ii in range(len(planets)):
ax.fill_between(x, planets[ii] - planetsSig[ii],
planets[ii] + planetsSig[ii], color=planetsColor[ii],
alpha=0.3, zorder=0)
ax.axhline(planets[ii], color=planetsColor[ii], lw=2, ls="--", zorder=2,
label=planetsName[ii])
ax.set_xscale("log")
ax.set_ylim(0, 0.12)
ax.set_xlabel("Time [yr]", fontsize=25)
ax.set_ylabel("Distance [AU]", fontsize=25)
ax.legend(loc="upper right", framealpha=0, fontsize=12)
fig.savefig(os.path.join("../../Plots", dataName + "Trappist1HZLimEvol.png"),
bbox_inches="tight", dpi=200)
# Done!
|
[
"dflemin3@uw.edu"
] |
dflemin3@uw.edu
|
f7e50bf1d16e0bf0011ed4628336620a82c81547
|
c0a93aa192b307a63d585799df64fc79088b7005
|
/robot_smach_states/src/robot_smach_states/util/designators/deprecation_warnings.py
|
683e1003318182d7ca5f0c9478b9629886abf320
|
[] |
no_license
|
sunarditay/tue_robocup
|
b75e2ea9ca040537aae6a8cf4c72bb5adc96a149
|
189bd987822f9bd1c477769892e154964f97d2f0
|
refs/heads/master
| 2020-03-25T13:35:59.331692
| 2018-06-16T16:58:45
| 2018-06-16T16:58:45
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,138
|
py
|
#! /usr/bin/env python
"""Deprecation warnings.
From:
- https://wiki.python.org/moin/PythonDecoratorLibrary#Generating_Deprecation_Warnings
- http://stackoverflow.com/questions/2536307/decorators-in-the-python-standard-lib-deprecated-specifically
"""
__author__ = 'Loy'
import warnings
import functools
import inspect
# def deprecated(func):
# '''This is a decorator which can be used to mark functions
# as deprecated. It will result in a warning being emitted
# when the function is used.'''
# def new_func(*args, **kwargs):
# warnings.warn("Call to deprecated function {}.".format(func.__name__),
# category=DeprecationWarning)
# return func(*args, **kwargs)
# new_func.__name__ = func.__name__
# new_func.__doc__ = func.__doc__
# new_func.__dict__.update(func.__dict__)
# return new_func
def deprecated(func):
'''This is a decorator which can be used to mark functions
as deprecated. It will result in a warning being emitted
when the function is used.'''
@functools.wraps(func)
def new_func(*args, **kwargs):
warnings.simplefilter('always', DeprecationWarning) #turn off filter
warnings.warn_explicit(
"Call to deprecated function {}.".format(func.__name__),
category=DeprecationWarning,
filename=func.func_code.co_filename,
lineno=func.func_code.co_firstlineno + 1
)
warnings.simplefilter('default', DeprecationWarning) #reset filter
return func(*args, **kwargs)
return new_func
def get_caller_info():
#Index 1 would get caller of get_caller_info, index 2 gets the caller of of the method that calls get_caller_info
frame, filename, line_number, function_name, lines, index = inspect.getouterframes(inspect.currentframe())[2]
return {"frame":frame,
"filename":filename, "line_number":line_number,
"function_name":function_name,
"lines":lines, "index":index}
if __name__ == "__main__":
## Usage examples ##
@deprecated
def my_func():
print "I'm a function"
my_func()
|
[
"loy.vanbeek@gmail.com"
] |
loy.vanbeek@gmail.com
|
b39ab9b08081e002abc6808a6796aed2fc518304
|
b7fa9ad7ef6875d5d3dc56a554bf205f4e6c40b1
|
/cookbook/migrations/0003_step_date_added.py
|
e694ee18a157a690544e9269285ca24524953e8f
|
[] |
no_license
|
geofftang/mise
|
7578a9e2e14fd35ef9aa4ec64e7e29787740d4a9
|
2139b08486e0cf2184195a6c0908b693c6d6afea
|
refs/heads/master
| 2021-05-02T18:25:30.167192
| 2018-04-11T05:28:48
| 2018-04-11T05:38:21
| 120,657,083
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 564
|
py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.8 on 2018-03-07 05:38
from __future__ import unicode_literals
from django.db import migrations, models
import django.utils.timezone
class Migration(migrations.Migration):
dependencies = [
('cookbook', '0002_remove_step_date_added'),
]
operations = [
migrations.AddField(
model_name='step',
name='date_added',
field=models.DateTimeField(auto_now_add=True, default=django.utils.timezone.now),
preserve_default=False,
),
]
|
[
"geoffrey.j.tang@gmail.com"
] |
geoffrey.j.tang@gmail.com
|
9d3a6d558304fc8d107d7e1685804bfb7f0e0bb5
|
86a66eec7ea95bcb7eb1fef97ae129606c061e95
|
/Singlepic/wsgi.py
|
bd39473a6b35baeeabfb9e7057510101d8c48a99
|
[] |
no_license
|
Bogaevskiy/Singlepic
|
8db9ea28a7a46254ab14766315e5315475f5696c
|
91c8094693b7e8f04ea49fefefc63b8f5b86b4d7
|
refs/heads/master
| 2020-11-25T23:27:26.377896
| 2020-04-21T18:51:05
| 2020-04-21T18:51:05
| 223,022,465
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 411
|
py
|
"""
WSGI config for Singlepic project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/2.2/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'Singlepic.settings')
application = get_wsgi_application()
|
[
"vileflesh@2ch.hk"
] |
vileflesh@2ch.hk
|
c287d5683e9ee27525987d8330386c0a0d3497ff
|
a2c4e411f4872f1c501618ed553ee5c87c3270db
|
/hacker/migrations/0003_delete_submission.py
|
447336e821a3fcc51a6a5b0a36f787fcb29e42f4
|
[
"MIT"
] |
permissive
|
lamnt-git/bug_bounty
|
c22f438f0c05c62329eb77ba1ce68c55a54d0b56
|
fbb53d266a36c7df68e5f81fbafb0cf34891f764
|
refs/heads/master
| 2022-12-22T08:09:13.224931
| 2020-01-03T04:49:46
| 2020-01-03T04:49:46
| 231,513,592
| 0
| 0
|
NOASSERTION
| 2022-12-08T03:22:58
| 2020-01-03T04:44:20
|
Python
|
UTF-8
|
Python
| false
| false
| 290
|
py
|
# Generated by Django 2.2.6 on 2020-01-02 13:09
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('hacker', '0002_submission'),
]
operations = [
migrations.DeleteModel(
name='Submission',
),
]
|
[
"tunglambk123@gmail.com"
] |
tunglambk123@gmail.com
|
921138bb02bc4ee32c67ab050aea636d5599d976
|
8c4c244007ebbbcdfe75059c44dd3ecf94d3221e
|
/tiaotiao/subexperimentgr14.py
|
6b279f98bfa304f1d543f43c523f96bd768cee91
|
[] |
no_license
|
michelle3113/tt_no1
|
590df6dfb66c62155b1fe865ceacdfc3d9b78285
|
128096d8751edab884879261d67390e8b384100f
|
refs/heads/master
| 2020-12-23T02:51:02.706598
| 2020-06-25T14:02:06
| 2020-06-25T14:02:06
| 237,012,431
| 3
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,944
|
py
|
import pandas as pd
import numpy as np
import scipy
from sklearn.ensemble import RandomForestClassifier
from sklearn.utils import shuffle
from sklearn.metrics import mean_squared_error # MSE
from sklearn.metrics import confusion_matrix
from scipy import stats
from sklearn.metrics import r2_score # R^2
from scipy.stats import pearsonr
from sklearn.preprocessing import OneHotEncoder
shifu = pd.read_excel('input/儿科路径病人明细_s1.xlsx')
# remove duplicate column
unique_col = [col for col in list(shifu.columns) if not '.' in col]
shifu = shifu[unique_col]
shifu.to_excel('results/shifu_demo_unique.xlsx', index=False)
# remove columns that containing null more that 85% ratio
shifu = shifu.loc[:, shifu.notna().mean() > 0.85]
# or shifu = shifu.dropnqa(1,thresh=len(shifu.index)*0.85)
shifu.to_excel('results/shifu_demo_nonull.xlsx', index=False)
# delete '-'
shifu = shifu.loc[:, (shifu == '-').sum() == 0]
shifu.to_excel('results/shifu_demo_nogang.xlsx', index=False)
# standard admiss_date: 2014-03-25 09:34:28:613 -> 2014-03-25 09:34:28
shifu['admiss_date'] = shifu['admiss_date'].apply(lambda s: s[:s.rfind(':')])
# convert object to datetime class
shifu['admiss_date'] = pd.to_datetime(shifu['admiss_date'])
shifu['dis_date'] = pd.to_datetime(shifu['dis_date'])
# generate target DIH column
DIH_day = (shifu['dis_date'] - shifu['admiss_date']).apply(lambda d: f'day_{d.days}')
shifu['DIH_day'] = DIH_day
shifu.to_excel('results/shifu_demo_add_target.xlsx', index=False)
########################################################
# data standard (**very important**)
# for sex attribute: normalize the value into range[0, 1] not [1, 2]
shifu['sex'] = shifu['sex'] - 1
########################################################
dis_diag = [['J18.000'], ['R94.500'], ['J18.000'], ['Z54.000']]
ohe=OneHotEncoder()
ohe.fit(dis_diag)
one_hot1=ohe.transform(dis_diag).toarray()
shifu['dis_diag']=one_hot1
admiss_diag=[['J18.000'], ['J18.000'], ['J18.000'], ['J18.000']]
ohe=OneHotEncoder()
ohe.fit(admiss_diag)
one_hot2=ohe.transform(admiss_diag).toarray()
shifu['admiss_diag']=one_hot2
shifu.to_excel('results/shifu_demo_one_hot.xlsx', index=False)
# shuffle dataset
shifu = shuffle(shifu)
# compute train num from train ratio
train_ratio = 0.70
train_num = int(len(shifu) * train_ratio)
print(f'Sample num: {len(shifu)}, Train num: {train_num}, Test num: {len(shifu) - train_num}')
# initialize target and generate all category name set
target, category_name = pd.factorize(shifu[shifu['DIH_day']>14])
# select train/test subset
# [ train_subset | test_subset ]
# [......................|..................]
train_subset = shifu.iloc[:train_num]
train_target = target[:train_num]
test_subset = shifu.iloc[train_num:]
test_target = target[train_num:]
payment=shifu['charge1','charge2','charge3','charge6','charge7','charge10']
fts_for_all_payment=shifu[payment]
personal_information=shifu['sex','blood_type','age','home_district']
fts_for_all_pi=shifu[personal_information]
health_state=['admiss_diag','admiss_times','dis_diag_no','dis_diag_type','dis_diag_status','admiss_status']
fts_for_all_hs=shifu[health_state]
others=shifu['pay_flag','local_flag']
fts_for_all_others=shifu[others]
def tiaotiao(*a):
selected_fts=a
clf = RandomForestClassifier()
clf.fit(train_subset[selected_fts], train_target)
test_prediction = clf.predict(test_subset[selected_fts])
# metric for RMSE
rmse = np.sqrt(mean_squared_error(test_target, test_prediction))
# metric for rho
rho, p_value = stats.spearmanr(test_target, test_prediction)
# metric for MSE
mes = mean_squared_error(test_target, test_prediction)
# metric for MAPE
def mean_absolute_percentage_error(y_true, y_pred):
y_true, y_pred = np.array(y_true), np.array(y_pred)
return np.mean(np.abs((y_true - y_pred) / y_true)) * 100
mape = mean_absolute_percentage_error(test_target, test_prediction)
# metric for Spec
TN, FP, FN, TP = confusion_matrix(test_target, test_prediction).ravel()
spec = TN / (TN + FP)
# metric fro Sens
sens = TP / (TP + FN)
# metric for Acc
acc = TP / (TP + FP + TN + FN)
# metric for R^2
r2 = r2_score(test_target, test_prediction)
# metric for abs(R)
# don't understand the formulation of |R|
r, _ = pearsonr(test_target, test_prediction)
# overall description: confusion matrix
print(pd.crosstab(category_name[test_target], category_name[test_prediction],
rownames=['Target'], colnames=['Prediction'], margins=True))
print(f'RMSE: {rmse}')
print(f'rho: {rho:.3f}')
print(f'MSE: {mes:.3f}')
print(f'MAPE: {mape:.3f}')
print(f'Spec: {spec:.3f}')
print(f'Sens: {sens:.3f}')
print(f'Acc: {acc:.3f}')
print(f'R^2: {r2:.3f}')
print(f'abs(R):{r:.3f}')
return;
tiaotiao(fts_for_all_payment,fts_for_all_hs,fts_for_all_pi,fts_for_all_others)
|
[
"evanfeng97@gmail.com"
] |
evanfeng97@gmail.com
|
adfe6915a3b64d12dc6776c8b1a88a1f57f06fe7
|
1274c7a7cb16aefb3fb0b052c2448eea5643f14f
|
/webempresa/webempresa/blog/urls.py
|
59b20335d8e611873f66f47a8394b30756e417e4
|
[] |
no_license
|
No-Haiku/mis-paginas-en-django
|
241316b2eb44fb7bfd5227ae50fe30759e1e9768
|
54e428b8a4e05d2731a7a07801f1fcf9690cce86
|
refs/heads/main
| 2023-05-30T09:37:21.300821
| 2021-06-17T06:32:09
| 2021-06-17T06:32:09
| 336,439,360
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 190
|
py
|
from django.urls import path
from . import views
urlpatterns = [
path('', views.blog, name="blog"),
path('category/<int:category_id>/', views.category, name="category"),
]
|
[
"66711328+No-Haiku@users.noreply.github.com"
] |
66711328+No-Haiku@users.noreply.github.com
|
7016107af932a3a637e855b91589feec91a4ea95
|
5d45e2bf2287e3878cc90415854a796ed5532142
|
/talk/_server.py
|
ab188a339616e6e87622dc5153ec9172d8be2f03
|
[] |
no_license
|
ap--/teaching-python-aidd-adv-oct-2021
|
cce2bfb62f295014ecc1c0e7217dc0e1b26e781d
|
8ef3a712805088c7d211ce8155e6838a2df6ef30
|
refs/heads/main
| 2023-08-22T04:25:41.383181
| 2021-10-21T12:08:55
| 2021-10-21T12:08:55
| 419,219,843
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 591
|
py
|
from pathlib import Path
from flask import Flask
from flask import request
app = Flask(
__name__,
static_url_path='',
static_folder=str(Path(__file__).parent.joinpath("../data"))
)
@app.route("/")
def hello_world():
return "Hello Hello World!"
@app.route("/greeting")
def greeting():
name = request.args.get("name", "????")
return f"Hello Hello {name}!"
@app.route("/data.json")
def data():
return {
"data": [1, 2, 3],
"status": "good",
}
if __name__ == "__main__":
# by default runs at http://127.0.0.1:5000/
app.run(debug=True)
|
[
"andreas@poehlmann.io"
] |
andreas@poehlmann.io
|
9b09056d8459349da2fc2bcd60ee7f7c34f3d3dc
|
c68ec45e9b4302ae28d8f70797c335180ee1a196
|
/stages/03_kubernetes/project/app/routes.py
|
f2d47acb40665e72b2bcd50fba4b945877a1f734
|
[
"MIT",
"LicenseRef-scancode-other-permissive"
] |
permissive
|
dgr113/my-otus-tests
|
90069f856a7177ca36490a3d4716f1870e5eefd2
|
c573d85be422eb76986fa217ffc0fe3415b6df2b
|
refs/heads/main
| 2023-04-23T07:12:31.403027
| 2021-05-11T10:14:05
| 2021-05-11T10:14:05
| 309,331,188
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,043
|
py
|
# coding: utf-8
from typing import Sequence
from flask import jsonify, current_app, request
from flask_restful import Resource # type: ignore
from .models import Client # type: ignore
class Index(Resource):
def get(self):
return jsonify({
'result': "{} from {}!".format(current_app.config['APP_CONFIG']['GREETING'], current_app.config['APP_CONFIG']['HOSTNAME'])
})
class Config(Resource):
def get(self):
return jsonify( current_app.config['APP_CONFIG'] )
class StudentMany(Resource):
def get(self) -> Sequence[dict]:
return Client.find()
def post(self) -> str:
req_data = request.get_json()
return Client.insert(req_data)
class StudentOne(Resource):
def get(self, student_id: int) -> Sequence[dict]:
return Client.find(student_id)
def put(self, student_id: int):
req_data = request.get_json()
return Client.update(student_id, req_data)
def delete(self, student_id: int):
return Client.delete(student_id)
|
[
"dmitry-gr87@yandex.ru"
] |
dmitry-gr87@yandex.ru
|
9b9e01b1834d7671cca3e226996c4e193900163e
|
8b4db3ef33fa27ef4ff885cde09666da88872045
|
/sfc/settings.py
|
5c75aa7d94a54ce50e8b39cf06e616135cc8fb9b
|
[] |
no_license
|
KoyanagiAyuha/sfc
|
b2f4051871febbac18fb56d6ecd03a31adfee11b
|
a7e1b0bce4d945bdb2db046db4d1e303a0596a92
|
refs/heads/master
| 2023-06-06T14:19:52.065280
| 2021-06-23T15:17:25
| 2021-06-23T15:17:25
| 379,646,171
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,448
|
py
|
"""
Django settings for sfc project.
Generated by 'django-admin startproject' using Django 3.1.
For more information on this file, see
https://docs.djangoproject.com/en/3.1/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.1/ref/settings/
"""
from pathlib import Path
import os
# Build paths inside the project like this: BASE_DIR / 'subdir'.
BASE_DIR = Path(__file__).resolve(strict=True).parent.parent
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.1/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '3cg8#_rc*09p+z+79jjiil)axw28^s)3yl-x^bh4-g#2ltc!)0'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'rest_framework',
'rest_framework.authtoken',
'core.apps.CoreConfig',
'api_user.apps.ApiUserConfig',
'api_dm.apps.ApiDmConfig',
'corsheaders',
]
MIDDLEWARE = [
'corsheaders.middleware.CorsMiddleware',
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
CORS_ORIGIN_WHITELIST = [
'http://localhost:3000',
]
ROOT_URLCONF = 'sfc.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'sfc.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.1/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': BASE_DIR / 'db.sqlite3',
}
}
# Password validation
# https://docs.djangoproject.com/en/3.1/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.1/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'ASIA/TOKYO'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.1/howto/static-files/
AUTH_USER_MODEL = 'core.User'
STATIC_URL = '/static/'
MEDIA_ROOT = os.path.join(BASE_DIR, '_media')
MEDIA_URL = '/media/'
|
[
"ayuha.koyanagi@gmail.com"
] |
ayuha.koyanagi@gmail.com
|
0be4413bf097642311edff3d6acbdf9b72493722
|
ccc78f2398bfc245c385a78938a52e61edea98e9
|
/Module4 Midterm exam.py
|
1672f14ad11211fbd4f5098e73ecaa7acc641c60
|
[] |
no_license
|
PrimWILL/Introduction-to-Python-Absolute-Beginner
|
b29a41af4898758765699459e75fe66bd7b4a534
|
85316f1fe2e09fe3525fbee44b422f5dc1fc6a01
|
refs/heads/master
| 2020-07-29T17:25:24.546198
| 2019-09-25T06:56:49
| 2019-09-25T06:56:49
| 209,900,183
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 665
|
py
|
guess = 99
def STR_ANALYSIS(str):
if str.isdigit():
str = int(str)
if str > 99:
return print(str, "is a pretty big number")
elif str < 99:
return print(str, "is a smaller number than expected")
else:
return print("Correct! Congratulation.")
elif str.isalpha():
return print("\""+str+"\" is all alphabetical characters!")
else:
return print("\""+str+"\" is a surprise! It's neither all all alpha nor all digit characters!")
while True:
string = input("enter word or integer: ")
if string != '':
STR_ANALYSIS(string)
break
else:
pass
|
[
"51026374+PrimWILL@users.noreply.github.com"
] |
51026374+PrimWILL@users.noreply.github.com
|
28b0dbc65927fd0812b8358bd48dcd2790f3c99c
|
ccb87e34b5d105e35794591ba3aada625c8a838f
|
/jim_python_practice_programs/6.60_simple_interest.py
|
77e45b9835a3414df609b8fd91c906d237dba3f5
|
[] |
no_license
|
jaford/thissrocks
|
c2519135af007bf1cc37c511487c98db1ddd5a5b
|
e7d8b91d23de615f2124493742079988650396b9
|
refs/heads/master
| 2023-08-17T17:15:49.819091
| 2023-07-21T21:59:23
| 2023-07-21T21:59:23
| 10,363,528
| 4
| 0
| null | 2023-07-21T21:59:24
| 2013-05-29T16:00:32
|
Python
|
UTF-8
|
Python
| false
| false
| 600
|
py
|
#Write a program to calculate simple interest
# Replace ___ with your code
def simple_interest():
# take float input for principal, rate, and time
P = float(input("Enter the principal amount: "))
R = float(input("Enter the interest rate: "))
T = float(input("Enter the time in years: "))
# calculate the simple interest
interest = P * R * T * .01
# calculate the final amount
total_sum = P + interest
# print interest and total_sum in separate lines
print(f'The amount paid in interest is: {interest}')
print(f'The total amount paid is: {total_sum}')
simple_interest()
|
[
"jamesford3@gmail.com"
] |
jamesford3@gmail.com
|
53f3c190d40848ae5daf091e4bfbccdccc998ef3
|
866ba733b70fa722f1c6b89b5f3ead429856098d
|
/server/mysite/film_lookup/apps.py
|
6f0bd459dc4de2629dd0b6f353fb1dc6373181c5
|
[] |
no_license
|
gkink/BachelorProject
|
3f5460468d2b34a05e85696ed1a57ed01cfc4699
|
5e64f0eda63bdaba76c4d7f49f78876670c2dca1
|
refs/heads/master
| 2021-03-24T12:25:08.569948
| 2016-08-08T00:25:16
| 2016-08-08T00:25:16
| 54,229,726
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 95
|
py
|
from django.apps import AppConfig
class FilmLookupConfig(AppConfig):
name = 'film_lookup'
|
[
"gosip12@freeuni.edu.ge"
] |
gosip12@freeuni.edu.ge
|
3e9165b2692d54c88a6ac4046cc4146d400488f0
|
d3d5b933fe0672ba141b3cfde0ad942438b38304
|
/06.Advanced Loops/PyramidOfNumbers.py
|
086d08ac9925be33755ebc4f64df2f7908cf2fb7
|
[
"MIT"
] |
permissive
|
wesenu/Python-3
|
065aa49b7978c6f0cc1ebdd364d7b02059ab4dc6
|
55163496dac452a7110b7f76edc6894ee195f1fe
|
refs/heads/master
| 2023-03-15T11:52:54.877026
| 2018-08-20T11:56:34
| 2018-08-20T11:56:34
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 245
|
py
|
n = int(input())
num = 1
for i in range(1, n+1):
for j in range(i):
if i > 0:
print(' ', end='')
print(num, end='')
num += 1
if num > n:
break
print()
if num > n:
break
|
[
"nikolay.vutov.nv@gmail.com"
] |
nikolay.vutov.nv@gmail.com
|
d348f9fda6a4f0d2d5430ca7702d28c57f1d71b4
|
9facf0143ee2e34251f145dfab1d3b8e14e66200
|
/check_cycles.py
|
cad64672679a9b3c0f76c7e2c91de39169b03a63
|
[] |
no_license
|
alihaiderrizvi/handy-algorithms
|
f3e35ac4e9a89a2df77f7a751e1861ea7f1fcdbd
|
2e78fc9984d52c33ff825ce02dfa2da68c66dd71
|
refs/heads/main
| 2023-02-26T10:50:40.001516
| 2021-02-02T12:28:21
| 2021-02-02T12:28:21
| 335,281,360
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 603
|
py
|
def is_cyclic(numNodes, connections):
graph = {}
indegree = {}
for i in range(numNodes):
graph[i] = []
indegree[i] = 0
for i,j in connections:
graph[i].append(j)
indegree[j] += 1
queue = []
for i in indegree:
if indegree[i] == 0:
queue.append(i)
count = 0
while queue:
curr = queue.pop(0)
count += 1
for node in graph[curr]:
indegree[node] -= 1
if indegree[node] == 0:
queue.append(node)
return count == numNodes
|
[
"noreply@github.com"
] |
alihaiderrizvi.noreply@github.com
|
65592f9371d44d9f279f7e13104ef745cce86f21
|
ad4eeb3ddca55770acaa290f50444b39174eac89
|
/migrations/versions/38a42c2f5012_initial_setup.py
|
21ca376d18f31daf24467e1cdd83d1bd03322fbc
|
[] |
no_license
|
hcen001/crest-metadata-initiative
|
6177988e262eeef070a18253ffd7296d715726c7
|
d9b62c7ea4f2fcf44a2d25a4edbfbda50304dc4b
|
refs/heads/master
| 2022-12-09T16:08:23.118808
| 2018-07-20T21:28:44
| 2018-07-20T21:28:44
| 141,611,159
| 0
| 0
| null | 2022-09-16T17:48:53
| 2018-07-19T17:28:08
|
CSS
|
UTF-8
|
Python
| false
| false
| 1,405
|
py
|
"""initial setup
Revision ID: 38a42c2f5012
Revises:
Create Date: 2018-07-18 15:01:38.452443
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '38a42c2f5012'
down_revision = None
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('user_account',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('date_created', sa.DateTime(), nullable=True),
sa.Column('date_updated', sa.DateTime(), nullable=True),
sa.Column('active', sa.Boolean(), server_default='t', nullable=True),
sa.Column('first_name', sa.String(length=128), server_default='First name', nullable=False),
sa.Column('last_name', sa.String(length=128), server_default='Last name', nullable=False),
sa.Column('email', sa.String(length=128), server_default='someone@xample.org', nullable=False),
sa.Column('username', sa.String(length=64), nullable=False),
sa.Column('authenticated', sa.Boolean(), server_default='f', nullable=False),
sa.Column('ticket', sa.String(length=64), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('email')
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('user_account')
# ### end Alembic commands ###
|
[
"hcen001@cs.fiu.edu"
] |
hcen001@cs.fiu.edu
|
4b529eb1a41fa41aebc04458383a380f44631bc0
|
9c87bcfe6b374f638b4731ff1c50872c608869c3
|
/BankApp/testScript.py
|
6cd4cd18192c84d9297c5230c9fa967f7bce5f95
|
[] |
no_license
|
rashbhat/BankApplication
|
10ca91006e47fcafe9bf1072c161a7250f363626
|
7fbb769f1966f71818012b2ffe6a08cb43932993
|
refs/heads/master
| 2021-01-10T05:00:15.148426
| 2015-11-18T12:34:27
| 2015-11-18T12:34:27
| 46,332,963
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,437
|
py
|
from BankApp.models import *
import time
def addBank(bankName='corporation',count=1):
start= time.time()
try:
BankDetails.objects.all().delete()
list = []
for i in range(0,count):
list.append(BankDetails(bank_name=bankName))
bank = BankDetails.objects.bulk_create(list)
ret=str(count)+ ' Banks added successfully'
except Exception as e:
ret = 'unable to add the banks', e
finally:
end = time.time()
print (end-start)
print ('hi')
return ret
def addUser(user='testUser',count=1,bank='hdfc'):
start= time.time()
try:
UserBankMap.objects.all().delete()
UserDetails.objects.all().delete()
bankObj = BankDetails.objects.create(bank_name=bank)
userBankMapList = []
for i in range(0,count):
userObj = UserDetails.objects.create(user_name =user)
userBankMapList.append(UserBankMap(bank_id=bankObj,user_id=userObj))
UserBankMap.objects.bulk_create(userBankMapList)
ret= str(count)+ ' Users added successfully'
except Exception as e:
ret= 'unable to add the users',e
finally:
end = time.time()
print (end-start)
print ('hi')
return ret
def addAccount(acc_type='RD',acc_bal=300,count=1):
start= time.time()
try:
AccountDetails.objects.all().delete()
UserBankMap.objects.all().delete()
UserDetails.objects.all().delete()
BankDetails.objects.all().delete()
bankObj = BankDetails.objects.create(bank_name ='hdfc')
userObj = UserDetails.objects.create(user_name ='user')
userBankMapObj = UserBankMap.objects.create(bank_id=bankObj,user_id=userObj)
accountList=[]
for i in range (0,count):
accountList.append(AccountDetails(userBankMap_id = userBankMapObj,account_type=acc_type,account_balance=acc_bal))
accountObj = AccountDetails.objects.bulk_create(accountList)
ret= str(count)+ ' Accounts created successfully'
except Exception as e:
ret= 'unable to create account',e
finally:
end = time.time()
print (end-start)
print ('hi')
return ret
def addTransaction(trans_type='credit',trans_amount=100,count=1):
start= time.time()
TransactionDetails.objects.all().delete()
AccountDetails.objects.all().delete()
UserBankMap.objects.all().delete()
UserDetails.objects.all().delete()
BankDetails.objects.all().delete()
bankObj = BankDetails.objects.create(bank_name ='hdfc')
userObj = UserDetails.objects.create(user_name ='user')
userBankMapObj = UserBankMap.objects.create(bank_id=bankObj,user_id=userObj)
accountObj= AccountDetails.objects.create(userBankMap_id = userBankMapObj,account_type='savings',account_balance='150')
try:
transList =[]
for i in range(0,count):
transList.append(TransactionDetails(trans_type=trans_type,trans_amount=trans_amount,account_id=accountObj))
transObj = TransactionDetails.objects.bulk_create(transList)
ret= str(count)+ ' Transactions added successfully'
except Exception as e:
ret= 'unable to add transactions',e
finally:
end = time.time()
print (end-start)
print ('hi')
return ret
|
[
"rashmi.bhat@tarams.com"
] |
rashmi.bhat@tarams.com
|
57c9aaaa261bf0b0d4e271cb10e4c8e9674e9dd7
|
354f06515afe31f97a7301ced9229f379830c5b0
|
/Document/doc.py
|
304746b0324b1299af21abeb229306de3d849bb9
|
[] |
no_license
|
mushthofa/HackerRankPython
|
a9bbe905c5baede197ad174ee986c3331f8fcadf
|
3a03928c05a9e2bc79fd4c4014bfa455b86841e5
|
refs/heads/master
| 2020-06-28T17:07:19.058230
| 2019-08-02T20:13:39
| 2019-08-02T20:13:39
| 200,292,171
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,150
|
py
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Sun Jul 2 01:14:48 2017
@author: mush
"""
from sklearn.feature_extraction.text import CountVectorizer
from sklearn.feature_extraction.text import TfidfTransformer
from sklearn.linear_model import LogisticRegression
from sklearn.pipeline import Pipeline
from sklearn.model_selection import cross_val_score
trfilename = 'trainingdata.txt'
with open(trfilename) as trfile:
n = int(trfile.readline().strip())
target = []
textdata = []
for i in range(n):
line = trfile.readline().strip().split(" ")
target.append(int(line[0]))
textdata.append(" ".join(line[1:]))
k = int(input().strip())
testdata = []
for i in range(k):
line =input().strip()
testdata.append(line)
clf = Pipeline([('vect', CountVectorizer()), ('tfidf', TfidfTransformer()), ('clf', LogisticRegression(C=100, penalty='l1'))])
clf.fit(textdata, target)
test_out = clf.predict(testdata)
for t in test_out:
print(t)
#scores = cross_val_score(clf, textdata, target, cv=5)
#print("Accuracy: %0.2f (+/- %0.2f)" % (scores.mean(), scores.std() * 2))
|
[
"noreply@github.com"
] |
mushthofa.noreply@github.com
|
5b87429b97fd2963755878b89b8492ca457369ba
|
ae7c8af0df1229bb948b736871f6453ebdc2968d
|
/english_rule.py
|
891fc31bcd646ff9796e4b523f41c8498c5afe3f
|
[] |
no_license
|
fishjawzrex/Algorithms-and-Data-Structures-in-Python
|
7dd9a8a09c9e09985724e67e0df62f92c11c0edf
|
03ef0298395ef9ae5ecdf157ae301db3f1849df2
|
refs/heads/master
| 2020-06-18T07:47:51.940712
| 2019-07-10T14:31:23
| 2019-07-10T14:31:23
| 196,220,102
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 628
|
py
|
class EnglishRuler:
def __init__(self, inches, ticks):
self.inches = inches
self.ticks = ticks
def draw_line(self,ticks,label):
line = '-'*ticks
if label:
line += label
print(line)
def draw_interval(self,center_line):
if center_line:
self.draw_interval(center_line-1)
self.draw_line(center_line-1,None)
self.draw_interval(center_line-1)
def draw_ruler(self):
self.draw_line(self.ticks, '0')
for i in range(1,self.inches+1):
self.draw_interval(self.ticks)
self.draw_line(self.ticks,str(i))
w = EnglishRuler(4,5)
w.draw_ruler()
|
[
"noreply@github.com"
] |
fishjawzrex.noreply@github.com
|
f5c38733f167bf2ed48272b6324bb99c940da5c5
|
482160f217892648c408b73e814427ca62afe14a
|
/chat application/server.py
|
4b57733367bbeb697ee29247f7bcbf9d343b19d4
|
[] |
no_license
|
JasonVanRaamsdonk/Socket-Programming---Python
|
752da9fd6ceeb5bdf6c3531da8b3bb3b768e26ce
|
87693817be4b93bd1a260c88995176c0697a6332
|
refs/heads/master
| 2020-08-27T11:34:33.254049
| 2019-11-02T00:51:12
| 2019-11-02T00:51:12
| 217,353,291
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,401
|
py
|
"""
author: Jason van Raamsdonk
handling multiple connection server-side
"""
import socket
import select # gives operating level o/i capabilities
HEADER_LENGTH = 10
IP = "127.0.0.1"
PORT = 1234
server_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
server_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) # to ensure port is not in use
server_socket.bind((IP, PORT))
server_socket.listen()
sockets_list = [server_socket]
clients = {}
"""
clients dictionary:
The client socket will be the key and the user
data will be the value
"""
def receive_message(client_socket):
try:
message_header = client_socket.recv(HEADER_LENGTH)
if not len(message_header):
return False
message_length = int(message_header.decode('utf-8').strip())
return {"header": message_header, "data": client_socket.recv(message_length)}
except:
return False
while True:
read_sockets, _, exception_sockets = select.select(sockets_list, [], sockets_list)
for notified_socket in read_sockets:
if notified_socket == server_socket:
client_socket, client_address = server_socket.accept()
user = receive_message(client_socket)
if user is False: # someone disconnected
continue
sockets_list.append(client_socket)
clients[client_socket] = user
print(f"accepted new connection form {client_address[0]},:{client_address[1]} username:{user['data'].decode('utf-8')}")
else:
message = receive_message(notified_socket)
if message is False:
print(f"Closed connection from {clients[notified_socket]['data'].decode('utf-8')}")
sockets_list.remove(notified_socket)
del clients[notified_socket]
continue
user = clients[notified_socket]
print(f"Received message from {user['data'].decode('utf-8')}: {message['data'].decode('utf-8')}")
for client_socket in clients:
if client_socket != notified_socket: # we don't want to send the message back to the sender
client_socket.send(user['header'] + user['data'] + message['header'] + message['data'])
for notified_socket in exception_sockets:
sockets_list.remove(notified_socket)
del clients[notified_socket]
|
[
"c17764691@mydit.ie"
] |
c17764691@mydit.ie
|
d1f132ac90cc23d7a00f8efe98c24313cb2ee5c3
|
0b0e5d55c5d7ac66b58deaf9d7452d2485925881
|
/mvc/models.py
|
5fa9c6e0bbc4f3eb4734e332140699804a28f8f8
|
[] |
no_license
|
390910131/tcicada
|
7b776198de74ca26a376b771ddafd7b64355b7c8
|
9387597734ef4b8afc2a4e0c80d5c1dcb592bd28
|
refs/heads/master
| 2021-01-15T19:44:58.275700
| 2010-08-19T15:37:50
| 2010-08-19T15:37:50
| 42,674,760
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,409
|
py
|
# -*- coding:utf-8 -*-
import time
from django.db import models, connection
from django.contrib import admin
from django.utils import timesince, html
from cicada.utils import function, formatter
from cicada.settings import *
# Create your models here.
class Category(models.Model):
name = models.CharField('分类名称', max_length=20)
def __unicode__(self):
return self.name
def save(self):
self.name = self.name[0:20]
return super(Category, self).save()
class Meta:
verbose_name = '分类名称'
verbose_name_plural = '分类名称'
class CategoryAdmin(admin.ModelAdmin):
list_display = ('id', 'name')
list_display_links = ('id', 'name')
list_per_page = ADMIN_PAGE_SIZE
# Area Model
class Area(models.Model):
TYPE_CHOICES = ((0, '国家'), (1, '省'), (2, '市'), (3, '区县'))
name = models.CharField('地名', max_length=100)
code = models.CharField('代码', max_length=255)
type = models.IntegerField('类型', choices = TYPE_CHOICES)
parent = models.IntegerField('父级编号(关联自己)')
def __unicode__(self):
return self.name
class Meta:
verbose_name = u'所在地'
verbose_name_plural = u'所在地'
class AreaAdmin(admin.ModelAdmin):
list_display = ('id','name','code')
list_display_links = ('id','name','code')
list_per_page = ADMIN_PAGE_SIZE
# User Model
class User(models.Model):
username = models.CharField('用户名', max_length=20)
password = models.CharField('密码', max_length=100)
realname = models.CharField('姓名', max_length=20)
email = models.EmailField('Email')
area = models.ForeignKey(Area, verbose_name='地区')
face = models.ImageField('头像', upload_to='face/%Y/%m/%d',default='',blank=True)
url = models.CharField('个人主页', max_length=200, default='',blank=True)
about = models.TextField('关于我', max_length=1000, default='', blank=True)
addtime = models.DateTimeField('注册时间', auto_now=True)
friend = models.ManyToManyField('self', verbose_name='朋友')
def __unicode__(self):
return self.realname
def addtime_format(self):
return self.addtime.strftime('%Y-%m-%d %H:%M:%S')
def save(self, modify_pwd=True):
if modify_pwd:
self.password = function.md5_encode(self.password)
self.about = formatter.substr(self.about, 20, True)
super(User, self).save()
class Meta:
verbose_name = u'用户'
verbose_name_plural = u'用户'
class UserAdmin(admin.ModelAdmin):
list_display = ('id', 'username', 'realname', 'email', 'addtime_format')
list_display_links = ('username', 'realname', 'email')
list_per_page = ADMIN_PAGE_SIZE
# Note Model
class Note(models.Model):
message = models.TextField('消息')
addtime = models.DateTimeField('发布时间', auto_now=True)
category = models.ForeignKey(Category, verbose_name='来源')
user = models.ForeignKey(User, verbose_name='发布者')
def __unicode__(self):
return self.message
def message_short(self):
return formatter.substr(self.message, 30)
def addtime_format_admin(self):
return self.addtime.strftime('%Y-%m-%d %H:%M:%S')
def category_name(self):
return self.category.name
def user_name(self):
return self.user.realname
def save(self):
self.message = formatter.content_tiny_url(self.message)
self.message = html.escape(self.message)
self.message = formatter.substr(self.message, 140)
super(Note, self).save()
def get_absolute_url(self):
return APP_DOMAIN + 'message/%s/' % self.id
class Meta:
verbose_name = u'消息'
verbose_name_plural = u'消息'
class NoteAdmin(admin.ModelAdmin):
list_display = ('id', 'user_name', 'message_short', 'addtime_format_admin', 'category_name')
list_display_links = ('id', 'message_short')
search_fields = ['message']
list_per_page = ADMIN_PAGE_SIZE
admin.site.register(Note,NoteAdmin)
admin.site.register(Category,CategoryAdmin)
admin.site.register(User,UserAdmin)
admin.site.register(Area,AreaAdmin)
|
[
"kimlyfly@aa58f504-fbc4-ba3c-79f2-aa09fbc8d42c"
] |
kimlyfly@aa58f504-fbc4-ba3c-79f2-aa09fbc8d42c
|
7dc929d5a5dbe01e80c475af5bbc70599883261f
|
068c02748396264ab920d93f4946132d01155f59
|
/venv/lib/python3.5/site-packages/captcha/widgets.py
|
fa530cb6b0f3306177da904a69c0bbb1c4377d24
|
[] |
no_license
|
sumeyyekilic/Django-WebApp
|
8b1752c5cda6078dff98d63527cfca2c3fb014b2
|
3d2da87d714caae55a7fd550539e9efac0212dd6
|
refs/heads/master
| 2020-06-04T18:10:52.953144
| 2019-10-31T05:59:25
| 2019-10-31T05:59:25
| 192,136,312
| 5
| 0
| null | 2019-10-31T06:00:37
| 2019-06-16T00:49:45
|
Python
|
UTF-8
|
Python
| false
| false
| 1,333
|
py
|
from django import forms
from django.conf import settings
from django.utils.safestring import mark_safe
from . import client
from .constants import TEST_PUBLIC_KEY
class ReCaptcha(forms.widgets.Widget):
if getattr(settings, "NOCAPTCHA", False):
recaptcha_response_name = 'g-recaptcha-response'
recaptcha_challenge_name = 'g-recaptcha-response'
else:
recaptcha_challenge_name = 'recaptcha_challenge_field'
recaptcha_response_name = 'recaptcha_response_field'
def __init__(self, public_key=None, use_ssl=None, attrs=None, *args,
**kwargs):
self.public_key = public_key or getattr(settings, 'RECAPTCHA_PUBLIC_KEY', TEST_PUBLIC_KEY)
if attrs is None:
attrs = {}
self.use_ssl = use_ssl if use_ssl is not None else getattr(
settings, 'RECAPTCHA_USE_SSL', True)
self.js_attrs = attrs
super(ReCaptcha, self).__init__(*args, **kwargs)
def render(self, name, value, attrs=None):
return mark_safe(u'%s' % client.displayhtml(
self.public_key,
self.js_attrs, use_ssl=self.use_ssl))
def value_from_datadict(self, data, files, name):
return [
data.get(self.recaptcha_challenge_name, None),
data.get(self.recaptcha_response_name, None)
]
|
[
"smyyekilic@gmail.com"
] |
smyyekilic@gmail.com
|
52e73bbaf0f98cbdfba7c45a1b45c6439b23e49d
|
8013562f078a60b8370a37ab421d4c0378e51594
|
/01_EstruturaSequencial/Ex17.py
|
346f6e144310d5cbcd4ff82f28921d73a3990c01
|
[] |
no_license
|
Luizcarlosqueiroz/PythonExercises
|
05226602afe52d8c0984bda80a09657260625075
|
695a3df9834b5085993b3675f14719af5980e6bf
|
refs/heads/master
| 2022-11-30T02:16:08.439853
| 2020-08-03T20:12:51
| 2020-08-03T20:12:51
| 276,566,313
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 737
|
py
|
area = float(input("Qual o tamanho da área em metros a ser pintada? "))
litros = area / 6
latas = int(litros / 18)
galoes = int(litros / 3.6)
if litros % 18 != 0:
latas += 1
if litros % 3.6 != 0:
galoes += 1
mixLatas = int(litros/18)
litrosRest = litros - (mixLatas*18)
mixGaloes = int(litrosRest / 3.6)
if litrosRest % 3.6 != 0:
mixGaloes += 1
print("Você precisará de {} latas de tinta com um custo de R$ {:.2f}.".format(latas, latas*80))
print("OU")
print("Você precisará de {} galões de tinta com um custo de R$ {:.2f}.".format(galoes, galoes*25))
print("OU")
print("Você precisará de {} latas e {} galões de tinta com um custo de R$ {:.2f}."
.format(mixLatas, mixGaloes, mixLatas*80 + mixGaloes*25))
|
[
"luizcarlosqueirozf@gmail.com"
] |
luizcarlosqueirozf@gmail.com
|
39e24e40bfd7c47b32d30ba9afc9a9ea08fcb122
|
087546cbb4dceecb3d1199ed07b95ee980b4af83
|
/scholarly_citation_finder/apps/parser/AuthorParser.py
|
596e015784d55cc57f0fe3507cc0b55c334ed05b
|
[
"MIT"
] |
permissive
|
citationfinder/scholarly_citation_finder
|
369122c65607b90a42e395d6f916e489a7e9d2a3
|
3e6c340cfebc934a013759e27d8c145171110156
|
refs/heads/master
| 2020-04-12T01:40:30.084296
| 2017-02-19T16:30:38
| 2017-02-19T16:30:38
| 52,394,942
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,863
|
py
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
import logging
from django.db.utils import DataError
from scholarly_citation_finder.tools.nameparser.AuthorNameParser import AuthorNameParser
from scholarly_citation_finder.apps.core.models import Author, AuthorNameBlock, AuthorNameVariation
from scholarly_citation_finder.apps.parser.Exceptions import ParserDataError
logger = logging.getLogger(__name__)
class AuthorParser:
'''
Parse an author.
'''
def __init__(self, database):
'''
Create object.
:param database: Database name
'''
self.database = database
def parse(self, name):
'''
Parse an author.
:param name: Author name as string
'''
name = AuthorNameParser(name, normalize=True)
if name.title and not name.first:
name.first = name.title
#name.title = ''
name_middle = name.middle if name.middle else None
name_suffix = name.suffix if name.suffix else None
name_nickname = name.nickname if name.nickname else None
if name.last and name.first:
try:
# Get block
block, _ = AuthorNameBlock.objects.using(self.database).get_or_create(name='%s,%s' % (name.last, name.first[0]))
# Get or create name variation
variation = AuthorNameVariation.objects.using(self.database).filter(block_id=block.id,
first=name.first,
middle=name_middle,
last=name.last,
suffix=name_suffix,
nickname=name_nickname)[:1]
if variation:
return variation[0].author_id
else:
variation_short = AuthorNameVariation.objects.using(self.database).filter(block_id=block.id,
first=name.first[0],
middle=name_middle[0] if name_middle else None,
last=name.last)[:1]
if variation_short:
author_id = variation_short[0].author_id
else:
#name.capitalize()
author = Author.objects.using(self.database).create(name=str(name).title())
author_id = author.id
if len(name.first) > 1: # Otherwise this version was already stored above
self.__store_shortname_variation(block.id, author_id, name.first, name_middle, name.last)
AuthorNameVariation.objects.using(self.database).create(block_id=block.id,
author_id=author_id,
first=name.first,
middle=name_middle,
last=name.last,
suffix=name_suffix,
nickname=name_nickname)
return author_id
except(DataError) as e:
raise ParserDataError('Author name is invalid: %s' % str(e))
else:
raise ParserDataError('Author name has no last or first name: %s' % name)
def __store_shortname_variation(self, block_id, author_id, first, middle, last):
'''
Store the short version of the name variation.
:param block_id: ID of the block
:param author_id: ID of the author
:param first: First name
:param middle: Middle name
:param last: Last name
'''
middle = middle[0] if middle else None
AuthorNameVariation.objects.using(self.database).get_or_create(block_id=block_id,
author_id=author_id,
first=first[0],
middle=middle,
last=last)
|
[
"code@xennis.org"
] |
code@xennis.org
|
0e0ef3f81fb8b228e7805d8eeab539f69ebab911
|
67381bbdfef860b00ec4709ebae86563e632db20
|
/Probability_Loan/code.py
|
71d4a0a2ca68a3fc966c88dcae15c708a964081e
|
[
"MIT"
] |
permissive
|
RounakPython/ga-learner-dsai-repo
|
7591099e4a4c9c799eec0aa8b699a70c61403023
|
8aeaab16affd865b31bd1f740d77f983e2ba5bbe
|
refs/heads/master
| 2022-12-27T13:56:23.051542
| 2020-10-11T06:51:53
| 2020-10-11T06:51:53
| 266,036,211
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,480
|
py
|
# --------------
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
# Load the dataframe
df = pd.read_csv(path)
#Code starts here
#Task 1
total = df.shape[0]
p_a = len(df[df['fico'] > 700]) / total
p_b = len(df[df['purpose'] == 'debt_consolidation']) / total
p_a_b = len(df[(df['fico'] > 700) & (df['purpose'] == 'debt_consolidation')]) / total
p_a_b = p_a_b / p_b
result = p_a_b == p_a
print(result)
#Task 2
prob_lp = len(df[df['paid.back.loan'] == 'Yes']) /total
prob_cs = len(df[df['credit.policy'] == 'Yes']) /total
new_df = df[df['paid.back.loan'] == 'Yes']
prob_pd_cs = len(df[(df['paid.back.loan'] == 'Yes') & (df['credit.policy'] == 'Yes')]) / total
bayes = prob_pd_cs / prob_cs
print(bayes)
#Task 3
df['purpose'].value_counts(normalize=True).plot(kind='bar')
plt.title("Probability Distribution of Purpose")
plt.ylabel("Probability")
plt.xlabel("Number of Purpose")
plt.show()
df1 = df[df['paid.back.loan'] == 'No']
df1['purpose'].value_counts(normalize=True).plot(kind='bar')
plt.title("Probability Distribution of Purpose")
plt.ylabel("Probability")
plt.xlabel("Number of Purpose")
plt.show()
#Task 4
inst_median = df['installment'].median()
inst_mean = df['installment'].mean()
df['installment'].hist(normed = True, bins=50)
plt.axvline(x=inst_median, color='r')
plt.axvline(x=inst_mean, color='g')
plt.show()
df['log.annual.inc'].hist(normed = True, bins=50)
plt.show()
|
[
"RounakPython@users.noreply.github.com"
] |
RounakPython@users.noreply.github.com
|
b333a3b4d733faf7a887292ac3ed667ff495649d
|
197cf9217cba0fc3c0eba7014ef836b43ac98461
|
/base/forms.py
|
89577d4332ac1ef9b95351c548bb5ebb1d73af4e
|
[] |
no_license
|
barslmn/sci-pub
|
e9f14fc22070384fdde1c20451ed47ba45234797
|
18df0a8908ccc927f099f4581cafa225268c0558
|
refs/heads/master
| 2022-12-06T19:47:57.530307
| 2020-08-05T11:31:41
| 2020-08-05T11:31:41
| 285,229,138
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 440
|
py
|
from django import forms
from captcha.fields import CaptchaField
class AllauthSignupForm(forms.Form):
first_name = forms.CharField(max_length=30, label='First name')
last_name = forms.CharField(max_length=30, label='Last name')
captcha = CaptchaField()
def signup(self, request, user):
user.first_name = self.cleaned_data['first_name']
user.last_name = self.cleaned_data['last_name']
user.save()
|
[
"barslmn@gmail.com"
] |
barslmn@gmail.com
|
ae33b5f0422bd597c0b9eb6cd088f47deba98e6b
|
9ba5d85bc644cc586abc29b6c82047deb4caea1f
|
/leetcode/275.h-指数-ii.py
|
1b67540878acfdffabeb52f8ebd82622e0e2e425
|
[
"MIT"
] |
permissive
|
Data-Designer/Leetcode-Travel
|
f01dda19a1e37a2ba9da42e8ecda304c73645d99
|
147cf44904ce73cd4fd1cecf33f1ac8a336b0e6f
|
refs/heads/master
| 2023-07-30T22:54:53.101323
| 2021-09-14T04:34:08
| 2021-09-14T04:34:08
| 366,757,874
| 2
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 783
|
py
|
'''
Description: 具体思路是保证一个最长区间,其中的每一个数都大于区间长度
version:
Author: Data Designer
Date: 2021-05-30 10:33:55
LastEditors: Data Designer
LastEditTime: 2021-05-30 10:43:46
'''
#
# @lc app=leetcode.cn id=275 lang=python3
#
# [275] H 指数 II
#
# @lc code=start
class Solution:
def hIndex(self, citations: List[int]) -> int:
size = len(citations)
if size ==0 or citations[-1]==0:
return 0
left,right = 0,size-1
while left < right:
mid = left + (right-left)//2
if citations[mid] >= size-mid: # 因为要保证区间外的每一个数都小于n-i
right = mid
else:
left = mid +1
return size-left
# @lc code=end
|
[
"zc_dlmu@163.com"
] |
zc_dlmu@163.com
|
cc1e4f5c83ecc6b4a61bf86e1b05ab29487d7b65
|
1bb42bac177fb4e979faa441363c27cb636a43aa
|
/multi_epoch_dp_matrix_factorization/multiple_participations/factorize_multi_epoch_prefix_sum.py
|
6b76010e64530c60ff33fca9365ce0968ed01d04
|
[
"BSD-3-Clause",
"Apache-2.0"
] |
permissive
|
google-research/federated
|
a6040e80fa0fbf533e0d665c66a9bc549d208b3d
|
329e60fa56b87f691303638ceb9dfa1fc5083953
|
refs/heads/master
| 2023-08-28T13:10:10.885505
| 2023-08-22T23:06:08
| 2023-08-22T23:06:40
| 295,559,343
| 595
| 187
|
Apache-2.0
| 2022-05-12T08:42:53
| 2020-09-14T23:09:07
|
Python
|
UTF-8
|
Python
| false
| false
| 7,541
|
py
|
# Copyright 2023, Google LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Binary for factorizing and writing multi-epoch prefix sum matrices."""
import collections
from collections.abc import Sequence
import os
from absl import app
from absl import flags
from absl import logging
from jax import numpy as jnp
import numpy as np
import optax
import pandas as pd
import tensorflow as tf
from multi_epoch_dp_matrix_factorization import matrix_constructors
from multi_epoch_dp_matrix_factorization import matrix_io
from multi_epoch_dp_matrix_factorization.multiple_participations import contrib_matrix_builders
from multi_epoch_dp_matrix_factorization.multiple_participations import lagrange_terms
from multi_epoch_dp_matrix_factorization.multiple_participations import lt_initializers
from multi_epoch_dp_matrix_factorization.multiple_participations import optimization
from utils import training_utils
# Unused import to declare XM flags.
IRRELEVANT_FLAGS = frozenset(iter(flags.FLAGS))
_INIT_MATRIX_DIR = flags.DEFINE_string(
'init_matrix_dir',
'',
'Directory from which to load optional initial matrix',
)
_NUM_EPOCHS = flags.DEFINE_integer(
'num_epochs', 1, 'Number of epochs for which to optimize'
)
_STEPS_PER_EPOCH = flags.DEFINE_integer(
'steps_per_epoch', 1, 'Number of steps in each epoch.'
)
_CONSTRAINT_PATTERN = flags.DEFINE_enum(
'constraint_pattern',
'all_positive',
['all_positive', 'none'],
'Additional constraints on the factorization.',
)
_NN_LR = flags.DEFINE_float(
'nn_lr', 0.01, 'Learning rate for nonnegativity optimizer.'
)
_NN_MOMENTUM = flags.DEFINE_float(
'nn_momentum', 0.95, 'Momentum value for nonnegativity optimizer.'
)
_PREFIX_SUM = 'prefix_sum'
_MOMENTUM_COOLDOWN = 'momentum_with_cooldown'
_MATRIX_TO_FACTOR = flags.DEFINE_enum(
'matrix_to_factor',
_PREFIX_SUM,
[_PREFIX_SUM, _MOMENTUM_COOLDOWN],
(
'Which matrix to factor, either the standard prefix sum matrix, '
'or the matrix with momentum and learning-rate cooldown. The '
'lr schedule and momentum parameter are currently hard-coded.'
),
)
# Operational parameters
_MAX_ITER = flags.DEFINE_integer(
'max_iterations', 10, 'Maximum number of steps to take.'
)
_STEPS_PER_EVAL = flags.DEFINE_integer(
'steps_per_eval', 1, 'Number of steps to take between evalautions.'
)
_REL_DUALITY_GAP = flags.DEFINE_float(
'target_relative_duality_gap',
1e-3,
'Relative duality gap to use as stopping criterion.',
)
_ROOT_DIR = flags.DEFINE_string(
'root_output_dir', '', 'Directory to write matrices and loss values to.'
)
_RUN_NAME = flags.DEFINE_string(
'run_name',
'',
(
'Unique experiment name. Will be appended to root_output_dir to'
' uniquify results.'
),
)
HPARAM_FLAGS = [f for f in flags.FLAGS if f not in IRRELEVANT_FLAGS]
FLAGS = flags.FLAGS
def _get_lagrange_terms() -> lagrange_terms.LagrangeTerms:
"""Constructs initial LagrangeTerms based on flags."""
constraint_pattern = _CONSTRAINT_PATTERN.value
if constraint_pattern == 'all_positive':
if _INIT_MATRIX_DIR.value:
return lt_initializers.init_nonnegative_lagrange_terms_from_path(
_INIT_MATRIX_DIR.value, _NUM_EPOCHS.value, _STEPS_PER_EPOCH.value
)
else:
return lt_initializers.init_nonnegative_lagrange_terms(
_NUM_EPOCHS.value, _STEPS_PER_EPOCH.value
)
elif constraint_pattern == 'none':
n = _NUM_EPOCHS.value * _STEPS_PER_EPOCH.value
contrib_matrix = contrib_matrix_builders.epoch_participation_matrix(
n, _NUM_EPOCHS.value
)
return lagrange_terms.init_lagrange_terms(contrib_matrix)
else:
raise ValueError(f'Unknown --constraint_pattern {constraint_pattern}')
def get_lr_schedule(n):
# Hard-coded for now based on previous experiments
cooldown_period = n // 4
cooldown_target = 0.05
lr = np.ones(n)
lr[-cooldown_period:] = np.linspace(1.0, cooldown_target, num=cooldown_period)
return lr
def main(argv: Sequence[str]) -> None:
if len(argv) > 1:
raise app.UsageError('Too many command-line arguments.')
hparam_dict = collections.OrderedDict(
[(name, FLAGS[name].value) for name in HPARAM_FLAGS]
)
program_state_manager, (_, csv_manager, tb_manager) = (
training_utils.create_managers(
root_dir=_ROOT_DIR.value,
experiment_name=_RUN_NAME.value,
)
)
training_utils.write_hparams_to_csv(
hparam_dict=hparam_dict,
root_output_dir=_ROOT_DIR.value,
experiment_name=_RUN_NAME.value,
)
n = _NUM_EPOCHS.value * _STEPS_PER_EPOCH.value
flags.FLAGS.matrix_root_path = os.path.join(_ROOT_DIR.value, _RUN_NAME.value)
if _MATRIX_TO_FACTOR.value == _PREFIX_SUM:
s_matrix = jnp.tri(n, dtype=jnp.float64)
# We need to ensure we write with a path matrix_io.get_prefix_sum_w_h can
# load via aggregator_builder.py
output_dir = matrix_io.get_matrix_path(n, matrix_io.PREFIX_OPT)
learning_rates = None
else:
momentum = 0.95 # Hard-coded for now:
learning_rates = get_lr_schedule(n)
s_matrix = matrix_constructors.momentum_sgd_matrix(
n, momentum, learning_rates
)
# We need to ensure we write with a path that aggregator_builder.py
# can reconstruct via the lr_momentum_matrix codepath from which
# momentum can be inferred.
output_dir = matrix_io.get_momentum_path(n, momentum)
assert s_matrix.dtype == np.float64
lt = _get_lagrange_terms()
lt.assert_valid()
logging.info('Calling into Lagrange dual problem solver.')
results = optimization.solve_lagrange_dual_problem(
s_matrix=s_matrix,
lt=lt,
update_langrange_terms_fn=optimization.OptaxUpdate(
# Larger problems seem to need smaller learning rates in order to
# not produce non-PD u_total() matrices; see comments
# on OptaxUpdate
nonneg_optimizer=optax.sgd(_NN_LR.value, momentum=_NN_MOMENTUM.value),
lt=lt,
multiplicative_update=True,
),
max_iterations=_MAX_ITER.value,
iters_per_eval=_STEPS_PER_EVAL.value,
target_relative_duality_gap=_REL_DUALITY_GAP.value,
program_state_manager=program_state_manager,
metric_release_managers=(csv_manager, tb_manager),
)
logging.info('Writing final results to %s', output_dir)
tf.io.gfile.makedirs(output_dir)
loss_csv_filename = os.path.join(output_dir, 'losses.csv')
logging.info('Final loss: %s', results['losses'][-1])
logging.info('Final dual objective value: %s', results['dual_obj_vals'][-1])
df_arg = {}
for col_name in ['losses', 'dual_obj_vals']:
df_arg[col_name] = results[col_name]
df = pd.DataFrame(df_arg)
# TODO(b/241453645): Move formatting as a dataframe into the optimize
# function itself? Serialize not-per-round entries in results as well?
df.to_csv(loss_csv_filename)
matrix_io.verify_and_write(
results['W'], results['H'], s_matrix, output_dir, lr_sched=learning_rates
)
if __name__ == '__main__':
app.run(main)
|
[
"copybara-worker@google.com"
] |
copybara-worker@google.com
|
a4c06618a453e251976f90bba6bc5a9ec9cd78b1
|
c14745a84de92c11cd0e12c15b99dc214dc2da68
|
/img.py
|
2dc82f400d3c37e99ae77d7b89d64855d7a78a0b
|
[] |
no_license
|
abhishek111226/ViTag
|
850d15666aea7370364ce3f9ff113cd97ee711ce
|
f128e80351dc42c7d3851e4442ae48396958661b
|
refs/heads/master
| 2020-07-30T15:12:27.535038
| 2019-12-24T13:44:43
| 2019-12-24T13:44:43
| 73,627,719
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,167
|
py
|
import numpy as np
import cv2
from matplotlib import pyplot as plt
img1 = cv2.imread('./extract_taj/1.png',0) # queryImage
img2 = cv2.imread('./extract_taj/5.png',0) # trainImage
# Initiate SIFT detector
sift = cv2.xfeatures2d.SIFT_create()
# find the keypoints and descriptors with SIFT
kp1, des1 = sift.detectAndCompute(img1,None)
kp2, des2 = sift.detectAndCompute(img2,None)
# FLANN parameters
FLANN_INDEX_KDTREE = 0
index_params = dict(algorithm = FLANN_INDEX_KDTREE, trees = 5)
search_params = dict(checks=50) # or pass empty dictionary
flann = cv2.FlannBasedMatcher(index_params,search_params)
matches = flann.knnMatch(des1,des2,k=2)
# Need to draw only good matches, so create a mask
matchesMask = [[0,0] for i in xrange(len(matches))]
# ratio test as per Lowe's paper
for i,(m,n) in enumerate(matches):
if m.distance < 0.7*n.distance:
matchesMask[i]=[1,0]
draw_params = dict(matchColor = (0,255,0),
singlePointColor = (255,0,0),
matchesMask = matchesMask,
flags = 0)
img3 = cv2.drawMatchesKnn(img1,kp1,img2,kp2,matches,None,**draw_params)
plt.imshow(img3,),plt.show()
|
[
"abhishek0patwardhan@gmail.com"
] |
abhishek0patwardhan@gmail.com
|
57d6682bc27b07ec0953a716508390f6945482bf
|
50e43348552237093193b0ad388d2c9081e73ce8
|
/module.py
|
598c9b00bf4770d72b1c9805d85eead70fa5b157
|
[] |
no_license
|
antlikelion/python_practice
|
33d0cd46683ef5a41ff631392852e2fe9592c0c2
|
0e88decdbc3d7c20598dd201079c30e7045c3aa1
|
refs/heads/master
| 2020-05-04T16:26:22.908543
| 2019-04-03T11:52:50
| 2019-04-03T11:52:50
| 179,278,533
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 563
|
py
|
class cal:
def __init__(self, former, latter):
self.former = former
self.latter = latter
def add(self):
result = self.former+self.latter
return result
def sub(self):
result = self.former-self.latter
return result
def mul(self):
result = self.former*self.latter
return result
def div(self):
try:
result = self.former/self.latter
return result
except ZeroDivisionError:
return "0으로는 숫자를 나눌 수 없습니다."
|
[
"ignorant_woo@naver.com"
] |
ignorant_woo@naver.com
|
39bd383c866c0e1600fd64751cde16b02dff786e
|
f0cb61445a288e6566bf4b04655a52c5220b6018
|
/Part5.py
|
10df9d8dbdd3d32b6147f69b2735a9c5b2978a07
|
[] |
no_license
|
TrixniteTech/Youtube-Tutorial
|
48e76e995bb5cd710d95f6fd6c3c3038ec21bca5
|
c9d70ff1fb03e151a8b92a6e3afb506442f32af2
|
refs/heads/master
| 2023-02-07T07:31:30.975554
| 2020-12-20T09:52:52
| 2020-12-20T09:52:52
| 275,588,961
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,027
|
py
|
import discord
from discord.ext import commands
client = commands.Bot(command_prefix = '.')
@client.event
async def on_ready():
print('Ready')
@client.command()
async def ping(ctx):
await ctx.send(f"Pong!{round(client.latency*1000)}")
@client.command()
@commands.has_permissions(administrator=True)
async def ban(ctx, member:discord.Member, *, reason=None):
await member.ban(reason=reason)
await ctx.send(f"Banned{member.mention} sucessfully for {reason}")
@client.command()
@commands.has_permissions(administrator=True)
async def unban(ctx, *, member):
banned_users = await ctx.guild.bans()
member_name, member_discriminator = member.split('#')
for ban_entry in banned_users:
user = ban_entry.user
if(user.name, user.discriminator) == (member_name, member_discriminator):
await ctx.guild.unban(user)
await ctx.send(f"Unbanned{user.mention}"
return
client.run('TOKEN')
|
[
"noreply@github.com"
] |
TrixniteTech.noreply@github.com
|
6868c43b7cfd7d3d96ae0903e3158798a51ddd45
|
78e1d96cbdc5eed5891535081449a20c3e5e152f
|
/codeforces/Swap Adjacent Elements.py
|
19b744cf0b29e243faf4c3ab13e1404dd314a224
|
[] |
no_license
|
sabiramhaida/cp_training
|
e44a3be4c2703187bcc2131abeb6dc55740a2f5f
|
1b908dfd52f172d0026e25d6f58a686516d561dd
|
refs/heads/master
| 2021-06-25T01:41:38.786029
| 2020-12-30T23:18:01
| 2020-12-30T23:18:01
| 176,548,432
| 3
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 282
|
py
|
def solve(array,n,s):
temp=0
for i in range(n):
temp=max(temp,array[i])
if(s[i]=='0' and temp>i+1):
print("NO")
return
print("YES")
n=int(input())
array=list(int(i) for i in input().split())
s=input()+'0'
solve(array,n,s)
|
[
"noreply@github.com"
] |
sabiramhaida.noreply@github.com
|
ed4be1061f552d4c5b4f12cc0357a3d6c51a47b3
|
7d312b3056367391356270bbf3a7c3ff0411b76b
|
/Bai1.py
|
4e16693c2f6bb764e63095fbe6c3032b28e22c68
|
[] |
no_license
|
inugato/python
|
b7f274055873c69994fa8f82f8540979d225598c
|
0154f62e9aa7c70e22ce4be872ead85065871eed
|
refs/heads/master
| 2021-07-05T08:32:17.802409
| 2017-09-28T02:59:15
| 2017-09-28T02:59:15
| 104,686,852
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 90
|
py
|
sum = 0
i = 1
n = input('Enter value: ')
while i <= n :
sum = sum + i
i=i+1
print(sum)
|
[
"longlamloi97@gmail.com"
] |
longlamloi97@gmail.com
|
cfaf4399abcc938e8d055259cedc03d3aa529069
|
73340c329d519a8e4ba486527b2afd7fe1252dd5
|
/juststorit/worker_main.py
|
ec5c38b46f53b93a9c30ce26befe4391cbeffe4f
|
[] |
no_license
|
kaivalya97/juststorit
|
46d5de0c94a2ea0a27ebed26ce8681061bae8b0f
|
1472151f9e940be6909db81a427c20b7cbbfee32
|
refs/heads/master
| 2020-03-20T14:29:44.224479
| 2018-06-15T12:56:24
| 2018-06-15T12:56:24
| 137,486,944
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,317
|
py
|
from flask import Flask, request, jsonify
import requests.exceptions
from encrypt_split import encrypt_split
from decrypt_merge import decrypt_merge
from sn_up import sn_up
import crud
import os
import random
from worker_client import send_file,recv_file
import json
import sock_comm
# worker - send - user - 8003
# worker - recv - user - 8002
# worker - send - storjnode - 8001
# worker - recv - storjnode - 8000
PORT_send_user = 8003
PORT_recv_user = 8002
PORT_send_sn = 8001
PORT_recv_sn = 8000
def poll_queue():
queueIP = "10.20.24.90"
queue = "http://" + queueIP + ":8005/queuepop"
headers = {'Content-Type': 'application/json'}
return requests.get(queue, headers=headers)
def get_shards(filename, uid):
shards = crud.get_shard_sn_list(filename, uid)
noofshards = len(shards)
for i in range(0, noofshards):
sn_ip = shards[i]['storage_id'] ###Get storage node IP for each shard
shard_name = shards[i]['shard_id'] ###Get shard name
shard_name = shard_name
recv_file(sn_ip, shard_name, PORT_recv_sn)
filename = filename['filename']
print filename
decrypt_merge(filename,noofshards)
for i in range(0, noofshards):
os.remove(shards[i]['shard_id'])
while (1):
req = poll_queue()
while (req.text == '101'):
req = poll_queue()
print req.text
req = req.json()
my_request = req['Operation']
ip = req['User_IP']
uid = req['UserID']
if my_request == "U":
name = req['File_Name']
file_path = req['File_Path']
recv_file(ip, file_path, PORT_recv_user)
response = crud.insertFiledetails(uid, name)
file_id = response
encrypt_split(name)
sn = sn_up()
noofsn = len(sn)
for shard in os.listdir("shards"):
print shard
file_shard = 'shards/'+shard
sn_ip = sn[random.randint(0, noofsn-1)]
send_file(sn_ip, file_shard, PORT_send_sn)
crud.insertSharddetails('shard', file_id, shard, sn_ip)
os.remove('shards/'+shard)
os.remove(name)
elif my_request == "D":
file_list = crud.list_files(uid)
sock_comm.send(ip,json.dumps(file_list))
#send list to user. {fid:filename}
elif my_request == "DL":
name = req['File_Name']
get_shards(name, uid)
send_file(ip, name['filename'], PORT_send_user)
os.remove(name['filename'])
elif my_request == "l":
crud.list_files(uid)
|
[
"noreply@github.com"
] |
kaivalya97.noreply@github.com
|
7dacf743b9b7356f778064776b91b73097e250b9
|
b580a663d12aa7ff58ab88522c6f30cdadfcb18e
|
/binary_search_recursive.py
|
addb8c92dd4074432590f91e351f4501744fd3d2
|
[] |
no_license
|
jaquinocode/my-algorithms-python
|
02a44b9b8681d53c108243ae3ff05a66c5c09636
|
9fa0a684fe7a7cc041519fbd183123208b6fefbf
|
refs/heads/main
| 2023-08-17T04:34:05.885888
| 2021-09-13T07:05:12
| 2021-09-13T07:05:12
| 405,860,941
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 632
|
py
|
def index_of(target, array):
start = 0
end = len(array) - 1
target_index = binary_search(target, array, start, end)
return target_index
def binary_search(target, array, start, end):
midIndex = start + abs(start - end)//2
if (start > end):
return -1
elif target == array[midIndex]:
return midIndex
elif target > array[midIndex]:
start = midIndex + 1
return binary_search(target, array, start, end)
elif target < array[midIndex]:
end = midIndex - 1
return binary_search(target, array, start, end)
print(index_of('zamboni', ['aardvark', 'apple', 'boy', 'train', 'zebra']))
|
[
"28276675+jaquinocode@users.noreply.github.com"
] |
28276675+jaquinocode@users.noreply.github.com
|
5da14db6c3c90c59d24cc0b054e71bd057e6a4b1
|
25329df0c20dd3567f4bdcbcab2f0503a1aae8d3
|
/HW-001/app/config.py
|
ec0d5ad4f2800e0384c21062f192540ebc52816c
|
[] |
no_license
|
JFX-Xx/10W-ML
|
f3e3c2f2affab46024a892b03b8573a02c4208f1
|
1b835722f6ae69764c634db5552e8f5fdf68990b
|
refs/heads/master
| 2020-07-13T02:06:52.732471
| 2019-08-28T15:51:42
| 2019-08-28T15:51:42
| 204,961,184
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 204
|
py
|
# The toolbar is only enabled in debug mode:
#DEBUG = True
DEBUG = False
# Secret key for generating tokens.
# Set a 'SECRET_KEY' to enable the Flask session cookies.
SECRET_KEY = 'this_is_a_secret_key'
|
[
"48290849+JFX-Xx@users.noreply.github.com"
] |
48290849+JFX-Xx@users.noreply.github.com
|
a84af6faf9ff33e05a9abe3733d145e5fa3e25fa
|
55c250525bd7198ac905b1f2f86d16a44f73e03a
|
/Python/Games/AI Games/AIPong/Algorithm_1/gameAPI/agent.py
|
4a76f86cb231ed74b4db5a432f90ed80b5492ce7
|
[] |
no_license
|
NateWeiler/Resources
|
213d18ba86f7cc9d845741b8571b9e2c2c6be916
|
bd4a8a82a3e83a381c97d19e5df42cbababfc66c
|
refs/heads/master
| 2023-09-03T17:50:31.937137
| 2023-08-28T23:50:57
| 2023-08-28T23:50:57
| 267,368,545
| 2
| 1
| null | 2022-09-08T15:20:18
| 2020-05-27T16:18:17
| null |
UTF-8
|
Python
| false
| false
| 128
|
py
|
version https://git-lfs.github.com/spec/v1
oid sha256:27cce145fe8172d870a360236c5c6253ce5da598c202beb0960aafdf81240309
size 415
|
[
"nateweiler84@gmail.com"
] |
nateweiler84@gmail.com
|
d11b930e57bd88e67f6f8d6f670dc1cd6d3960a5
|
a777f50fb94d72f78c6e6ebd9bc45da5cbd704c1
|
/django_analytics/django_analytics/settings.py
|
2d3e31073c062273f99cf6b20dabc60ab8255bcb
|
[
"MIT"
] |
permissive
|
creativepsyco/django-analytics
|
69709fe864d65829421efc24575f24c094ee686f
|
b058814728489f65f9cfdd694bbf2aed8c9fd58a
|
refs/heads/master
| 2020-04-05T23:06:24.401559
| 2014-02-09T17:16:10
| 2014-02-09T17:16:10
| 16,665,231
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,322
|
py
|
# Django settings for django_analytics project.
DEBUG = True
TEMPLATE_DEBUG = DEBUG
ADMINS = (
# ('Your Name', 'your_email@example.com'),
)
MANAGERS = ADMINS
DATABASES = {
'default': {
'ENGINE': 'django.db.backends', # Add 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
'NAME': '', # Or path to database file if using sqlite3.
# The following settings are not used with sqlite3:
'USER': '',
'PASSWORD': '',
'HOST': '', # Empty for localhost through domain sockets or '127.0.0.1' for localhost through TCP.
'PORT': '', # Set to empty string for default.
}
}
# Hosts/domain names that are valid for this site; required if DEBUG is False
# See https://docs.djangoproject.com/en/1.5/ref/settings/#allowed-hosts
ALLOWED_HOSTS = []
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# In a Windows environment this must be set to your system time zone.
TIME_ZONE = 'Asia/Singapore'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en-us'
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale.
USE_L10N = True
# If you set this to False, Django will not use timezone-aware datetimes.
USE_TZ = True
# Absolute filesystem path to the directory that will hold user-uploaded files.
# Example: "/var/www/example.com/media/"
MEDIA_ROOT = ''
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash.
# Examples: "http://example.com/media/", "http://media.example.com/"
MEDIA_URL = ''
# Absolute path to the directory static files should be collected to.
# Don't put anything in this directory yourself; store your static files
# in apps' "static/" subdirectories and in STATICFILES_DIRS.
# Example: "/var/www/example.com/static/"
STATIC_ROOT = ''
# URL prefix for static files.
# Example: "http://example.com/static/", "http://static.example.com/"
STATIC_URL = '/static/'
# Additional locations of static files
STATICFILES_DIRS = (
# Put strings here, like "/home/html/static" or "C:/www/django/static".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
# List of finder classes that know how to find static files in
# various locations.
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
# 'django.contrib.staticfiles.finders.DefaultStorageFinder',
)
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
# 'django.template.loaders.eggs.Loader',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
# Uncomment the next line for simple clickjacking protection:
# 'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'django_analytics.urls'
# Python dotted path to the WSGI application used by Django's runserver.
WSGI_APPLICATION = 'django_analytics.wsgi.application'
TEMPLATE_DIRS = (
# Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.admin',
'report'
# Uncomment the next line to enable admin documentation:
# 'django.contrib.admindocs',
)
SESSION_SERIALIZER = 'django.contrib.sessions.serializers.JSONSerializer'
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error when DEBUG=False.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
}
},
'handlers': {
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
}
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
}
}
from local_settings import *
|
[
"mohit.kanwal@gmail.com"
] |
mohit.kanwal@gmail.com
|
3f2d3b7bfa7aa5de1b5808bed56689912eac88a7
|
b09529aa860795a29ac00f03319115f2d5054e98
|
/com/tlz/python-storage/python-mysql/PyDBPool.py
|
4104026c674afdd0d3ab012c1aa8cd8a6848081d
|
[] |
no_license
|
283938162/FirstPython
|
481425096368a4580b668f3e5b4d33e630e68b86
|
c7cdfb33375ca0122ed107c1d70a72397ea3558b
|
refs/heads/master
| 2021-09-12T15:54:50.947915
| 2018-04-18T10:05:47
| 2018-04-18T10:05:47
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,036
|
py
|
import pymysql
from DBUtils.PooledDB import PooledDB
'''
python object
http://www.runoob.com/python/python-object.html
'''
mysqlInfo = {
"host": '39.108.231.238',
"user": 'aliyun',
"passwd": 'liu@2014',
"dbname": 'DBTest',
"port": 3306,
"charset": 'utf8'
}
class PyDBPool:
__pool = None
# 构造函数中的变量全局可用
def __init__(self) -> None:
# 构造函数 创建数据库连接,操作游标
self.conn = PyDBPool.getMysqlConn(self)
self.cursor = self.conn.cursor()
# 数据库连接池连接
# self 代表类的实例,self 在定义类的方法时是必须有的,虽然在调用时不必传入相应的参数。
# 每实例化一个对象都会 创建一次 没必须 使用类方法 声明一个静态方法就行
# def getMysqlConn(self):
# if PyDBPool.__pool is None:
# __pool = PooledDB(creator = pymysql, mincached = 1, maxcached = 20, host = mysqlInfo['host'],
# user = mysqlInfo['user'], passwd = mysqlInfo['passwd'], db = mysqlInfo['dbname'],
# port = mysqlInfo['port'], charset = mysqlInfo['charset'])
# print("__pool :", __pool)
# print("数据库连接池创建成功!")
# return __pool.connection()
#
#
@staticmethod # 通过注解声明一个静态方法,只创建一次 类似java的 static{}
def getMysqlConn(self):
if PyDBPool.__pool is None:
__pool = PooledDB(creator = pymysql, mincached = 1, maxcached = 20, host = mysqlInfo['host'],
user = mysqlInfo['user'], passwd = mysqlInfo['passwd'], db = mysqlInfo['dbname'],
port = mysqlInfo['port'], charset = mysqlInfo['charset'])
print("__pool :", __pool)
print("数据库连接池创建成功!")
return __pool.connection()
# 连接资源释放
def dispose(self):
self.cursor.close()
self.conn.close()
# 插入/更新/删除sql
def update(self, sql):
print("sql = ", sql)
try:
num = self.cursor.execute(sql)
if sql[0] == 'd':
print("数据删除成功!")
elif sql[0] == 'i':
print("数据插入成功!")
elif sql[0] == 'u':
print("数据更新成功!")
self.conn.commit()
return num
except Exception as e:
print(e)
# 查询
def select(self, sql):
print("sql = ", sql)
self.cursor.execute(sql)
result = self.cursor.fetchall()
return result
if __name__ == '__main__':
dbpool = PyDBPool()
sql = "select * from emp"
# result = dbpool.select(sql)
# print("result = ", result)
# 插入数据
# sql_insert = "insert into emp(empno,ename,job) VALUES ('%d','%s','%s')"%(101,'张三丰','掌门人');
# dbpool.update(sql_insert)
# 更新数据(如果有多余一个的 ename='张三丰' 的记录 将 出现 (1062, "Duplicate entry '111' for key 'PRIMARY'")很奇怪)
sql_update = "update emp set empno = '%d' where ename = '%s'" % (1999, '张三丰')
dbpool.update(sql_update)
# 删除数据
# sql_delete = "delete from emp where empno = '%d'" % (100)
# dbpool.update(sql_delete)
result = dbpool.select(sql)
print("result = ", result)
# 释放资源
dbpool.dispose()
|
[
"283938162@qq.com"
] |
283938162@qq.com
|
a024e5e80b9084277bff7e1aac07c1cfe8608543
|
f8ff84f02d6dfa66d003890c4f51ea575232ba93
|
/cinder/cinder/tests/unit/test_emc_vmax.py
|
322911710e286d00e473632ee464caf4e137f7e5
|
[
"Apache-2.0"
] |
permissive
|
zarson/stack
|
8d341463bdf0136447bf1ada5be943df8ba55a4b
|
827003bc566ed992f754618063a771694e51cfca
|
refs/heads/master
| 2021-06-03T00:49:19.075199
| 2016-05-12T07:45:35
| 2016-05-12T07:45:35
| 58,616,957
| 0
| 1
| null | 2020-07-24T01:59:08
| 2016-05-12T07:08:17
|
Python
|
UTF-8
|
Python
| false
| false
| 322,901
|
py
|
# Copyright (c) 2012 - 2015 EMC Corporation, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
import shutil
import tempfile
import time
from xml.dom import minidom
import mock
from oslo_service import loopingcall
from oslo_utils import units
import six
from cinder import exception
from cinder.i18n import _
from cinder.objects import fields
from cinder import test
from cinder.volume import configuration as conf
from cinder.volume.drivers.emc import emc_vmax_common
from cinder.volume.drivers.emc import emc_vmax_fast
from cinder.volume.drivers.emc import emc_vmax_fc
from cinder.volume.drivers.emc import emc_vmax_iscsi
from cinder.volume.drivers.emc import emc_vmax_masking
from cinder.volume.drivers.emc import emc_vmax_provision
from cinder.volume.drivers.emc import emc_vmax_provision_v3
from cinder.volume.drivers.emc import emc_vmax_utils
from cinder.volume import volume_types
CINDER_EMC_CONFIG_DIR = '/etc/cinder/'
class EMC_StorageVolume(dict):
pass
class CIM_StorageExtent(dict):
pass
class SE_InitiatorMaskingGroup(dict):
pass
class SE_ConcreteJob(dict):
pass
class SE_StorageHardwareID(dict):
pass
class CIM_ReplicationServiceCapabilities(dict):
pass
class SYMM_SrpStoragePool(dict):
pass
class SYMM_LunMasking(dict):
pass
class CIM_DeviceMaskingGroup(dict):
pass
class EMC_LunMaskingSCSIProtocolController(dict):
pass
class CIM_TargetMaskingGroup(dict):
pass
class EMC_StorageHardwareID(dict):
pass
class CIM_IPProtocolEndpoint(dict):
pass
class SE_ReplicationSettingData(dict):
def __init__(self, *args, **kwargs):
self['DefaultInstance'] = self.createInstance()
def createInstance(self):
self.DesiredCopyMethodology = 0
class Fake_CIMProperty(object):
def fake_getCIMProperty(self):
cimproperty = Fake_CIMProperty()
cimproperty.value = True
return cimproperty
def fake_getBlockSizeCIMProperty(self):
cimproperty = Fake_CIMProperty()
cimproperty.value = '512'
return cimproperty
def fake_getConsumableBlocksCIMProperty(self):
cimproperty = Fake_CIMProperty()
cimproperty.value = '12345'
return cimproperty
def fake_getIsConcatenatedCIMProperty(self):
cimproperty = Fake_CIMProperty()
cimproperty.value = True
return cimproperty
def fake_getIsCompositeCIMProperty(self):
cimproperty = Fake_CIMProperty()
cimproperty.value = False
return cimproperty
def fake_getTotalManagedSpaceCIMProperty(self):
cimproperty = Fake_CIMProperty()
cimproperty.value = '20000000000'
return cimproperty
def fake_getRemainingManagedSpaceCIMProperty(self):
cimproperty = Fake_CIMProperty()
cimproperty.value = '10000000000'
return cimproperty
def fake_getElementNameCIMProperty(self, name):
cimproperty = Fake_CIMProperty()
cimproperty.value = name
return cimproperty
def fake_getSupportedReplicationTypes(self):
cimproperty = Fake_CIMProperty()
cimproperty.value = [2, 10]
return cimproperty
def fake_getipv4address(self):
cimproperty = Fake_CIMProperty()
cimproperty.key = 'IPv4Address'
cimproperty.value = '10.10.10.10'
return cimproperty
class Fake_CIM_TierPolicyServiceCapabilities(object):
def fake_getpolicyinstance(self):
classinstance = Fake_CIM_TierPolicyServiceCapabilities()
classcimproperty = Fake_CIMProperty()
cimproperty = classcimproperty.fake_getCIMProperty()
cimproperties = {u'SupportsTieringPolicies': cimproperty}
classinstance.properties = cimproperties
return classinstance
class FakeCIMInstanceName(dict):
def fake_getinstancename(self, classname, bindings):
instancename = FakeCIMInstanceName()
for key in bindings:
instancename[key] = bindings[key]
instancename.classname = classname
instancename.namespace = 'root/emc'
return instancename
class FakeDB(object):
def volume_update(self, context, volume_id, model_update):
pass
def volume_get(self, context, volume_id):
conn = FakeEcomConnection()
objectpath = {}
objectpath['CreationClassName'] = 'Symm_StorageVolume'
if volume_id == 'vol1':
device_id = '1'
objectpath['DeviceID'] = device_id
else:
objectpath['DeviceID'] = volume_id
return conn.GetInstance(objectpath)
def volume_get_all_by_group(self, context, group_id):
volumes = []
volumes.append(EMCVMAXCommonData.test_source_volume)
return volumes
def consistencygroup_get(self, context, cg_group_id):
return EMCVMAXCommonData.test_CG
def snapshot_get_all_for_cgsnapshot(self, context, cgsnapshot_id):
snapshots = []
snapshots.append(EMCVMAXCommonData.test_snapshot)
return snapshots
class EMCVMAXCommonData(object):
wwpn1 = "123456789012345"
wwpn2 = "123456789054321"
connector = {'ip': '10.0.0.2',
'initiator': 'iqn.1993-08.org.debian: 01: 222',
'wwpns': [wwpn1, wwpn2],
'wwnns': ["223456789012345", "223456789054321"],
'host': 'fakehost'}
target_wwns = [wwn[::-1] for wwn in connector['wwpns']]
fabric_name_prefix = "fakeFabric"
end_point_map = {connector['wwpns'][0]: [target_wwns[0]],
connector['wwpns'][1]: [target_wwns[1]]}
device_map = {}
for wwn in connector['wwpns']:
fabric_name = ''.join([fabric_name_prefix,
wwn[-2:]])
target_wwn = wwn[::-1]
fabric_map = {'initiator_port_wwn_list': [wwn],
'target_port_wwn_list': [target_wwn]
}
device_map[fabric_name] = fabric_map
default_storage_group = (
u'//10.10.10.10/root/emc: SE_DeviceMaskingGroup.InstanceID='
'"SYMMETRIX+000198700440+OS_default_GOLD1_SG"')
storage_system = 'SYMMETRIX+000195900551'
storage_system_v3 = 'SYMMETRIX-+-000197200056'
port_group = 'OS-portgroup-PG'
lunmaskctrl_id = (
'SYMMETRIX+000195900551+OS-fakehost-gold-I-MV')
lunmaskctrl_name = (
'OS-fakehost-gold-I-MV')
initiatorgroup_id = (
'SYMMETRIX+000195900551+OS-fakehost-IG')
initiatorgroup_name = 'OS-fakehost-IG'
initiatorgroup_creationclass = 'SE_InitiatorMaskingGroup'
iscsi_initiator = 'iqn.1993-08.org.debian'
storageextent_creationclass = 'CIM_StorageExtent'
initiator1 = 'iqn.1993-08.org.debian: 01: 1a2b3c4d5f6g'
stconf_service_creationclass = 'Symm_StorageConfigurationService'
ctrlconf_service_creationclass = 'Symm_ControllerConfigurationService'
elementcomp_service_creationclass = 'Symm_ElementCompositionService'
storreloc_service_creationclass = 'Symm_StorageRelocationService'
replication_service_creationclass = 'EMC_ReplicationService'
vol_creationclass = 'Symm_StorageVolume'
pool_creationclass = 'Symm_VirtualProvisioningPool'
lunmask_creationclass = 'Symm_LunMaskingSCSIProtocolController'
lunmask_creationclass2 = 'Symm_LunMaskingView'
hostedservice_creationclass = 'CIM_HostedService'
policycapability_creationclass = 'CIM_TierPolicyServiceCapabilities'
policyrule_creationclass = 'Symm_TierPolicyRule'
assoctierpolicy_creationclass = 'CIM_StorageTier'
storagepool_creationclass = 'Symm_VirtualProvisioningPool'
srpstoragepool_creationclass = 'Symm_SRPStoragePool'
storagegroup_creationclass = 'CIM_DeviceMaskingGroup'
hardwareid_creationclass = 'EMC_StorageHardwareID'
replicationgroup_creationclass = 'CIM_ReplicationGroup'
storagepoolid = 'SYMMETRIX+000195900551+U+gold'
storagegroupname = 'OS-fakehost-gold-I-SG'
defaultstoragegroupname = 'OS_default_GOLD1_SG'
storagevolume_creationclass = 'EMC_StorageVolume'
policyrule = 'gold'
poolname = 'gold'
totalmanagedspace_bits = '1000000000000'
subscribedcapacity_bits = '500000000000'
totalmanagedspace_gbs = 931
subscribedcapacity_gbs = 466
fake_host = 'HostX@Backend#gold+1234567891011'
fake_host_v3 = 'HostX@Backend#Bronze+SRP_1+1234567891011'
fake_host_2_v3 = 'HostY@Backend#SRP_1+1234567891011'
unit_creationclass = 'CIM_ProtocolControllerForUnit'
storage_type = 'gold'
keybindings = {'CreationClassName': u'Symm_StorageVolume',
'SystemName': u'SYMMETRIX+000195900551',
'DeviceID': u'1',
'SystemCreationClassName': u'Symm_StorageSystem'}
keybindings2 = {'CreationClassName': u'Symm_StorageVolume',
'SystemName': u'SYMMETRIX+000195900551',
'DeviceID': u'99999',
'SystemCreationClassName': u'Symm_StorageSystem'}
provider_location = {'classname': 'Symm_StorageVolume',
'keybindings': keybindings}
provider_location2 = {'classname': 'Symm_StorageVolume',
'keybindings': keybindings2}
provider_location_multi_pool = {'classname': 'Symm_StorageVolume',
'keybindings': keybindings,
'version': '2.2.0'}
block_size = 512
majorVersion = 1
minorVersion = 2
revNumber = 3
block_size = 512
metaHead_volume = {'DeviceID': 10,
'ConsumableBlocks': 1000}
meta_volume1 = {'DeviceID': 11,
'ConsumableBlocks': 200}
meta_volume2 = {'DeviceID': 12,
'ConsumableBlocks': 300}
properties = {'ConsumableBlocks': '12345',
'BlockSize': '512'}
test_volume = {'name': 'vol1',
'size': 1,
'volume_name': 'vol1',
'id': '1',
'device_id': '1',
'provider_auth': None,
'project_id': 'project',
'display_name': 'vol1',
'display_description': 'test volume',
'volume_type_id': 'abc',
'provider_location': six.text_type(provider_location),
'status': 'available',
'host': fake_host,
'NumberOfBlocks': 100,
'BlockSize': block_size
}
test_volume_v2 = {'name': 'vol1',
'size': 1,
'volume_name': 'vol1',
'id': 'vol1',
'device_id': '1',
'provider_auth': None,
'project_id': 'project',
'display_name': 'vol1',
'display_description': 'test volume',
'volume_type_id': 'abc',
'provider_location': six.text_type(provider_location),
'status': 'available',
'host': fake_host,
'NumberOfBlocks': 100,
'BlockSize': block_size
}
test_volume_v3 = {'name': 'vol1',
'size': 1,
'volume_name': 'vol1',
'id': 'vol1',
'device_id': '1',
'provider_auth': None,
'project_id': 'project',
'display_name': 'vol1',
'display_description': 'test volume',
'volume_type_id': 'abc',
'provider_location': six.text_type(provider_location),
'status': 'available',
'host': fake_host_v3,
'NumberOfBlocks': 100,
'BlockSize': block_size
}
test_volume_CG = {'name': 'volInCG',
'consistencygroup_id': 'abc',
'size': 1,
'volume_name': 'volInCG',
'id': 'volInCG',
'device_id': '1',
'provider_auth': None,
'project_id': 'project',
'display_name': 'volInCG',
'display_description':
'test volume in Consistency group',
'volume_type_id': 'abc',
'provider_location': six.text_type(provider_location),
'status': 'available',
'host': fake_host
}
test_volume_CG_v3 = {'name': 'volInCG',
'consistencygroup_id': 'abc',
'size': 1,
'volume_name': 'volInCG',
'id': 'volInCG',
'device_id': '1',
'provider_auth': None,
'project_id': 'project',
'display_name': 'volInCG',
'display_description':
'test volume in Consistency group',
'volume_type_id': 'abc',
'provider_location':
six.text_type(provider_location),
'status': 'available',
'host': fake_host_v3}
test_failed_volume = {'name': 'failed_vol',
'size': 1,
'volume_name': 'failed_vol',
'id': '4',
'device_id': '1',
'provider_auth': None,
'project_id': 'project',
'display_name': 'failed_vol',
'display_description': 'test failed volume',
'volume_type_id': 'abc',
'host': fake_host}
failed_delete_vol = {'name': 'failed_delete_vol',
'size': '-1',
'volume_name': 'failed_delete_vol',
'id': '99999',
'device_id': '99999',
'provider_auth': None,
'project_id': 'project',
'display_name': 'failed delete vol',
'display_description': 'failed delete volume',
'volume_type_id': 'abc',
'provider_location':
six.text_type(provider_location2),
'host': fake_host}
test_source_volume = {'size': 1,
'volume_type_id': 'sourceid',
'display_name': 'sourceVolume',
'name': 'sourceVolume',
'device_id': '1',
'volume_name': 'vmax-154326',
'provider_auth': None,
'project_id': 'project',
'id': '2',
'host': fake_host,
'provider_location':
six.text_type(provider_location),
'display_description': 'snapshot source volume'}
test_source_volume_v3 = {'size': 1,
'volume_type_id': 'sourceid',
'display_name': 'sourceVolume',
'name': 'sourceVolume',
'device_id': '1',
'volume_name': 'vmax-154326',
'provider_auth': None,
'project_id': 'project',
'id': '2',
'host': fake_host_v3,
'provider_location':
six.text_type(provider_location),
'display_description': 'snapshot source volume'}
test_CG = {'name': 'myCG1',
'id': '12345abcde',
'volume_type_id': 'abc',
'status': fields.ConsistencyGroupStatus.AVAILABLE
}
test_snapshot = {'name': 'myCG1',
'id': '12345abcde',
'status': 'available',
'host': fake_host
}
test_CG_snapshot = {'name': 'testSnap',
'id': '12345abcde',
'consistencygroup_id': '123456789',
'status': 'available',
'snapshots': [],
'consistencygroup': test_CG
}
location_info = {'location_info': '000195900551#silver#None',
'storage_protocol': 'ISCSI'}
location_info_v3 = {'location_info': '1234567891011#SRP_1#Bronze#DSS',
'storage_protocol': 'FC'}
test_host = {'capabilities': location_info,
'host': 'fake_host'}
test_host_v3 = {'capabilities': location_info_v3,
'host': fake_host_2_v3}
initiatorNames = ["123456789012345", "123456789054321"]
storagegroups = [{'CreationClassName': storagegroup_creationclass,
'ElementName': storagegroupname},
{'CreationClassName': storagegroup_creationclass,
'ElementName': 'OS-SRP_1-Bronze-DSS-SG'}]
test_ctxt = {}
new_type = {}
diff = {}
extra_specs = {'storagetype:pool': u'SRP_1',
'volume_backend_name': 'V3_BE',
'storagetype:workload': u'DSS',
'storagetype:slo': u'Bronze',
'storagetype:array': u'1234567891011',
'isV3': True,
'portgroupname': u'OS-portgroup-PG'}
remainingSLOCapacity = '123456789'
SYNCHRONIZED = 4
UNSYNCHRONIZED = 3
class FakeLookupService(object):
def get_device_mapping_from_network(self, initiator_wwns, target_wwns):
return EMCVMAXCommonData.device_map
class FakeEcomConnection(object):
def __init__(self, *args, **kwargs):
self.data = EMCVMAXCommonData()
def InvokeMethod(self, MethodName, Service, ElementName=None, InPool=None,
ElementType=None, Size=None,
SyncType=None, SourceElement=None, TargetElement=None,
Operation=None, Synchronization=None,
TheElements=None, TheElement=None,
LUNames=None, InitiatorPortIDs=None, DeviceAccesses=None,
ProtocolControllers=None,
MaskingGroup=None, Members=None,
HardwareId=None, ElementSource=None, EMCInPools=None,
CompositeType=None, EMCNumberOfMembers=None,
EMCBindElements=None,
InElements=None, TargetPool=None, RequestedState=None,
ReplicationGroup=None, ReplicationType=None,
ReplicationSettingData=None, GroupName=None, Force=None,
RemoveElements=None, RelationshipName=None,
SourceGroup=None, TargetGroup=None, Goal=None,
Type=None, EMCSRP=None, EMCSLO=None, EMCWorkload=None,
EMCCollections=None, InitiatorMaskingGroup=None,
DeviceMaskingGroup=None, TargetMaskingGroup=None,
ProtocolController=None, StorageID=None, IDType=None,
WaitForCopyState=None, Collections=None):
rc = 0
myjob = SE_ConcreteJob()
myjob.classname = 'SE_ConcreteJob'
myjob['InstanceID'] = '9999'
myjob['status'] = 'success'
myjob['type'] = ElementName
if Size == -1073741824 and (
MethodName == 'CreateOrModifyCompositeElement'):
rc = 0
myjob = SE_ConcreteJob()
myjob.classname = 'SE_ConcreteJob'
myjob['InstanceID'] = '99999'
myjob['status'] = 'success'
myjob['type'] = 'failed_delete_vol'
if ElementName == 'failed_vol' and (
MethodName == 'CreateOrModifyElementFromStoragePool'):
rc = 10
myjob['status'] = 'failure'
elif TheElements and TheElements[0]['DeviceID'] == '99999' and (
MethodName == 'ReturnElementsToStoragePool'):
rc = 10
myjob['status'] = 'failure'
elif HardwareId:
rc = 0
targetendpoints = {}
endpoints = []
endpoint = {}
endpoint['Name'] = (EMCVMAXCommonData.end_point_map[
EMCVMAXCommonData.connector['wwpns'][0]])
endpoints.append(endpoint)
endpoint2 = {}
endpoint2['Name'] = (EMCVMAXCommonData.end_point_map[
EMCVMAXCommonData.connector['wwpns'][1]])
endpoints.append(endpoint2)
targetendpoints['TargetEndpoints'] = endpoints
return rc, targetendpoints
elif ReplicationType and (
MethodName == 'GetDefaultReplicationSettingData'):
rc = 0
rsd = SE_ReplicationSettingData()
rsd['DefaultInstance'] = SE_ReplicationSettingData()
return rc, rsd
if MethodName == 'CreateStorageHardwareID':
ret = {}
rc = 0
ret['HardwareID'] = self.data.iscsi_initiator
return rc, ret
if MethodName == 'GetSupportedSizeRange':
ret = {}
rc = 0
ret['EMCInformationSource'] = 3
ret['EMCRemainingSLOCapacity'] = self.data.remainingSLOCapacity
return rc, ret
elif MethodName == 'GetCompositeElements':
ret = {}
rc = 0
ret['OutElements'] = [self.data.metaHead_volume,
self.data.meta_volume1,
self.data.meta_volume2]
return rc, ret
job = {'Job': myjob}
return rc, job
def EnumerateInstanceNames(self, name):
result = None
if name == 'EMC_StorageConfigurationService':
result = self._enum_stconfsvcs()
elif name == 'EMC_ControllerConfigurationService':
result = self._enum_ctrlconfsvcs()
elif name == 'Symm_ElementCompositionService':
result = self._enum_elemcompsvcs()
elif name == 'Symm_StorageRelocationService':
result = self._enum_storrelocsvcs()
elif name == 'EMC_ReplicationService':
result = self._enum_replicsvcs()
elif name == 'EMC_VirtualProvisioningPool':
result = self._enum_pools()
elif name == 'EMC_StorageVolume':
result = self._enum_storagevolumes()
elif name == 'Symm_StorageVolume':
result = self._enum_storagevolumes()
elif name == 'CIM_StorageVolume':
result = self._enum_storagevolumes()
elif name == 'CIM_ProtocolControllerForUnit':
result = self._enum_unitnames()
elif name == 'EMC_LunMaskingSCSIProtocolController':
result = self._enum_lunmaskctrls()
elif name == 'EMC_StorageProcessorSystem':
result = self._enum_processors()
elif name == 'EMC_StorageHardwareIDManagementService':
result = self._enum_hdwidmgmts()
elif name == 'SE_StorageHardwareID':
result = self._enum_storhdwids()
elif name == 'EMC_StorageSystem':
result = self._enum_storagesystems()
elif name == 'Symm_TierPolicyRule':
result = self._enum_policyrules()
elif name == 'CIM_ReplicationServiceCapabilities':
result = self._enum_repservcpbls()
elif name == 'SE_StorageSynchronized_SV_SV':
result = self._enum_storageSyncSvSv()
elif name == 'Symm_SRPStoragePool':
result = self._enum_srpstoragepool()
else:
result = self._default_enum()
return result
def EnumerateInstances(self, name):
result = None
if name == 'EMC_VirtualProvisioningPool':
result = self._enum_pool_details()
elif name == 'SE_StorageHardwareID':
result = self._enum_storhdwids()
elif name == 'SE_ManagementServerSoftwareIdentity':
result = self._enum_sw_identity()
else:
result = self._default_enum()
return result
def GetInstance(self, objectpath, LocalOnly=False):
try:
name = objectpath['CreationClassName']
except KeyError:
name = objectpath.classname
result = None
if name == 'Symm_StorageVolume':
result = self._getinstance_storagevolume(objectpath)
elif name == 'CIM_ProtocolControllerForUnit':
result = self._getinstance_unit(objectpath)
elif name == 'SE_ConcreteJob':
result = self._getinstance_job(objectpath)
elif name == 'SE_StorageSynchronized_SV_SV':
result = self._getinstance_syncsvsv(objectpath)
elif name == 'Symm_TierPolicyServiceCapabilities':
result = self._getinstance_policycapabilities(objectpath)
elif name == 'CIM_TierPolicyServiceCapabilities':
result = self._getinstance_policycapabilities(objectpath)
elif name == 'SE_InitiatorMaskingGroup':
result = self._getinstance_initiatormaskinggroup(objectpath)
elif name == 'CIM_InitiatorMaskingGroup':
result = self._getinstance_initiatormaskinggroup(objectpath)
elif name == 'SE_StorageHardwareID':
result = self._getinstance_storagehardwareid(objectpath)
elif name == 'CIM_ReplicationGroup':
result = self._getinstance_replicationgroup(objectpath)
elif name == 'Symm_SRPStoragePool':
result = self._getinstance_srpstoragepool(objectpath)
elif name == 'CIM_TargetMaskingGroup':
result = self._getinstance_targetmaskinggroup(objectpath)
elif name == 'CIM_DeviceMaskingGroup':
result = self._getinstance_devicemaskinggroup(objectpath)
elif name == 'EMC_StorageHardwareID':
result = self._getinstance_storagehardwareid(objectpath)
elif name == 'Symm_VirtualProvisioningPool':
result = self._getinstance_pool(objectpath)
elif name == 'Symm_ReplicationServiceCapabilities':
result = self._getinstance_replicationServCapabilities(objectpath)
else:
result = self._default_getinstance(objectpath)
return result
def ModifyInstance(self, objectpath, PropertyList=None):
pass
def DeleteInstance(self, objectpath):
pass
def Associators(self, objectpath, ResultClass='EMC_StorageHardwareID'):
result = None
if '_StorageHardwareID' in ResultClass:
result = self._assoc_hdwid()
elif ResultClass == 'EMC_iSCSIProtocolEndpoint':
result = self._assoc_endpoint()
elif ResultClass == 'EMC_StorageVolume':
result = self._assoc_storagevolume(objectpath)
elif ResultClass == 'Symm_LunMaskingView':
result = self._assoc_maskingview()
elif ResultClass == 'CIM_DeviceMaskingGroup':
result = self._assoc_storagegroup()
elif ResultClass == 'CIM_StorageExtent':
result = self._assoc_storageextent()
elif ResultClass == 'EMC_LunMaskingSCSIProtocolController':
result = self._assoc_lunmaskctrls()
elif ResultClass == 'CIM_TargetMaskingGroup':
result = self._assoc_portgroup()
else:
result = self._default_assoc(objectpath)
return result
def AssociatorNames(self, objectpath,
ResultClass='default', AssocClass='default'):
result = None
if objectpath == 'point_to_storage_instance_names':
result = ['FirstStorageTierInstanceNames']
if ResultClass != 'default':
result = self.ResultClassHelper(ResultClass, objectpath)
if result is None and AssocClass != 'default':
result = self.AssocClassHelper(AssocClass, objectpath)
if result is None:
result = self._default_assocnames(objectpath)
return result
def AssocClassHelper(self, AssocClass, objectpath):
if AssocClass == 'CIM_HostedService':
result = self._assocnames_hostedservice()
elif AssocClass == 'CIM_AssociatedTierPolicy':
result = self._assocnames_assoctierpolicy()
elif AssocClass == 'CIM_OrderedMemberOfCollection':
result = self._enum_storagevolumes()
elif AssocClass == 'CIM_BindsTo':
result = self._assocnames_bindsto()
elif AssocClass == 'CIM_MemberOfCollection':
result = self._assocnames_memberofcollection()
else:
result = None
return result
def ResultClassHelper(self, ResultClass, objectpath):
if ResultClass == 'EMC_LunMaskingSCSIProtocolController':
result = self._assocnames_lunmaskctrl()
elif ResultClass == 'CIM_TierPolicyServiceCapabilities':
result = self._assocnames_policyCapabilities()
elif ResultClass == 'Symm_TierPolicyRule':
result = self._assocnames_policyrule()
elif ResultClass == 'CIM_StoragePool':
result = self._assocnames_storagepool()
elif ResultClass == 'EMC_VirtualProvisioningPool':
result = self._assocnames_storagepool()
elif ResultClass == 'CIM_DeviceMaskingGroup':
result = self._assocnames_storagegroup()
elif ResultClass == 'EMC_StorageVolume':
result = self._enum_storagevolumes()
elif ResultClass == 'Symm_StorageVolume':
result = self._enum_storagevolumes()
elif ResultClass == 'SE_InitiatorMaskingGroup':
result = self._enum_initiatorMaskingGroup()
elif ResultClass == 'CIM_InitiatorMaskingGroup':
result = self._enum_initiatorMaskingGroup()
elif ResultClass == 'CIM_StorageExtent':
result = self._enum_storage_extent()
elif ResultClass == 'SE_StorageHardwareID':
result = self._enum_storhdwids()
elif ResultClass == 'CIM_ReplicationServiceCapabilities':
result = self._enum_repservcpbls()
elif ResultClass == 'CIM_ReplicationGroup':
result = self._enum_repgroups()
elif ResultClass == 'Symm_FCSCSIProtocolEndpoint':
result = self._enum_fcscsiendpoint()
elif ResultClass == 'EMC_FCSCSIProtocolEndpoint':
result = self._enum_fcscsiendpoint()
elif ResultClass == 'Symm_SRPStoragePool':
result = self._enum_srpstoragepool()
elif ResultClass == 'Symm_StoragePoolCapabilities':
result = self._enum_storagepoolcapabilities()
elif ResultClass == 'CIM_storageSetting':
result = self._enum_storagesettings()
elif ResultClass == 'CIM_TargetMaskingGroup':
result = self._assocnames_portgroup()
elif ResultClass == 'CIM_InitiatorMaskingGroup':
result = self._enum_initMaskingGroup()
elif ResultClass == 'Symm_LunMaskingView':
result = self._enum_maskingView()
elif ResultClass == 'EMC_Meta':
result = self._enum_metavolume()
elif ResultClass == 'EMC_FrontEndSCSIProtocolController':
result = self._enum_maskingView()
elif ResultClass == 'CIM_TierPolicyRule':
result = self._assocnames_tierpolicy(objectpath)
else:
result = None
return result
def ReferenceNames(self, objectpath,
ResultClass='CIM_ProtocolControllerForUnit'):
result = None
if ResultClass == 'CIM_ProtocolControllerForUnit':
result = self._ref_unitnames2()
else:
result = self._default_ref(objectpath)
return result
def _ref_unitnames(self):
unitnames = []
unitname = {}
dependent = {}
dependent['CreationClassName'] = self.data.vol_creationclass
dependent['DeviceID'] = self.data.test_volume['id']
dependent['ElementName'] = self.data.test_volume['name']
dependent['SystemName'] = self.data.storage_system
antecedent = {}
antecedent['CreationClassName'] = self.data.lunmask_creationclass
antecedent['DeviceID'] = self.data.lunmaskctrl_id
antecedent['SystemName'] = self.data.storage_system
unitname['Dependent'] = dependent
unitname['Antecedent'] = antecedent
unitname['CreationClassName'] = self.data.unit_creationclass
unitnames.append(unitname)
return unitnames
def mv_entry(self, mvname):
unitname = {}
dependent = {}
dependent['CreationClassName'] = self.data.vol_creationclass
dependent['DeviceID'] = self.data.test_volume['id']
dependent['ElementName'] = self.data.test_volume['name']
dependent['SystemName'] = self.data.storage_system
antecedent = SYMM_LunMasking()
antecedent['CreationClassName'] = self.data.lunmask_creationclass2
antecedent['SystemName'] = self.data.storage_system
antecedent['ElementName'] = mvname
classcimproperty = Fake_CIMProperty()
elementName = (
classcimproperty.fake_getElementNameCIMProperty(mvname))
properties = {u'ElementName': elementName}
antecedent.properties = properties
unitname['Dependent'] = dependent
unitname['Antecedent'] = antecedent
unitname['CreationClassName'] = self.data.unit_creationclass
return unitname
def _ref_unitnames2(self):
unitnames = []
unitname = self.mv_entry('OS-myhost-MV')
unitnames.append(unitname)
# Second masking
unitname2 = self.mv_entry('OS-fakehost-MV')
unitnames.append(unitname2)
# third masking
amended = 'OS-rslong493156848e71b072a17c1c4625e45f75-MV'
unitname3 = self.mv_entry(amended)
unitnames.append(unitname3)
return unitnames
def _default_ref(self, objectpath):
return objectpath
def _assoc_hdwid(self):
assocs = []
assoc = EMC_StorageHardwareID()
assoc['StorageID'] = self.data.connector['initiator']
assoc['SystemName'] = self.data.storage_system
assoc['CreationClassName'] = 'EMC_StorageHardwareID'
assoc.path = assoc
assocs.append(assoc)
for wwpn in self.data.connector['wwpns']:
assoc2 = EMC_StorageHardwareID()
assoc2['StorageID'] = wwpn
assoc2['SystemName'] = self.data.storage_system
assoc2['CreationClassName'] = 'EMC_StorageHardwareID'
assoc2.path = assoc2
assocs.append(assoc2)
assocs.append(assoc)
return assocs
def _assoc_endpoint(self):
assocs = []
assoc = {}
assoc['Name'] = 'iqn.1992-04.com.emc: 50000973f006dd80'
assoc['SystemName'] = self.data.storage_system
assocs.append(assoc)
return assocs
def _assoc_storagegroup(self):
assocs = []
assoc1 = CIM_DeviceMaskingGroup()
assoc1['ElementName'] = self.data.storagegroupname
assoc1['SystemName'] = self.data.storage_system
assoc1['CreationClassName'] = 'CIM_DeviceMaskingGroup'
assoc1.path = assoc1
assocs.append(assoc1)
assoc2 = CIM_DeviceMaskingGroup()
assoc2['ElementName'] = self.data.defaultstoragegroupname
assoc2['SystemName'] = self.data.storage_system
assoc2['CreationClassName'] = 'CIM_DeviceMaskingGroup'
assoc2.path = assoc2
assocs.append(assoc2)
return assocs
def _assoc_portgroup(self):
assocs = []
assoc = CIM_TargetMaskingGroup()
assoc['ElementName'] = self.data.port_group
assoc['SystemName'] = self.data.storage_system
assoc['CreationClassName'] = 'CIM_TargetMaskingGroup'
assoc.path = assoc
assocs.append(assoc)
return assocs
def _assoc_lunmaskctrls(self):
ctrls = []
ctrl = EMC_LunMaskingSCSIProtocolController()
ctrl['CreationClassName'] = self.data.lunmask_creationclass
ctrl['DeviceID'] = self.data.lunmaskctrl_id
ctrl['SystemName'] = self.data.storage_system
ctrl['ElementName'] = self.data.lunmaskctrl_name
ctrl.path = ctrl
ctrls.append(ctrl)
return ctrls
def _assoc_maskingview(self):
assocs = []
assoc = SYMM_LunMasking()
assoc['Name'] = 'myMaskingView'
assoc['SystemName'] = self.data.storage_system
assoc['CreationClassName'] = 'Symm_LunMaskingView'
assoc['DeviceID'] = '1234'
assoc['SystemCreationClassName'] = '1234'
assoc['ElementName'] = 'OS-fakehost-gold-I-MV'
assoc.classname = assoc['CreationClassName']
assoc.path = assoc
assocs.append(assoc)
return assocs
# Added test for EMC_StorageVolume associators
def _assoc_storagevolume(self, objectpath):
assocs = []
if 'type' not in objectpath:
vol = self.data.test_volume
elif objectpath['type'] == 'failed_delete_vol':
vol = self.data.failed_delete_vol
elif objectpath['type'] == 'vol1':
vol = self.data.test_volume
elif objectpath['type'] == 'volInCG':
vol = self.data.test_volume_CG
elif objectpath['type'] == 'appendVolume':
vol = self.data.test_volume
elif objectpath['type'] == 'failed_vol':
vol = self.data.test_failed_volume
else:
vol = self.data.test_volume
vol['DeviceID'] = vol['device_id']
assoc = self._getinstance_storagevolume(vol)
assocs.append(assoc)
return assocs
def _assoc_storageextent(self):
assocs = []
assoc = CIM_StorageExtent()
assoc['Name'] = 'myStorageExtent'
assoc['SystemName'] = self.data.storage_system
assoc['CreationClassName'] = 'CIM_StorageExtent'
assoc.classname = assoc['CreationClassName']
assoc.path = assoc
classcimproperty = Fake_CIMProperty()
isConcatenatedcimproperty = (
classcimproperty.fake_getIsCompositeCIMProperty())
properties = {u'IsConcatenated': isConcatenatedcimproperty}
assoc.properties = properties
assocs.append(assoc)
return assocs
def _default_assoc(self, objectpath):
return objectpath
def _assocnames_lunmaskctrl(self):
return self._enum_lunmaskctrls()
def _assocnames_hostedservice(self):
return self._enum_hostedservice()
def _assocnames_policyCapabilities(self):
return self._enum_policycapabilities()
def _assocnames_policyrule(self):
return self._enum_policyrules()
def _assocnames_assoctierpolicy(self):
return self._enum_assoctierpolicy()
def _assocnames_storagepool(self):
return self._enum_storagepool()
def _assocnames_storagegroup(self):
return self._enum_storagegroup()
def _assocnames_storagevolume(self):
return self._enum_storagevolume()
def _assocnames_portgroup(self):
return self._enum_portgroup()
def _assocnames_memberofcollection(self):
return self._enum_hostedservice()
def _assocnames_bindsto(self):
return self._enum_ipprotocolendpoint()
def _default_assocnames(self, objectpath):
return objectpath
def _getinstance_storagevolume(self, objectpath):
foundinstance = None
instance = EMC_StorageVolume()
vols = self._enum_storagevolumes()
for vol in vols:
if vol['DeviceID'] == objectpath['DeviceID']:
instance = vol
break
if not instance:
foundinstance = None
else:
foundinstance = instance
return foundinstance
def _getinstance_lunmask(self):
lunmask = {}
lunmask['CreationClassName'] = self.data.lunmask_creationclass
lunmask['DeviceID'] = self.data.lunmaskctrl_id
lunmask['SystemName'] = self.data.storage_system
return lunmask
def _getinstance_initiatormaskinggroup(self, objectpath):
initiatorgroup = SE_InitiatorMaskingGroup()
initiatorgroup['CreationClassName'] = (
self.data.initiatorgroup_creationclass)
initiatorgroup['DeviceID'] = self.data.initiatorgroup_id
initiatorgroup['SystemName'] = self.data.storage_system
initiatorgroup['ElementName'] = self.data.initiatorgroup_name
initiatorgroup.path = initiatorgroup
return initiatorgroup
def _getinstance_storagehardwareid(self, objectpath):
hardwareid = SE_StorageHardwareID()
hardwareid['CreationClassName'] = self.data.hardwareid_creationclass
hardwareid['SystemName'] = self.data.storage_system
hardwareid['StorageID'] = self.data.connector['wwpns'][0]
hardwareid.path = hardwareid
return hardwareid
def _getinstance_pool(self, objectpath):
pool = {}
pool['CreationClassName'] = 'Symm_VirtualProvisioningPool'
pool['ElementName'] = self.data.poolname
pool['SystemName'] = self.data.storage_system
pool['TotalManagedSpace'] = self.data.totalmanagedspace_bits
pool['EMCSubscribedCapacity'] = self.data.subscribedcapacity_bits
return pool
def _getinstance_replicationgroup(self, objectpath):
replicationgroup = {}
replicationgroup['CreationClassName'] = (
self.data.replicationgroup_creationclass)
replicationgroup['ElementName'] = '1234bcde'
return replicationgroup
def _getinstance_srpstoragepool(self, objectpath):
srpstoragepool = SYMM_SrpStoragePool()
srpstoragepool['CreationClassName'] = (
self.data.srpstoragepool_creationclass)
srpstoragepool['ElementName'] = 'SRP_1'
classcimproperty = Fake_CIMProperty()
totalManagedSpace = (
classcimproperty.fake_getTotalManagedSpaceCIMProperty())
remainingManagedSpace = (
classcimproperty.fake_getRemainingManagedSpaceCIMProperty())
properties = {u'TotalManagedSpace': totalManagedSpace,
u'RemainingManagedSpace': remainingManagedSpace}
srpstoragepool.properties = properties
return srpstoragepool
def _getinstance_targetmaskinggroup(self, objectpath):
targetmaskinggroup = CIM_TargetMaskingGroup()
targetmaskinggroup['CreationClassName'] = 'CIM_TargetMaskingGroup'
targetmaskinggroup['ElementName'] = self.data.port_group
targetmaskinggroup.path = targetmaskinggroup
return targetmaskinggroup
def _getinstance_devicemaskinggroup(self, objectpath):
targetmaskinggroup = {}
if 'CreationClassName' in objectpath:
targetmaskinggroup['CreationClassName'] = (
objectpath['CreationClassName'])
else:
targetmaskinggroup['CreationClassName'] = (
'CIM_DeviceMaskingGroup')
if 'ElementName' in objectpath:
targetmaskinggroup['ElementName'] = objectpath['ElementName']
else:
targetmaskinggroup['ElementName'] = (
self.data.storagegroupname)
return targetmaskinggroup
def _getinstance_unit(self, objectpath):
unit = {}
dependent = {}
dependent['CreationClassName'] = self.data.vol_creationclass
dependent['DeviceID'] = self.data.test_volume['id']
dependent['ElementName'] = self.data.test_volume['name']
dependent['SystemName'] = self.data.storage_system
antecedent = {}
antecedent['CreationClassName'] = self.data.lunmask_creationclass
antecedent['DeviceID'] = self.data.lunmaskctrl_id
antecedent['SystemName'] = self.data.storage_system
unit['Dependent'] = dependent
unit['Antecedent'] = antecedent
unit['CreationClassName'] = self.data.unit_creationclass
unit['DeviceNumber'] = '1'
return unit
def _getinstance_job(self, jobpath):
jobinstance = {}
jobinstance['InstanceID'] = '9999'
if jobpath['status'] == 'failure':
jobinstance['JobState'] = 10
jobinstance['ErrorCode'] = 99
jobinstance['ErrorDescription'] = 'Failure'
else:
jobinstance['JobState'] = 7
jobinstance['ErrorCode'] = 0
jobinstance['ErrorDescription'] = None
jobinstance['OperationalStatus'] = (2, 17)
return jobinstance
def _getinstance_policycapabilities(self, policycapabilitypath):
instance = Fake_CIM_TierPolicyServiceCapabilities()
fakeinstance = instance.fake_getpolicyinstance()
return fakeinstance
def _getinstance_syncsvsv(self, objectpath):
svInstance = {}
svInstance['SyncedElement'] = 'SyncedElement'
svInstance['SystemElement'] = 'SystemElement'
svInstance['PercentSynced'] = 100
if 'PercentSynced' in objectpath and objectpath['PercentSynced'] < 100:
svInstance['PercentSynced'] = 50
svInstance['CopyState'] = self.data.SYNCHRONIZED
if 'CopyState' in objectpath and (
objectpath['CopyState'] != self.data.SYNCHRONIZED):
svInstance['CopyState'] = self.data.UNSYNCHRONIZED
return svInstance
def _getinstance_replicationServCapabilities(self, objectpath):
repServCpblInstance = SYMM_SrpStoragePool()
classcimproperty = Fake_CIMProperty()
repTypesCimproperty = (
classcimproperty.fake_getSupportedReplicationTypes())
properties = {u'SupportedReplicationTypes': repTypesCimproperty}
repServCpblInstance.properties = properties
return repServCpblInstance
def _getinstance_ipprotocolendpoint(self, objectpath):
return self._enum_ipprotocolendpoint()[0]
def _getinstance_lunmaskingview(self, objectpath):
return self._enum_maskingView()[0]
def _default_getinstance(self, objectpath):
return objectpath
def _enum_stconfsvcs(self):
conf_services = []
conf_service1 = {}
conf_service1['SystemName'] = self.data.storage_system
conf_service1['CreationClassName'] = (
self.data.stconf_service_creationclass)
conf_services.append(conf_service1)
conf_service2 = {}
conf_service2['SystemName'] = self.data.storage_system_v3
conf_service2['CreationClassName'] = (
self.data.stconf_service_creationclass)
conf_services.append(conf_service2)
return conf_services
def _enum_ctrlconfsvcs(self):
conf_services = []
conf_service = {}
conf_service['SystemName'] = self.data.storage_system
conf_service['CreationClassName'] = (
self.data.ctrlconf_service_creationclass)
conf_services.append(conf_service)
conf_service1 = {}
conf_service1['SystemName'] = self.data.storage_system_v3
conf_service1['CreationClassName'] = (
self.data.ctrlconf_service_creationclass)
conf_services.append(conf_service1)
return conf_services
def _enum_elemcompsvcs(self):
comp_services = []
comp_service = {}
comp_service['SystemName'] = self.data.storage_system
comp_service['CreationClassName'] = (
self.data.elementcomp_service_creationclass)
comp_services.append(comp_service)
return comp_services
def _enum_storrelocsvcs(self):
reloc_services = []
reloc_service = {}
reloc_service['SystemName'] = self.data.storage_system
reloc_service['CreationClassName'] = (
self.data.storreloc_service_creationclass)
reloc_services.append(reloc_service)
return reloc_services
def _enum_replicsvcs(self):
replic_services = []
replic_service = {}
replic_service['SystemName'] = self.data.storage_system
replic_service['CreationClassName'] = (
self.data.replication_service_creationclass)
replic_services.append(replic_service)
replic_service2 = {}
replic_service2['SystemName'] = self.data.storage_system_v3
replic_service2['CreationClassName'] = (
self.data.replication_service_creationclass)
replic_services.append(replic_service2)
return replic_services
def _enum_pools(self):
pools = []
pool = {}
pool['InstanceID'] = (
self.data.storage_system + '+U+' + self.data.storage_type)
pool['CreationClassName'] = 'Symm_VirtualProvisioningPool'
pool['ElementName'] = 'gold'
pools.append(pool)
return pools
def _enum_pool_details(self):
pools = []
pool = {}
pool['InstanceID'] = (
self.data.storage_system + '+U+' + self.data.storage_type)
pool['CreationClassName'] = 'Symm_VirtualProvisioningPool'
pool['TotalManagedSpace'] = 12345678
pool['RemainingManagedSpace'] = 123456
pools.append(pool)
return pools
def _enum_storagevolumes(self):
vols = []
vol = EMC_StorageVolume()
vol['Name'] = self.data.test_volume['name']
vol['CreationClassName'] = 'Symm_StorageVolume'
vol['ElementName'] = self.data.test_volume['id']
vol['DeviceID'] = self.data.test_volume['device_id']
vol['Id'] = self.data.test_volume['id']
vol['SystemName'] = self.data.storage_system
vol['NumberOfBlocks'] = self.data.test_volume['NumberOfBlocks']
vol['BlockSize'] = self.data.test_volume['BlockSize']
# Added vol to vol.path
vol['SystemCreationClassName'] = 'Symm_StorageSystem'
vol.path = vol
vol.path.classname = vol['CreationClassName']
classcimproperty = Fake_CIMProperty()
blocksizecimproperty = classcimproperty.fake_getBlockSizeCIMProperty()
consumableBlockscimproperty = (
classcimproperty.fake_getConsumableBlocksCIMProperty())
isCompositecimproperty = (
classcimproperty.fake_getIsCompositeCIMProperty())
properties = {u'ConsumableBlocks': blocksizecimproperty,
u'BlockSize': consumableBlockscimproperty,
u'IsComposite': isCompositecimproperty}
vol.properties = properties
name = {}
name['classname'] = 'Symm_StorageVolume'
keys = {}
keys['CreationClassName'] = 'Symm_StorageVolume'
keys['SystemName'] = self.data.storage_system
keys['DeviceID'] = vol['DeviceID']
keys['SystemCreationClassName'] = 'Symm_StorageSystem'
name['keybindings'] = keys
vol['provider_location'] = str(name)
vols.append(vol)
failed_delete_vol = EMC_StorageVolume()
failed_delete_vol['name'] = 'failed_delete_vol'
failed_delete_vol['CreationClassName'] = 'Symm_StorageVolume'
failed_delete_vol['ElementName'] = 'failed_delete_vol'
failed_delete_vol['DeviceID'] = '99999'
failed_delete_vol['SystemName'] = self.data.storage_system
# Added vol to vol.path
failed_delete_vol['SystemCreationClassName'] = 'Symm_StorageSystem'
failed_delete_vol.path = failed_delete_vol
failed_delete_vol.path.classname = (
failed_delete_vol['CreationClassName'])
vols.append(failed_delete_vol)
failed_vol = EMC_StorageVolume()
failed_vol['name'] = 'failed__vol'
failed_vol['CreationClassName'] = 'Symm_StorageVolume'
failed_vol['ElementName'] = 'failed_vol'
failed_vol['DeviceID'] = '4'
failed_vol['SystemName'] = self.data.storage_system
# Added vol to vol.path
failed_vol['SystemCreationClassName'] = 'Symm_StorageSystem'
failed_vol.path = failed_vol
failed_vol.path.classname = failed_vol['CreationClassName']
name_failed = {}
name_failed['classname'] = 'Symm_StorageVolume'
keys_failed = {}
keys_failed['CreationClassName'] = 'Symm_StorageVolume'
keys_failed['SystemName'] = self.data.storage_system
keys_failed['DeviceID'] = failed_vol['DeviceID']
keys_failed['SystemCreationClassName'] = 'Symm_StorageSystem'
name_failed['keybindings'] = keys_failed
failed_vol['provider_location'] = str(name_failed)
vols.append(failed_vol)
volumeHead = EMC_StorageVolume()
volumeHead.classname = 'Symm_StorageVolume'
blockSize = self.data.block_size
volumeHead['ConsumableBlocks'] = (
self.data.metaHead_volume['ConsumableBlocks'])
volumeHead['BlockSize'] = blockSize
volumeHead['DeviceID'] = self.data.metaHead_volume['DeviceID']
vols.append(volumeHead)
metaMember1 = EMC_StorageVolume()
metaMember1.classname = 'Symm_StorageVolume'
metaMember1['ConsumableBlocks'] = (
self.data.meta_volume1['ConsumableBlocks'])
metaMember1['BlockSize'] = blockSize
metaMember1['DeviceID'] = self.data.meta_volume1['DeviceID']
vols.append(metaMember1)
metaMember2 = EMC_StorageVolume()
metaMember2.classname = 'Symm_StorageVolume'
metaMember2['ConsumableBlocks'] = (
self.data.meta_volume2['ConsumableBlocks'])
metaMember2['BlockSize'] = blockSize
metaMember2['DeviceID'] = self.data.meta_volume2['DeviceID']
vols.append(metaMember2)
return vols
def _enum_initiatorMaskingGroup(self):
initatorgroups = []
initatorgroup = {}
initatorgroup['CreationClassName'] = (
self.data.initiatorgroup_creationclass)
initatorgroup['DeviceID'] = self.data.initiatorgroup_id
initatorgroup['SystemName'] = self.data.storage_system
initatorgroup['ElementName'] = self.data.initiatorgroup_name
initatorgroups.append(initatorgroup)
return initatorgroups
def _enum_storage_extent(self):
storageExtents = []
storageExtent = CIM_StorageExtent()
storageExtent['CreationClassName'] = (
self.data.storageextent_creationclass)
classcimproperty = Fake_CIMProperty()
isConcatenatedcimproperty = (
classcimproperty.fake_getIsConcatenatedCIMProperty())
properties = {u'IsConcatenated': isConcatenatedcimproperty}
storageExtent.properties = properties
storageExtents.append(storageExtent)
return storageExtents
def _enum_lunmaskctrls(self):
ctrls = []
ctrl = {}
ctrl['CreationClassName'] = self.data.lunmask_creationclass
ctrl['DeviceID'] = self.data.lunmaskctrl_id
ctrl['SystemName'] = self.data.storage_system
ctrl['ElementName'] = self.data.lunmaskctrl_name
ctrls.append(ctrl)
return ctrls
def _enum_hostedservice(self):
hostedservices = []
hostedservice = {}
hostedservice['CreationClassName'] = (
self.data.hostedservice_creationclass)
hostedservice['SystemName'] = self.data.storage_system
hostedservice['Name'] = self.data.storage_system
hostedservices.append(hostedservice)
return hostedservices
def _enum_policycapabilities(self):
policycapabilities = []
policycapability = {}
policycapability['CreationClassName'] = (
self.data.policycapability_creationclass)
policycapability['SystemName'] = self.data.storage_system
propertiesList = []
CIMProperty = {'is_array': True}
properties = {u'SupportedTierFeatures': CIMProperty}
propertiesList.append(properties)
policycapability['Properties'] = propertiesList
policycapabilities.append(policycapability)
return policycapabilities
def _enum_policyrules(self):
policyrules = []
policyrule = {}
policyrule['CreationClassName'] = self.data.policyrule_creationclass
policyrule['SystemName'] = self.data.storage_system
policyrule['PolicyRuleName'] = self.data.policyrule
policyrules.append(policyrule)
return policyrules
def _enum_assoctierpolicy(self):
assoctierpolicies = []
assoctierpolicy = {}
assoctierpolicy['CreationClassName'] = (
self.data.assoctierpolicy_creationclass)
assoctierpolicies.append(assoctierpolicy)
return assoctierpolicies
def _enum_storagepool(self):
storagepools = []
storagepool = {}
storagepool['CreationClassName'] = self.data.storagepool_creationclass
storagepool['InstanceID'] = self.data.storagepoolid
storagepool['ElementName'] = 'gold'
storagepools.append(storagepool)
return storagepools
def _enum_srpstoragepool(self):
storagepools = []
storagepool = {}
storagepool['CreationClassName'] = (
self.data.srpstoragepool_creationclass)
storagepool['InstanceID'] = 'SYMMETRIX-+-000197200056-+-SRP_1'
storagepool['ElementName'] = 'SRP_1'
storagepools.append(storagepool)
return storagepools
def _enum_storagepoolcapabilities(self):
storagepoolcaps = []
storagepoolcap = {}
storagepoolcap['CreationClassName'] = 'Symm_StoragePoolCapabilities'
storagepoolcap['InstanceID'] = 'SYMMETRIX-+-000197200056-+-SRP_1'
storagepoolcaps.append(storagepoolcap)
return storagepoolcaps
def _enum_storagesettings(self):
storagesettings = []
storagesetting = {}
storagesetting['CreationClassName'] = 'CIM_StoragePoolSetting'
storagesetting['InstanceID'] = ('SYMMETRIX-+-000197200056-+-SBronze:'
'DSS-+-F-+-0-+-SR-+-SRP_1')
storagesettings.append(storagesetting)
return storagesettings
def _enum_targetMaskingGroup(self):
targetMaskingGroups = []
targetMaskingGroup = {}
targetMaskingGroup['CreationClassName'] = 'CIM_TargetMaskingGroup'
targetMaskingGroup['ElementName'] = self.data.port_group
targetMaskingGroups.append(targetMaskingGroup)
return targetMaskingGroups
def _enum_initMaskingGroup(self):
initMaskingGroups = []
initMaskingGroup = {}
initMaskingGroup['CreationClassName'] = 'CIM_InitiatorMaskingGroup'
initMaskingGroup['ElementName'] = 'myInitGroup'
initMaskingGroups.append(initMaskingGroup)
return initMaskingGroups
def _enum_storagegroup(self):
storagegroups = []
storagegroup1 = {}
storagegroup1['CreationClassName'] = (
self.data.storagegroup_creationclass)
storagegroup1['ElementName'] = self.data.storagegroupname
storagegroups.append(storagegroup1)
storagegroup2 = {}
storagegroup2['CreationClassName'] = (
self.data.storagegroup_creationclass)
storagegroup2['ElementName'] = self.data.defaultstoragegroupname
storagegroup2['SystemName'] = self.data.storage_system
storagegroups.append(storagegroup2)
storagegroup3 = {}
storagegroup3['CreationClassName'] = (
self.data.storagegroup_creationclass)
storagegroup3['ElementName'] = 'OS-fakehost-SRP_1-Bronze-DSS-SG'
storagegroups.append(storagegroup3)
storagegroup4 = {}
storagegroup4['CreationClassName'] = (
self.data.storagegroup_creationclass)
storagegroup4['ElementName'] = 'OS-SRP_1-Bronze-DSS-SG'
storagegroups.append(storagegroup4)
return storagegroups
def _enum_storagevolume(self):
storagevolumes = []
storagevolume = {}
storagevolume['CreationClassName'] = (
self.data.storagevolume_creationclass)
storagevolumes.append(storagevolume)
return storagevolumes
def _enum_hdwidmgmts(self):
services = []
srv = {}
srv['SystemName'] = self.data.storage_system
services.append(srv)
return services
def _enum_storhdwids(self):
storhdwids = []
hdwid = SE_StorageHardwareID()
hdwid['CreationClassName'] = self.data.hardwareid_creationclass
hdwid['StorageID'] = self.data.connector['wwpns'][0]
hdwid['InstanceID'] = "W-+-" + self.data.connector['wwpns'][0]
hdwid.path = hdwid
storhdwids.append(hdwid)
return storhdwids
def _enum_storagesystems(self):
storagesystems = []
storagesystem = {}
storagesystem['SystemName'] = self.data.storage_system
storagesystem['Name'] = self.data.storage_system
storagesystems.append(storagesystem)
return storagesystems
def _enum_repservcpbls(self):
repservcpbls = []
servcpbl = CIM_ReplicationServiceCapabilities()
servcpbl['CreationClassName'] = 'Symm_ReplicationServiceCapabilities'
servcpbl['InstanceID'] = self.data.storage_system
repservcpbls.append(servcpbl)
return repservcpbls
def _enum_repgroups(self):
repgroups = []
repgroup = {}
repgroup['CreationClassName'] = (
self.data.replicationgroup_creationclass)
repgroups.append(repgroup)
return repgroups
def _enum_fcscsiendpoint(self):
wwns = []
wwn = {}
wwn['Name'] = "5000090000000000"
wwns.append(wwn)
return wwns
def _enum_maskingView(self):
maskingViews = []
maskingView = SYMM_LunMasking()
maskingView['CreationClassName'] = 'Symm_LunMaskingView'
maskingView['ElementName'] = self.data.lunmaskctrl_name
cimproperty = Fake_CIMProperty()
cimproperty.value = self.data.lunmaskctrl_name
properties = {u'ElementName': cimproperty}
maskingView.properties = properties
maskingViews.append(maskingView)
return maskingViews
def _enum_portgroup(self):
portgroups = []
portgroup = {}
portgroup['CreationClassName'] = (
'CIM_TargetMaskingGroup')
portgroup['ElementName'] = self.data.port_group
portgroups.append(portgroup)
return portgroups
def _enum_metavolume(self):
return []
def _enum_storageSyncSvSv(self):
conn = FakeEcomConnection()
sourceVolume = {}
sourceVolume['CreationClassName'] = 'Symm_StorageVolume'
sourceVolume['DeviceID'] = self.data.test_volume['device_id']
sourceInstanceName = conn.GetInstance(sourceVolume)
svInstances = []
svInstance = {}
svInstance['SyncedElement'] = 'SyncedElement'
svInstance['SystemElement'] = sourceInstanceName
svInstance['CreationClassName'] = 'SE_StorageSynchronized_SV_SV'
svInstance['PercentSynced'] = 100
svInstance['CopyState'] = self.data.UNSYNCHRONIZED
svInstances.append(svInstance)
return svInstances
def _enum_sw_identity(self):
swIdentities = []
swIdentity = {}
swIdentity['MajorVersion'] = self.data.majorVersion
swIdentity['MinorVersion'] = self.data.minorVersion
swIdentity['RevisionNumber'] = self.data.revNumber
swIdentities.append(swIdentity)
return swIdentities
def _enum_ipprotocolendpoint(self):
ipprotocolendpoints = []
ipprotocolendpoint = CIM_IPProtocolEndpoint()
ipprotocolendpoint['CreationClassName'] = 'CIM_IPProtocolEndpoint'
ipprotocolendpoint['SystemName'] = self.data.storage_system
classcimproperty = Fake_CIMProperty()
ipv4addresscimproperty = (
classcimproperty.fake_getipv4address())
properties = {u'IPv4Address': ipv4addresscimproperty}
ipprotocolendpoint.properties = properties
ipprotocolendpoint.path = ipprotocolendpoint
ipprotocolendpoints.append(ipprotocolendpoint)
return ipprotocolendpoints
def _default_enum(self):
names = []
name = {}
name['Name'] = 'default'
names.append(name)
return names
class EMCVMAXISCSIDriverNoFastTestCase(test.TestCase):
def setUp(self):
self.data = EMCVMAXCommonData()
self.tempdir = tempfile.mkdtemp()
super(EMCVMAXISCSIDriverNoFastTestCase, self).setUp()
self.config_file_path = None
self.create_fake_config_file_no_fast()
self.addCleanup(self._cleanup)
configuration = conf.Configuration(None)
configuration.append_config_values = mock.Mock(return_value=0)
configuration.config_group = 'ISCSINoFAST'
configuration.cinder_emc_config_file = self.config_file_path
self.stubs.Set(configuration, 'safe_get',
self.fake_safe_get({'driver_use_ssl':
True,
'volume_backend_name':
'ISCSINoFAST'}))
self.stubs.Set(emc_vmax_iscsi.EMCVMAXISCSIDriver,
'smis_do_iscsi_discovery',
self.fake_do_iscsi_discovery)
self.stubs.Set(emc_vmax_common.EMCVMAXCommon, '_get_ecom_connection',
self.fake_ecom_connection)
instancename = FakeCIMInstanceName()
self.stubs.Set(emc_vmax_utils.EMCVMAXUtils, 'get_instance_name',
instancename.fake_getinstancename)
self.stubs.Set(time, 'sleep',
self.fake_sleep)
self.stubs.Set(emc_vmax_utils.EMCVMAXUtils, 'isArrayV3',
self.fake_is_v3)
driver = emc_vmax_iscsi.EMCVMAXISCSIDriver(configuration=configuration)
driver.db = FakeDB()
self.driver = driver
self.driver.utils = emc_vmax_utils.EMCVMAXUtils(object)
def fake_safe_get(self, values):
def _safe_get(key):
return values.get(key)
return _safe_get
def create_fake_config_file_no_fast(self):
doc = minidom.Document()
emc = doc.createElement("EMC")
doc.appendChild(emc)
doc = self.add_array_info(doc, emc)
filename = 'cinder_emc_config_ISCSINoFAST.xml'
self.config_file_path = self.tempdir + '/' + filename
f = open(self.config_file_path, 'w')
doc.writexml(f)
f.close()
def create_fake_config_file_no_fast_with_interval_retries(self):
doc = minidom.Document()
emc = doc.createElement("EMC")
doc.appendChild(emc)
doc = self.add_array_info(doc, emc)
doc = self.add_interval_and_retries(doc, emc)
filename = 'cinder_emc_config_ISCSINoFAST_int_ret.xml'
config_file_path = self.tempdir + '/' + filename
f = open(self.config_file_path, 'w')
doc.writexml(f)
f.close()
return config_file_path
def create_fake_config_file_no_fast_with_interval(self):
doc = minidom.Document()
emc = doc.createElement("EMC")
doc.appendChild(emc)
doc = self.add_array_info(doc, emc)
doc = self.add_interval_only(doc, emc)
filename = 'cinder_emc_config_ISCSINoFAST_int.xml'
config_file_path = self.tempdir + '/' + filename
f = open(self.config_file_path, 'w')
doc.writexml(f)
f.close()
return config_file_path
def create_fake_config_file_no_fast_with_retries(self):
doc = minidom.Document()
emc = doc.createElement("EMC")
doc.appendChild(emc)
doc = self.add_array_info(doc, emc)
doc = self.add_retries_only(doc, emc)
filename = 'cinder_emc_config_ISCSINoFAST_ret.xml'
config_file_path = self.tempdir + '/' + filename
f = open(self.config_file_path, 'w')
doc.writexml(f)
f.close()
return config_file_path
def add_array_info(self, doc, emc):
array = doc.createElement("Array")
arraytext = doc.createTextNode("1234567891011")
emc.appendChild(array)
array.appendChild(arraytext)
ecomserverip = doc.createElement("EcomServerIp")
ecomserveriptext = doc.createTextNode("1.1.1.1")
emc.appendChild(ecomserverip)
ecomserverip.appendChild(ecomserveriptext)
ecomserverport = doc.createElement("EcomServerPort")
ecomserverporttext = doc.createTextNode("10")
emc.appendChild(ecomserverport)
ecomserverport.appendChild(ecomserverporttext)
ecomusername = doc.createElement("EcomUserName")
ecomusernametext = doc.createTextNode("user")
emc.appendChild(ecomusername)
ecomusername.appendChild(ecomusernametext)
ecompassword = doc.createElement("EcomPassword")
ecompasswordtext = doc.createTextNode("pass")
emc.appendChild(ecompassword)
ecompassword.appendChild(ecompasswordtext)
portgroup = doc.createElement("PortGroup")
portgrouptext = doc.createTextNode(self.data.port_group)
portgroup.appendChild(portgrouptext)
portgroups = doc.createElement("PortGroups")
portgroups.appendChild(portgroup)
emc.appendChild(portgroups)
pool = doc.createElement("Pool")
pooltext = doc.createTextNode("gold")
emc.appendChild(pool)
pool.appendChild(pooltext)
array = doc.createElement("Array")
arraytext = doc.createTextNode("1234567891011")
emc.appendChild(array)
array.appendChild(arraytext)
timeout = doc.createElement("Timeout")
timeouttext = doc.createTextNode("0")
emc.appendChild(timeout)
timeout.appendChild(timeouttext)
return doc
def add_interval_and_retries(self, doc, emc):
interval = doc.createElement("Interval")
intervaltext = doc.createTextNode("5")
emc.appendChild(interval)
interval.appendChild(intervaltext)
retries = doc.createElement("Retries")
retriestext = doc.createTextNode("40")
emc.appendChild(retries)
retries.appendChild(retriestext)
return doc
def add_interval_only(self, doc, emc):
interval = doc.createElement("Interval")
intervaltext = doc.createTextNode("20")
emc.appendChild(interval)
interval.appendChild(intervaltext)
return doc
def add_retries_only(self, doc, emc):
retries = doc.createElement("Retries")
retriestext = doc.createTextNode("70")
emc.appendChild(retries)
retries.appendChild(retriestext)
return doc
# fix for https://bugs.launchpad.net/cinder/+bug/1364232
def create_fake_config_file_1364232(self):
filename = 'cinder_emc_config_1364232.xml'
config_file_1364232 = self.tempdir + '/' + filename
text_file = open(config_file_1364232, "w")
text_file.write("<?xml version='1.0' encoding='UTF-8'?>\n<EMC>\n"
"<EcomServerIp>10.10.10.10</EcomServerIp>\n"
"<EcomServerPort>5988</EcomServerPort>\n"
"<EcomUserName>user\t</EcomUserName>\n"
"<EcomPassword>password</EcomPassword>\n"
"<PortGroups><PortGroup>OS-PORTGROUP1-PG"
"</PortGroup><PortGroup>OS-PORTGROUP2-PG"
" </PortGroup>\n"
"<PortGroup>OS-PORTGROUP3-PG</PortGroup>"
"<PortGroup>OS-PORTGROUP4-PG</PortGroup>"
"</PortGroups>\n<Array>000198700439"
" \n</Array>\n<Pool>FC_SLVR1\n"
"</Pool>\n<FastPolicy>SILVER1</FastPolicy>\n"
"</EMC>")
text_file.close()
return config_file_1364232
def fake_ecom_connection(self):
conn = FakeEcomConnection()
return conn
def fake_do_iscsi_discovery(self, volume):
output = []
item = '10.10.0.50: 3260,1 iqn.1992-04.com.emc: 50000973f006dd80'
output.append(item)
return output
def fake_sleep(self, seconds):
return
def fake_is_v3(self, conn, serialNumber):
return False
def populate_masking_dict_setup(self):
extraSpecs = {'storagetype:pool': u'gold_pool',
'volume_backend_name': 'GOLD_POOL_BE',
'storagetype:array': u'1234567891011',
'isV3': False,
'portgroupname': u'OS-portgroup-PG',
'storagetype:fastpolicy': u'GOLD'}
vol = {'SystemName': self.data.storage_system}
self.driver.common._find_lun = mock.Mock(
return_value=vol)
self.driver.common.utils.find_controller_configuration_service = (
mock.Mock(return_value=None))
return extraSpecs
def test_populate_masking_dict_fast(self):
extraSpecs = self.populate_masking_dict_setup()
# If fast is enabled it will uniquely determine the SG and MV
# on the host along with the protocol(iSCSI) e.g. I
maskingViewDict = self.driver.common._populate_masking_dict(
self.data.test_volume, self.data.connector, extraSpecs)
self.assertEqual(
'OS-fakehost-GOLD-FP-I-SG', maskingViewDict['sgGroupName'])
self.assertEqual(
'OS-fakehost-GOLD-FP-I-MV', maskingViewDict['maskingViewName'])
def test_populate_masking_dict_fast_more_than_14chars(self):
# If the length of the FAST policy name is greater than 14 chars
extraSpecs = self.populate_masking_dict_setup()
extraSpecs['storagetype:fastpolicy'] = 'GOLD_MORE_THAN_FOURTEEN_CHARS'
maskingViewDict = self.driver.common._populate_masking_dict(
self.data.test_volume, self.data.connector, extraSpecs)
self.assertEqual(
'OS-fakehost-GOLD_MO__CHARS-FP-I-SG',
maskingViewDict['sgGroupName'])
self.assertEqual(
'OS-fakehost-GOLD_MO__CHARS-FP-I-MV',
maskingViewDict['maskingViewName'])
def test_populate_masking_dict_no_fast(self):
# If fast isn't enabled the pool will uniquely determine the SG and MV
# on the host along with the protocol(iSCSI) e.g. I
extraSpecs = self.populate_masking_dict_setup()
extraSpecs['storagetype:fastpolicy'] = None
maskingViewDict = self.driver.common._populate_masking_dict(
self.data.test_volume, self.data.connector, extraSpecs)
self.assertEqual(
'OS-fakehost-gold_pool-I-SG', maskingViewDict['sgGroupName'])
self.assertEqual(
'OS-fakehost-gold_pool-I-MV', maskingViewDict['maskingViewName'])
def test_populate_masking_dict_fast_both_exceeding(self):
# If the length of the FAST policy name is greater than 14 chars and
# the length of the short host is more than 38 characters
extraSpecs = self.populate_masking_dict_setup()
connector = {'host': 'SHORT_HOST_MORE_THEN THIRTY_EIGHT_CHARACTERS'}
extraSpecs['storagetype:fastpolicy'] = (
'GOLD_MORE_THAN_FOURTEEN_CHARACTERS')
maskingViewDict = self.driver.common._populate_masking_dict(
self.data.test_volume, connector, extraSpecs)
self.assertLessEqual(len(maskingViewDict['sgGroupName']), 64)
self.assertLessEqual(len(maskingViewDict['maskingViewName']), 64)
def test_populate_masking_dict_no_fast_both_exceeding(self):
# If the length of the FAST policy name is greater than 14 chars and
# the length of the short host is more than 38 characters
extraSpecs = self.populate_masking_dict_setup()
connector = {'host': 'SHORT_HOST_MORE_THEN THIRTY_EIGHT_CHARACTERS'}
extraSpecs['storagetype:pool'] = (
'GOLD_POOL_MORE_THAN_SIXTEEN_CHARACTERS')
extraSpecs['storagetype:fastpolicy'] = None
maskingViewDict = self.driver.common._populate_masking_dict(
self.data.test_volume, connector, extraSpecs)
self.assertLessEqual(len(maskingViewDict['sgGroupName']), 64)
self.assertLessEqual(len(maskingViewDict['maskingViewName']), 64)
def test_filter_list(self):
portgroupnames = ['pg3', 'pg1', 'pg4', 'pg2']
portgroupnames = (
self.driver.common.utils._filter_list(portgroupnames))
self.assertEqual(4, len(portgroupnames))
self.assertEqual(['pg1', 'pg2', 'pg3', 'pg4'], sorted(portgroupnames))
portgroupnames = ['pg1']
portgroupnames = (
self.driver.common.utils._filter_list(portgroupnames))
self.assertEqual(1, len(portgroupnames))
self.assertEqual(['pg1'], portgroupnames)
portgroupnames = ['only_pg', '', '', '', '', '']
portgroupnames = (
self.driver.common.utils._filter_list(portgroupnames))
self.assertEqual(1, len(portgroupnames))
self.assertEqual(['only_pg'], portgroupnames)
def test_get_random_pg_from_list(self):
portGroupNames = ['pg1', 'pg2', 'pg3', 'pg4']
portGroupName = (
self.driver.common.utils._get_random_pg_from_list(portGroupNames))
self.assertTrue('pg' in portGroupName)
portGroupNames = ['pg1']
portGroupName = (
self.driver.common.utils._get_random_pg_from_list(portGroupNames))
self.assertEqual('pg1', portGroupName)
def test_get_random_portgroup(self):
# 4 portgroups
data = ("<?xml version='1.0' encoding='UTF-8'?>\n<EMC>\n"
"<PortGroups>"
"<PortGroup>OS-PG1</PortGroup>\n"
"<PortGroup>OS-PG2</PortGroup>\n"
"<PortGroup>OS-PG3</PortGroup>\n"
"<PortGroup>OS-PG4</PortGroup>\n"
"</PortGroups>"
"</EMC>")
dom = minidom.parseString(data)
portgroup = self.driver.common.utils._get_random_portgroup(dom)
self.assertTrue('OS-PG' in portgroup)
# Duplicate portgroups
data = ("<?xml version='1.0' encoding='UTF-8'?>\n<EMC>\n"
"<PortGroups>"
"<PortGroup>OS-PG1</PortGroup>\n"
"<PortGroup>OS-PG1</PortGroup>\n"
"<PortGroup>OS-PG1</PortGroup>\n"
"<PortGroup>OS-PG2</PortGroup>\n"
"</PortGroups>"
"</EMC>")
dom = minidom.parseString(data)
portgroup = self.driver.common.utils._get_random_portgroup(dom)
self.assertTrue('OS-PG' in portgroup)
def test_get_random_portgroup_exception(self):
# Missing PortGroup values
data = ("<?xml version='1.0' encoding='UTF-8'?>\n<EMC>\n"
"<PortGroups>"
"<PortGroup></PortGroup>\n"
"<PortGroup></PortGroup>\n"
"</PortGroups>"
"</EMC>")
dom = minidom.parseString(data)
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.common.utils._get_random_portgroup, dom)
# Missing portgroups
data = ("<?xml version='1.0' encoding='UTF-8'?>\n<EMC>\n"
"<PortGroups>"
"</PortGroups>"
"</EMC>")
dom = minidom.parseString(data)
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.common.utils._get_random_portgroup, dom)
def test_is_sync_complete(self):
conn = self.fake_ecom_connection()
syncname = SE_ConcreteJob()
syncname.classname = 'SE_StorageSynchronized_SV_SV'
syncname['CopyState'] = self.data.UNSYNCHRONIZED
issynched = self.driver.common.utils._is_sync_complete(conn, syncname)
self.assertFalse(issynched)
def test_get_correct_port_group(self):
self.driver.common.conn = self.fake_ecom_connection()
maskingViewInstanceName = {'CreationClassName': 'Symm_LunMaskingView',
'ElementName': 'OS-fakehost-gold-I-MV',
'SystemName': 'SYMMETRIX+000195900551'}
deviceinfodict = {'controller': maskingViewInstanceName}
portgroupname = self.driver.common._get_correct_port_group(
deviceinfodict, self.data.storage_system)
self.assertEqual('OS-portgroup-PG', portgroupname)
def test_generate_unique_trunc_pool(self):
pool_under_16_chars = 'pool_under_16'
pool1 = self.driver.utils.generate_unique_trunc_pool(
pool_under_16_chars)
self.assertEqual(pool_under_16_chars, pool1)
pool_over_16_chars = (
'pool_over_16_pool_over_16')
# Should generate truncated string first 8 chars and
# last 7 chars
pool2 = self.driver.utils.generate_unique_trunc_pool(
pool_over_16_chars)
self.assertEqual('pool_ove_over_16', pool2)
def test_generate_unique_trunc_host(self):
host_under_38_chars = 'host_under_38_chars'
host1 = self.driver.utils.generate_unique_trunc_host(
host_under_38_chars)
self.assertEqual(host_under_38_chars, host1)
host_over_38_chars = (
'host_over_38_chars_host_over_38_chars_host_over_38_chars')
# Check that the same md5 value is retrieved from multiple calls
host2 = self.driver.utils.generate_unique_trunc_host(
host_over_38_chars)
host3 = self.driver.utils.generate_unique_trunc_host(
host_over_38_chars)
self.assertEqual(host2, host3)
def test_find_ip_protocol_endpoints(self):
conn = self.fake_ecom_connection()
foundIpAddresses = self.driver.common._find_ip_protocol_endpoints(
conn, self.data.storage_system, self.data.port_group)
self.assertEqual('10.10.10.10', foundIpAddresses[0])
def test_find_device_number(self):
host = 'fakehost'
data = (
self.driver.common.find_device_number(self.data.test_volume_v2,
host))
self.assertEqual('OS-fakehost-MV', data['maskingview'])
host = 'bogushost'
data = (
self.driver.common.find_device_number(self.data.test_volume_v2,
host))
self.assertFalse(data)
def test_find_device_number_long_host(self):
# Long host name
host = 'myhost.mydomain.com'
data = (
self.driver.common.find_device_number(self.data.test_volume_v2,
host))
self.assertEqual('OS-myhost-MV', data['maskingview'])
def test_find_device_number_short_name_over_38_chars(self):
# short name over 38 chars
host = 'myShortnameIsOverThirtyEightCharactersLong'
host = self.driver.common.utils.generate_unique_trunc_host(host)
amended = 'OS-' + host + '-MV'
v2_host_over_38 = self.data.test_volume_v2.copy()
# Pool aware scheduler enabled
v2_host_over_38['host'] = host
data = (
self.driver.common.find_device_number(v2_host_over_38,
host))
self.assertEqual(amended, data['maskingview'])
def test_unbind_and_get_volume_from_storage_pool(self):
conn = self.fake_ecom_connection()
common = self.driver.common
common.utils.is_volume_bound_to_pool = mock.Mock(
return_value='False')
storageConfigService = (
common.utils.find_storage_configuration_service(
conn, self.data.storage_system))
volumeInstanceName = (
conn.EnumerateInstanceNames("EMC_StorageVolume")[0])
volumeName = "unbind-vol"
extraSpecs = {'volume_backend_name': 'GOLD_BE',
'isV3': False}
volumeInstance = (
common._unbind_and_get_volume_from_storage_pool(
conn, storageConfigService,
volumeInstanceName, volumeName, extraSpecs))
self.assertEqual(self.data.storage_system,
volumeInstance['SystemName'])
self.assertEqual('1', volumeInstance['ElementName'])
def test_create_hardware_ids(self):
conn = self.fake_ecom_connection()
connector = {
'ip': '10.0.0.2',
'initiator': self.data.iscsi_initiator,
'host': 'fakehost'}
initiatorNames = (
self.driver.common.masking._find_initiator_names(conn, connector))
storageHardwareIDInstanceNames = (
self.driver.common.masking._create_hardware_ids(
conn, initiatorNames, self.data.storage_system))
self.assertEqual(self.data.iscsi_initiator,
storageHardwareIDInstanceNames[0])
def test_get_pool_instance_and_system_name(self):
conn = self.fake_ecom_connection()
# V2 - old '+' separator
storagesystem = {}
storagesystem['SystemName'] = self.data.storage_system
storagesystem['Name'] = self.data.storage_system
pools = conn.EnumerateInstanceNames("EMC_VirtualProvisioningPool")
poolname = 'gold'
poolinstancename, systemname = (
self.driver.common.utils._get_pool_instance_and_system_name(
conn, pools, storagesystem, poolname))
self.assertEqual(self.data.storage_system, systemname)
self.assertEqual(self.data.storagepoolid,
poolinstancename['InstanceID'])
# V3 - note: V2 can also have the '-+-' separator
storagesystem = {}
storagesystem['SystemName'] = self.data.storage_system_v3
storagesystem['Name'] = self.data.storage_system_v3
pools = conn.EnumerateInstanceNames('Symm_SRPStoragePool')
poolname = 'SRP_1'
poolinstancename, systemname = (
self.driver.common.utils._get_pool_instance_and_system_name(
conn, pools, storagesystem, poolname))
self.assertEqual(self.data.storage_system_v3, systemname)
self.assertEqual('SYMMETRIX-+-000197200056-+-SRP_1',
poolinstancename['InstanceID'])
# Invalid poolname
poolname = 'bogus'
poolinstancename, systemname = (
self.driver.common.utils._get_pool_instance_and_system_name(
conn, pools, storagesystem, poolname))
self.assertIsNone(poolinstancename)
self.assertEqual(self.data.storage_system_v3, systemname)
def test_get_hardware_type(self):
iqn_initiator = 'iqn.1992-04.com.emc: 50000973f006dd80'
hardwaretypeid = (
self.driver.utils._get_hardware_type(iqn_initiator))
self.assertEqual(5, hardwaretypeid)
wwpn_initiator = '123456789012345'
hardwaretypeid = (
self.driver.utils._get_hardware_type(wwpn_initiator))
self.assertEqual(2, hardwaretypeid)
bogus_initiator = 'bogus'
hardwaretypeid = (
self.driver.utils._get_hardware_type(bogus_initiator))
self.assertEqual(0, hardwaretypeid)
def test_check_if_rollback_action_for_masking_required(self):
conn = self.fake_ecom_connection()
controllerConfigService = (
self.driver.utils.find_controller_configuration_service(
conn, self.data.storage_system))
extraSpecs = {'volume_backend_name': 'GOLD_BE',
'isV3': False,
'storagetype:fastpolicy': 'GOLD1'}
vol = EMC_StorageVolume()
vol['name'] = self.data.test_volume['name']
vol['CreationClassName'] = 'Symm_StorageVolume'
vol['ElementName'] = self.data.test_volume['id']
vol['DeviceID'] = self.data.test_volume['device_id']
vol['Id'] = self.data.test_volume['id']
vol['SystemName'] = self.data.storage_system
vol['NumberOfBlocks'] = self.data.test_volume['NumberOfBlocks']
vol['BlockSize'] = self.data.test_volume['BlockSize']
# Added vol to vol.path
vol['SystemCreationClassName'] = 'Symm_StorageSystem'
vol.path = vol
vol.path.classname = vol['CreationClassName']
rollbackDict = {}
rollbackDict['isV3'] = False
rollbackDict['defaultStorageGroupInstanceName'] = (
self.data.default_storage_group)
rollbackDict['sgName'] = self.data.storagegroupname
rollbackDict['volumeName'] = 'vol1'
rollbackDict['fastPolicyName'] = 'GOLD1'
rollbackDict['volumeInstance'] = vol
rollbackDict['controllerConfigService'] = controllerConfigService
rollbackDict['extraSpecs'] = extraSpecs
# Path 1 - The volume is in another storage group that isn't the
# default storage group
expectedmessage = (_("V2 rollback - Volume in another storage "
"group besides default storage group."))
message = (
self.driver.common.masking.
_check_if_rollback_action_for_masking_required(
conn, rollbackDict))
self.assertEqual(expectedmessage, message)
# Path 2 - The volume is not in any storage group
rollbackDict['sgName'] = 'sq_not_exist'
expectedmessage = (_("V2 rollback, volume is not in any storage "
"group."))
message = (
self.driver.common.masking.
_check_if_rollback_action_for_masking_required(
conn, rollbackDict))
self.assertEqual(expectedmessage, message)
def test_migrate_cleanup(self):
conn = self.fake_ecom_connection()
extraSpecs = {'volume_backend_name': 'GOLD_BE',
'isV3': False,
'storagetype:fastpolicy': 'GOLD1'}
vol = EMC_StorageVolume()
vol['name'] = self.data.test_volume['name']
vol['CreationClassName'] = 'Symm_StorageVolume'
vol['ElementName'] = self.data.test_volume['id']
vol['DeviceID'] = self.data.test_volume['device_id']
vol['Id'] = self.data.test_volume['id']
vol['SystemName'] = self.data.storage_system
vol['NumberOfBlocks'] = self.data.test_volume['NumberOfBlocks']
vol['BlockSize'] = self.data.test_volume['BlockSize']
# Added vol to vol.path
vol['SystemCreationClassName'] = 'Symm_StorageSystem'
vol.path = vol
vol.path.classname = vol['CreationClassName']
# The volume is already belong to default storage group
return_to_default = self.driver.common._migrate_cleanup(
conn, vol, self.data.storage_system, 'GOLD1',
vol['name'], extraSpecs)
self.assertFalse(return_to_default)
# The volume does not belong to default storage group
return_to_default = self.driver.common._migrate_cleanup(
conn, vol, self.data.storage_system, 'BRONZE1',
vol['name'], extraSpecs)
self.assertTrue(return_to_default)
def test_wait_for_job_complete(self):
myjob = SE_ConcreteJob()
myjob.classname = 'SE_ConcreteJob'
myjob['InstanceID'] = '9999'
myjob['status'] = 'success'
myjob['type'] = 'type'
myjob['CreationClassName'] = 'SE_ConcreteJob'
myjob['Job'] = myjob
conn = self.fake_ecom_connection()
self.driver.utils._is_job_finished = mock.Mock(
return_value=True)
rc, errordesc = self.driver.utils.wait_for_job_complete(conn, myjob)
self.assertEqual(0, rc)
self.assertIsNone(errordesc)
self.driver.utils._is_job_finished.assert_called_once_with(
conn, myjob)
self.assertTrue(self.driver.utils._is_job_finished.return_value)
self.driver.utils._is_job_finished.reset_mock()
# Save the original state and restore it after this test
loopingcall_orig = loopingcall.FixedIntervalLoopingCall
loopingcall.FixedIntervalLoopingCall = mock.Mock()
rc, errordesc = self.driver.utils.wait_for_job_complete(conn, myjob)
self.assertEqual(0, rc)
self.assertIsNone(errordesc)
loopingcall.FixedIntervalLoopingCall.assert_called_once_with(
mock.ANY)
loopingcall.FixedIntervalLoopingCall.reset_mock()
loopingcall.FixedIntervalLoopingCall = loopingcall_orig
def test_wait_for_job_complete_bad_job_state(self):
myjob = SE_ConcreteJob()
myjob.classname = 'SE_ConcreteJob'
myjob['InstanceID'] = '9999'
myjob['status'] = 'success'
myjob['type'] = 'type'
myjob['CreationClassName'] = 'SE_ConcreteJob'
myjob['Job'] = myjob
conn = self.fake_ecom_connection()
self.driver.utils._is_job_finished = mock.Mock(
return_value=True)
self.driver.utils._verify_job_state = mock.Mock(
return_value=(-1, 'Job finished with an error'))
rc, errordesc = self.driver.utils.wait_for_job_complete(conn, myjob)
self.assertEqual(-1, rc)
self.assertEqual('Job finished with an error', errordesc)
def test_wait_for_sync(self):
mysync = 'fakesync'
conn = self.fake_ecom_connection()
self.driver.utils._is_sync_complete = mock.Mock(
return_value=True)
rc = self.driver.utils.wait_for_sync(conn, mysync)
self.assertIsNotNone(rc)
self.driver.utils._is_sync_complete.assert_called_once_with(
conn, mysync)
self.assertTrue(self.driver.utils._is_sync_complete.return_value)
self.driver.utils._is_sync_complete.reset_mock()
# Save the original state and restore it after this test
loopingcall_orig = loopingcall.FixedIntervalLoopingCall
loopingcall.FixedIntervalLoopingCall = mock.Mock()
rc = self.driver.utils.wait_for_sync(conn, mysync)
self.assertIsNotNone(rc)
loopingcall.FixedIntervalLoopingCall.assert_called_once_with(
mock.ANY)
loopingcall.FixedIntervalLoopingCall.reset_mock()
loopingcall.FixedIntervalLoopingCall = loopingcall_orig
def test_wait_for_sync_extra_specs(self):
mysync = 'fakesync'
conn = self.fake_ecom_connection()
file_name = (
self.create_fake_config_file_no_fast_with_interval_retries())
extraSpecs = {'volume_backend_name': 'ISCSINoFAST'}
pool = 'gold+1234567891011'
arrayInfo = self.driver.utils.parse_file_to_get_array_map(
self.config_file_path)
poolRec = self.driver.utils.extract_record(arrayInfo, pool)
extraSpecs = self.driver.common._set_v2_extra_specs(extraSpecs,
poolRec)
self.driver.utils._is_sync_complete = mock.Mock(
return_value=True)
rc = self.driver.utils.wait_for_sync(conn, mysync, extraSpecs)
self.assertIsNotNone(rc)
self.driver.utils._is_sync_complete.assert_called_once_with(
conn, mysync)
self.assertTrue(self.driver.utils._is_sync_complete.return_value)
self.assertEqual(40,
self.driver.utils._get_max_job_retries(extraSpecs))
self.assertEqual(5,
self.driver.utils._get_interval_in_secs(extraSpecs))
self.driver.utils._is_sync_complete.reset_mock()
# Save the original state and restore it after this test
loopingcall_orig = loopingcall.FixedIntervalLoopingCall
loopingcall.FixedIntervalLoopingCall = mock.Mock()
rc = self.driver.utils.wait_for_sync(conn, mysync)
self.assertIsNotNone(rc)
loopingcall.FixedIntervalLoopingCall.assert_called_once_with(
mock.ANY)
loopingcall.FixedIntervalLoopingCall.reset_mock()
loopingcall.FixedIntervalLoopingCall = loopingcall_orig
bExists = os.path.exists(file_name)
if bExists:
os.remove(file_name)
# Bug 1395830: _find_lun throws exception when lun is not found.
def test_find_lun(self):
keybindings = {'CreationClassName': u'Symm_StorageVolume',
'SystemName': u'SYMMETRIX+000195900551',
'DeviceID': u'1',
'SystemCreationClassName': u'Symm_StorageSystem'}
provider_location = {'classname': 'Symm_StorageVolume',
'keybindings': keybindings}
volume = EMC_StorageVolume()
volume['name'] = 'vol1'
volume['provider_location'] = six.text_type(provider_location)
self.driver.common.conn = self.driver.common._get_ecom_connection()
findlun = self.driver.common._find_lun(volume)
getinstance = self.driver.common.conn._getinstance_storagevolume(
keybindings)
# Found lun.
self.assertEqual(getinstance, findlun)
keybindings2 = {'CreationClassName': u'Symm_StorageVolume',
'SystemName': u'SYMMETRIX+000195900551',
'DeviceID': u'9',
'SystemCreationClassName': u'Symm_StorageSystem'}
provider_location2 = {'classname': 'Symm_StorageVolume',
'keybindings': keybindings2}
volume2 = EMC_StorageVolume()
volume2['name'] = 'myVol'
volume2['provider_location'] = six.text_type(provider_location2)
verify_orig = self.driver.common.conn.GetInstance
self.driver.common.conn.GetInstance = mock.Mock(
return_value=None)
findlun2 = self.driver.common._find_lun(volume2)
# Not found.
self.assertIsNone(findlun2)
self.driver.utils.get_instance_name(
provider_location2['classname'],
keybindings2)
self.driver.common.conn.GetInstance.assert_called_once_with(
keybindings2)
self.driver.common.conn.GetInstance.reset_mock()
self.driver.common.conn.GetInstance = verify_orig
keybindings3 = {'CreationClassName': u'Symm_StorageVolume',
'SystemName': u'SYMMETRIX+000195900551',
'DeviceID': u'9999',
'SystemCreationClassName': u'Symm_StorageSystem'}
provider_location3 = {'classname': 'Symm_StorageVolume',
'keybindings': keybindings3}
instancename3 = self.driver.utils.get_instance_name(
provider_location3['classname'],
keybindings3)
# Error other than not found.
arg = 9999, "test_error"
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.common.utils.process_exception_args,
arg, instancename3)
# Bug 1403160 - make sure the masking view is cleanly deleted
def test_last_volume_delete_masking_view(self):
extraSpecs = {'volume_backend_name': 'ISCSINoFAST'}
conn = self.fake_ecom_connection()
controllerConfigService = (
self.driver.utils.find_controller_configuration_service(
conn, self.data.storage_system))
maskingViewInstanceName = (
self.driver.common.masking._find_masking_view(
conn, self.data.lunmaskctrl_name, self.data.storage_system))
maskingViewName = conn.GetInstance(
maskingViewInstanceName)['ElementName']
# Deleting Masking View failed
self.assertRaises(
exception.VolumeBackendAPIException,
self.driver.common.masking._last_volume_delete_masking_view,
conn, controllerConfigService, maskingViewInstanceName,
maskingViewName, extraSpecs)
# Deleting Masking view successful
self.driver.common.masking.utils.get_existing_instance = mock.Mock(
return_value=None)
self.driver.common.masking._last_volume_delete_masking_view(
conn, controllerConfigService, maskingViewInstanceName,
maskingViewName, extraSpecs)
# Bug 1403160 - make sure the storage group is cleanly deleted
def test_remove_last_vol_and_delete_sg(self):
conn = self.fake_ecom_connection()
controllerConfigService = (
self.driver.utils.find_controller_configuration_service(
conn, self.data.storage_system))
storageGroupName = self.data.storagegroupname
storageGroupInstanceName = (
self.driver.utils.find_storage_masking_group(
conn, controllerConfigService, storageGroupName))
volumeInstanceName = (
conn.EnumerateInstanceNames("EMC_StorageVolume")[0])
volumeName = "1403160-Vol"
extraSpecs = {'volume_backend_name': 'GOLD_BE',
'isV3': False}
# Deleting Storage Group failed
self.assertRaises(
exception.VolumeBackendAPIException,
self.driver.common.masking._remove_last_vol_and_delete_sg,
conn, controllerConfigService, storageGroupInstanceName,
storageGroupName, volumeInstanceName, volumeName, extraSpecs)
# Deleting Storage group successful
self.driver.common.masking.utils.get_existing_instance = mock.Mock(
return_value=None)
self.driver.common.masking._remove_last_vol_and_delete_sg(
conn, controllerConfigService, storageGroupInstanceName,
storageGroupName, volumeInstanceName, volumeName, extraSpecs)
# Bug 1504192 - if the last volume is being unmapped and the masking view
# goes away, cleanup the initiators and associated initiator group.
def test_delete_initiators_from_initiator_group(self):
conn = self.fake_ecom_connection()
controllerConfigService = (
self.driver.utils.find_controller_configuration_service(
conn, self.data.storage_system))
initiatorGroupName = self.data.initiatorgroup_name
initiatorGroupInstanceName = (
self.driver.common.masking._get_initiator_group_from_masking_view(
conn, self.data.lunmaskctrl_name, self.data.storage_system))
conn.InvokeMethod = mock.Mock(return_value=1)
# Deletion of initiators failed.
self.driver.common.masking._delete_initiators_from_initiator_group(
conn, controllerConfigService, initiatorGroupInstanceName,
initiatorGroupName)
conn.InvokeMethod = mock.Mock(return_value=0)
# Deletion of initiators successful.
self.driver.common.masking._delete_initiators_from_initiator_group(
conn, controllerConfigService, initiatorGroupInstanceName,
initiatorGroupName)
# Bug 1504192 - if the last volume is being unmapped and the masking view
# goes away, cleanup the initiators and associated initiator group.
def test_last_volume_delete_initiator_group_exception(self):
extraSpecs = {'volume_backend_name': 'ISCSINoFAST'}
conn = self.fake_ecom_connection()
controllerConfigService = (
self.driver.utils.find_controller_configuration_service(
conn, self.data.storage_system))
initiatorGroupInstanceName = (
self.driver.common.masking._get_initiator_group_from_masking_view(
conn, self.data.lunmaskctrl_name, self.data.storage_system))
job = {
'Job': {'InstanceID': '9999', 'status': 'success', 'type': None}}
conn.InvokeMethod = mock.Mock(return_value=(4096, job))
self.driver.common.masking.get_masking_views_by_initiator_group = (
mock.Mock(return_value=[]))
self.driver.common.masking._delete_initiators_from_initiator_group = (
mock.Mock(return_value=True))
self.driver.common.masking.utils.wait_for_job_complete = (
mock.Mock(return_value=(2, 'failure')))
# Exception occurrs while deleting the initiator group.
self.assertRaises(
exception.VolumeBackendAPIException,
self.driver.common.masking._last_volume_delete_initiator_group,
conn, controllerConfigService, initiatorGroupInstanceName,
extraSpecs)
# Bug 1504192 - if the last volume is being unmapped and the masking view
# goes away, cleanup the initiators and associated initiator group.
def test_last_volume_delete_initiator_group(self):
extraSpecs = {'volume_backend_name': 'ISCSINoFAST'}
conn = self.fake_ecom_connection()
controllerConfigService = (
self.driver.utils.find_controller_configuration_service(
conn, self.data.storage_system))
initiatorGroupName = self.data.initiatorgroup_name
initiatorGroupInstanceName = (
self.driver.common.masking._get_initiator_group_from_masking_view(
conn, self.data.lunmaskctrl_name, self.data.storage_system))
self.assertEqual(initiatorGroupName,
conn.GetInstance(
initiatorGroupInstanceName)['ElementName'])
# masking view is associated with the initiator group and initiator
# group will not be deleted.
self.driver.common.masking._last_volume_delete_initiator_group(
conn, controllerConfigService, initiatorGroupInstanceName,
extraSpecs)
self.driver.common.masking.get_masking_views_by_initiator_group = (
mock.Mock(return_value=[]))
self.driver.common.masking._delete_initiators_from_initiator_group = (
mock.Mock(return_value=True))
# No Masking view and initiators associated with the Initiator group
# and initiator group will be deleted.
self.driver.common.masking._last_volume_delete_initiator_group(
conn, controllerConfigService, initiatorGroupInstanceName,
extraSpecs)
job = {
'Job': {'InstanceID': '9999', 'status': 'success', 'type': None}}
conn.InvokeMethod = mock.Mock(return_value=(4096, job))
self.driver.common.masking.utils.wait_for_job_complete = (
mock.Mock(return_value=(0, 'success')))
# Deletion of initiator group is successful after waiting for job
# to complete.
self.driver.common.masking._last_volume_delete_initiator_group(
conn, controllerConfigService, initiatorGroupInstanceName,
extraSpecs)
# Tests removal of last volume in a storage group V2
def test_remove_and_reset_members(self):
extraSpecs = {'volume_backend_name': 'GOLD_BE',
'isV3': False}
conn = self.fake_ecom_connection()
controllerConfigService = (
self.driver.utils.find_controller_configuration_service(
conn, self.data.storage_system))
volumeInstanceName = (
conn.EnumerateInstanceNames("EMC_StorageVolume")[0])
volumeInstance = conn.GetInstance(volumeInstanceName)
volumeName = "Last-Vol"
self.driver.common.masking.get_devices_from_storage_group = mock.Mock(
return_value=['one_value'])
self.driver.common.masking.utils.get_existing_instance = mock.Mock(
return_value=None)
self.driver.common.masking.remove_and_reset_members(
conn, controllerConfigService, volumeInstance,
volumeName, extraSpecs)
# Bug 1393555 - masking view has been deleted by another process.
def test_find_maskingview(self):
conn = self.fake_ecom_connection()
foundMaskingViewInstanceName = (
self.driver.common.masking._find_masking_view(
conn, self.data.lunmaskctrl_name, self.data.storage_system))
# The masking view has been found.
self.assertEqual(
self.data.lunmaskctrl_name,
conn.GetInstance(foundMaskingViewInstanceName)['ElementName'])
self.driver.common.masking.utils.get_existing_instance = mock.Mock(
return_value=None)
foundMaskingViewInstanceName2 = (
self.driver.common.masking._find_masking_view(
conn, self.data.lunmaskctrl_name, self.data.storage_system))
# The masking view has not been found.
self.assertIsNone(foundMaskingViewInstanceName2)
# Bug 1393555 - port group has been deleted by another process.
def test_find_portgroup(self):
conn = self.fake_ecom_connection()
controllerConfigService = (
self.driver.utils.find_controller_configuration_service(
conn, self.data.storage_system))
foundPortGroupInstanceName = (
self.driver.common.masking.find_port_group(
conn, controllerConfigService, self.data.port_group))
# The port group has been found.
self.assertEqual(
self.data.port_group,
conn.GetInstance(foundPortGroupInstanceName)['ElementName'])
self.driver.common.masking.utils.get_existing_instance = mock.Mock(
return_value=None)
foundPortGroupInstanceName2 = (
self.driver.common.masking.find_port_group(
conn, controllerConfigService, self.data.port_group))
# The port group has not been found as it has been deleted
# externally or by another thread.
self.assertIsNone(foundPortGroupInstanceName2)
# Bug 1393555 - storage group has been deleted by another process.
def test_get_storage_group_from_masking_view(self):
conn = self.fake_ecom_connection()
foundStorageGroupInstanceName = (
self.driver.common.masking._get_storage_group_from_masking_view(
conn, self.data.lunmaskctrl_name, self.data.storage_system))
# The storage group has been found.
self.assertEqual(
self.data.storagegroupname,
conn.GetInstance(foundStorageGroupInstanceName)['ElementName'])
self.driver.common.masking.utils.get_existing_instance = mock.Mock(
return_value=None)
foundStorageGroupInstanceName2 = (
self.driver.common.masking._get_storage_group_from_masking_view(
conn, self.data.lunmaskctrl_name, self.data.storage_system))
# The storage group has not been found as it has been deleted
# externally or by another thread.
self.assertIsNone(foundStorageGroupInstanceName2)
# Bug 1393555 - initiator group has been deleted by another process.
def test_get_initiator_group_from_masking_view(self):
conn = self.fake_ecom_connection()
foundInitiatorGroupInstanceName = (
self.driver.common.masking._get_initiator_group_from_masking_view(
conn, self.data.lunmaskctrl_name, self.data.storage_system))
# The initiator group has been found.
self.assertEqual(
self.data.initiatorgroup_name,
conn.GetInstance(foundInitiatorGroupInstanceName)['ElementName'])
self.driver.common.masking.utils.get_existing_instance = mock.Mock(
return_value=None)
foundInitiatorGroupInstanceName2 = (
self.driver.common.masking._get_storage_group_from_masking_view(
conn, self.data.lunmaskctrl_name, self.data.storage_system))
# The initiator group has not been found as it has been deleted
# externally or by another thread.
self.assertIsNone(foundInitiatorGroupInstanceName2)
# Bug 1393555 - port group has been deleted by another process.
def test_get_port_group_from_masking_view(self):
conn = self.fake_ecom_connection()
foundPortGroupInstanceName = (
self.driver.common.masking._get_port_group_from_masking_view(
conn, self.data.lunmaskctrl_name, self.data.storage_system))
# The port group has been found.
self.assertEqual(
self.data.port_group,
conn.GetInstance(foundPortGroupInstanceName)['ElementName'])
self.driver.common.masking.utils.get_existing_instance = mock.Mock(
return_value=None)
foundPortGroupInstanceName2 = (
self.driver.common.masking._get_port_group_from_masking_view(
conn, self.data.lunmaskctrl_name, self.data.storage_system))
# The port group has not been found as it has been deleted
# externally or by another thread.
self.assertIsNone(foundPortGroupInstanceName2)
# Bug 1393555 - initiator group has been deleted by another process.
def test_find_initiator_group(self):
conn = self.fake_ecom_connection()
controllerConfigService = (
self.driver.utils.find_controller_configuration_service(
conn, self.data.storage_system))
foundInitiatorGroupInstanceName = (
self.driver.common.masking._find_initiator_masking_group(
conn, controllerConfigService, self.data.initiatorNames))
# The initiator group has been found.
self.assertEqual(
self.data.initiatorgroup_name,
conn.GetInstance(foundInitiatorGroupInstanceName)['ElementName'])
self.driver.common.masking.utils.get_existing_instance = mock.Mock(
return_value=None)
foundInitiatorGroupInstanceName2 = (
self.driver.common.masking._find_initiator_masking_group(
conn, controllerConfigService, self.data.initiatorNames))
# The initiator group has not been found as it has been deleted
# externally or by another thread.
self.assertIsNone(foundInitiatorGroupInstanceName2)
# Bug 1393555 - hardware id has been deleted by another process.
def test_get_storage_hardware_id_instance_names(self):
conn = self.fake_ecom_connection()
foundHardwareIdInstanceNames = (
self.driver.common.masking._get_storage_hardware_id_instance_names(
conn, self.data.initiatorNames, self.data.storage_system))
# The hardware id list has been found.
self.assertEqual(
'123456789012345',
conn.GetInstance(
foundHardwareIdInstanceNames[0])['StorageID'])
self.driver.common.masking.utils.get_existing_instance = mock.Mock(
return_value=None)
foundHardwareIdInstanceNames2 = (
self.driver.common.masking._get_storage_hardware_id_instance_names(
conn, self.data.initiatorNames, self.data.storage_system))
# The hardware id list has not been found as it has been removed
# externally.
self.assertTrue(len(foundHardwareIdInstanceNames2) == 0)
# Bug 1393555 - controller has been deleted by another process.
def test_find_lunmasking_scsi_protocol_controller(self):
self.driver.common.conn = self.fake_ecom_connection()
foundControllerInstanceName = (
self.driver.common._find_lunmasking_scsi_protocol_controller(
self.data.storage_system, self.data.connector))
# The controller has been found.
self.assertEqual(
'OS-fakehost-gold-I-MV',
self.driver.common.conn.GetInstance(
foundControllerInstanceName)['ElementName'])
self.driver.common.utils.get_existing_instance = mock.Mock(
return_value=None)
foundControllerInstanceName2 = (
self.driver.common._find_lunmasking_scsi_protocol_controller(
self.data.storage_system, self.data.connector))
# The controller has not been found as it has been removed
# externally.
self.assertIsNone(foundControllerInstanceName2)
# Bug 1393555 - storage group has been deleted by another process.
def test_get_policy_default_storage_group(self):
conn = self.fake_ecom_connection()
controllerConfigService = (
self.driver.utils.find_controller_configuration_service(
conn, self.data.storage_system))
foundStorageMaskingGroupInstanceName = (
self.driver.common.fast.get_policy_default_storage_group(
conn, controllerConfigService, 'OS_default'))
# The storage group has been found.
self.assertEqual(
'OS_default_GOLD1_SG',
conn.GetInstance(
foundStorageMaskingGroupInstanceName)['ElementName'])
self.driver.common.fast.utils.get_existing_instance = mock.Mock(
return_value=None)
foundStorageMaskingGroupInstanceName2 = (
self.driver.common.fast.get_policy_default_storage_group(
conn, controllerConfigService, 'OS_default'))
# The storage group has not been found as it has been removed
# externally.
self.assertIsNone(foundStorageMaskingGroupInstanceName2)
# Bug 1393555 - policy has been deleted by another process.
def test_get_capacities_associated_to_policy(self):
conn = self.fake_ecom_connection()
total_capacity_gb, free_capacity_gb = (
self.driver.common.fast.get_capacities_associated_to_policy(
conn, self.data.storage_system, self.data.policyrule))
# The capacities associated to the policy have been found.
self.assertEqual(self.data.totalmanagedspace_gbs, total_capacity_gb)
self.assertEqual(self.data.subscribedcapacity_gbs, free_capacity_gb)
self.driver.common.fast.utils.get_existing_instance = mock.Mock(
return_value=None)
total_capacity_gb_2, free_capacity_gb_2 = (
self.driver.common.fast.get_capacities_associated_to_policy(
conn, self.data.storage_system, self.data.policyrule))
# The capacities have not been found as the policy has been
# removed externally.
self.assertEqual(0, total_capacity_gb_2)
self.assertEqual(0, free_capacity_gb_2)
# Bug 1393555 - storage group has been deleted by another process.
def test_find_storage_masking_group(self):
conn = self.fake_ecom_connection()
controllerConfigService = (
self.driver.utils.find_controller_configuration_service(
conn, self.data.storage_system))
foundStorageMaskingGroupInstanceName = (
self.driver.common.utils.find_storage_masking_group(
conn, controllerConfigService, self.data.storagegroupname))
# The storage group has been found.
self.assertEqual(
self.data.storagegroupname,
conn.GetInstance(
foundStorageMaskingGroupInstanceName)['ElementName'])
self.driver.common.utils.get_existing_instance = mock.Mock(
return_value=None)
foundStorageMaskingGroupInstanceName2 = (
self.driver.common.utils.find_storage_masking_group(
conn, controllerConfigService, self.data.storagegroupname))
# The storage group has not been found as it has been removed
# externally.
self.assertIsNone(foundStorageMaskingGroupInstanceName2)
# Bug 1393555 - pool has been deleted by another process.
def test_get_pool_by_name(self):
conn = self.fake_ecom_connection()
foundPoolInstanceName = self.driver.common.utils.get_pool_by_name(
conn, self.data.poolname, self.data.storage_system)
# The pool has been found.
self.assertEqual(
self.data.poolname,
conn.GetInstance(foundPoolInstanceName)['ElementName'])
self.driver.common.utils.get_existing_instance = mock.Mock(
return_value=None)
foundPoolInstanceName2 = self.driver.common.utils.get_pool_by_name(
conn, self.data.poolname, self.data.storage_system)
# The pool has not been found as it has been removed externally.
self.assertIsNone(foundPoolInstanceName2)
def test_get_volume_stats_1364232(self):
file_name = self.create_fake_config_file_1364232()
arrayInfo = self.driver.utils.parse_file_to_get_array_map(file_name)
self.assertEqual(
'000198700439', arrayInfo[0]['SerialNumber'])
self.assertEqual(
'FC_SLVR1', arrayInfo[0]['PoolName'])
self.assertEqual(
'SILVER1', arrayInfo[0]['FastPolicy'])
self.assertTrue(
'OS-PORTGROUP' in arrayInfo[0]['PortGroup'])
bExists = os.path.exists(file_name)
if bExists:
os.remove(file_name)
def test_intervals_and_retries_override(
self):
file_name = (
self.create_fake_config_file_no_fast_with_interval_retries())
extraSpecs = {'volume_backend_name': 'ISCSINoFAST'}
pool = 'gold+1234567891011'
arrayInfo = self.driver.utils.parse_file_to_get_array_map(
self.config_file_path)
poolRec = self.driver.utils.extract_record(arrayInfo, pool)
extraSpecs = self.driver.common._set_v2_extra_specs(extraSpecs,
poolRec)
self.assertEqual(40,
self.driver.utils._get_max_job_retries(extraSpecs))
self.assertEqual(5,
self.driver.utils._get_interval_in_secs(extraSpecs))
bExists = os.path.exists(file_name)
if bExists:
os.remove(file_name)
def test_intervals_and_retries_default(self):
extraSpecs = {'volume_backend_name': 'ISCSINoFAST'}
pool = 'gold+1234567891011'
arrayInfo = self.driver.utils.parse_file_to_get_array_map(
self.config_file_path)
poolRec = self.driver.utils.extract_record(arrayInfo, pool)
extraSpecs = self.driver.common._set_v2_extra_specs(extraSpecs,
poolRec)
self.assertEqual(60,
self.driver.utils._get_max_job_retries(extraSpecs))
self.assertEqual(10,
self.driver.utils._get_interval_in_secs(extraSpecs))
def test_interval_only(self):
extraSpecs = {'volume_backend_name': 'ISCSINoFAST'}
file_name = self.create_fake_config_file_no_fast_with_interval()
pool = 'gold+1234567891011'
arrayInfo = self.driver.utils.parse_file_to_get_array_map(
self.config_file_path)
poolRec = self.driver.utils.extract_record(arrayInfo, pool)
extraSpecs = self.driver.common._set_v2_extra_specs(extraSpecs,
poolRec)
self.assertEqual(60,
self.driver.utils._get_max_job_retries(extraSpecs))
self.assertEqual(20,
self.driver.utils._get_interval_in_secs(extraSpecs))
bExists = os.path.exists(file_name)
if bExists:
os.remove(file_name)
def test_retries_only(self):
extraSpecs = {'volume_backend_name': 'ISCSINoFAST'}
file_name = self.create_fake_config_file_no_fast_with_retries()
pool = 'gold+1234567891011'
arrayInfo = self.driver.utils.parse_file_to_get_array_map(
self.config_file_path)
poolRec = self.driver.utils.extract_record(arrayInfo, pool)
extraSpecs = self.driver.common._set_v2_extra_specs(extraSpecs,
poolRec)
self.assertEqual(70,
self.driver.utils._get_max_job_retries(extraSpecs))
self.assertEqual(10,
self.driver.utils._get_interval_in_secs(extraSpecs))
bExists = os.path.exists(file_name)
if bExists:
os.remove(file_name)
@mock.patch.object(
emc_vmax_utils.EMCVMAXUtils,
'isArrayV3',
return_value=False)
@mock.patch.object(
emc_vmax_utils.EMCVMAXUtils,
'get_pool_capacities',
return_value=(1234, 1200))
@mock.patch.object(
emc_vmax_fast.EMCVMAXFast,
'is_tiering_policy_enabled',
return_value=False)
@mock.patch.object(
emc_vmax_utils.EMCVMAXUtils,
'find_storageSystem',
return_value=None)
def test_get_volume_stats_no_fast(self,
mock_storage_system,
mock_is_fast_enabled,
mock_capacity,
mock_is_v3):
self.driver.get_volume_stats(True)
@mock.patch.object(
emc_vmax_common.EMCVMAXCommon,
'_get_pool_and_storage_system',
return_value=(None, EMCVMAXCommonData.storage_system))
@mock.patch.object(
volume_types,
'get_volume_type_extra_specs',
return_value={'volume_backend_name': 'ISCSINoFAST'})
def test_create_volume_no_fast_success(
self, _mock_volume_type, mock_storage_system):
self.driver.create_volume(self.data.test_volume_v2)
@mock.patch.object(
emc_vmax_common.EMCVMAXCommon,
'_get_pool_and_storage_system',
return_value=(None, EMCVMAXCommonData.storage_system))
@mock.patch.object(
volume_types,
'get_volume_type_extra_specs',
return_value={'storagetype: stripedmetacount': '4',
'volume_backend_name': 'ISCSINoFAST'})
def test_create_volume_no_fast_striped_success(
self, _mock_volume_type, mock_storage_system):
self.driver.create_volume(self.data.test_volume_v2)
@mock.patch.object(
emc_vmax_common.EMCVMAXCommon,
'_get_pool_and_storage_system',
return_value=(None, EMCVMAXCommonData.storage_system))
@mock.patch.object(
volume_types,
'get_volume_type_extra_specs',
return_value={'volume_backend_name': 'ISCSINoFAST'})
def test_create_volume_in_CG_no_fast_success(
self, _mock_volume_type, mock_storage_system):
self.driver.create_volume(self.data.test_volume_CG)
@mock.patch.object(
emc_vmax_common.EMCVMAXCommon,
'_get_pool_and_storage_system',
return_value=(None, EMCVMAXCommonData.storage_system))
@mock.patch.object(
volume_types,
'get_volume_type_extra_specs',
return_value={'volume_backend_name': 'ISCSINoFAST'})
def test_delete_volume_no_fast_success(
self, _mock_volume_type, mock_storage_system):
self.driver.delete_volume(self.data.test_volume)
def test_create_volume_no_fast_failed(self):
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.create_volume,
self.data.test_failed_volume)
@mock.patch.object(
volume_types,
'get_volume_type_extra_specs',
return_value={'volume_backend_name': 'ISCSINoFAST'})
def test_delete_volume_no_fast_notfound(self, _mock_volume_type):
notfound_delete_vol = {}
notfound_delete_vol['name'] = 'notfound_delete_vol'
notfound_delete_vol['id'] = '10'
notfound_delete_vol['CreationClassName'] = 'Symmm_StorageVolume'
notfound_delete_vol['SystemName'] = self.data.storage_system
notfound_delete_vol['DeviceID'] = notfound_delete_vol['id']
notfound_delete_vol['SystemCreationClassName'] = 'Symm_StorageSystem'
notfound_delete_vol['volume_type_id'] = 'abc'
notfound_delete_vol['provider_location'] = None
notfound_delete_vol['host'] = self.data.fake_host
name = {}
name['classname'] = 'Symm_StorageVolume'
keys = {}
keys['CreationClassName'] = notfound_delete_vol['CreationClassName']
keys['SystemName'] = notfound_delete_vol['SystemName']
keys['DeviceID'] = notfound_delete_vol['DeviceID']
keys['SystemCreationClassName'] = (
notfound_delete_vol['SystemCreationClassName'])
name['keybindings'] = keys
self.driver.delete_volume(notfound_delete_vol)
@mock.patch.object(
emc_vmax_utils.EMCVMAXUtils,
'wait_for_job_complete',
return_value=(-1, 'error'))
@mock.patch.object(
emc_vmax_common.EMCVMAXCommon,
'_get_pool_and_storage_system',
return_value=(None, EMCVMAXCommonData.storage_system))
@mock.patch.object(
volume_types,
'get_volume_type_extra_specs',
return_value={'volume_backend_name': 'ISCSINoFAST'})
def test_delete_volume_failed(
self, _mock_volume_type, mock_storage_system, mock_wait):
self.driver.create_volume(self.data.failed_delete_vol)
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.delete_volume,
self.data.failed_delete_vol)
@mock.patch.object(
emc_vmax_common.EMCVMAXCommon,
'_is_same_host',
return_value=True)
@mock.patch.object(
emc_vmax_common.EMCVMAXCommon,
'find_device_number',
return_value={'hostlunid': 1,
'storagesystem': EMCVMAXCommonData.storage_system})
@mock.patch.object(
emc_vmax_masking.EMCVMAXMasking,
'_wrap_get_storage_group_from_volume',
return_value=None)
@mock.patch.object(
volume_types,
'get_volume_type_extra_specs',
return_value={'volume_backend_name': 'ISCSINoFAST'})
def test_already_mapped_no_fast_success(
self, _mock_volume_type, mock_wrap_group, mock_wrap_device,
mock_is_same_host):
self.driver.common._get_correct_port_group = mock.Mock(
return_value=self.data.port_group)
self.driver.initialize_connection(self.data.test_volume,
self.data.connector)
@mock.patch.object(
emc_vmax_masking.EMCVMAXMasking,
'_check_adding_volume_to_storage_group',
return_value=None)
@mock.patch.object(
emc_vmax_utils.EMCVMAXUtils,
'find_storage_masking_group',
return_value='value')
@mock.patch.object(
emc_vmax_masking.EMCVMAXMasking,
'_wrap_get_storage_group_from_volume',
return_value=None)
@mock.patch.object(
volume_types,
'get_volume_type_extra_specs',
return_value={'volume_backend_name': 'ISCSINoFAST'})
def test_map_new_masking_view_no_fast_success(
self, _mock_volume_type, mock_wrap_group,
mock_storage_group, mock_add_volume):
self.driver.common._wrap_find_device_number = mock.Mock(
return_value={})
self.driver.initialize_connection(self.data.test_volume,
self.data.connector)
@mock.patch.object(
emc_vmax_masking.EMCVMAXMasking,
'_check_adding_volume_to_storage_group',
return_value=None)
@mock.patch.object(
emc_vmax_common.EMCVMAXCommon,
'_is_same_host',
return_value=False)
@mock.patch.object(
emc_vmax_utils.EMCVMAXUtils,
'find_storage_masking_group',
return_value='value')
@mock.patch.object(
emc_vmax_common.EMCVMAXCommon,
'find_device_number',
return_value={'hostlunid': 1,
'storagesystem': EMCVMAXCommonData.storage_system})
@mock.patch.object(
emc_vmax_masking.EMCVMAXMasking,
'_wrap_get_storage_group_from_volume',
return_value=None)
@mock.patch.object(
volume_types,
'get_volume_type_extra_specs',
return_value={'volume_backend_name': 'ISCSINoFAST'})
def test_map_live_migration_no_fast_success(self,
_mock_volume_type,
mock_wrap_group,
mock_wrap_device,
mock_storage_group,
mock_same_host,
mock_check):
self.driver.initialize_connection(self.data.test_volume,
self.data.connector)
@mock.patch.object(
emc_vmax_masking.EMCVMAXMasking,
'_get_initiator_group_from_masking_view',
return_value='value')
@mock.patch.object(
emc_vmax_masking.EMCVMAXMasking,
'_find_initiator_masking_group',
return_value='value')
@mock.patch.object(
emc_vmax_masking.EMCVMAXMasking,
'_find_masking_view',
return_value='value')
@mock.patch.object(
emc_vmax_masking.EMCVMAXMasking,
'_wrap_get_storage_group_from_volume',
return_value=None)
@mock.patch.object(
volume_types,
'get_volume_type_extra_specs',
return_value={'volume_backend_name': 'ISCSINoFAST'})
def test_map_existing_masking_view_no_fast_success(
self, _mock_volume_type, mock_wrap_group, mock_storage_group,
mock_initiator_group, mock_ig_from_mv):
self.driver.initialize_connection(self.data.test_volume,
self.data.connector)
@mock.patch.object(
emc_vmax_common.EMCVMAXCommon,
'find_device_number',
return_value={'storagesystem': EMCVMAXCommonData.storage_system})
@mock.patch.object(
emc_vmax_masking.EMCVMAXMasking,
'_wrap_get_storage_group_from_volume',
return_value=None)
def test_map_no_fast_failed(self, mock_wrap_group, mock_wrap_device):
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.initialize_connection,
self.data.test_volume,
self.data.connector)
@mock.patch.object(
emc_vmax_masking.EMCVMAXMasking,
'get_initiator_group_from_masking_view',
return_value='myInitGroup')
@mock.patch.object(
emc_vmax_masking.EMCVMAXMasking,
'_find_initiator_masking_group',
return_value='myInitGroup')
@mock.patch.object(
emc_vmax_utils.EMCVMAXUtils,
'find_storage_masking_group',
return_value=EMCVMAXCommonData.storagegroupname)
@mock.patch.object(
volume_types,
'get_volume_type_extra_specs',
return_value={'volume_backend_name': 'ISCSINoFAST'})
def test_detach_no_fast_success(
self, mock_volume_type, mock_storage_group,
mock_ig, mock_igc):
self.driver.terminate_connection(
self.data.test_volume, self.data.connector)
@mock.patch.object(
emc_vmax_utils.EMCVMAXUtils,
'get_volume_size',
return_value='2147483648')
@mock.patch.object(
volume_types,
'get_volume_type_extra_specs',
return_value={'volume_backend_name': 'ISCSINoFAST'})
def test_extend_volume_no_fast_success(
self, _mock_volume_type, mock_volume_size):
newSize = '2'
self.driver.extend_volume(self.data.test_volume, newSize)
@mock.patch.object(
emc_vmax_utils.EMCVMAXUtils,
'check_if_volume_is_extendable',
return_value='False')
@mock.patch.object(
volume_types,
'get_volume_type_extra_specs',
return_value={'storagetype: stripedmetacount': '4',
'volume_backend_name': 'ISCSINoFAST'})
def test_extend_volume_striped_no_fast_failed(
self, _mock_volume_type, _mock_is_extendable):
newSize = '2'
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.extend_volume,
self.data.test_volume,
newSize)
@mock.patch.object(
emc_vmax_common.EMCVMAXCommon,
'_get_pool_and_storage_system',
return_value=(None, EMCVMAXCommonData.storage_system))
@mock.patch.object(
emc_vmax_utils.EMCVMAXUtils,
'get_meta_members_capacity_in_byte',
return_value=[1234567, 7654321])
@mock.patch.object(
emc_vmax_utils.EMCVMAXUtils,
'get_volume_meta_head',
return_value=[EMCVMAXCommonData.test_volume])
@mock.patch.object(
FakeDB,
'volume_get',
return_value=EMCVMAXCommonData.test_source_volume)
@mock.patch.object(
volume_types,
'get_volume_type_extra_specs',
return_value={'volume_backend_name': 'ISCSINoFAST'})
def test_create_snapshot_different_sizes_meta_no_fast_success(
self, mock_volume_type, mock_volume,
mock_meta, mock_size, mock_pool):
self.data.test_volume['volume_name'] = "vmax-1234567"
common = self.driver.common
volumeDict = {'classname': u'Symm_StorageVolume',
'keybindings': EMCVMAXCommonData.keybindings}
common.provision.create_volume_from_pool = (
mock.Mock(return_value=(volumeDict, 0)))
common.provision.get_volume_dict_from_job = (
mock.Mock(return_value=volumeDict))
self.driver.create_snapshot(self.data.test_volume)
def test_create_snapshot_no_fast_failed(self):
self.data.test_volume['volume_name'] = "vmax-1234567"
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.create_snapshot,
self.data.test_volume)
@mock.patch.object(
emc_vmax_utils.EMCVMAXUtils,
'get_meta_members_capacity_in_byte',
return_value=[1234567])
@mock.patch.object(
emc_vmax_utils.EMCVMAXUtils,
'get_volume_meta_head',
return_value=[EMCVMAXCommonData.test_volume])
@mock.patch.object(
emc_vmax_utils.EMCVMAXUtils,
'find_sync_sv_by_target',
return_value=(None, None))
@mock.patch.object(
volume_types,
'get_volume_type_extra_specs',
return_value={'volume_backend_name': 'ISCSINoFAST'})
def test_create_volume_from_same_size_meta_snapshot(
self, mock_volume_type, mock_sync_sv, mock_meta, mock_size):
self.data.test_volume['volume_name'] = "vmax-1234567"
self.driver.create_volume_from_snapshot(
self.data.test_volume, self.data.test_volume)
def test_create_volume_from_snapshot_no_fast_failed(self):
self.data.test_volume['volume_name'] = "vmax-1234567"
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.create_volume_from_snapshot,
self.data.test_volume,
self.data.test_volume)
@mock.patch.object(
emc_vmax_utils.EMCVMAXUtils,
'get_volume_meta_head',
return_value=None)
@mock.patch.object(
emc_vmax_common.EMCVMAXCommon,
'_find_storage_sync_sv_sv',
return_value=(None, None))
@mock.patch.object(
FakeDB,
'volume_get',
return_value=EMCVMAXCommonData.test_source_volume)
@mock.patch.object(
volume_types,
'get_volume_type_extra_specs',
return_value={'volume_backend_name': 'ISCSINoFAST'})
def test_create_clone_simple_volume_no_fast_success(
self, mock_volume_type, mock_volume, mock_sync_sv,
mock_simple_volume):
self.data.test_volume['volume_name'] = "vmax-1234567"
self.driver.create_cloned_volume(self.data.test_volume,
EMCVMAXCommonData.test_source_volume)
# Bug https://bugs.launchpad.net/cinder/+bug/1440154
@mock.patch.object(
emc_vmax_utils.EMCVMAXUtils,
'get_volume_meta_head',
return_value=[EMCVMAXCommonData.test_volume])
@mock.patch.object(
emc_vmax_common.EMCVMAXCommon,
'_get_pool_and_storage_system',
return_value=(None, EMCVMAXCommonData.storage_system))
@mock.patch.object(
emc_vmax_utils.EMCVMAXUtils,
'get_meta_members_capacity_in_byte',
return_value=[1234567, 7654321])
@mock.patch.object(
FakeDB,
'volume_get',
return_value=EMCVMAXCommonData.test_source_volume)
@mock.patch.object(
volume_types,
'get_volume_type_extra_specs',
return_value={'volume_backend_name': 'ISCSINoFAST'})
@mock.patch.object(
emc_vmax_provision.EMCVMAXProvision,
'create_element_replica')
@mock.patch.object(
emc_vmax_utils.EMCVMAXUtils,
'find_sync_sv_by_target',
return_value=(None, None))
def test_create_clone_assert_clean_up_target_volume(
self, mock_sync, mock_create_replica, mock_volume_type,
mock_volume, mock_capacities, mock_pool, mock_meta_volume):
self.data.test_volume['volume_name'] = "vmax-1234567"
e = exception.VolumeBackendAPIException('CreateElementReplica Ex')
common = self.driver.common
common._delete_from_pool = mock.Mock(return_value=0)
conn = self.fake_ecom_connection()
storageConfigService = (
common.utils.find_storage_configuration_service(
conn, self.data.storage_system))
mock_create_replica.side_effect = e
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.create_cloned_volume,
self.data.test_volume,
EMCVMAXCommonData.test_source_volume)
extraSpecs = common._initial_setup(self.data.test_volume)
fastPolicy = extraSpecs['storagetype:fastpolicy']
targetInstance = (
conn.EnumerateInstanceNames("EMC_StorageVolume")[0])
common._delete_from_pool.assert_called_with(storageConfigService,
targetInstance,
targetInstance['Name'],
targetInstance['DeviceID'],
fastPolicy,
extraSpecs)
@mock.patch.object(
volume_types,
'get_volume_type_extra_specs',
return_value={'volume_backend_name': 'ISCSINoFAST'})
def test_migrate_volume_no_fast_success(self, _mock_volume_type):
self.driver.migrate_volume(self.data.test_ctxt, self.data.test_volume,
self.data.test_host)
@mock.patch.object(
emc_vmax_utils.EMCVMAXUtils,
'parse_pool_instance_id',
return_value=('silver', 'SYMMETRIX+000195900551'))
@mock.patch.object(
volume_types,
'get_volume_type_extra_specs',
return_value={'volume_backend_name': 'ISCSINoFAST'})
def test_retype_volume_no_fast_success(
self, _mock_volume_type, mock_values):
self.driver.retype(
self.data.test_ctxt, self.data.test_volume, self.data.new_type,
self.data.diff, self.data.test_host)
@mock.patch.object(
emc_vmax_common.EMCVMAXCommon,
'_get_pool_and_storage_system',
return_value=(None, EMCVMAXCommonData.storage_system))
@mock.patch.object(
volume_types,
'get_volume_type_extra_specs',
return_value={'volume_backend_name': 'ISCSINoFAST'})
def test_create_CG_no_fast_success(
self, _mock_volume_type, _mock_storage_system):
self.driver.create_consistencygroup(
self.data.test_ctxt, self.data.test_CG)
@mock.patch.object(
emc_vmax_common.EMCVMAXCommon,
'_get_members_of_replication_group',
return_value=None)
@mock.patch.object(
FakeDB,
'volume_get_all_by_group',
return_value=None)
@mock.patch.object(
emc_vmax_common.EMCVMAXCommon,
'_get_pool_and_storage_system',
return_value=(None, EMCVMAXCommonData.storage_system))
@mock.patch.object(
volume_types,
'get_volume_type_extra_specs',
return_value={'volume_backend_name': 'ISCSINoFAST'})
def test_delete_CG_no_volumes_no_fast_success(
self, _mock_volume_type, _mock_storage_system,
_mock_db_volumes, _mock_members):
self.driver.delete_consistencygroup(
self.data.test_ctxt, self.data.test_CG, [])
@mock.patch.object(
emc_vmax_common.EMCVMAXCommon,
'_get_pool_and_storage_system',
return_value=(None, EMCVMAXCommonData.storage_system))
@mock.patch.object(
volume_types,
'get_volume_type_extra_specs',
return_value={'volume_backend_name': 'ISCSINoFAST'})
def test_delete_CG_with_volumes_no_fast_success(
self, _mock_volume_type, _mock_storage_system):
self.driver.delete_consistencygroup(
self.data.test_ctxt, self.data.test_CG, [])
@mock.patch.object(
emc_vmax_utils.EMCVMAXUtils,
'find_group_sync_rg_by_target',
return_value="")
@mock.patch.object(
emc_vmax_common.EMCVMAXCommon,
'_get_members_of_replication_group',
return_value=())
@mock.patch.object(
emc_vmax_common.EMCVMAXCommon,
'_find_consistency_group',
return_value=(None, EMCVMAXCommonData.test_CG))
@mock.patch.object(
emc_vmax_common.EMCVMAXCommon,
'_get_pool_and_storage_system',
return_value=(None, EMCVMAXCommonData.storage_system))
@mock.patch.object(
volume_types,
'get_volume_type_extra_specs',
return_value={'volume_backend_name': 'ISCSINoFAST'})
def test_create_snapshot_for_CG_no_fast_success(
self, _mock_volume_type, _mock_storage, _mock_cg, _mock_members,
_mock_rg):
self.driver.create_cgsnapshot(
self.data.test_ctxt, self.data.test_CG_snapshot, [])
@mock.patch.object(
emc_vmax_common.EMCVMAXCommon,
'_get_pool_and_storage_system',
return_value=(None, EMCVMAXCommonData.storage_system))
@mock.patch.object(
volume_types,
'get_volume_type_extra_specs',
return_value={'volume_backend_name': 'ISCSINoFAST'})
def test_delete_snapshot_for_CG_no_fast_success(
self, _mock_volume_type, _mock_storage):
self.driver.delete_cgsnapshot(
self.data.test_ctxt, self.data.test_CG_snapshot, [])
@mock.patch.object(
emc_vmax_common.EMCVMAXCommon,
'_get_pool_and_storage_system',
return_value=(None, EMCVMAXCommonData.storage_system))
@mock.patch.object(
volume_types,
'get_volume_type_extra_specs',
return_value={'volume_backend_name': 'ISCSINoFAST'})
def test_update_CG_add_volume_no_fast_success(
self, _mock_volume_type, _mock_storage_system):
add_volumes = []
add_volumes.append(self.data.test_source_volume)
remove_volumes = None
self.driver.update_consistencygroup(
self.data.test_ctxt, self.data.test_CG,
add_volumes, remove_volumes)
# Multiple volumes
add_volumes.append(self.data.test_source_volume)
self.driver.update_consistencygroup(
self.data.test_ctxt, self.data.test_CG,
add_volumes, remove_volumes)
# Can't find CG
self.driver.common._find_consistency_group = mock.Mock(
return_value=None)
self.assertRaises(exception.ConsistencyGroupNotFound,
self.driver.update_consistencygroup,
self.data.test_ctxt, self.data.test_CG,
add_volumes, remove_volumes)
@mock.patch.object(
emc_vmax_common.EMCVMAXCommon,
'_get_pool_and_storage_system',
return_value=(None, EMCVMAXCommonData.storage_system))
@mock.patch.object(
volume_types,
'get_volume_type_extra_specs',
return_value={'volume_backend_name': 'ISCSINoFAST'})
def test_update_CG_remove_volume_no_fast_success(
self, _mock_volume_type, _mock_storage_system):
remove_volumes = []
remove_volumes.append(self.data.test_source_volume)
add_volumes = None
self.driver.update_consistencygroup(
self.data.test_ctxt, self.data.test_CG,
add_volumes, remove_volumes)
# Multiple volumes
remove_volumes.append(self.data.test_source_volume)
self.driver.update_consistencygroup(
self.data.test_ctxt, self.data.test_CG,
add_volumes, remove_volumes)
# Bug https://bugs.launchpad.net/cinder/+bug/1442376
@mock.patch.object(
emc_vmax_common.EMCVMAXCommon,
'_get_pool_and_storage_system',
return_value=(None, EMCVMAXCommonData.storage_system))
@mock.patch.object(
emc_vmax_utils.EMCVMAXUtils,
'get_meta_members_capacity_in_byte',
return_value=[1234567, 7654321])
@mock.patch.object(
emc_vmax_utils.EMCVMAXUtils,
'get_volume_meta_head',
return_value=[EMCVMAXCommonData.test_volume])
@mock.patch.object(
FakeDB,
'volume_get',
return_value=EMCVMAXCommonData.test_source_volume)
@mock.patch.object(
volume_types,
'get_volume_type_extra_specs',
return_value={'volume_backend_name': 'ISCSINoFAST'})
def test_create_clone_with_different_meta_sizes(
self, mock_volume_type, mock_volume,
mock_meta, mock_size, mock_pool):
self.data.test_volume['volume_name'] = "vmax-1234567"
common = self.driver.common
volumeDict = {'classname': u'Symm_StorageVolume',
'keybindings': EMCVMAXCommonData.keybindings}
volume = {'size': 0}
common.provision.create_volume_from_pool = (
mock.Mock(return_value=(volumeDict, volume['size'])))
common.provision.get_volume_dict_from_job = (
mock.Mock(return_value=volumeDict))
common._create_composite_volume = (
mock.Mock(return_value=(0,
volumeDict,
EMCVMAXCommonData.storage_system)))
self.driver.create_cloned_volume(self.data.test_volume,
EMCVMAXCommonData.test_source_volume)
extraSpecs = self.driver.common._initial_setup(self.data.test_volume)
common._create_composite_volume.assert_called_with(
volume, "TargetBaseVol", 1234567, extraSpecs, 1)
def test_find_volume_by_device_id_on_array(self):
conn = self.fake_ecom_connection()
utils = self.driver.common.utils
volumeInstanceName = utils.find_volume_by_device_id_on_array(
conn, self.data.storage_system, self.data.test_volume['device_id'])
expectVolume = {}
expectVolume['CreationClassName'] = 'Symm_StorageVolume'
expectVolume['DeviceID'] = self.data.test_volume['device_id']
expect = conn.GetInstance(expectVolume)
self.assertEqual(expect, volumeInstanceName)
def test_get_volume_element_name(self):
volumeId = 'ea95aa39-080b-4f11-9856-a03acf9112ad'
utils = self.driver.common.utils
volumeElementName = utils.get_volume_element_name(volumeId)
expectVolumeElementName = (
emc_vmax_utils.VOLUME_ELEMENT_NAME_PREFIX + volumeId)
self.assertEqual(expectVolumeElementName, volumeElementName)
def test_get_associated_replication_from_source_volume(self):
conn = self.fake_ecom_connection()
utils = self.driver.common.utils
repInstanceName = (
utils.get_associated_replication_from_source_volume(
conn, self.data.storage_system,
self.data.test_volume['device_id']))
expectInstanceName = (
conn.EnumerateInstanceNames('SE_StorageSynchronized_SV_SV')[0])
self.assertEqual(expectInstanceName, repInstanceName)
def test_get_array_and_device_id_success(self):
deviceId = '0123'
arrayId = u'array1234'
external_ref = {u'source-name': deviceId}
volume = {'volume_metadata': [{'key': 'array', 'value': arrayId}]
}
utils = self.driver.common.utils
(arrId, devId) = utils.get_array_and_device_id(volume, external_ref)
self.assertEqual(arrayId, arrId)
self.assertEqual(deviceId, devId)
def test_get_array_and_device_id_failed(self):
deviceId = '0123'
arrayId = u'array1234'
external_ref = {u'no-source-name': deviceId}
volume = {'volume_metadata': [{'key': 'array', 'value': arrayId}]
}
utils = self.driver.common.utils
self.assertRaises(exception.VolumeBackendAPIException,
utils.get_array_and_device_id,
volume,
external_ref)
def test_rename_volume(self):
conn = self.fake_ecom_connection()
utils = self.driver.common.utils
newName = 'new_name'
volume = {}
volume['CreationClassName'] = 'Symm_StorageVolume'
volume['DeviceID'] = '1'
volume['ElementName'] = 'original_name'
pywbem = mock.Mock()
pywbem.cim_obj = mock.Mock()
pywbem.cim_obj.CIMInstance = mock.Mock()
emc_vmax_utils.pywbem = pywbem
volumeInstance = conn.GetInstance(volume)
originalName = volumeInstance['ElementName']
volumeInstance = utils.rename_volume(conn, volumeInstance, newName)
self.assertEqual(newName, volumeInstance['ElementName'])
volumeInstance = utils.rename_volume(
conn, volumeInstance, originalName)
self.assertEqual(originalName, volumeInstance['ElementName'])
def test_get_smi_version(self):
conn = self.fake_ecom_connection()
utils = self.driver.common.utils
version = utils.get_smi_version(conn)
expected = int(str(self.data.majorVersion)
+ str(self.data.minorVersion)
+ str(self.data.revNumber))
self.assertEqual(version, expected)
def test_get_pool_name(self):
conn = self.fake_ecom_connection()
utils = self.driver.common.utils
poolInstanceName = {}
poolInstanceName['InstanceID'] = "SATA_GOLD1"
poolInstanceName['CreationClassName'] = 'Symm_VirtualProvisioningPool'
poolName = utils.get_pool_name(conn, poolInstanceName)
self.assertEqual(poolName, self.data.poolname)
def test_get_meta_members_capacity_in_byte(self):
conn = self.fake_ecom_connection()
utils = self.driver.common.utils
memberVolumeInstanceNames = []
volumeHead = EMC_StorageVolume()
volumeHead.classname = 'Symm_StorageVolume'
blockSize = self.data.block_size
volumeHead['ConsumableBlocks'] = (
self.data.metaHead_volume['ConsumableBlocks'])
volumeHead['BlockSize'] = blockSize
volumeHead['DeviceID'] = self.data.metaHead_volume['DeviceID']
memberVolumeInstanceNames.append(volumeHead)
metaMember1 = EMC_StorageVolume()
metaMember1.classname = 'Symm_StorageVolume'
metaMember1['ConsumableBlocks'] = (
self.data.meta_volume1['ConsumableBlocks'])
metaMember1['BlockSize'] = blockSize
metaMember1['DeviceID'] = self.data.meta_volume1['DeviceID']
memberVolumeInstanceNames.append(metaMember1)
metaMember2 = EMC_StorageVolume()
metaMember2.classname = 'Symm_StorageVolume'
metaMember2['ConsumableBlocks'] = (
self.data.meta_volume2['ConsumableBlocks'])
metaMember2['BlockSize'] = blockSize
metaMember2['DeviceID'] = self.data.meta_volume2['DeviceID']
memberVolumeInstanceNames.append(metaMember2)
capacities = utils.get_meta_members_capacity_in_byte(
conn, memberVolumeInstanceNames)
headSize = (
volumeHead['ConsumableBlocks'] -
metaMember1['ConsumableBlocks'] -
metaMember2['ConsumableBlocks'])
expected = [headSize * blockSize,
metaMember1['ConsumableBlocks'] * blockSize,
metaMember2['ConsumableBlocks'] * blockSize]
self.assertEqual(capacities, expected)
def test_get_composite_elements(self):
conn = self.fake_ecom_connection()
utils = self.driver.common.utils
volumeInstanceName = (
conn.EnumerateInstanceNames("EMC_StorageVolume")[0])
volumeInstance = conn.GetInstance(volumeInstanceName)
memberVolumeInstanceNames = utils.get_composite_elements(
conn, volumeInstance)
expected = [self.data.metaHead_volume,
self.data.meta_volume1,
self.data.meta_volume2]
self.assertEqual(memberVolumeInstanceNames, expected)
def test_get_volume_model_updates(self):
utils = self.driver.common.utils
status = 'status-string'
volumes = utils.get_volume_model_updates(
None, self.driver.db.volume_get_all_by_group("", 5),
self.data.test_CG['id'],
status)
self.assertEqual(status, volumes[0]['status'])
@mock.patch.object(
emc_vmax_utils.EMCVMAXUtils,
'find_group_sync_rg_by_target',
return_value="")
@mock.patch.object(
emc_vmax_common.EMCVMAXCommon,
'_find_consistency_group',
return_value=(None, EMCVMAXCommonData.test_CG))
@mock.patch.object(
emc_vmax_common.EMCVMAXCommon,
'_get_pool_and_storage_system',
return_value=(None, EMCVMAXCommonData.storage_system))
@mock.patch.object(
volume_types,
'get_volume_type_extra_specs',
return_value={'volume_backend_name': 'ISCSINoFAST'})
def test_create_consistencygroup_from_src(
self, _mock_volume_type, _mock_storage, _mock_cg, _mock_rg):
volumes = []
volumes.append(self.data.test_source_volume)
snapshots = []
self.data.test_snapshot['volume_size'] = "10"
snapshots.append(self.data.test_snapshot)
model_update, volumes_model_update = (
self.driver.create_consistencygroup_from_src(
self.data.test_ctxt, self.data.test_CG, volumes,
self.data.test_CG_snapshot, snapshots))
self.assertEqual({'status': fields.ConsistencyGroupStatus.AVAILABLE},
model_update)
self.assertEqual([{'status': 'available', 'id': '2'}],
volumes_model_update)
@mock.patch.object(
emc_vmax_common.EMCVMAXCommon,
'_update_pool_stats',
return_value={1, 2, 3})
def test_ssl_support(self, pool_stats):
self.driver.common.update_volume_stats()
self.assertTrue(self.driver.common.ecomUseSSL)
def _cleanup(self):
if self.config_file_path:
bExists = os.path.exists(self.config_file_path)
if bExists:
os.remove(self.config_file_path)
shutil.rmtree(self.tempdir)
class EMCVMAXISCSIDriverFastTestCase(test.TestCase):
def setUp(self):
self.data = EMCVMAXCommonData()
self.tempdir = tempfile.mkdtemp()
super(EMCVMAXISCSIDriverFastTestCase, self).setUp()
self.config_file_path = None
self.create_fake_config_file_fast()
self.addCleanup(self._cleanup)
configuration = mock.Mock()
configuration.cinder_emc_config_file = self.config_file_path
configuration.safe_get.return_value = 'ISCSIFAST'
configuration.config_group = 'ISCSIFAST'
self.stubs.Set(emc_vmax_iscsi.EMCVMAXISCSIDriver,
'smis_do_iscsi_discovery',
self.fake_do_iscsi_discovery)
self.stubs.Set(emc_vmax_common.EMCVMAXCommon, '_get_ecom_connection',
self.fake_ecom_connection)
instancename = FakeCIMInstanceName()
self.stubs.Set(emc_vmax_utils.EMCVMAXUtils, 'get_instance_name',
instancename.fake_getinstancename)
self.stubs.Set(time, 'sleep',
self.fake_sleep)
self.stubs.Set(emc_vmax_utils.EMCVMAXUtils, 'isArrayV3',
self.fake_is_v3)
driver = emc_vmax_iscsi.EMCVMAXISCSIDriver(configuration=configuration)
driver.db = FakeDB()
self.driver = driver
def create_fake_config_file_fast(self):
doc = minidom.Document()
emc = doc.createElement("EMC")
doc.appendChild(emc)
array = doc.createElement("Array")
arraytext = doc.createTextNode("1234567891011")
emc.appendChild(array)
array.appendChild(arraytext)
fastPolicy = doc.createElement("FastPolicy")
fastPolicyText = doc.createTextNode("GOLD1")
emc.appendChild(fastPolicy)
fastPolicy.appendChild(fastPolicyText)
ecomserverip = doc.createElement("EcomServerIp")
ecomserveriptext = doc.createTextNode("1.1.1.1")
emc.appendChild(ecomserverip)
ecomserverip.appendChild(ecomserveriptext)
ecomserverport = doc.createElement("EcomServerPort")
ecomserverporttext = doc.createTextNode("10")
emc.appendChild(ecomserverport)
ecomserverport.appendChild(ecomserverporttext)
ecomusername = doc.createElement("EcomUserName")
ecomusernametext = doc.createTextNode("user")
emc.appendChild(ecomusername)
ecomusername.appendChild(ecomusernametext)
ecompassword = doc.createElement("EcomPassword")
ecompasswordtext = doc.createTextNode("pass")
emc.appendChild(ecompassword)
ecompassword.appendChild(ecompasswordtext)
timeout = doc.createElement("Timeout")
timeouttext = doc.createTextNode("0")
emc.appendChild(timeout)
timeout.appendChild(timeouttext)
portgroup = doc.createElement("PortGroup")
portgrouptext = doc.createTextNode(self.data.port_group)
portgroup.appendChild(portgrouptext)
pool = doc.createElement("Pool")
pooltext = doc.createTextNode("gold")
emc.appendChild(pool)
pool.appendChild(pooltext)
portgroups = doc.createElement("PortGroups")
portgroups.appendChild(portgroup)
emc.appendChild(portgroups)
filename = 'cinder_emc_config_ISCSIFAST.xml'
self.config_file_path = self.tempdir + '/' + filename
f = open(self.config_file_path, 'w')
doc.writexml(f)
f.close()
def fake_ecom_connection(self):
conn = FakeEcomConnection()
return conn
def fake_do_iscsi_discovery(self, volume):
output = []
item = '10.10.0.50: 3260,1 iqn.1992-04.com.emc: 50000973f006dd80'
output.append(item)
return output
def fake_sleep(self, seconds):
return
def fake_is_v3(self, conn, serialNumber):
return False
@mock.patch.object(
emc_vmax_fast.EMCVMAXFast,
'get_capacities_associated_to_policy',
return_value=(1234, 1200))
@mock.patch.object(
emc_vmax_fast.EMCVMAXFast,
'get_tier_policy_by_name',
return_value=None)
@mock.patch.object(
emc_vmax_fast.EMCVMAXFast,
'is_tiering_policy_enabled',
return_value=True)
@mock.patch.object(
emc_vmax_utils.EMCVMAXUtils,
'find_storageSystem',
return_value=None)
def test_get_volume_stats_fast(self,
mock_storage_system,
mock_is_fast_enabled,
mock_get_policy,
mock_capacity):
self.driver.get_volume_stats(True)
@mock.patch.object(
emc_vmax_fast.EMCVMAXFast,
'get_pool_associated_to_policy',
return_value=1)
@mock.patch.object(
emc_vmax_common.EMCVMAXCommon,
'_get_pool_and_storage_system',
return_value=(None, EMCVMAXCommonData.storage_system))
@mock.patch.object(
volume_types,
'get_volume_type_extra_specs',
return_value={'volume_backend_name': 'ISCSIFAST'})
def test_create_volume_fast_success(
self, _mock_volume_type, mock_storage_system, mock_pool_policy):
self.driver.create_volume(self.data.test_volume_v2)
@mock.patch.object(
emc_vmax_fast.EMCVMAXFast,
'get_pool_associated_to_policy',
return_value=1)
@mock.patch.object(
emc_vmax_common.EMCVMAXCommon,
'_get_pool_and_storage_system',
return_value=(None, EMCVMAXCommonData.storage_system))
@mock.patch.object(
volume_types,
'get_volume_type_extra_specs',
return_value={'storagetype: stripedmetacount': '4',
'volume_backend_name': 'ISCSIFAST'})
def test_create_volume_fast_striped_success(
self, _mock_volume_type, mock_storage_system, mock_pool_policy):
self.driver.create_volume(self.data.test_volume_v2)
@mock.patch.object(
emc_vmax_fast.EMCVMAXFast,
'get_pool_associated_to_policy',
return_value=1)
@mock.patch.object(
emc_vmax_common.EMCVMAXCommon,
'_get_pool_and_storage_system',
return_value=(None, EMCVMAXCommonData.storage_system))
@mock.patch.object(
volume_types,
'get_volume_type_extra_specs',
return_value={'volume_backend_name': 'ISCSIFAST'})
def test_create_volume_in_CG_fast_success(
self, _mock_volume_type, mock_storage_system, mock_pool_policy):
self.driver.create_volume(self.data.test_volume_CG)
@mock.patch.object(
emc_vmax_masking.EMCVMAXMasking,
'_wrap_get_storage_group_from_volume',
return_value=None)
@mock.patch.object(
volume_types,
'get_volume_type_extra_specs',
return_value={'volume_backend_name': 'ISCSIFAST'})
def test_delete_volume_fast_success(
self, _mock_volume_type, mock_storage_group):
self.driver.delete_volume(self.data.test_volume)
def test_create_volume_fast_failed(self):
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.create_volume,
self.data.test_failed_volume)
@mock.patch.object(
emc_vmax_masking.EMCVMAXMasking,
'_wrap_get_storage_group_from_volume',
return_value=None)
@mock.patch.object(
volume_types,
'get_volume_type_extra_specs',
return_value={'volume_backend_name': 'ISCSIFAST'})
def test_delete_volume_fast_notfound(
self, _mock_volume_type, mock_wrapper):
notfound_delete_vol = {}
notfound_delete_vol['name'] = 'notfound_delete_vol'
notfound_delete_vol['id'] = '10'
notfound_delete_vol['CreationClassName'] = 'Symmm_StorageVolume'
notfound_delete_vol['SystemName'] = self.data.storage_system
notfound_delete_vol['DeviceID'] = notfound_delete_vol['id']
notfound_delete_vol['SystemCreationClassName'] = 'Symm_StorageSystem'
notfound_delete_vol['host'] = self.data.fake_host
name = {}
name['classname'] = 'Symm_StorageVolume'
keys = {}
keys['CreationClassName'] = notfound_delete_vol['CreationClassName']
keys['SystemName'] = notfound_delete_vol['SystemName']
keys['DeviceID'] = notfound_delete_vol['DeviceID']
keys['SystemCreationClassName'] = (
notfound_delete_vol['SystemCreationClassName'])
name['keybindings'] = keys
notfound_delete_vol['volume_type_id'] = 'abc'
notfound_delete_vol['provider_location'] = None
self.driver.delete_volume(notfound_delete_vol)
@mock.patch.object(
emc_vmax_utils.EMCVMAXUtils,
'wait_for_job_complete',
return_value=(-1, 'error'))
@mock.patch.object(
emc_vmax_fast.EMCVMAXFast,
'get_pool_associated_to_policy',
return_value=1)
@mock.patch.object(
emc_vmax_common.EMCVMAXCommon,
'_get_pool_and_storage_system',
return_value=(None, EMCVMAXCommonData.storage_system))
@mock.patch.object(
emc_vmax_masking.EMCVMAXMasking,
'_wrap_get_storage_group_from_volume',
return_value=None)
@mock.patch.object(
volume_types,
'get_volume_type_extra_specs',
return_value={'volume_backend_name': 'ISCSIFAST'})
def test_delete_volume_fast_failed(
self, _mock_volume_type, _mock_storage_group,
mock_storage_system, mock_policy_pool, mock_wait):
self.driver.create_volume(self.data.failed_delete_vol)
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.delete_volume,
self.data.failed_delete_vol)
@mock.patch.object(
emc_vmax_common.EMCVMAXCommon,
'_is_same_host',
return_value=True)
@mock.patch.object(
emc_vmax_common.EMCVMAXCommon,
'find_device_number',
return_value={'hostlunid': 1,
'storagesystem': EMCVMAXCommonData.storage_system})
@mock.patch.object(
emc_vmax_masking.EMCVMAXMasking,
'_wrap_get_storage_group_from_volume',
return_value=None)
@mock.patch.object(
volume_types,
'get_volume_type_extra_specs',
return_value={'volume_backend_name': 'ISCSIFAST'})
def test_already_mapped_fast_success(
self, _mock_volume_type, mock_wrap_group, mock_wrap_device,
mock_is_same_host):
self.driver.common._get_correct_port_group = mock.Mock(
return_value=self.data.port_group)
self.driver.initialize_connection(self.data.test_volume,
self.data.connector)
@mock.patch.object(
emc_vmax_common.EMCVMAXCommon,
'find_device_number',
return_value={'storagesystem': EMCVMAXCommonData.storage_system})
@mock.patch.object(
emc_vmax_masking.EMCVMAXMasking,
'_wrap_get_storage_group_from_volume',
return_value=None)
def test_map_fast_failed(self, mock_wrap_group, mock_wrap_device):
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.initialize_connection,
self.data.test_volume,
self.data.connector)
@mock.patch.object(
emc_vmax_masking.EMCVMAXMasking,
'get_initiator_group_from_masking_view',
return_value='myInitGroup')
@mock.patch.object(
emc_vmax_masking.EMCVMAXMasking,
'_find_initiator_masking_group',
return_value='myInitGroup')
@mock.patch.object(
emc_vmax_utils.EMCVMAXUtils,
'find_storage_masking_group',
return_value=EMCVMAXCommonData.storagegroupname)
@mock.patch.object(
volume_types,
'get_volume_type_extra_specs',
return_value={'volume_backend_name': 'ISCSIFAST'})
def test_detach_fast_success(
self, mock_volume_type, mock_storage_group,
mock_ig, mock_igc):
self.driver.terminate_connection(
self.data.test_volume, self.data.connector)
@mock.patch.object(
emc_vmax_utils.EMCVMAXUtils,
'get_volume_size',
return_value='2147483648')
@mock.patch.object(
volume_types,
'get_volume_type_extra_specs',
return_value={'volume_backend_name': 'ISCSIFAST'})
def test_extend_volume_fast_success(
self, _mock_volume_type, mock_volume_size):
newSize = '2'
self.driver.extend_volume(self.data.test_volume, newSize)
@mock.patch.object(
emc_vmax_utils.EMCVMAXUtils,
'check_if_volume_is_extendable',
return_value='False')
@mock.patch.object(
volume_types,
'get_volume_type_extra_specs',
return_value={'volume_backend_name': 'ISCSIFAST'})
def test_extend_volume_striped_fast_failed(
self, _mock_volume_type, _mock_is_extendable):
newSize = '2'
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.extend_volume,
self.data.test_volume,
newSize)
@mock.patch.object(
emc_vmax_fast.EMCVMAXFast,
'get_pool_associated_to_policy',
return_value=1)
@mock.patch.object(
emc_vmax_common.EMCVMAXCommon,
'_get_pool_and_storage_system',
return_value=(None, EMCVMAXCommonData.storage_system))
@mock.patch.object(
emc_vmax_utils.EMCVMAXUtils,
'get_meta_members_capacity_in_byte',
return_value=[1234567, 7654321])
@mock.patch.object(
emc_vmax_utils.EMCVMAXUtils,
'get_volume_meta_head',
return_value=[EMCVMAXCommonData.test_volume])
@mock.patch.object(
FakeDB,
'volume_get',
return_value=EMCVMAXCommonData.test_source_volume)
@mock.patch.object(
volume_types,
'get_volume_type_extra_specs',
return_value={'volume_backend_name': 'ISCSIFAST'})
def test_create_snapshot_different_sizes_meta_fast_success(
self, mock_volume_type, mock_volume,
mock_meta, mock_size, mock_pool, mock_policy):
self.data.test_volume['volume_name'] = "vmax-1234567"
common = self.driver.common
volumeDict = {'classname': u'Symm_StorageVolume',
'keybindings': EMCVMAXCommonData.keybindings}
common.provision.create_volume_from_pool = (
mock.Mock(return_value=(volumeDict, 0)))
common.provision.get_volume_dict_from_job = (
mock.Mock(return_value=volumeDict))
common.fast.is_volume_in_default_SG = (
mock.Mock(return_value=True))
self.driver.create_snapshot(self.data.test_volume)
def test_create_snapshot_fast_failed(self):
self.data.test_volume['volume_name'] = "vmax-1234567"
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.create_snapshot,
self.data.test_volume)
@mock.patch.object(
emc_vmax_utils.EMCVMAXUtils,
'get_meta_members_capacity_in_byte',
return_value=[1234567])
@mock.patch.object(
emc_vmax_utils.EMCVMAXUtils,
'get_volume_meta_head',
return_value=[EMCVMAXCommonData.test_volume])
@mock.patch.object(
emc_vmax_utils.EMCVMAXUtils,
'find_sync_sv_by_target',
return_value=(None, None))
@mock.patch.object(
volume_types,
'get_volume_type_extra_specs',
return_value={'volume_backend_name': 'ISCSIFAST'})
def test_create_volume_from_same_size_meta_snapshot(
self, mock_volume_type, mock_sync_sv, mock_meta, mock_size):
self.data.test_volume['volume_name'] = "vmax-1234567"
common = self.driver.common
common.fast.is_volume_in_default_SG = mock.Mock(return_value=True)
self.driver.create_volume_from_snapshot(
self.data.test_volume, self.data.test_volume)
@mock.patch.object(
emc_vmax_utils.EMCVMAXUtils,
'find_sync_sv_by_target',
return_value=(None, None))
@mock.patch.object(
emc_vmax_utils.EMCVMAXUtils,
'find_replication_service',
return_value=None)
@mock.patch.object(
volume_types,
'get_volume_type_extra_specs',
return_value={'volume_backend_name': 'ISCSIFAST',
'FASTPOLICY': 'FC_GOLD1'})
def test_create_volume_from_snapshot_fast_failed(
self, mock_volume_type,
mock_rep_service, mock_sync_sv):
self.data.test_volume['volume_name'] = "vmax-1234567"
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.create_volume_from_snapshot,
self.data.test_volume,
EMCVMAXCommonData.test_source_volume)
@mock.patch.object(
emc_vmax_common.EMCVMAXCommon,
'_get_pool_and_storage_system',
return_value=(None, EMCVMAXCommonData.storage_system))
@mock.patch.object(
emc_vmax_utils.EMCVMAXUtils,
'get_meta_members_capacity_in_byte',
return_value=[1234567, 7654321])
@mock.patch.object(
emc_vmax_utils.EMCVMAXUtils,
'get_volume_meta_head',
return_value=[EMCVMAXCommonData.test_volume])
@mock.patch.object(
emc_vmax_fast.EMCVMAXFast,
'get_pool_associated_to_policy',
return_value=1)
@mock.patch.object(
FakeDB,
'volume_get',
return_value=EMCVMAXCommonData.test_source_volume)
@mock.patch.object(
volume_types,
'get_volume_type_extra_specs',
return_value={'volume_backend_name': 'ISCSIFAST'})
def test_create_clone_fast_failed(
self, mock_volume_type, mock_vol,
mock_policy, mock_meta, mock_size, mock_pool):
self.data.test_volume['volume_name'] = "vmax-1234567"
self.driver.common._modify_and_get_composite_volume_instance = (
mock.Mock(return_value=(1, None)))
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.create_cloned_volume,
self.data.test_volume,
EMCVMAXCommonData.test_source_volume)
@mock.patch.object(
volume_types,
'get_volume_type_extra_specs',
return_value={'volume_backend_name': 'ISCSIFAST'})
def test_migrate_volume_fast_success(self, _mock_volume_type):
self.driver.migrate_volume(self.data.test_ctxt, self.data.test_volume,
self.data.test_host)
@mock.patch.object(
emc_vmax_masking.EMCVMAXMasking,
'_wrap_get_storage_group_from_volume',
return_value=None)
@mock.patch.object(
emc_vmax_utils.EMCVMAXUtils,
'parse_pool_instance_id',
return_value=('silver', 'SYMMETRIX+000195900551'))
@mock.patch.object(
volume_types,
'get_volume_type_extra_specs',
return_value={'volume_backend_name': 'ISCSIFAST'})
def test_retype_volume_fast_success(
self, _mock_volume_type, mock_values, mock_wrap):
self.driver.retype(
self.data.test_ctxt, self.data.test_volume, self.data.new_type,
self.data.diff, self.data.test_host)
@mock.patch.object(
emc_vmax_common.EMCVMAXCommon,
'_get_pool_and_storage_system',
return_value=(None, EMCVMAXCommonData.storage_system))
@mock.patch.object(
volume_types,
'get_volume_type_extra_specs',
return_value={'volume_backend_name': 'ISCSIFAST'})
def test_create_CG_fast_success(
self, _mock_volume_type, _mock_storage_system):
self.driver.create_consistencygroup(
self.data.test_ctxt, self.data.test_CG)
@mock.patch.object(
emc_vmax_common.EMCVMAXCommon,
'_get_members_of_replication_group',
return_value=None)
@mock.patch.object(
FakeDB,
'volume_get_all_by_group',
return_value=None)
@mock.patch.object(
emc_vmax_common.EMCVMAXCommon,
'_get_pool_and_storage_system',
return_value=(None, EMCVMAXCommonData.storage_system))
@mock.patch.object(
volume_types,
'get_volume_type_extra_specs',
return_value={'volume_backend_name': 'ISCSIFAST'})
def test_delete_CG_no_volumes_fast_success(
self, _mock_volume_type, _mock_storage_system,
_mock_db_volumes, _mock_members):
self.driver.delete_consistencygroup(
self.data.test_ctxt, self.data.test_CG, [])
@mock.patch.object(
emc_vmax_common.EMCVMAXCommon,
'_get_pool_and_storage_system',
return_value=(None, EMCVMAXCommonData.storage_system))
@mock.patch.object(
volume_types,
'get_volume_type_extra_specs',
return_value={'volume_backend_name': 'ISCSIFAST'})
def test_delete_CG_with_volumes_fast_success(
self, _mock_volume_type, _mock_storage_system):
self.driver.delete_consistencygroup(
self.data.test_ctxt, self.data.test_CG, [])
@mock.patch.object(
emc_vmax_utils.EMCVMAXUtils,
'find_group_sync_rg_by_target',
return_value="")
@mock.patch.object(
emc_vmax_common.EMCVMAXCommon,
'_get_members_of_replication_group',
return_value=())
@mock.patch.object(
emc_vmax_common.EMCVMAXCommon,
'_find_consistency_group',
return_value=(None, EMCVMAXCommonData.test_CG))
@mock.patch.object(
emc_vmax_common.EMCVMAXCommon,
'_get_pool_and_storage_system',
return_value=(None, EMCVMAXCommonData.storage_system))
@mock.patch.object(
volume_types,
'get_volume_type_extra_specs',
return_value={'volume_backend_name': 'ISCSIFAST'})
def test_create_snapshot_for_CG_no_fast_success(
self, _mock_volume_type, _mock_storage, _mock_cg, _mock_members,
_mock_rg):
self.driver.create_cgsnapshot(
self.data.test_ctxt, self.data.test_CG_snapshot, [])
@mock.patch.object(
emc_vmax_common.EMCVMAXCommon,
'_get_pool_and_storage_system',
return_value=(None, EMCVMAXCommonData.storage_system))
@mock.patch.object(
volume_types,
'get_volume_type_extra_specs',
return_value={'volume_backend_name': 'ISCSIFAST'})
def test_delete_snapshot_for_CG_no_fast_success(
self, _mock_volume_type, _mock_storage):
self.driver.delete_cgsnapshot(
self.data.test_ctxt, self.data.test_CG_snapshot, [])
@mock.patch.object(
emc_vmax_common.EMCVMAXCommon,
'_get_pool_and_storage_system',
return_value=(None, EMCVMAXCommonData.storage_system))
@mock.patch.object(
volume_types,
'get_volume_type_extra_specs',
return_value={'volume_backend_name': 'ISCSIFAST'})
def test_update_CG_add_volume_fast_success(
self, _mock_volume_type, _mock_storage_system):
add_volumes = []
add_volumes.append(self.data.test_source_volume)
remove_volumes = None
self.driver.update_consistencygroup(
self.data.test_ctxt, self.data.test_CG,
add_volumes, remove_volumes)
# Multiple volumes
add_volumes.append(self.data.test_source_volume)
self.driver.update_consistencygroup(
self.data.test_ctxt, self.data.test_CG,
add_volumes, remove_volumes)
@mock.patch.object(
emc_vmax_common.EMCVMAXCommon,
'_get_pool_and_storage_system',
return_value=(None, EMCVMAXCommonData.storage_system))
@mock.patch.object(
volume_types,
'get_volume_type_extra_specs',
return_value={'volume_backend_name': 'ISCSIFAST'})
def test_update_CG_remove_volume_fast_success(
self, _mock_volume_type, _mock_storage_system):
remove_volumes = []
remove_volumes.append(self.data.test_source_volume)
add_volumes = None
self.driver.update_consistencygroup(
self.data.test_ctxt, self.data.test_CG,
add_volumes, remove_volumes)
# Multiple volumes
remove_volumes.append(self.data.test_source_volume)
self.driver.update_consistencygroup(
self.data.test_ctxt, self.data.test_CG,
add_volumes, remove_volumes)
def _cleanup(self):
bExists = os.path.exists(self.config_file_path)
if bExists:
os.remove(self.config_file_path)
shutil.rmtree(self.tempdir)
class EMCVMAXFCDriverNoFastTestCase(test.TestCase):
def setUp(self):
self.data = EMCVMAXCommonData()
self.tempdir = tempfile.mkdtemp()
super(EMCVMAXFCDriverNoFastTestCase, self).setUp()
self.config_file_path = None
self.create_fake_config_file_no_fast()
self.addCleanup(self._cleanup)
configuration = mock.Mock()
configuration.cinder_emc_config_file = self.config_file_path
configuration.safe_get.return_value = 'FCNoFAST'
configuration.config_group = 'FCNoFAST'
self.stubs.Set(emc_vmax_common.EMCVMAXCommon, '_get_ecom_connection',
self.fake_ecom_connection)
instancename = FakeCIMInstanceName()
self.stubs.Set(emc_vmax_utils.EMCVMAXUtils, 'get_instance_name',
instancename.fake_getinstancename)
self.stubs.Set(time, 'sleep',
self.fake_sleep)
self.stubs.Set(emc_vmax_utils.EMCVMAXUtils, 'isArrayV3',
self.fake_is_v3)
driver = emc_vmax_fc.EMCVMAXFCDriver(configuration=configuration)
driver.db = FakeDB()
driver.common.conn = FakeEcomConnection()
driver.zonemanager_lookup_service = FakeLookupService()
self.driver = driver
self.driver.utils = emc_vmax_utils.EMCVMAXUtils(object)
def create_fake_config_file_no_fast(self):
doc = minidom.Document()
emc = doc.createElement("EMC")
doc.appendChild(emc)
array = doc.createElement("Array")
arraytext = doc.createTextNode("1234567891011")
emc.appendChild(array)
array.appendChild(arraytext)
ecomserverip = doc.createElement("EcomServerIp")
ecomserveriptext = doc.createTextNode("1.1.1.1")
emc.appendChild(ecomserverip)
ecomserverip.appendChild(ecomserveriptext)
ecomserverport = doc.createElement("EcomServerPort")
ecomserverporttext = doc.createTextNode("10")
emc.appendChild(ecomserverport)
ecomserverport.appendChild(ecomserverporttext)
ecomusername = doc.createElement("EcomUserName")
ecomusernametext = doc.createTextNode("user")
emc.appendChild(ecomusername)
ecomusername.appendChild(ecomusernametext)
ecompassword = doc.createElement("EcomPassword")
ecompasswordtext = doc.createTextNode("pass")
emc.appendChild(ecompassword)
ecompassword.appendChild(ecompasswordtext)
portgroup = doc.createElement("PortGroup")
portgrouptext = doc.createTextNode(self.data.port_group)
portgroup.appendChild(portgrouptext)
portgroups = doc.createElement("PortGroups")
portgroups.appendChild(portgroup)
emc.appendChild(portgroups)
pool = doc.createElement("Pool")
pooltext = doc.createTextNode("gold")
emc.appendChild(pool)
pool.appendChild(pooltext)
timeout = doc.createElement("Timeout")
timeouttext = doc.createTextNode("0")
emc.appendChild(timeout)
timeout.appendChild(timeouttext)
filename = 'cinder_emc_config_FCNoFAST.xml'
self.config_file_path = self.tempdir + '/' + filename
f = open(self.config_file_path, 'w')
doc.writexml(f)
f.close()
def fake_ecom_connection(self):
conn = FakeEcomConnection()
return conn
def fake_sleep(self, seconds):
return
def fake_is_v3(self, conn, serialNumber):
return False
@mock.patch.object(
emc_vmax_utils.EMCVMAXUtils,
'get_pool_capacities',
return_value=(1234, 1200))
@mock.patch.object(
emc_vmax_fast.EMCVMAXFast,
'is_tiering_policy_enabled',
return_value=False)
@mock.patch.object(
emc_vmax_utils.EMCVMAXUtils,
'find_storageSystem',
return_value=None)
def test_get_volume_stats_no_fast(self,
mock_storage_system,
mock_is_fast_enabled,
mock_capacity):
self.driver.get_volume_stats(True)
@mock.patch.object(
emc_vmax_common.EMCVMAXCommon,
'_get_pool_and_storage_system',
return_value=(None, EMCVMAXCommonData.storage_system))
@mock.patch.object(
volume_types,
'get_volume_type_extra_specs',
return_value={'volume_backend_name': 'FCNoFAST'})
def test_create_volume_no_fast_success(
self, _mock_volume_type, mock_storage_system):
self.driver.create_volume(self.data.test_volume_v2)
@mock.patch.object(
emc_vmax_common.EMCVMAXCommon,
'_get_pool_and_storage_system',
return_value=(None, EMCVMAXCommonData.storage_system))
@mock.patch.object(
volume_types,
'get_volume_type_extra_specs',
return_value={'storagetype: stripedmetacount': '4',
'volume_backend_name': 'FCNoFAST'})
def test_create_volume_no_fast_striped_success(
self, _mock_volume_type, mock_storage_system):
self.driver.create_volume(self.data.test_volume_v2)
@mock.patch.object(
emc_vmax_common.EMCVMAXCommon,
'_get_pool_and_storage_system',
return_value=(None, EMCVMAXCommonData.storage_system))
@mock.patch.object(
volume_types,
'get_volume_type_extra_specs',
return_value={'volume_backend_name': 'FCNoFAST'})
def test_create_volume_in_CG_no_fast_success(
self, _mock_volume_type, mock_storage_system):
self.driver.create_volume(self.data.test_volume_CG)
@mock.patch.object(
emc_vmax_common.EMCVMAXCommon,
'_get_pool_and_storage_system',
return_value=(None, EMCVMAXCommonData.storage_system))
@mock.patch.object(
volume_types,
'get_volume_type_extra_specs',
return_value={'volume_backend_name': 'FCNoFAST'})
def test_delete_volume_no_fast_success(
self, _mock_volume_type, mock_storage_system):
self.driver.delete_volume(self.data.test_volume)
def test_create_volume_no_fast_failed(self):
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.create_volume,
self.data.test_failed_volume)
@mock.patch.object(
volume_types,
'get_volume_type_extra_specs',
return_value={'volume_backend_name': 'FCNoFAST'})
def test_delete_volume_no_fast_notfound(self, _mock_volume_type):
notfound_delete_vol = {}
notfound_delete_vol['name'] = 'notfound_delete_vol'
notfound_delete_vol['id'] = '10'
notfound_delete_vol['CreationClassName'] = 'Symmm_StorageVolume'
notfound_delete_vol['SystemName'] = self.data.storage_system
notfound_delete_vol['DeviceID'] = notfound_delete_vol['id']
notfound_delete_vol['SystemCreationClassName'] = 'Symm_StorageSystem'
notfound_delete_vol['host'] = self.data.fake_host
name = {}
name['classname'] = 'Symm_StorageVolume'
keys = {}
keys['CreationClassName'] = notfound_delete_vol['CreationClassName']
keys['SystemName'] = notfound_delete_vol['SystemName']
keys['DeviceID'] = notfound_delete_vol['DeviceID']
keys['SystemCreationClassName'] = (
notfound_delete_vol['SystemCreationClassName'])
name['keybindings'] = keys
notfound_delete_vol['volume_type_id'] = 'abc'
notfound_delete_vol['provider_location'] = None
self.driver.delete_volume(notfound_delete_vol)
@mock.patch.object(
emc_vmax_utils.EMCVMAXUtils,
'wait_for_job_complete',
return_value=(-1, 'error'))
@mock.patch.object(
emc_vmax_common.EMCVMAXCommon,
'_get_pool_and_storage_system',
return_value=(None, EMCVMAXCommonData.storage_system))
@mock.patch.object(
volume_types,
'get_volume_type_extra_specs',
return_value={'volume_backend_name': 'FCNoFAST'})
def test_delete_volume_failed(
self, _mock_volume_type, mock_storage_system, mock_wait):
self.driver.create_volume(self.data.failed_delete_vol)
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.delete_volume,
self.data.failed_delete_vol)
@mock.patch.object(
emc_vmax_common.EMCVMAXCommon,
'_is_same_host',
return_value=True)
@mock.patch.object(
emc_vmax_masking.EMCVMAXMasking,
'get_masking_view_from_storage_group',
return_value=EMCVMAXCommonData.lunmaskctrl_name)
@mock.patch.object(
volume_types,
'get_volume_type_extra_specs',
return_value={'volume_backend_name': 'FCNoFAST',
'FASTPOLICY': 'FC_GOLD1'})
def test_map_lookup_service_no_fast_success(
self, _mock_volume_type, mock_maskingview, mock_is_same_host):
self.data.test_volume['volume_name'] = "vmax-1234567"
common = self.driver.common
common.get_target_wwns_from_masking_view = mock.Mock(
return_value=EMCVMAXCommonData.target_wwns)
common._get_correct_port_group = mock.Mock(
return_value=self.data.port_group)
lookup_service = self.driver.zonemanager_lookup_service
lookup_service.get_device_mapping_from_network = mock.Mock(
return_value=EMCVMAXCommonData.device_map)
data = self.driver.initialize_connection(self.data.test_volume,
self.data.connector)
common.get_target_wwns_from_masking_view.assert_called_once_with(
EMCVMAXCommonData.storage_system, self.data.test_volume,
EMCVMAXCommonData.connector)
lookup_service.get_device_mapping_from_network.assert_called_once_with(
EMCVMAXCommonData.connector['wwpns'],
EMCVMAXCommonData.target_wwns)
# Test the lookup service code path.
for init, target in data['data']['initiator_target_map'].items():
self.assertEqual(init, target[0][::-1])
@mock.patch.object(
emc_vmax_common.EMCVMAXCommon,
'find_device_number',
return_value={'Name': "0001"})
@mock.patch.object(
volume_types,
'get_volume_type_extra_specs',
return_value={'volume_backend_name': 'FCNoFAST',
'FASTPOLICY': 'FC_GOLD1'})
def test_map_no_fast_failed(self, _mock_volume_type, mock_wrap_device):
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.initialize_connection,
self.data.test_volume,
self.data.connector)
@mock.patch.object(
emc_vmax_masking.EMCVMAXMasking,
'get_initiator_group_from_masking_view',
return_value='myInitGroup')
@mock.patch.object(
emc_vmax_masking.EMCVMAXMasking,
'_find_initiator_masking_group',
return_value='myInitGroup')
@mock.patch.object(
emc_vmax_masking.EMCVMAXMasking,
'get_masking_view_by_volume',
return_value=EMCVMAXCommonData.lunmaskctrl_name)
@mock.patch.object(
volume_types,
'get_volume_type_extra_specs',
return_value={'volume_backend_name': 'FCNoFAST'})
def test_detach_no_fast_last_volume_success(
self, mock_volume_type, mock_mv, mock_ig, mock_igc):
self.driver.terminate_connection(self.data.test_source_volume,
self.data.connector)
@mock.patch.object(
emc_vmax_utils.EMCVMAXUtils,
'get_volume_size',
return_value='2147483648')
@mock.patch.object(
volume_types,
'get_volume_type_extra_specs',
return_value={'volume_backend_name': 'FCNoFAST'})
def test_extend_volume_no_fast_success(self, _mock_volume_type,
_mock_volume_size):
newSize = '2'
self.driver.extend_volume(self.data.test_volume, newSize)
@mock.patch.object(
emc_vmax_utils.EMCVMAXUtils,
'check_if_volume_is_extendable',
return_value='False')
@mock.patch.object(
volume_types,
'get_volume_type_extra_specs',
return_value={'volume_backend_name': 'FCNoFAST'})
def test_extend_volume_striped_no_fast_failed(
self, _mock_volume_type, _mock_is_extendable):
newSize = '2'
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.extend_volume,
self.data.test_volume,
newSize)
@mock.patch.object(
volume_types,
'get_volume_type_extra_specs',
return_value={'volume_backend_name': 'FCNoFAST'})
def test_migrate_volume_no_fast_success(self, _mock_volume_type):
self.driver.migrate_volume(self.data.test_ctxt, self.data.test_volume,
self.data.test_host)
@mock.patch.object(
emc_vmax_utils.EMCVMAXUtils,
'parse_pool_instance_id',
return_value=('silver', 'SYMMETRIX+000195900551'))
@mock.patch.object(
volume_types,
'get_volume_type_extra_specs',
return_value={'volume_backend_name': 'FCNoFAST'})
def test_retype_volume_no_fast_success(
self, _mock_volume_type, mock_values):
self.driver.retype(
self.data.test_ctxt, self.data.test_volume, self.data.new_type,
self.data.diff, self.data.test_host)
@mock.patch.object(
emc_vmax_common.EMCVMAXCommon,
'_get_pool_and_storage_system',
return_value=(None, EMCVMAXCommonData.storage_system))
@mock.patch.object(
volume_types,
'get_volume_type_extra_specs',
return_value={'volume_backend_name': 'FCNoFAST'})
def test_create_CG_no_fast_success(
self, _mock_volume_type, _mock_storage_system):
self.driver.create_consistencygroup(
self.data.test_ctxt, self.data.test_CG)
@mock.patch.object(
emc_vmax_common.EMCVMAXCommon,
'_get_members_of_replication_group',
return_value=None)
@mock.patch.object(
FakeDB,
'volume_get_all_by_group',
return_value=None)
@mock.patch.object(
emc_vmax_common.EMCVMAXCommon,
'_get_pool_and_storage_system',
return_value=(None, EMCVMAXCommonData.storage_system))
@mock.patch.object(
volume_types,
'get_volume_type_extra_specs',
return_value={'volume_backend_name': 'FCNoFAST'})
def test_delete_CG_no_volumes_no_fast_success(
self, _mock_volume_type, _mock_storage_system,
_mock_db_volumes, _mock_members):
self.driver.delete_consistencygroup(
self.data.test_ctxt, self.data.test_CG, [])
@mock.patch.object(
emc_vmax_common.EMCVMAXCommon,
'_get_pool_and_storage_system',
return_value=(None, EMCVMAXCommonData.storage_system))
@mock.patch.object(
volume_types,
'get_volume_type_extra_specs',
return_value={'volume_backend_name': 'FCNoFAST'})
def test_delete_CG_with_volumes_no_fast_success(
self, _mock_volume_type, _mock_storage_system):
self.driver.delete_consistencygroup(
self.data.test_ctxt, self.data.test_CG, [])
@mock.patch.object(
emc_vmax_utils.EMCVMAXUtils,
'find_group_sync_rg_by_target',
return_value="")
@mock.patch.object(
emc_vmax_common.EMCVMAXCommon,
'_get_members_of_replication_group',
return_value=())
@mock.patch.object(
emc_vmax_common.EMCVMAXCommon,
'_find_consistency_group',
return_value=(None, EMCVMAXCommonData.test_CG))
@mock.patch.object(
emc_vmax_common.EMCVMAXCommon,
'_get_pool_and_storage_system',
return_value=(None, EMCVMAXCommonData.storage_system))
@mock.patch.object(
volume_types,
'get_volume_type_extra_specs',
return_value={'volume_backend_name': 'FCNoFAST'})
def test_create_snapshot_for_CG_no_fast_success(
self, _mock_volume_type, _mock_storage, _mock_cg, _mock_members,
_mock_rg):
self.driver.create_cgsnapshot(
self.data.test_ctxt, self.data.test_CG_snapshot, [])
@mock.patch.object(
emc_vmax_common.EMCVMAXCommon,
'_get_pool_and_storage_system',
return_value=(None, EMCVMAXCommonData.storage_system))
@mock.patch.object(
volume_types,
'get_volume_type_extra_specs',
return_value={'volume_backend_name': 'FCNoFAST'})
def test_delete_snapshot_for_CG_no_fast_success(
self, _mock_volume_type, _mock_storage):
self.driver.delete_cgsnapshot(
self.data.test_ctxt, self.data.test_CG_snapshot, [])
def test_manage_existing_get_size(self):
volume = {}
metadata = {'key': 'array',
'value': '12345'}
volume['volume_metadata'] = [metadata]
external_ref = {'source-name': '0123'}
utils = self.driver.common.utils
gbSize = 2
utils.get_volume_size = mock.Mock(
return_value=gbSize * units.Gi)
volumeInstanceName = {'CreationClassName': "Symm_StorageVolume",
'DeviceID': "0123",
'SystemName': "12345"}
utils.find_volume_by_device_id_on_array = mock.Mock(
return_value=volumeInstanceName)
size = self.driver.manage_existing_get_size(volume, external_ref)
self.assertEqual(gbSize, size)
def test_manage_existing_no_fast_success(self):
volume = {}
metadata = {'key': 'array',
'value': '12345'}
poolInstanceName = {}
storageSystem = {}
poolInstanceName['InstanceID'] = "SATA_GOLD1"
storageSystem['InstanceID'] = "SYMMETRIX+00019870000"
volume['volume_metadata'] = [metadata]
volume['name'] = "test-volume"
external_ref = {'source-name': '0123'}
utils = self.driver.common.utils
gbSize = 2
utils.get_volume_size = mock.Mock(
return_value=gbSize * units.Gi)
utils.get_associated_replication_from_source_volume = mock.Mock(
return_value=None)
utils.get_assoc_pool_from_volume = mock.Mock(
return_value=(poolInstanceName))
vol = EMC_StorageVolume()
vol['CreationClassName'] = 'Symm_StorageVolume'
vol['ElementName'] = 'OS-' + volume['name']
vol['DeviceID'] = external_ref['source-name']
vol['SystemName'] = storageSystem['InstanceID']
vol['SystemCreationClassName'] = 'Symm_StorageSystem'
vol.path = vol
utils.rename_volume = mock.Mock(
return_value=vol)
common = self.driver.common
common._initial_setup = mock.Mock(
return_value={'volume_backend_name': 'FCNoFAST',
'storagetype:fastpolicy': None})
common._get_pool_and_storage_system = mock.Mock(
return_value=(poolInstanceName, storageSystem))
volumeInstanceName = {'CreationClassName': "Symm_StorageVolume",
'DeviceID': "0123",
'SystemName': "12345"}
utils.find_volume_by_device_id_on_array = mock.Mock(
return_value=volumeInstanceName)
masking = self.driver.common.masking
masking.get_masking_view_from_storage_group = mock.Mock(
return_value=None)
self.driver.manage_existing(volume, external_ref)
utils.rename_volume.assert_called_once_with(
common.conn, volumeInstanceName, volume['name'])
def test_unmanage_no_fast_success(self):
keybindings = {'CreationClassName': u'Symm_StorageVolume',
'SystemName': u'SYMMETRIX+000195900000',
'DeviceID': u'1',
'SystemCreationClassName': u'Symm_StorageSystem'}
provider_location = {'classname': 'Symm_StorageVolume',
'keybindings': keybindings}
volume = {'name': 'vol1',
'size': 1,
'id': '1',
'device_id': '1',
'provider_auth': None,
'project_id': 'project',
'display_name': 'vol1',
'display_description': 'test volume',
'volume_type_id': 'abc',
'provider_location': six.text_type(provider_location),
'status': 'available',
'host': self.data.fake_host,
'NumberOfBlocks': 100,
'BlockSize': self.data.block_size
}
common = self.driver.common
common._initial_setup = mock.Mock(
return_value={'volume_backend_name': 'FCNoFAST',
'storagetype:fastpolicy': None})
utils = self.driver.common.utils
utils.rename_volume = mock.Mock(return_value=None)
self.driver.unmanage(volume)
utils.rename_volume.assert_called_once_with(
common.conn, common._find_lun(volume), '1')
def test_unmanage_no_fast_failed(self):
keybindings = {'CreationClassName': u'Symm_StorageVolume',
'SystemName': u'SYMMETRIX+000195900000',
'DeviceID': u'999',
'SystemCreationClassName': u'Symm_StorageSystem'}
provider_location = {'classname': 'Symm_StorageVolume',
'keybindings': keybindings}
volume = {'name': 'NO_SUCH_VOLUME',
'size': 1,
'id': '999',
'device_id': '999',
'provider_auth': None,
'project_id': 'project',
'display_name': 'No such volume',
'display_description': 'volume not on the array',
'volume_type_id': 'abc',
'provider_location': six.text_type(provider_location),
'status': 'available',
'host': self.data.fake_host,
'NumberOfBlocks': 100,
'BlockSize': self.data.block_size
}
common = self.driver.common
common._initial_setup = mock.Mock(
return_value={'volume_backend_name': 'FCNoFAST',
'fastpolicy': None})
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.unmanage,
volume)
def _cleanup(self):
bExists = os.path.exists(self.config_file_path)
if bExists:
os.remove(self.config_file_path)
shutil.rmtree(self.tempdir)
class EMCVMAXFCDriverFastTestCase(test.TestCase):
def setUp(self):
self.data = EMCVMAXCommonData()
self.tempdir = tempfile.mkdtemp()
super(EMCVMAXFCDriverFastTestCase, self).setUp()
self.config_file_path = None
self.create_fake_config_file_fast()
self.addCleanup(self._cleanup)
configuration = mock.Mock()
configuration.cinder_emc_config_file = self.config_file_path
configuration.safe_get.return_value = 'FCFAST'
configuration.config_group = 'FCFAST'
self.stubs.Set(emc_vmax_common.EMCVMAXCommon, '_get_ecom_connection',
self.fake_ecom_connection)
instancename = FakeCIMInstanceName()
self.stubs.Set(emc_vmax_utils.EMCVMAXUtils, 'get_instance_name',
instancename.fake_getinstancename)
self.stubs.Set(time, 'sleep',
self.fake_sleep)
self.stubs.Set(emc_vmax_utils.EMCVMAXUtils, 'isArrayV3',
self.fake_is_v3)
driver = emc_vmax_fc.EMCVMAXFCDriver(configuration=configuration)
driver.db = FakeDB()
driver.common.conn = FakeEcomConnection()
driver.zonemanager_lookup_service = None
self.driver = driver
self.driver.utils = emc_vmax_utils.EMCVMAXUtils(object)
self.driver.masking = emc_vmax_masking.EMCVMAXMasking('FC')
def create_fake_config_file_fast(self):
doc = minidom.Document()
emc = doc.createElement("EMC")
doc.appendChild(emc)
fastPolicy = doc.createElement("FastPolicy")
fastPolicyText = doc.createTextNode("GOLD1")
emc.appendChild(fastPolicy)
fastPolicy.appendChild(fastPolicyText)
ecomserverip = doc.createElement("EcomServerIp")
ecomserveriptext = doc.createTextNode("1.1.1.1")
emc.appendChild(ecomserverip)
ecomserverip.appendChild(ecomserveriptext)
ecomserverport = doc.createElement("EcomServerPort")
ecomserverporttext = doc.createTextNode("10")
emc.appendChild(ecomserverport)
ecomserverport.appendChild(ecomserverporttext)
ecomusername = doc.createElement("EcomUserName")
ecomusernametext = doc.createTextNode("user")
emc.appendChild(ecomusername)
ecomusername.appendChild(ecomusernametext)
ecompassword = doc.createElement("EcomPassword")
ecompasswordtext = doc.createTextNode("pass")
emc.appendChild(ecompassword)
ecompassword.appendChild(ecompasswordtext)
portgroup = doc.createElement("PortGroup")
portgrouptext = doc.createTextNode(self.data.port_group)
portgroup.appendChild(portgrouptext)
pool = doc.createElement("Pool")
pooltext = doc.createTextNode("gold")
emc.appendChild(pool)
pool.appendChild(pooltext)
array = doc.createElement("Array")
arraytext = doc.createTextNode("1234567891011")
emc.appendChild(array)
array.appendChild(arraytext)
portgroups = doc.createElement("PortGroups")
portgroups.appendChild(portgroup)
emc.appendChild(portgroups)
timeout = doc.createElement("Timeout")
timeouttext = doc.createTextNode("0")
emc.appendChild(timeout)
timeout.appendChild(timeouttext)
filename = 'cinder_emc_config_FCFAST.xml'
self.config_file_path = self.tempdir + '/' + filename
f = open(self.config_file_path, 'w')
doc.writexml(f)
f.close()
def fake_ecom_connection(self):
conn = FakeEcomConnection()
return conn
def fake_sleep(self, seconds):
return
def fake_is_v3(self, conn, serialNumber):
return False
@mock.patch.object(
emc_vmax_fast.EMCVMAXFast,
'get_capacities_associated_to_policy',
return_value=(1234, 1200))
@mock.patch.object(
emc_vmax_fast.EMCVMAXFast,
'get_tier_policy_by_name',
return_value=None)
@mock.patch.object(
emc_vmax_fast.EMCVMAXFast,
'is_tiering_policy_enabled',
return_value=True)
@mock.patch.object(
emc_vmax_utils.EMCVMAXUtils,
'find_storageSystem',
return_value=None)
def test_get_volume_stats_fast(self,
mock_storage_system,
mock_is_fast_enabled,
mock_get_policy,
mock_capacity):
self.driver.get_volume_stats(True)
@mock.patch.object(
emc_vmax_fast.EMCVMAXFast,
'get_pool_associated_to_policy',
return_value=1)
@mock.patch.object(
emc_vmax_common.EMCVMAXCommon,
'_get_pool_and_storage_system',
return_value=(None, EMCVMAXCommonData.storage_system))
@mock.patch.object(
volume_types,
'get_volume_type_extra_specs',
return_value={'volume_backend_name': 'FCFAST'})
def test_create_volume_fast_success(
self, _mock_volume_type, mock_storage_system, mock_pool_policy):
self.driver.create_volume(self.data.test_volume_v2)
@mock.patch.object(
emc_vmax_fast.EMCVMAXFast,
'get_pool_associated_to_policy',
return_value=1)
@mock.patch.object(
emc_vmax_common.EMCVMAXCommon,
'_get_pool_and_storage_system',
return_value=(None, EMCVMAXCommonData.storage_system))
@mock.patch.object(
volume_types,
'get_volume_type_extra_specs',
return_value={'storagetype: stripedmetacount': '4',
'volume_backend_name': 'FCFAST'})
def test_create_volume_fast_striped_success(
self, _mock_volume_type, mock_storage_system, mock_pool_policy):
self.driver.create_volume(self.data.test_volume_v2)
@mock.patch.object(
emc_vmax_fast.EMCVMAXFast,
'get_pool_associated_to_policy',
return_value=1)
@mock.patch.object(
emc_vmax_common.EMCVMAXCommon,
'_get_pool_and_storage_system',
return_value=(None, EMCVMAXCommonData.storage_system))
@mock.patch.object(
volume_types,
'get_volume_type_extra_specs',
return_value={'volume_backend_name': 'FCFAST'})
def test_create_volume_in_CG_fast_success(
self, _mock_volume_type, mock_storage_system, mock_pool_policy):
self.driver.create_volume(self.data.test_volume_CG)
@mock.patch.object(
emc_vmax_masking.EMCVMAXMasking,
'_wrap_get_storage_group_from_volume',
return_value=None)
@mock.patch.object(
volume_types,
'get_volume_type_extra_specs',
return_value={'volume_backend_name': 'FCFAST'})
def test_delete_volume_fast_success(self, _mock_volume_type,
mock_storage_group):
self.driver.delete_volume(self.data.test_volume)
def test_create_volume_fast_failed(self):
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.create_volume,
self.data.test_failed_volume)
@mock.patch.object(
volume_types,
'get_volume_type_extra_specs',
return_value={'volume_backend_name': 'FCFAST'})
def test_delete_volume_fast_notfound(self, _mock_volume_type):
""""Test delete volume with volume not found."""
notfound_delete_vol = {}
notfound_delete_vol['name'] = 'notfound_delete_vol'
notfound_delete_vol['id'] = '10'
notfound_delete_vol['CreationClassName'] = 'Symmm_StorageVolume'
notfound_delete_vol['SystemName'] = self.data.storage_system
notfound_delete_vol['DeviceID'] = notfound_delete_vol['id']
notfound_delete_vol['SystemCreationClassName'] = 'Symm_StorageSystem'
notfound_delete_vol['host'] = self.data.fake_host
name = {}
name['classname'] = 'Symm_StorageVolume'
keys = {}
keys['CreationClassName'] = notfound_delete_vol['CreationClassName']
keys['SystemName'] = notfound_delete_vol['SystemName']
keys['DeviceID'] = notfound_delete_vol['DeviceID']
keys['SystemCreationClassName'] = (
notfound_delete_vol['SystemCreationClassName'])
name['keybindings'] = keys
notfound_delete_vol['volume_type_id'] = 'abc'
notfound_delete_vol['provider_location'] = None
self.driver.delete_volume(notfound_delete_vol)
@mock.patch.object(
emc_vmax_utils.EMCVMAXUtils,
'wait_for_job_complete',
return_value=(-1, 'error'))
@mock.patch.object(
emc_vmax_fast.EMCVMAXFast,
'get_pool_associated_to_policy',
return_value=1)
@mock.patch.object(
emc_vmax_common.EMCVMAXCommon,
'_get_pool_and_storage_system',
return_value=(None, EMCVMAXCommonData.storage_system))
@mock.patch.object(
emc_vmax_masking.EMCVMAXMasking,
'_wrap_get_storage_group_from_volume',
return_value=None)
@mock.patch.object(
volume_types,
'get_volume_type_extra_specs',
return_value={'volume_backend_name': 'FCFAST'})
def test_delete_volume_fast_failed(
self, _mock_volume_type, mock_wrapper,
mock_storage_system, mock_pool_policy, mock_wait):
self.driver.create_volume(self.data.failed_delete_vol)
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.delete_volume,
self.data.failed_delete_vol)
@mock.patch.object(
emc_vmax_common.EMCVMAXCommon,
'_is_same_host',
return_value=True)
@mock.patch.object(
emc_vmax_masking.EMCVMAXMasking,
'get_masking_view_from_storage_group',
return_value=EMCVMAXCommonData.lunmaskctrl_name)
@mock.patch.object(
volume_types,
'get_volume_type_extra_specs',
return_value={'volume_backend_name': 'FCFAST',
'FASTPOLICY': 'FC_GOLD1'})
def test_map_fast_success(self, _mock_volume_type, mock_maskingview,
mock_is_same_host):
common = self.driver.common
common.get_target_wwns = mock.Mock(
return_value=EMCVMAXCommonData.target_wwns)
self.driver.common._get_correct_port_group = mock.Mock(
return_value=self.data.port_group)
data = self.driver.initialize_connection(
self.data.test_volume, self.data.connector)
# Test the no lookup service, pre-zoned case.
common.get_target_wwns.assert_called_once_with(
EMCVMAXCommonData.storage_system, EMCVMAXCommonData.connector)
for init, target in data['data']['initiator_target_map'].items():
self.assertIn(init[::-1], target)
@mock.patch.object(
emc_vmax_common.EMCVMAXCommon,
'find_device_number',
return_value={'Name': "0001"})
@mock.patch.object(
volume_types,
'get_volume_type_extra_specs',
return_value={'volume_backend_name': 'FCFAST',
'FASTPOLICY': 'FC_GOLD1'})
def test_map_fast_failed(self, _mock_volume_type, mock_wrap_device):
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.initialize_connection,
self.data.test_volume,
self.data.connector)
@mock.patch.object(
emc_vmax_common.EMCVMAXCommon,
'get_masking_views_by_port_group',
return_value=[])
@mock.patch.object(
emc_vmax_masking.EMCVMAXMasking,
'get_initiator_group_from_masking_view',
return_value='myInitGroup')
@mock.patch.object(
emc_vmax_masking.EMCVMAXMasking,
'_find_initiator_masking_group',
return_value='myInitGroup')
@mock.patch.object(
emc_vmax_masking.EMCVMAXMasking,
'get_masking_view_by_volume',
return_value=EMCVMAXCommonData.lunmaskctrl_name)
@mock.patch.object(
volume_types,
'get_volume_type_extra_specs',
return_value={'volume_backend_name': 'FCFAST',
'FASTPOLICY': 'FC_GOLD1'})
def test_detach_fast_success(self, mock_volume_type, mock_maskingview,
mock_ig, mock_igc, mock_mv):
common = self.driver.common
common.get_target_wwns = mock.Mock(
return_value=EMCVMAXCommonData.target_wwns)
data = self.driver.terminate_connection(self.data.test_volume,
self.data.connector)
common.get_target_wwns.assert_called_once_with(
EMCVMAXCommonData.storage_system, EMCVMAXCommonData.connector)
numTargetWwns = len(EMCVMAXCommonData.target_wwns)
self.assertEqual(numTargetWwns, len(data['data']))
@mock.patch.object(
emc_vmax_utils.EMCVMAXUtils,
'get_volume_size',
return_value='2147483648')
@mock.patch.object(
volume_types,
'get_volume_type_extra_specs',
return_value={'volume_backend_name': 'FCFAST'})
def test_extend_volume_fast_success(self, _mock_volume_type,
_mock_volume_size):
newSize = '2'
self.driver.extend_volume(self.data.test_volume, newSize)
@mock.patch.object(
emc_vmax_utils.EMCVMAXUtils,
'check_if_volume_is_extendable',
return_value='False')
@mock.patch.object(
volume_types,
'get_volume_type_extra_specs',
return_value={'volume_backend_name': 'FCFAST'})
def test_extend_volume_striped_fast_failed(self,
_mock_volume_type,
_mock_is_extendable):
newSize = '2'
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.extend_volume,
self.data.test_volume,
newSize)
@mock.patch.object(
emc_vmax_fast.EMCVMAXFast,
'get_pool_associated_to_policy',
return_value=1)
@mock.patch.object(
emc_vmax_common.EMCVMAXCommon,
'_get_pool_and_storage_system',
return_value=(None, EMCVMAXCommonData.storage_system))
@mock.patch.object(
emc_vmax_utils.EMCVMAXUtils,
'get_meta_members_capacity_in_byte',
return_value=[1234567, 7654321])
@mock.patch.object(
emc_vmax_utils.EMCVMAXUtils,
'get_volume_meta_head',
return_value=[EMCVMAXCommonData.test_volume])
@mock.patch.object(
FakeDB,
'volume_get',
return_value=EMCVMAXCommonData.test_source_volume)
@mock.patch.object(
volume_types,
'get_volume_type_extra_specs',
return_value={'volume_backend_name': 'FCFAST'})
def test_create_snapshot_different_sizes_meta_fast_success(
self, mock_volume_type, mock_volume,
mock_meta, mock_size, mock_pool, mock_policy):
self.data.test_volume['volume_name'] = "vmax-1234567"
common = self.driver.common
volumeDict = {'classname': u'Symm_StorageVolume',
'keybindings': EMCVMAXCommonData.keybindings}
common.provision.create_volume_from_pool = (
mock.Mock(return_value=(volumeDict, 0)))
common.provision.get_volume_dict_from_job = (
mock.Mock(return_value=volumeDict))
common.fast.is_volume_in_default_SG = (
mock.Mock(return_value=True))
self.driver.create_snapshot(self.data.test_volume)
@mock.patch.object(
emc_vmax_common.EMCVMAXCommon,
'_validate_pool',
return_value=('Bogus_Pool'))
def test_create_snapshot_fast_failed(self, mock_pool):
self.data.test_volume['volume_name'] = "vmax-1234567"
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.create_snapshot,
self.data.test_volume)
@mock.patch.object(
emc_vmax_utils.EMCVMAXUtils,
'get_meta_members_capacity_in_byte',
return_value=[1234567])
@mock.patch.object(
emc_vmax_utils.EMCVMAXUtils,
'get_volume_meta_head',
return_value=[EMCVMAXCommonData.test_volume])
@mock.patch.object(
emc_vmax_utils.EMCVMAXUtils,
'find_sync_sv_by_target',
return_value=(None, None))
@mock.patch.object(
volume_types,
'get_volume_type_extra_specs',
return_value={'volume_backend_name': 'FCFAST'})
def test_create_volume_from_same_size_meta_snapshot(
self, mock_volume_type, mock_sync_sv, mock_meta, mock_size):
self.data.test_volume['volume_name'] = "vmax-1234567"
common = self.driver.common
common.fast.is_volume_in_default_SG = mock.Mock(return_value=True)
self.driver.create_volume_from_snapshot(
self.data.test_volume, self.data.test_volume)
@mock.patch.object(
emc_vmax_utils.EMCVMAXUtils,
'find_sync_sv_by_target',
return_value=(None, None))
@mock.patch.object(
emc_vmax_utils.EMCVMAXUtils,
'find_replication_service',
return_value=None)
@mock.patch.object(
volume_types,
'get_volume_type_extra_specs',
return_value={'volume_backend_name': 'FCFAST',
'FASTPOLICY': 'FC_GOLD1'})
def test_create_volume_from_snapshot_fast_failed(
self, mock_volume_type, mock_rep_service, mock_sync_sv):
self.data.test_volume['volume_name'] = "vmax-1234567"
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.create_volume_from_snapshot,
self.data.test_volume,
EMCVMAXCommonData.test_source_volume)
def test_create_clone_simple_volume_fast_success(self):
extraSpecs = {'storagetype:fastpolicy': 'FC_GOLD1',
'volume_backend_name': 'FCFAST',
'isV3': False}
self.driver.common._initial_setup = (
mock.Mock(return_value=extraSpecs))
self.driver.common.extraSpecs = extraSpecs
self.driver.utils.is_clone_licensed = (
mock.Mock(return_value=True))
FakeDB.volume_get = (
mock.Mock(return_value=EMCVMAXCommonData.test_source_volume))
self.data.test_volume['volume_name'] = "vmax-1234567"
self.driver.common.fast.is_volume_in_default_SG = (
mock.Mock(return_value=True))
self.driver.utils.isArrayV3 = mock.Mock(return_value=False)
self.driver.common._find_storage_sync_sv_sv = (
mock.Mock(return_value=(None, None)))
self.driver.create_cloned_volume(self.data.test_volume,
EMCVMAXCommonData.test_source_volume)
@mock.patch.object(
emc_vmax_common.EMCVMAXCommon,
'_get_pool_and_storage_system',
return_value=(None, EMCVMAXCommonData.storage_system))
@mock.patch.object(
emc_vmax_utils.EMCVMAXUtils,
'get_meta_members_capacity_in_byte',
return_value=[1234567, 7654321])
@mock.patch.object(
emc_vmax_utils.EMCVMAXUtils,
'get_volume_meta_head',
return_value=[EMCVMAXCommonData.test_volume])
@mock.patch.object(
emc_vmax_fast.EMCVMAXFast,
'get_pool_associated_to_policy',
return_value=1)
@mock.patch.object(
FakeDB,
'volume_get',
return_value=EMCVMAXCommonData.test_source_volume)
@mock.patch.object(
volume_types,
'get_volume_type_extra_specs',
return_value={'volume_backend_name': 'FCFAST'})
def test_create_clone_fast_failed(
self, mock_volume_type, mock_vol, mock_policy,
mock_meta, mock_size, mock_pool):
self.data.test_volume['volume_name'] = "vmax-1234567"
self.driver.common._modify_and_get_composite_volume_instance = (
mock.Mock(return_value=(1, None)))
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.create_cloned_volume,
self.data.test_volume,
EMCVMAXCommonData.test_source_volume)
@mock.patch.object(
volume_types,
'get_volume_type_extra_specs',
return_value={'volume_backend_name': 'FCFAST'})
def test_migrate_volume_fast_success(self, _mock_volume_type):
self.driver.migrate_volume(self.data.test_ctxt, self.data.test_volume,
self.data.test_host)
@mock.patch.object(
emc_vmax_masking.EMCVMAXMasking,
'_wrap_get_storage_group_from_volume',
return_value=None)
@mock.patch.object(
emc_vmax_utils.EMCVMAXUtils,
'parse_pool_instance_id',
return_value=('silver', 'SYMMETRIX+000195900551'))
@mock.patch.object(
volume_types,
'get_volume_type_extra_specs',
return_value={'volume_backend_name': 'FCFAST'})
def test_retype_volume_fast_success(
self, _mock_volume_type, mock_values, mock_wrap):
self.driver.retype(
self.data.test_ctxt, self.data.test_volume, self.data.new_type,
self.data.diff, self.data.test_host)
@mock.patch.object(
emc_vmax_common.EMCVMAXCommon,
'_get_pool_and_storage_system',
return_value=(None, EMCVMAXCommonData.storage_system))
@mock.patch.object(
volume_types,
'get_volume_type_extra_specs',
return_value={'volume_backend_name': 'FCFAST'})
def test_create_CG_fast_success(
self, _mock_volume_type, _mock_storage_system):
self.driver.create_consistencygroup(
self.data.test_ctxt, self.data.test_CG)
@mock.patch.object(
emc_vmax_common.EMCVMAXCommon,
'_get_members_of_replication_group',
return_value=None)
@mock.patch.object(
FakeDB,
'volume_get_all_by_group',
return_value=None)
@mock.patch.object(
emc_vmax_common.EMCVMAXCommon,
'_get_pool_and_storage_system',
return_value=(None, EMCVMAXCommonData.storage_system))
@mock.patch.object(
volume_types,
'get_volume_type_extra_specs',
return_value={'volume_backend_name': 'FCFAST'})
def test_delete_CG_no_volumes_fast_success(
self, _mock_volume_type, _mock_storage_system,
_mock_db_volumes, _mock_members):
self.driver.delete_consistencygroup(
self.data.test_ctxt, self.data.test_CG, [])
@mock.patch.object(
emc_vmax_common.EMCVMAXCommon,
'_get_pool_and_storage_system',
return_value=(None, EMCVMAXCommonData.storage_system))
@mock.patch.object(
volume_types,
'get_volume_type_extra_specs',
return_value={'volume_backend_name': 'FCFAST'})
def test_delete_CG_with_volumes_fast_success(
self, _mock_volume_type, _mock_storage_system):
self.driver.delete_consistencygroup(
self.data.test_ctxt, self.data.test_CG, [])
@mock.patch.object(
emc_vmax_utils.EMCVMAXUtils,
'find_group_sync_rg_by_target',
return_value="")
@mock.patch.object(
emc_vmax_common.EMCVMAXCommon,
'_get_members_of_replication_group',
return_value=())
@mock.patch.object(
emc_vmax_common.EMCVMAXCommon,
'_find_consistency_group',
return_value=(None, EMCVMAXCommonData.test_CG))
@mock.patch.object(
emc_vmax_common.EMCVMAXCommon,
'_get_pool_and_storage_system',
return_value=(None, EMCVMAXCommonData.storage_system))
@mock.patch.object(
volume_types,
'get_volume_type_extra_specs',
return_value={'volume_backend_name': 'FCFAST'})
def test_create_snapshot_for_CG_no_fast_success(
self, _mock_volume_type, _mock_storage, _mock_cg, _mock_members,
_mock_rg):
self.driver.create_cgsnapshot(
self.data.test_ctxt, self.data.test_CG_snapshot, [])
@mock.patch.object(
emc_vmax_common.EMCVMAXCommon,
'_get_pool_and_storage_system',
return_value=(None, EMCVMAXCommonData.storage_system))
@mock.patch.object(
volume_types,
'get_volume_type_extra_specs',
return_value={'volume_backend_name': 'FCFAST'})
def test_delete_snapshot_for_CG_no_fast_success(
self, _mock_volume_type, _mock_storage):
self.driver.delete_cgsnapshot(
self.data.test_ctxt, self.data.test_CG_snapshot, [])
# Bug 1385450
def test_create_clone_without_license(self):
mockRepServCap = {}
mockRepServCap['InstanceID'] = 'SYMMETRIX+1385450'
self.driver.utils.find_replication_service_capabilities = (
mock.Mock(return_value=mockRepServCap))
self.driver.utils.is_clone_licensed = (
mock.Mock(return_value=False))
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.create_cloned_volume,
self.data.test_volume,
EMCVMAXCommonData.test_source_volume)
def test_manage_existing_fast_failed(self):
volume = {}
metadata = {'key': 'array',
'value': '12345'}
poolInstanceName = {}
storageSystem = {}
poolInstanceName['InstanceID'] = "SATA_GOLD1"
storageSystem['InstanceID'] = "SYMMETRIX+00019870000"
volume['volume_metadata'] = [metadata]
volume['name'] = "test-volume"
external_ref = {'source-name': '0123'}
common = self.driver.common
common._initial_setup = mock.Mock(
return_value={'volume_backend_name': 'FCFAST',
'storagetype:fastpolicy': 'GOLD'})
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.manage_existing,
volume,
external_ref)
def _cleanup(self):
bExists = os.path.exists(self.config_file_path)
if bExists:
os.remove(self.config_file_path)
shutil.rmtree(self.tempdir)
class EMCV3DriverTestCase(test.TestCase):
def setUp(self):
self.data = EMCVMAXCommonData()
self.data.storage_system = 'SYMMETRIX-+-000197200056'
self.tempdir = tempfile.mkdtemp()
super(EMCV3DriverTestCase, self).setUp()
self.config_file_path = None
self.create_fake_config_file_v3()
self.addCleanup(self._cleanup)
self.set_configuration()
def set_configuration(self):
configuration = mock.Mock()
configuration.cinder_emc_config_file = self.config_file_path
configuration.safe_get.return_value = 'V3'
configuration.config_group = 'V3'
self.stubs.Set(emc_vmax_common.EMCVMAXCommon, '_get_ecom_connection',
self.fake_ecom_connection)
instancename = FakeCIMInstanceName()
self.stubs.Set(emc_vmax_utils.EMCVMAXUtils, 'get_instance_name',
instancename.fake_getinstancename)
self.stubs.Set(time, 'sleep',
self.fake_sleep)
self.stubs.Set(emc_vmax_utils.EMCVMAXUtils, 'isArrayV3',
self.fake_is_v3)
driver = emc_vmax_fc.EMCVMAXFCDriver(configuration=configuration)
driver.db = FakeDB()
self.driver = driver
def create_fake_config_file_v3(self):
doc = minidom.Document()
emc = doc.createElement("EMC")
doc.appendChild(emc)
ecomserverip = doc.createElement("EcomServerIp")
ecomserveriptext = doc.createTextNode("1.1.1.1")
emc.appendChild(ecomserverip)
ecomserverip.appendChild(ecomserveriptext)
ecomserverport = doc.createElement("EcomServerPort")
ecomserverporttext = doc.createTextNode("10")
emc.appendChild(ecomserverport)
ecomserverport.appendChild(ecomserverporttext)
ecomusername = doc.createElement("EcomUserName")
ecomusernametext = doc.createTextNode("user")
emc.appendChild(ecomusername)
ecomusername.appendChild(ecomusernametext)
ecompassword = doc.createElement("EcomPassword")
ecompasswordtext = doc.createTextNode("pass")
emc.appendChild(ecompassword)
ecompassword.appendChild(ecompasswordtext)
portgroup = doc.createElement("PortGroup")
portgrouptext = doc.createTextNode(self.data.port_group)
portgroup.appendChild(portgrouptext)
pool = doc.createElement("Pool")
pooltext = doc.createTextNode("SRP_1")
emc.appendChild(pool)
pool.appendChild(pooltext)
array = doc.createElement("Array")
arraytext = doc.createTextNode("1234567891011")
emc.appendChild(array)
array.appendChild(arraytext)
slo = doc.createElement("SLO")
slotext = doc.createTextNode("Bronze")
emc.appendChild(slo)
slo.appendChild(slotext)
workload = doc.createElement("Workload")
workloadtext = doc.createTextNode("DSS")
emc.appendChild(workload)
workload.appendChild(workloadtext)
portgroups = doc.createElement("PortGroups")
portgroups.appendChild(portgroup)
emc.appendChild(portgroups)
timeout = doc.createElement("Timeout")
timeouttext = doc.createTextNode("0")
emc.appendChild(timeout)
timeout.appendChild(timeouttext)
filename = 'cinder_emc_config_V3.xml'
self.config_file_path = self.tempdir + '/' + filename
f = open(self.config_file_path, 'w')
doc.writexml(f)
f.close()
def fake_ecom_connection(self):
self.conn = FakeEcomConnection()
return self.conn
def fake_sleep(self, seconds):
return
def fake_is_v3(self, conn, serialNumber):
return True
def default_extraspec(self):
return {'storagetype:pool': 'SRP_1',
'volume_backend_name': 'V3_BE',
'storagetype:workload': 'DSS',
'storagetype:slo': 'Bronze',
'storagetype:array': '1234567891011',
'isV3': True,
'portgroupname': 'OS-portgroup-PG'}
def default_vol(self):
vol = EMC_StorageVolume()
vol['name'] = self.data.test_volume['name']
vol['CreationClassName'] = 'Symm_StorageVolume'
vol['ElementName'] = self.data.test_volume['id']
vol['DeviceID'] = self.data.test_volume['device_id']
vol['Id'] = self.data.test_volume['id']
vol['SystemName'] = self.data.storage_system
vol['NumberOfBlocks'] = self.data.test_volume['NumberOfBlocks']
vol['BlockSize'] = self.data.test_volume['BlockSize']
# Added vol to vol.path
vol['SystemCreationClassName'] = 'Symm_StorageSystem'
vol.path = vol
vol.path.classname = vol['CreationClassName']
return vol
def default_storage_group(self):
storagegroup = {}
storagegroup['CreationClassName'] = (
self.data.storagegroup_creationclass)
storagegroup['ElementName'] = 'no_masking_view'
return storagegroup
def test_last_vol_in_SG_with_MV(self):
conn = self.fake_ecom_connection()
controllerConfigService = (
self.driver.common.utils.find_controller_configuration_service(
conn, self.data.storage_system))
extraSpecs = self.default_extraspec()
storageGroupName = self.data.storagegroupname
storageGroupInstanceName = (
self.driver.common.utils.find_storage_masking_group(
conn, controllerConfigService, storageGroupName))
vol = self.default_vol()
self.driver.common.masking._delete_mv_ig_and_sg = mock.Mock()
self.assertTrue(self.driver.common.masking._last_vol_in_SG(
conn, controllerConfigService, storageGroupInstanceName,
storageGroupName, vol, vol['name'], extraSpecs))
def test_last_vol_in_SG_no_MV(self):
conn = self.fake_ecom_connection()
controllerConfigService = (
self.driver.common.utils.find_controller_configuration_service(
conn, self.data.storage_system))
extraSpecs = self.default_extraspec()
self.driver.common.masking.get_masking_view_from_storage_group = (
mock.Mock(return_value=None))
self.driver.common.masking.utils.get_existing_instance = (
mock.Mock(return_value=None))
storagegroup = self.default_storage_group()
vol = self.default_vol()
self.assertTrue(self.driver.common.masking._last_vol_in_SG(
conn, controllerConfigService, storagegroup,
storagegroup['ElementName'], vol, vol['name'], extraSpecs))
def test_last_vol_in_SG_no_MV_fail(self):
self.driver.common.masking.utils.get_existing_instance = (
mock.Mock(return_value='value'))
conn = self.fake_ecom_connection()
controllerConfigService = (
self.driver.common.utils.find_controller_configuration_service(
conn, self.data.storage_system))
extraSpecs = self.default_extraspec()
vol = self.default_vol()
storagegroup = self.default_storage_group()
storagegroup['ElementName'] = 'no_masking_view'
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.common.masking._last_vol_in_SG,
conn, controllerConfigService,
storagegroup, storagegroup['ElementName'], vol,
vol['name'], extraSpecs)
@mock.patch.object(
emc_vmax_utils.EMCVMAXUtils,
'find_storageSystem',
return_value={'Name': EMCVMAXCommonData.storage_system_v3})
def test_get_volume_stats_v3(
self, mock_storage_system):
self.driver.get_volume_stats(True)
@mock.patch.object(
emc_vmax_common.EMCVMAXCommon,
'_get_pool_and_storage_system',
return_value=(None, EMCVMAXCommonData.storage_system))
@mock.patch.object(
volume_types,
'get_volume_type_extra_specs',
return_value={'volume_backend_name': 'V3_BE'})
def test_create_volume_v3_success(
self, _mock_volume_type, mock_storage_system):
self.data.test_volume_v3['host'] = self.data.fake_host_v3
self.driver.common._initial_setup = mock.Mock(
return_value=self.default_extraspec())
self.driver.create_volume(self.data.test_volume_v3)
@mock.patch.object(
emc_vmax_common.EMCVMAXCommon,
'_get_pool_and_storage_system',
return_value=(None, EMCVMAXCommonData.storage_system))
@mock.patch.object(
volume_types,
'get_volume_type_extra_specs',
return_value={'volume_backend_name': 'V3_BE'})
def test_create_volume_v3_no_slo_success(
self, _mock_volume_type, mock_storage_system):
v3_vol = self.data.test_volume_v3
v3_vol['host'] = 'HostX@Backend#NONE+SRP_1+1234567891011'
instid = 'SYMMETRIX-+-000197200056-+-NONE:DSS-+-F-+-0-+-SR-+-SRP_1'
storagepoolsetting = (
{'InstanceID': instid,
'CreationClassName': 'CIM_StoragePoolSetting'})
self.driver.common.provisionv3.get_storage_pool_setting = mock.Mock(
return_value=storagepoolsetting)
extraSpecs = {'storagetype:pool': 'SRP_1',
'volume_backend_name': 'V3_BE',
'storagetype:workload': 'DSS',
'storagetype:slo': 'NONE',
'storagetype:array': '1234567891011',
'isV3': True,
'portgroupname': 'OS-portgroup-PG'}
self.driver.common._initial_setup = mock.Mock(
return_value=extraSpecs)
self.driver.create_volume(v3_vol)
@mock.patch.object(
emc_vmax_common.EMCVMAXCommon,
'_get_pool_and_storage_system',
return_value=(None, EMCVMAXCommonData.storage_system))
@mock.patch.object(
volume_types,
'get_volume_type_extra_specs',
return_value={'volume_backend_name': 'V3_BE'})
def test_create_volume_v3_invalid_slo_failed(
self, _mock_volume_type, mock_storage_system):
extraSpecs = {'storagetype:pool': 'SRP_1',
'volume_backend_name': 'V3_BE',
'storagetype:workload': 'DSS',
'storagetype:slo': 'Bogus',
'storagetype:array': '1234567891011',
'isV3': True,
'portgroupname': 'OS-portgroup-PG'}
self.driver.common._initial_setup = mock.Mock(
return_value=extraSpecs)
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.create_volume,
self.data.test_volume)
@mock.patch.object(
emc_vmax_common.EMCVMAXCommon,
'_get_pool_and_storage_system',
return_value=(None, EMCVMAXCommonData.storage_system))
@mock.patch.object(
volume_types,
'get_volume_type_extra_specs',
return_value={'volume_backend_name': 'V3_BE'})
def test_create_volume_in_CG_v3_success(
self, _mock_volume_type, mock_storage_system):
self.driver.common._initial_setup = mock.Mock(
return_value=self.default_extraspec())
self.driver.create_volume(self.data.test_volume_CG_v3)
@mock.patch.object(
volume_types,
'get_volume_type_extra_specs',
return_value={'volume_backend_name': 'V3_BE'})
def test_delete_volume_v3_success(self, _mock_volume_type):
self.driver.common._initial_setup = mock.Mock(
return_value=self.default_extraspec())
self.driver.delete_volume(self.data.test_volume_v3)
@mock.patch.object(
emc_vmax_common.EMCVMAXCommon,
'_get_pool_and_storage_system',
return_value=(None, EMCVMAXCommonData.storage_system))
@mock.patch.object(
volume_types,
'get_volume_type_extra_specs',
return_value={'volume_backend_name': 'V3_BE'})
@mock.patch.object(
FakeDB,
'volume_get',
return_value=EMCVMAXCommonData.test_source_volume_v3)
def test_create_snapshot_v3_success(
self, mock_volume_db, mock_type, moke_pool):
self.data.test_volume_v3['volume_name'] = "vmax-1234567"
self.driver.common._initial_setup = mock.Mock(
return_value=self.default_extraspec())
self.driver.create_snapshot(self.data.test_volume_v3)
@mock.patch.object(
FakeDB,
'volume_get',
return_value=EMCVMAXCommonData.test_source_volume_v3)
@mock.patch.object(
volume_types,
'get_volume_type_extra_specs',
return_value={'volume_backend_name': 'V3_BE'})
def test_delete_snapshot_v3_success(self, mock_volume_type, mock_db):
self.data.test_volume_v3['volume_name'] = "vmax-1234567"
self.driver.common._initial_setup = mock.Mock(
return_value=self.default_extraspec())
self.driver.delete_snapshot(self.data.test_volume_v3)
@mock.patch.object(
emc_vmax_common.EMCVMAXCommon,
'_get_pool_and_storage_system',
return_value=(None, EMCVMAXCommonData.storage_system))
@mock.patch.object(
volume_types,
'get_volume_type_extra_specs',
return_value={'volume_backend_name': 'V3_BE'})
@mock.patch.object(
FakeDB,
'volume_get',
return_value=EMCVMAXCommonData.test_source_volume)
def test_create_cloned_volume_v3_success(
self, mock_volume_db, mock_type, moke_pool):
self.data.test_volume_v3['volume_name'] = "vmax-1234567"
cloneVol = {}
cloneVol['name'] = 'vol1'
cloneVol['id'] = '10'
cloneVol['CreationClassName'] = 'Symmm_StorageVolume'
cloneVol['SystemName'] = self.data.storage_system
cloneVol['DeviceID'] = cloneVol['id']
cloneVol['SystemCreationClassName'] = 'Symm_StorageSystem'
cloneVol['volume_type_id'] = 'abc'
cloneVol['provider_location'] = None
cloneVol['NumberOfBlocks'] = 100
cloneVol['BlockSize'] = self.data.block_size
cloneVol['host'] = self.data.fake_host_v3
self.driver.common._initial_setup = mock.Mock(
return_value=self.default_extraspec())
self.driver.create_cloned_volume(cloneVol, self.data.test_volume_v3)
@mock.patch.object(
emc_vmax_common.EMCVMAXCommon,
'_get_pool_and_storage_system',
return_value=(None, EMCVMAXCommonData.storage_system))
@mock.patch.object(
volume_types,
'get_volume_type_extra_specs',
return_value={'volume_backend_name': 'V3_BE'})
def test_create_CG_v3_success(
self, _mock_volume_type, _mock_storage_system):
self.driver.create_consistencygroup(
self.data.test_ctxt, self.data.test_volume_CG_v3)
@mock.patch.object(
emc_vmax_common.EMCVMAXCommon,
'_get_members_of_replication_group',
return_value=None)
@mock.patch.object(
FakeDB,
'volume_get_all_by_group',
return_value=None)
@mock.patch.object(
emc_vmax_common.EMCVMAXCommon,
'_get_pool_and_storage_system',
return_value=(None, EMCVMAXCommonData.storage_system))
@mock.patch.object(
volume_types,
'get_volume_type_extra_specs',
return_value={'volume_backend_name': 'V3_BE'})
def test_delete_CG_no_volumes_v3_success(
self, _mock_volume_type, _mock_storage_system,
_mock_db_volumes, _mock_members):
self.driver.delete_consistencygroup(
self.data.test_ctxt, self.data.test_CG, [])
@mock.patch.object(
emc_vmax_common.EMCVMAXCommon,
'_get_pool_and_storage_system',
return_value=(None, EMCVMAXCommonData.storage_system))
@mock.patch.object(
volume_types,
'get_volume_type_extra_specs',
return_value={'volume_backend_name': 'V3_BE'})
def test_delete_CG_with_volumes_v3_success(
self, _mock_volume_type, _mock_storage_system):
self.driver.delete_consistencygroup(
self.data.test_ctxt, self.data.test_CG, [])
@mock.patch.object(
volume_types,
'get_volume_type_extra_specs',
return_value={'volume_backend_name': 'V3_BE'})
def test_migrate_volume_v3_success(self, _mock_volume_type):
self.driver.common._initial_setup = mock.Mock(
return_value=self.default_extraspec())
self.driver.migrate_volume(self.data.test_ctxt, self.data.test_volume,
self.data.test_host)
@mock.patch.object(
emc_vmax_provision_v3.EMCVMAXProvisionV3,
'_find_new_storage_group',
return_value='Any')
@mock.patch.object(
emc_vmax_utils.EMCVMAXUtils,
'wrap_get_storage_group_from_volume',
return_value=None)
@mock.patch.object(
emc_vmax_utils.EMCVMAXUtils,
'_get_fast_settings_from_storage_group',
return_value='Gold+DSS_REP')
@mock.patch.object(
volume_types,
'get_volume_type_extra_specs',
return_value={'volume_backend_name': 'V3_BE'})
def test_retype_volume_v3_success(
self, _mock_volume_type, mock_fast_settings,
mock_storage_group, mock_found_SG):
self.driver.common._initial_setup = mock.Mock(
return_value=self.default_extraspec())
self.assertTrue(self.driver.retype(
self.data.test_ctxt, self.data.test_volume_v3, self.data.new_type,
self.data.diff, self.data.test_host_v3))
@mock.patch.object(
emc_vmax_utils.EMCVMAXUtils,
'_get_fast_settings_from_storage_group',
return_value='Bronze+DSS')
@mock.patch.object(
volume_types,
'get_volume_type_extra_specs',
return_value={'volume_backend_name': 'V3_BE'})
def test_retype_volume_same_host_failure(
self, _mock_volume_type, mock_fast_settings):
self.driver.common._initial_setup = mock.Mock(
return_value=self.default_extraspec())
self.assertFalse(self.driver.retype(
self.data.test_ctxt, self.data.test_volume_v3, self.data.new_type,
self.data.diff, self.data.test_host_v3))
@mock.patch.object(
emc_vmax_utils.EMCVMAXUtils,
'find_group_sync_rg_by_target',
return_value=1)
@mock.patch.object(
emc_vmax_common.EMCVMAXCommon,
'_get_members_of_replication_group',
return_value=())
@mock.patch.object(
emc_vmax_common.EMCVMAXCommon,
'_find_consistency_group',
return_value=(None, EMCVMAXCommonData.test_CG))
@mock.patch.object(
emc_vmax_common.EMCVMAXCommon,
'_get_pool_and_storage_system',
return_value=(None, EMCVMAXCommonData.storage_system))
@mock.patch.object(
volume_types,
'get_volume_type_extra_specs',
return_value={'volume_backend_name': 'V3_BE'})
def test_create_cgsnapshot_v3_success(
self, _mock_volume_type, _mock_storage, _mock_cg, _mock_members,
mock_rg):
provisionv3 = self.driver.common.provisionv3
provisionv3.create_group_replica = mock.Mock(return_value=(0, None))
self.driver.create_cgsnapshot(
self.data.test_ctxt, self.data.test_CG_snapshot, [])
repServ = self.conn.EnumerateInstanceNames("EMC_ReplicationService")[0]
provisionv3.create_group_replica.assert_called_once_with(
self.conn, repServ,
(None, EMCVMAXCommonData.test_CG),
(None, EMCVMAXCommonData.test_CG), '12de',
EMCVMAXCommonData.extra_specs)
@mock.patch.object(
emc_vmax_common.EMCVMAXCommon,
'_get_pool_and_storage_system',
return_value=(None, EMCVMAXCommonData.storage_system))
@mock.patch.object(
volume_types,
'get_volume_type_extra_specs',
return_value={'volume_backend_name': 'V3_BE'})
def test_delete_cgsnapshot_v3_success(
self, _mock_volume_type, _mock_storage):
self.driver.delete_cgsnapshot(
self.data.test_ctxt, self.data.test_CG_snapshot, [])
@mock.patch.object(
emc_vmax_common.EMCVMAXCommon,
'_get_pool_and_storage_system',
return_value=(None, EMCVMAXCommonData.storage_system_v3))
@mock.patch.object(
volume_types,
'get_volume_type_extra_specs',
return_value={'volume_backend_name': 'V3_BE'})
def test_update_CG_add_volume_v3_success(
self, _mock_volume_type, _mock_storage_system):
add_volumes = []
add_volumes.append(self.data.test_source_volume)
remove_volumes = None
self.driver.update_consistencygroup(
self.data.test_ctxt, self.data.test_CG,
add_volumes, remove_volumes)
# Multiple volumes
add_volumes.append(self.data.test_source_volume)
self.driver.update_consistencygroup(
self.data.test_ctxt, self.data.test_CG,
add_volumes, remove_volumes)
# Can't find CG
self.driver.common._find_consistency_group = mock.Mock(
return_value=None)
self.assertRaises(exception.ConsistencyGroupNotFound,
self.driver.update_consistencygroup,
self.data.test_ctxt, self.data.test_CG,
add_volumes, remove_volumes)
@mock.patch.object(
emc_vmax_common.EMCVMAXCommon,
'_get_pool_and_storage_system',
return_value=(None, EMCVMAXCommonData.storage_system_v3))
@mock.patch.object(
volume_types,
'get_volume_type_extra_specs',
return_value={'volume_backend_name': 'V3_BE'})
def test_update_CG_remove_volume_v3_success(
self, _mock_volume_type, _mock_storage_system):
remove_volumes = []
remove_volumes.append(self.data.test_source_volume)
add_volumes = None
self.driver.update_consistencygroup(
self.data.test_ctxt, self.data.test_CG,
add_volumes, remove_volumes)
# Multiple volumes
remove_volumes.append(self.data.test_source_volume)
self.driver.update_consistencygroup(
self.data.test_ctxt, self.data.test_CG,
add_volumes, remove_volumes)
@mock.patch.object(
emc_vmax_common.EMCVMAXCommon,
'_is_same_host',
return_value=True)
@mock.patch.object(
emc_vmax_masking.EMCVMAXMasking,
'get_masking_view_from_storage_group',
return_value=EMCVMAXCommonData.lunmaskctrl_name)
@mock.patch.object(
volume_types,
'get_volume_type_extra_specs',
return_value={'volume_backend_name': 'V3_BE'})
def test_map_v3_success(
self, _mock_volume_type, mock_maskingview, mock_is_same_host):
common = self.driver.common
common.get_target_wwns = mock.Mock(
return_value=EMCVMAXCommonData.target_wwns)
self.driver.common._initial_setup = mock.Mock(
return_value=self.default_extraspec())
self.driver.common._get_correct_port_group = mock.Mock(
return_value=self.data.port_group)
data = self.driver.initialize_connection(
self.data.test_volume_v3, self.data.connector)
# Test the no lookup service, pre-zoned case.
common.get_target_wwns.assert_called_once_with(
EMCVMAXCommonData.storage_system, EMCVMAXCommonData.connector)
for init, target in data['data']['initiator_target_map'].items():
self.assertIn(init[::-1], target)
@mock.patch.object(
emc_vmax_common.EMCVMAXCommon,
'find_device_number',
return_value={'Name': "0001"})
@mock.patch.object(
volume_types,
'get_volume_type_extra_specs',
return_value={'volume_backend_name': 'V3_BE'})
def test_map_v3_failed(self, _mock_volume_type, mock_wrap_device):
self.driver.common._initial_setup = mock.Mock(
return_value=self.default_extraspec())
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.initialize_connection,
self.data.test_volume,
self.data.connector)
@mock.patch.object(
emc_vmax_common.EMCVMAXCommon,
'get_masking_views_by_port_group',
return_value=[])
@mock.patch.object(
emc_vmax_masking.EMCVMAXMasking,
'get_initiator_group_from_masking_view',
return_value='myInitGroup')
@mock.patch.object(
emc_vmax_masking.EMCVMAXMasking,
'_find_initiator_masking_group',
return_value='myInitGroup')
@mock.patch.object(
emc_vmax_masking.EMCVMAXMasking,
'get_masking_view_from_storage_group',
return_value=EMCVMAXCommonData.lunmaskctrl_name)
@mock.patch.object(
volume_types,
'get_volume_type_extra_specs',
return_value={'volume_backend_name': 'V3_BE'})
def test_detach_v3_success(self, mock_volume_type, mock_maskingview,
mock_ig, mock_igc, mock_mv):
common = self.driver.common
common.get_target_wwns = mock.Mock(
return_value=EMCVMAXCommonData.target_wwns)
common.masking.utils.find_storage_masking_group = mock.Mock(
return_value=self.data.storagegroups[0])
self.driver.common._initial_setup = mock.Mock(
return_value=self.default_extraspec())
data = self.driver.terminate_connection(self.data.test_volume_v3,
self.data.connector)
common.get_target_wwns.assert_called_once_with(
EMCVMAXCommonData.storage_system, EMCVMAXCommonData.connector)
numTargetWwns = len(EMCVMAXCommonData.target_wwns)
self.assertEqual(numTargetWwns, len(data['data']))
# Bug https://bugs.launchpad.net/cinder/+bug/1440154
@mock.patch.object(
emc_vmax_common.EMCVMAXCommon,
'_get_pool_and_storage_system',
return_value=(None, EMCVMAXCommonData.storage_system))
@mock.patch.object(
volume_types,
'get_volume_type_extra_specs',
return_value={'volume_backend_name': 'V3_BE'})
@mock.patch.object(
FakeDB,
'volume_get',
return_value=EMCVMAXCommonData.test_source_volume_v3)
@mock.patch.object(
emc_vmax_provision_v3.EMCVMAXProvisionV3,
'create_element_replica')
@mock.patch.object(
emc_vmax_utils.EMCVMAXUtils,
'find_sync_sv_by_target',
return_value=(None, None))
def test_create_clone_v3_assert_clean_up_target_volume(
self, mock_sync, mock_create_replica, mock_volume_db,
mock_type, moke_pool):
self.data.test_volume['volume_name'] = "vmax-1234567"
e = exception.VolumeBackendAPIException('CreateElementReplica Ex')
common = self.driver.common
volumeDict = {'classname': u'Symm_StorageVolume',
'keybindings': EMCVMAXCommonData.keybindings}
common._create_v3_volume = (
mock.Mock(return_value=(0, volumeDict, self.data.storage_system)))
conn = self.fake_ecom_connection()
storageConfigService = []
storageConfigService = {}
storageConfigService['SystemName'] = EMCVMAXCommonData.storage_system
storageConfigService['CreationClassName'] = (
self.data.stconf_service_creationclass)
common._delete_from_pool_v3 = mock.Mock(return_value=0)
mock_create_replica.side_effect = e
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.create_cloned_volume,
self.data.test_volume_v3,
EMCVMAXCommonData.test_source_volume_v3)
extraSpecs = common._initial_setup(self.data.test_volume_v3)
targetInstance = (
conn.EnumerateInstanceNames("EMC_StorageVolume")[0])
deviceID = targetInstance['DeviceID']
common._delete_from_pool_v3.assert_called_with(storageConfigService,
targetInstance,
targetInstance['Name'],
deviceID,
extraSpecs)
def test_get_remaining_slo_capacity_wlp(self):
conn = self.fake_ecom_connection()
array_info = {'Workload': u'DSS', 'SLO': u'Bronze'}
storagesystem = self.data.storage_system_v3
srpPoolInstanceName = {}
srpPoolInstanceName['InstanceID'] = (
self.data.storage_system_v3 + '+U+' + 'SRP_1')
srpPoolInstanceName['CreationClassName'] = (
'Symm_VirtualProvisioningPool')
srpPoolInstanceName['ElementName'] = 'SRP_1'
remainingCapacityGb = (
self.driver.common.provisionv3._get_remaining_slo_capacity_wlp(
conn, srpPoolInstanceName, array_info, storagesystem))
remainingSLOCapacityGb = self.driver.common.utils.convert_bits_to_gbs(
self.data.remainingSLOCapacity)
self.assertEqual(remainingSLOCapacityGb, remainingCapacityGb)
@mock.patch.object(
emc_vmax_utils.EMCVMAXUtils,
'get_volume_size',
return_value='2147483648')
def test_extend_volume(self, mock_volume_size):
newSize = '2'
self.driver.common._initial_setup = mock.Mock(
return_value=self.default_extraspec())
self.driver.extend_volume(self.data.test_volume_v3, newSize)
def test_extend_volume_smaller_size_exception(self):
test_local_volume = {'name': 'vol1',
'size': 4,
'volume_name': 'vol1',
'id': 'vol1',
'device_id': '1',
'provider_auth': None,
'project_id': 'project',
'display_name': 'vol1',
'display_description': 'test volume',
'volume_type_id': 'abc',
'provider_location': six.text_type(
self.data.provider_location),
'status': 'available',
'host': self.data.fake_host_v3,
'NumberOfBlocks': 100,
'BlockSize': self.data.block_size
}
newSize = '2'
self.driver.common._initial_setup = mock.Mock(
return_value=self.default_extraspec())
self.assertRaises(
exception.VolumeBackendAPIException,
self.driver.extend_volume,
test_local_volume, newSize)
def test_extend_volume_exception(self):
common = self.driver.common
newsize = '2'
common._initial_setup = mock.Mock(return_value=None)
common._find_lun = mock.Mock(return_value=None)
self.assertRaises(
exception.VolumeBackendAPIException,
common.extend_volume,
self.data.test_volume, newsize)
def test_extend_volume_size_tally_exception(self):
common = self.driver.common
newsize = '2'
self.driver.common._initial_setup = mock.Mock(
return_value=self.data.extra_specs)
vol = {'SystemName': self.data.storage_system}
common._find_lun = mock.Mock(return_value=vol)
common._extend_v3_volume = mock.Mock(return_value=(0, vol))
common.utils.find_volume_instance = mock.Mock(
return_value='2147483648')
common.utils.get_volume_size = mock.Mock(return_value='2147483646')
self.assertRaises(
exception.VolumeBackendAPIException,
common.extend_volume,
self.data.test_volume, newsize)
def _cleanup(self):
bExists = os.path.exists(self.config_file_path)
if bExists:
os.remove(self.config_file_path)
shutil.rmtree(self.tempdir)
class EMCV2MultiPoolDriverTestCase(test.TestCase):
def setUp(self):
self.data = EMCVMAXCommonData()
self.vol_v2 = self.data.test_volume_v2
self.vol_v2['provider_location'] = (
six.text_type(self.data.provider_location_multi_pool))
self.tempdir = tempfile.mkdtemp()
super(EMCV2MultiPoolDriverTestCase, self).setUp()
self.config_file_path = None
self.create_fake_config_file_multi_pool()
self.addCleanup(self._cleanup)
configuration = mock.Mock()
configuration.safe_get.return_value = 'MULTI_POOL'
configuration.cinder_emc_config_file = self.config_file_path
configuration.config_group = 'MULTI_POOL'
self.stubs.Set(emc_vmax_iscsi.EMCVMAXISCSIDriver,
'smis_do_iscsi_discovery',
self.fake_do_iscsi_discovery)
self.stubs.Set(emc_vmax_common.EMCVMAXCommon, '_get_ecom_connection',
self.fake_ecom_connection)
instancename = FakeCIMInstanceName()
self.stubs.Set(emc_vmax_utils.EMCVMAXUtils, 'get_instance_name',
instancename.fake_getinstancename)
self.stubs.Set(time, 'sleep',
self.fake_sleep)
self.stubs.Set(emc_vmax_utils.EMCVMAXUtils, 'isArrayV3',
self.fake_is_v3)
driver = emc_vmax_iscsi.EMCVMAXISCSIDriver(configuration=configuration)
driver.db = FakeDB()
self.driver = driver
self.driver.utils = emc_vmax_utils.EMCVMAXUtils(object)
def create_fake_config_file_multi_pool(self):
doc = minidom.Document()
emc = doc.createElement("EMC")
doc.appendChild(emc)
eComServers = doc.createElement("EcomServers")
emc.appendChild(eComServers)
eComServer = doc.createElement("EcomServer")
eComServers.appendChild(eComServer)
ecomserverip = doc.createElement("EcomServerIp")
eComServer.appendChild(ecomserverip)
ecomserveriptext = doc.createTextNode("1.1.1.1")
ecomserverip.appendChild(ecomserveriptext)
ecomserverport = doc.createElement("EcomServerPort")
eComServer.appendChild(ecomserverport)
ecomserverporttext = doc.createTextNode("10")
ecomserverport.appendChild(ecomserverporttext)
ecomusername = doc.createElement("EcomUserName")
eComServer.appendChild(ecomusername)
ecomusernametext = doc.createTextNode("user")
ecomusername.appendChild(ecomusernametext)
ecompassword = doc.createElement("EcomPassword")
eComServer.appendChild(ecompassword)
ecompasswordtext = doc.createTextNode("pass")
ecompassword.appendChild(ecompasswordtext)
arrays = doc.createElement("Arrays")
eComServer.appendChild(arrays)
array = doc.createElement("Array")
arrays.appendChild(array)
serialNo = doc.createElement("SerialNumber")
array.appendChild(serialNo)
serialNoText = doc.createTextNode("1234567891011")
serialNo.appendChild(serialNoText)
portgroups = doc.createElement("PortGroups")
array.appendChild(portgroups)
portgroup = doc.createElement("PortGroup")
portgroups.appendChild(portgroup)
portgrouptext = doc.createTextNode(self.data.port_group)
portgroup.appendChild(portgrouptext)
pools = doc.createElement("Pools")
array.appendChild(pools)
pool = doc.createElement("Pool")
pools.appendChild(pool)
poolName = doc.createElement("PoolName")
pool.appendChild(poolName)
poolNameText = doc.createTextNode("gold")
poolName.appendChild(poolNameText)
pool2 = doc.createElement("Pool")
pools.appendChild(pool2)
pool2Name = doc.createElement("PoolName")
pool2.appendChild(pool2Name)
pool2NameText = doc.createTextNode("SATA_BRONZE1")
pool2Name.appendChild(pool2NameText)
pool2FastPolicy = doc.createElement("FastPolicy")
pool2.appendChild(pool2FastPolicy)
pool2FastPolicyText = doc.createTextNode("BRONZE1")
pool2FastPolicy.appendChild(pool2FastPolicyText)
filename = 'cinder_emc_config_V2_MULTI_POOL.xml'
self.config_file_path = self.tempdir + '/' + filename
f = open(self.config_file_path, 'w')
doc.writexml(f)
f.close()
def fake_ecom_connection(self):
self.conn = FakeEcomConnection()
return self.conn
def fake_do_iscsi_discovery(self, volume):
output = []
item = '10.10.0.50: 3260,1 iqn.1992-04.com.emc: 50000973f006dd80'
output.append(item)
return output
def fake_sleep(self, seconds):
return
def fake_is_v3(self, conn, serialNumber):
return False
def default_extraspec(self):
return {'storagetype:pool': u'gold',
'volume_backend_name': 'MULTI_POOL_BE',
'storagetype:fastpolicy': None,
'storagetype:compositetype': u'concatenated',
'storagetype:membercount': 1,
'storagetype:array': u'1234567891011',
'isV3': False,
'portgroupname': u'OS-portgroup-PG'}
def test_validate_pool(self):
v2_valid_pool = self.data.test_volume_v2.copy()
# Pool aware scheduler enabled
v2_valid_pool['host'] = self.data.fake_host
pool = self.driver.common._validate_pool(v2_valid_pool)
self.assertEqual('gold+1234567891011', pool)
# Cannot get the pool from the host
v2_valid_pool['host'] = 'HostX@Backend'
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.common._validate_pool,
v2_valid_pool)
# Legacy test. Provider Location does not have the version
v2_valid_pool['host'] = self.data.fake_host
v2_valid_pool['provider_location'] = self.data.provider_location
pool = self.driver.common._validate_pool(v2_valid_pool)
self.assertIsNone(pool)
def test_array_info_multi_pool(self):
arrayInfo = self.driver.utils.parse_file_to_get_array_map(
self.config_file_path)
self.assertTrue(len(arrayInfo) == 2)
for arrayInfoRec in arrayInfo:
self.assertEqual(
'1234567891011', arrayInfoRec['SerialNumber'])
self.assertTrue(
self.data.port_group in arrayInfoRec['PortGroup'])
self.assertTrue(
self.data.poolname in arrayInfoRec['PoolName'] or
'SATA_BRONZE1' in arrayInfoRec['PoolName'])
@mock.patch.object(
emc_vmax_common.EMCVMAXCommon,
'_get_pool_and_storage_system',
return_value=(None, EMCVMAXCommonData.storage_system))
@mock.patch.object(
volume_types,
'get_volume_type_extra_specs',
return_value={'volume_backend_name': 'MULTI_POOL_BE'})
def test_create_volume_multi_pool_success(
self, _mock_volume_type, mock_storage_system):
self.vol_v2['provider_location'] = None
self.driver.common._initial_setup = mock.Mock(
return_value=self.default_extraspec())
self.driver.create_volume(self.vol_v2)
@mock.patch.object(
emc_vmax_common.EMCVMAXCommon,
'_get_pool_and_storage_system',
return_value=(None, EMCVMAXCommonData.storage_system))
@mock.patch.object(
volume_types,
'get_volume_type_extra_specs',
return_value={'volume_backend_name': 'MULTI_POOL_BE'})
def test_delete_volume_multi_pool_success(
self, _mock_volume_type, mock_storage_system):
self.driver.common._initial_setup = mock.Mock(
return_value=self.default_extraspec())
self.driver.delete_volume(self.vol_v2)
@mock.patch.object(
emc_vmax_common.EMCVMAXCommon,
'_get_pool_and_storage_system',
return_value=(None, EMCVMAXCommonData.storage_system))
@mock.patch.object(
volume_types,
'get_volume_type_extra_specs',
return_value={'volume_backend_name': 'MULTI_POOL_BE'})
def test_create_volume_in_CG_multi_pool_success(
self, _mock_volume_type, mock_storage_system):
self.data.test_volume_CG['provider_location'] = None
self.driver.common._initial_setup = mock.Mock(
return_value=self.default_extraspec())
self.driver.create_volume(self.data.test_volume_CG)
@mock.patch.object(
volume_types,
'get_volume_type_extra_specs',
return_value={'volume_backend_name': 'MULTI_POOL_BE'})
def test_retype_volume_multi_pool_success(
self, _mock_volume_type):
self.driver.common._initial_setup = mock.Mock(
return_value=self.default_extraspec())
self.driver.retype(
self.data.test_ctxt, self.vol_v2, self.data.new_type,
self.data.diff, self.data.test_host)
@mock.patch.object(
emc_vmax_common.EMCVMAXCommon,
'_get_pool_and_storage_system',
return_value=(None, EMCVMAXCommonData.storage_system))
@mock.patch.object(
volume_types,
'get_volume_type_extra_specs',
return_value={'volume_backend_name': 'MULTI_POOL_BE'})
# There is only one unique array in the conf file
def test_create_CG_multi_pool_success(
self, _mock_volume_type, _mock_storage_system):
self.driver.create_consistencygroup(
self.data.test_ctxt, self.data.test_CG)
@mock.patch.object(
emc_vmax_common.EMCVMAXCommon,
'_get_members_of_replication_group',
return_value=None)
@mock.patch.object(
FakeDB,
'volume_get_all_by_group',
return_value=None)
@mock.patch.object(
emc_vmax_common.EMCVMAXCommon,
'_get_pool_and_storage_system',
return_value=(None, EMCVMAXCommonData.storage_system))
@mock.patch.object(
volume_types,
'get_volume_type_extra_specs',
return_value={'volume_backend_name': 'MULTI_POOL_BE'})
def test_delete_CG_no_volumes_multi_pool_success(
self, _mock_volume_type, _mock_storage_system,
_mock_db_volumes, _mock_members):
self.driver.delete_consistencygroup(
self.data.test_ctxt, self.data.test_CG, [])
@mock.patch.object(
emc_vmax_common.EMCVMAXCommon,
'_get_pool_and_storage_system',
return_value=(None, EMCVMAXCommonData.storage_system))
@mock.patch.object(
volume_types,
'get_volume_type_extra_specs',
return_value={'volume_backend_name': 'MULTI_POOL_BE'})
def test_delete_CG_with_volumes_multi_pool_success(
self, _mock_volume_type, _mock_storage_system):
self.driver.delete_consistencygroup(
self.data.test_ctxt, self.data.test_CG, [])
def _cleanup(self):
bExists = os.path.exists(self.config_file_path)
if bExists:
os.remove(self.config_file_path)
shutil.rmtree(self.tempdir)
class EMCV3MultiSloDriverTestCase(test.TestCase):
def setUp(self):
self.data = EMCVMAXCommonData()
self.vol_v3 = self.data.test_volume_v3
self.vol_v3['provider_location'] = (
six.text_type(self.data.provider_location_multi_pool))
self.tempdir = tempfile.mkdtemp()
super(EMCV3MultiSloDriverTestCase, self).setUp()
self.config_file_path = None
self.create_fake_config_file_multi_slo_v3()
self.addCleanup(self._cleanup)
self.set_configuration()
def set_configuration(self):
configuration = mock.Mock()
configuration.safe_get.return_value = 'MULTI_SLO_V3'
configuration.cinder_emc_config_file = self.config_file_path
configuration.config_group = 'MULTI_SLO_V3'
self.stubs.Set(emc_vmax_common.EMCVMAXCommon, '_get_ecom_connection',
self.fake_ecom_connection)
instancename = FakeCIMInstanceName()
self.stubs.Set(emc_vmax_utils.EMCVMAXUtils, 'get_instance_name',
instancename.fake_getinstancename)
self.stubs.Set(time, 'sleep',
self.fake_sleep)
self.stubs.Set(emc_vmax_utils.EMCVMAXUtils, 'isArrayV3',
self.fake_is_v3)
driver = emc_vmax_fc.EMCVMAXFCDriver(configuration=configuration)
driver.db = FakeDB()
self.driver = driver
self.driver.utils = emc_vmax_utils.EMCVMAXUtils(object)
def create_fake_config_file_multi_slo_v3(self):
doc = minidom.Document()
emc = doc.createElement("EMC")
doc.appendChild(emc)
eComServers = doc.createElement("EcomServers")
emc.appendChild(eComServers)
eComServer = doc.createElement("EcomServer")
eComServers.appendChild(eComServer)
ecomserverip = doc.createElement("EcomServerIp")
eComServer.appendChild(ecomserverip)
ecomserveriptext = doc.createTextNode("1.1.1.1")
ecomserverip.appendChild(ecomserveriptext)
ecomserverport = doc.createElement("EcomServerPort")
eComServer.appendChild(ecomserverport)
ecomserverporttext = doc.createTextNode("10")
ecomserverport.appendChild(ecomserverporttext)
ecomusername = doc.createElement("EcomUserName")
eComServer.appendChild(ecomusername)
ecomusernametext = doc.createTextNode("user")
ecomusername.appendChild(ecomusernametext)
ecompassword = doc.createElement("EcomPassword")
eComServer.appendChild(ecompassword)
ecompasswordtext = doc.createTextNode("pass")
ecompassword.appendChild(ecompasswordtext)
arrays = doc.createElement("Arrays")
eComServer.appendChild(arrays)
array = doc.createElement("Array")
arrays.appendChild(array)
serialNo = doc.createElement("SerialNumber")
array.appendChild(serialNo)
serialNoText = doc.createTextNode("1234567891011")
serialNo.appendChild(serialNoText)
portgroups = doc.createElement("PortGroups")
array.appendChild(portgroups)
portgroup = doc.createElement("PortGroup")
portgroups.appendChild(portgroup)
portgrouptext = doc.createTextNode(self.data.port_group)
portgroup.appendChild(portgrouptext)
vpools = doc.createElement("Pools")
array.appendChild(vpools)
vpool = doc.createElement("Pool")
vpools.appendChild(vpool)
poolName = doc.createElement("PoolName")
vpool.appendChild(poolName)
poolNameText = doc.createTextNode("SRP_1")
poolName.appendChild(poolNameText)
poolslo = doc.createElement("SLO")
vpool.appendChild(poolslo)
poolsloText = doc.createTextNode("Bronze")
poolslo.appendChild(poolsloText)
poolworkload = doc.createElement("Workload")
vpool.appendChild(poolworkload)
poolworkloadText = doc.createTextNode("DSS")
poolworkload.appendChild(poolworkloadText)
vpool2 = doc.createElement("Pool")
vpools.appendChild(vpool2)
pool2Name = doc.createElement("PoolName")
vpool2.appendChild(pool2Name)
pool2NameText = doc.createTextNode("SRP_1")
pool2Name.appendChild(pool2NameText)
pool2slo = doc.createElement("SLO")
vpool2.appendChild(pool2slo)
pool2sloText = doc.createTextNode("Silver")
pool2slo.appendChild(pool2sloText)
pool2workload = doc.createElement("Workload")
vpool.appendChild(pool2workload)
pool2workloadText = doc.createTextNode("OLTP")
pool2workload.appendChild(pool2workloadText)
filename = 'cinder_emc_config_MULTI_SLO_V3.xml'
self.config_file_path = self.tempdir + '/' + filename
f = open(self.config_file_path, 'w')
doc.writexml(f)
f.close()
def fake_ecom_connection(self):
self.conn = FakeEcomConnection()
return self.conn
def fake_sleep(self, seconds):
return
def fake_is_v3(self, conn, serialNumber):
return True
def default_extraspec(self):
return {'storagetype:pool': u'SRP_1',
'volume_backend_name': 'MULTI_SLO_BE',
'storagetype:workload': u'DSS',
'storagetype:slo': u'Bronze',
'storagetype:array': u'1234567891011',
'isV3': True,
'portgroupname': u'OS-portgroup-PG'}
def test_validate_pool(self):
v3_valid_pool = self.data.test_volume_v3.copy()
# Pool aware scheduler enabled
v3_valid_pool['host'] = self.data.fake_host_v3
pool = self.driver.common._validate_pool(v3_valid_pool)
self.assertEqual('Bronze+SRP_1+1234567891011', pool)
# Cannot get the pool from the host
v3_valid_pool['host'] = 'HostX@Backend'
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.common._validate_pool,
v3_valid_pool)
# Legacy test. Provider Location does not have the version
v3_valid_pool['host'] = self.data.fake_host_v3
v3_valid_pool['provider_location'] = self.data.provider_location
pool = self.driver.common._validate_pool(v3_valid_pool)
self.assertIsNone(pool)
def test_array_info_multi_slo(self):
arrayInfo = self.driver.utils.parse_file_to_get_array_map(
self.config_file_path)
self.assertTrue(len(arrayInfo) == 2)
for arrayInfoRec in arrayInfo:
self.assertEqual(
'1234567891011', arrayInfoRec['SerialNumber'])
self.assertTrue(
self.data.port_group in arrayInfoRec['PortGroup'])
self.assertTrue('SRP_1' in arrayInfoRec['PoolName'])
self.assertTrue(
'Bronze' in arrayInfoRec['SLO'] or
'Silver' in arrayInfoRec['SLO'])
@mock.patch.object(
emc_vmax_common.EMCVMAXCommon,
'_get_pool_and_storage_system',
return_value=(None, EMCVMAXCommonData.storage_system))
@mock.patch.object(
volume_types,
'get_volume_type_extra_specs',
return_value={'volume_backend_name': 'MULTI_SLO_BE'})
def test_create_volume_multi_slo_success(
self, _mock_volume_type, mock_storage_system):
self.vol_v3['host'] = self.data.fake_host_v3
self.vol_v3['provider_location'] = None
self.driver.common._initial_setup = mock.Mock(
return_value=self.default_extraspec())
self.driver.create_volume(self.vol_v3)
@mock.patch.object(
emc_vmax_common.EMCVMAXCommon,
'_get_pool_and_storage_system',
return_value=(None, EMCVMAXCommonData.storage_system))
@mock.patch.object(
volume_types,
'get_volume_type_extra_specs',
return_value={'volume_backend_name': 'MULTI_SLO_BE'})
def test_delete_volume_multi_slo_success(
self, _mock_volume_type, mock_storage_system):
self.driver.common._initial_setup = mock.Mock(
return_value=self.default_extraspec())
self.driver.delete_volume(self.vol_v3)
@mock.patch.object(
emc_vmax_common.EMCVMAXCommon,
'_get_pool_and_storage_system',
return_value=(None, EMCVMAXCommonData.storage_system))
@mock.patch.object(
volume_types,
'get_volume_type_extra_specs',
return_value={'volume_backend_name': 'MULTI_SLO_BE'})
def test_create_volume_in_CG_multi_slo_success(
self, _mock_volume_type, mock_storage_system):
self.data.test_volume_CG_v3['provider_location'] = None
self.driver.common._initial_setup = mock.Mock(
return_value=self.default_extraspec())
self.driver.create_volume(self.data.test_volume_CG_v3)
@mock.patch.object(
emc_vmax_provision_v3.EMCVMAXProvisionV3,
'_find_new_storage_group',
return_value='Any')
@mock.patch.object(
emc_vmax_utils.EMCVMAXUtils,
'wrap_get_storage_group_from_volume',
return_value=None)
@mock.patch.object(
emc_vmax_utils.EMCVMAXUtils,
'_get_fast_settings_from_storage_group',
return_value='Gold+DSS_REP')
@mock.patch.object(
volume_types,
'get_volume_type_extra_specs',
return_value={'volume_backend_name': 'MULTI_SLO_BE'})
def test_retype_volume_multi_slo_success(
self, _mock_volume_type, mock_fast_settings,
mock_storage_group, mock_found_SG):
self.driver.common._initial_setup = mock.Mock(
return_value=self.default_extraspec())
self.assertTrue(self.driver.retype(
self.data.test_ctxt, self.data.test_volume_v3, self.data.new_type,
self.data.diff, self.data.test_host_v3))
@mock.patch.object(
emc_vmax_common.EMCVMAXCommon,
'_get_pool_and_storage_system',
return_value=(None, EMCVMAXCommonData.storage_system))
@mock.patch.object(
volume_types,
'get_volume_type_extra_specs',
return_value={'volume_backend_name': 'MULTI_SLO_BE'})
# There is only one unique array in the conf file
def test_create_CG_multi_slo_success(
self, _mock_volume_type, _mock_storage_system):
self.driver.common._initial_setup = mock.Mock(
return_value=self.default_extraspec())
self.driver.create_consistencygroup(
self.data.test_ctxt, self.data.test_CG)
@mock.patch.object(
emc_vmax_common.EMCVMAXCommon,
'_get_members_of_replication_group',
return_value=None)
@mock.patch.object(
FakeDB,
'volume_get_all_by_group',
return_value=None)
@mock.patch.object(
emc_vmax_common.EMCVMAXCommon,
'_get_pool_and_storage_system',
return_value=(None, EMCVMAXCommonData.storage_system))
@mock.patch.object(
volume_types,
'get_volume_type_extra_specs',
return_value={'volume_backend_name': 'MULTI_SLO_BE'})
def test_delete_CG_no_volumes_multi_slo_success(
self, _mock_volume_type, _mock_storage_system,
_mock_db_volumes, _mock_members):
self.driver.delete_consistencygroup(
self.data.test_ctxt, self.data.test_CG, [])
@mock.patch.object(
emc_vmax_common.EMCVMAXCommon,
'_get_pool_and_storage_system',
return_value=(None, EMCVMAXCommonData.storage_system))
@mock.patch.object(
volume_types,
'get_volume_type_extra_specs',
return_value={'volume_backend_name': 'MULTI_SLO_BE'})
def test_delete_CG_with_volumes_multi_slo_success(
self, _mock_volume_type, _mock_storage_system):
self.driver.delete_consistencygroup(
self.data.test_ctxt, self.data.test_CG, [])
def _cleanup(self):
bExists = os.path.exists(self.config_file_path)
if bExists:
os.remove(self.config_file_path)
shutil.rmtree(self.tempdir)
class EMCV2MultiPoolDriverMultipleEcomsTestCase(test.TestCase):
def setUp(self):
self.data = EMCVMAXCommonData()
self.vol_v2 = self.data.test_volume_v2
self.vol_v2['provider_location'] = (
six.text_type(self.data.provider_location_multi_pool))
self.tempdir = tempfile.mkdtemp()
super(EMCV2MultiPoolDriverMultipleEcomsTestCase, self).setUp()
self.config_file_path = None
self.create_fake_config_file_multi_ecom()
self.addCleanup(self._cleanup)
configuration = mock.Mock()
configuration.cinder_emc_config_file = self.config_file_path
configuration.safe_get.return_value = 'MULTI_ECOM'
configuration.config_group = 'MULTI_ECOM'
self.stubs.Set(emc_vmax_common.EMCVMAXCommon, '_get_ecom_connection',
self.fake_ecom_connection)
instancename = FakeCIMInstanceName()
self.stubs.Set(emc_vmax_utils.EMCVMAXUtils, 'get_instance_name',
instancename.fake_getinstancename)
self.stubs.Set(time, 'sleep',
self.fake_sleep)
self.stubs.Set(emc_vmax_utils.EMCVMAXUtils, 'isArrayV3',
self.fake_is_v3)
driver = emc_vmax_fc.EMCVMAXFCDriver(configuration=configuration)
driver.db = FakeDB()
driver.common.conn = FakeEcomConnection()
driver.zonemanager_lookup_service = FakeLookupService()
self.driver = driver
self.driver.utils = emc_vmax_utils.EMCVMAXUtils(object)
def create_fake_config_file_multi_ecom(self):
doc = minidom.Document()
emc = doc.createElement("EMC")
doc.appendChild(emc)
eComServers = doc.createElement("EcomServers")
emc.appendChild(eComServers)
eComServer = doc.createElement("EcomServer")
eComServers.appendChild(eComServer)
ecomserverip = doc.createElement("EcomServerIp")
eComServer.appendChild(ecomserverip)
ecomserveriptext = doc.createTextNode("1.1.1.1")
ecomserverip.appendChild(ecomserveriptext)
ecomserverport = doc.createElement("EcomServerPort")
eComServer.appendChild(ecomserverport)
ecomserverporttext = doc.createTextNode("10")
ecomserverport.appendChild(ecomserverporttext)
ecomusername = doc.createElement("EcomUserName")
eComServer.appendChild(ecomusername)
ecomusernametext = doc.createTextNode("user")
ecomusername.appendChild(ecomusernametext)
ecompassword = doc.createElement("EcomPassword")
eComServer.appendChild(ecompassword)
ecompasswordtext = doc.createTextNode("pass")
ecompassword.appendChild(ecompasswordtext)
arrays = doc.createElement("Arrays")
eComServer.appendChild(arrays)
array = doc.createElement("Array")
arrays.appendChild(array)
serialNo = doc.createElement("SerialNumber")
array.appendChild(serialNo)
serialNoText = doc.createTextNode("1110987654321")
serialNo.appendChild(serialNoText)
portgroups = doc.createElement("PortGroups")
array.appendChild(portgroups)
portgroup = doc.createElement("PortGroup")
portgroups.appendChild(portgroup)
portgrouptext = doc.createTextNode(self.data.port_group)
portgroup.appendChild(portgrouptext)
pools = doc.createElement("Pools")
array.appendChild(pools)
pool = doc.createElement("Pool")
pools.appendChild(pool)
poolName = doc.createElement("PoolName")
pool.appendChild(poolName)
poolNameText = doc.createTextNode("gold")
poolName.appendChild(poolNameText)
pool2 = doc.createElement("Pool")
pools.appendChild(pool2)
pool2Name = doc.createElement("PoolName")
pool2.appendChild(pool2Name)
pool2NameText = doc.createTextNode("SATA_BRONZE1")
pool2Name.appendChild(pool2NameText)
pool2FastPolicy = doc.createElement("FastPolicy")
pool2.appendChild(pool2FastPolicy)
pool2FastPolicyText = doc.createTextNode("BRONZE1")
pool2FastPolicy.appendChild(pool2FastPolicyText)
eComServer = doc.createElement("EcomServer")
eComServers.appendChild(eComServer)
ecomserverip = doc.createElement("EcomServerIp")
eComServer.appendChild(ecomserverip)
ecomserveriptext = doc.createTextNode("1.1.1.1")
ecomserverip.appendChild(ecomserveriptext)
ecomserverport = doc.createElement("EcomServerPort")
eComServer.appendChild(ecomserverport)
ecomserverporttext = doc.createTextNode("10")
ecomserverport.appendChild(ecomserverporttext)
ecomusername = doc.createElement("EcomUserName")
eComServer.appendChild(ecomusername)
ecomusernametext = doc.createTextNode("user")
ecomusername.appendChild(ecomusernametext)
ecompassword = doc.createElement("EcomPassword")
eComServer.appendChild(ecompassword)
ecompasswordtext = doc.createTextNode("pass")
ecompassword.appendChild(ecompasswordtext)
arrays = doc.createElement("Arrays")
eComServer.appendChild(arrays)
array = doc.createElement("Array")
arrays.appendChild(array)
serialNo = doc.createElement("SerialNumber")
array.appendChild(serialNo)
serialNoText = doc.createTextNode("1234567891011")
serialNo.appendChild(serialNoText)
portgroups = doc.createElement("PortGroups")
array.appendChild(portgroups)
portgroup = doc.createElement("PortGroup")
portgroups.appendChild(portgroup)
portgrouptext = doc.createTextNode(self.data.port_group)
portgroup.appendChild(portgrouptext)
pools = doc.createElement("Pools")
array.appendChild(pools)
pool = doc.createElement("Pool")
pools.appendChild(pool)
poolName = doc.createElement("PoolName")
pool.appendChild(poolName)
poolNameText = doc.createTextNode("gold")
poolName.appendChild(poolNameText)
pool2 = doc.createElement("Pool")
pools.appendChild(pool2)
pool2Name = doc.createElement("PoolName")
pool2.appendChild(pool2Name)
pool2NameText = doc.createTextNode("SATA_BRONZE1")
pool2Name.appendChild(pool2NameText)
pool2FastPolicy = doc.createElement("FastPolicy")
pool2.appendChild(pool2FastPolicy)
pool2FastPolicyText = doc.createTextNode("BRONZE1")
pool2FastPolicy.appendChild(pool2FastPolicyText)
filename = 'cinder_emc_config_V2_MULTI_ECOM.xml'
self.config_file_path = self.tempdir + '/' + filename
f = open(self.config_file_path, 'w')
doc.writexml(f)
f.close()
def fake_ecom_connection(self):
self.conn = FakeEcomConnection()
return self.conn
def fake_sleep(self, seconds):
return
def fake_is_v3(self, conn, serialNumber):
return False
def test_array_info_multi_ecom_no_fast(self):
pool = 'gold+1234567891011'
arrayInfo = self.driver.utils.parse_file_to_get_array_map(
self.config_file_path)
self.assertTrue(len(arrayInfo) == 4)
poolRec = self.driver.utils.extract_record(arrayInfo, pool)
self.assertEqual('1234567891011', poolRec['SerialNumber'])
self.assertEqual(self.data.port_group, poolRec['PortGroup'])
self.assertEqual(self.data.poolname, poolRec['PoolName'])
self.assertEqual('user', poolRec['EcomUserName'])
self.assertEqual('pass', poolRec['EcomPassword'])
self.assertIsNone(poolRec['FastPolicy'])
self.assertFalse(poolRec['EcomUseSSL'])
def test_array_info_multi_ecom_fast(self):
pool = 'SATA_BRONZE1+1234567891011'
arrayInfo = self.driver.utils.parse_file_to_get_array_map(
self.config_file_path)
self.assertTrue(len(arrayInfo) == 4)
poolRec = self.driver.utils.extract_record(arrayInfo, pool)
self.assertEqual('1234567891011', poolRec['SerialNumber'])
self.assertEqual(self.data.port_group, poolRec['PortGroup'])
self.assertEqual('SATA_BRONZE1', poolRec['PoolName'])
self.assertEqual('user', poolRec['EcomUserName'])
self.assertEqual('pass', poolRec['EcomPassword'])
self.assertEqual('BRONZE1', poolRec['FastPolicy'])
self.assertFalse(poolRec['EcomUseSSL'])
@mock.patch.object(
emc_vmax_common.EMCVMAXCommon,
'_get_pool_and_storage_system',
return_value=(None, EMCVMAXCommonData.storage_system))
@mock.patch.object(
volume_types,
'get_volume_type_extra_specs',
return_value={'volume_backend_name': 'MULTI_ECOM_BE'})
def test_create_volume_multi_ecom_success(
self, _mock_volume_type, mock_storage_system):
self.vol_v2['provider_location'] = None
self.driver.create_volume(self.vol_v2)
@mock.patch.object(
emc_vmax_common.EMCVMAXCommon,
'_get_pool_and_storage_system',
return_value=(None, EMCVMAXCommonData.storage_system))
@mock.patch.object(
volume_types,
'get_volume_type_extra_specs',
return_value={'volume_backend_name': 'MULTI_ECOM_BE'})
# If there are more than one unique arrays in conf file
def test_create_CG_multi_array_failure(
self, _mock_volume_type, _mock_storage_system):
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.create_consistencygroup,
self.data.test_ctxt,
self.data.test_CG)
@mock.patch.object(
emc_vmax_common.EMCVMAXCommon,
'_get_members_of_replication_group',
return_value=None)
@mock.patch.object(
FakeDB,
'volume_get_all_by_group',
return_value=None)
@mock.patch.object(
emc_vmax_common.EMCVMAXCommon,
'_get_pool_and_storage_system',
return_value=(None, EMCVMAXCommonData.storage_system))
@mock.patch.object(
volume_types,
'get_volume_type_extra_specs',
return_value={'volume_backend_name': 'MULTI_ECOM_BE'})
# There is more than one unique arrays in the conf file
def test_delete_CG_no_volumes_multi_array_failure(
self, _mock_volume_type, _mock_storage_system,
_mock_db_volumes, _mock_members):
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.delete_consistencygroup,
self.data.test_ctxt,
self.data.test_CG,
[])
@mock.patch.object(
emc_vmax_common.EMCVMAXCommon,
'_get_pool_and_storage_system',
return_value=(None, EMCVMAXCommonData.storage_system))
@mock.patch.object(
volume_types,
'get_volume_type_extra_specs',
return_value={'volume_backend_name': 'MULTI_ECOM_BE'})
def test_create_volume_in_CG_multi_ecom_success(
self, _mock_volume_type, mock_storage_system):
self.data.test_volume_CG['provider_location'] = None
self.driver.create_volume(self.data.test_volume_CG)
def _cleanup(self):
bExists = os.path.exists(self.config_file_path)
if bExists:
os.remove(self.config_file_path)
shutil.rmtree(self.tempdir)
class EMCVMAXProvisionV3Test(test.TestCase):
def setUp(self):
self.data = EMCVMAXCommonData()
super(EMCVMAXProvisionV3Test, self).setUp()
configuration = mock.Mock()
configuration.safe_get.return_value = 'ProvisionV3Tests'
configuration.config_group = 'ProvisionV3Tests'
emc_vmax_common.EMCVMAXCommon._gather_info = mock.Mock()
driver = emc_vmax_iscsi.EMCVMAXISCSIDriver(configuration=configuration)
driver.db = FakeDB()
self.driver = driver
def test_get_storage_pool_setting(self):
provisionv3 = self.driver.common.provisionv3
conn = FakeEcomConnection()
slo = 'Bronze'
workload = 'DSS'
poolInstanceName = {}
poolInstanceName['InstanceID'] = "SATA_GOLD1"
poolInstanceName['CreationClassName'] = (
self.data.storagepool_creationclass)
storagePoolCapability = provisionv3.get_storage_pool_capability(
conn, poolInstanceName)
storagepoolsetting = provisionv3.get_storage_pool_setting(
conn, storagePoolCapability, slo, workload)
self.assertTrue(
'Bronze:DSS' in storagepoolsetting['InstanceID'])
def test_get_storage_pool_setting_exception(self):
provisionv3 = self.driver.common.provisionv3
conn = FakeEcomConnection()
slo = 'Bronze'
workload = 'NONE'
poolInstanceName = {}
poolInstanceName['InstanceID'] = "SATA_GOLD1"
poolInstanceName['CreationClassName'] = (
self.data.storagepool_creationclass)
storagePoolCapability = provisionv3.get_storage_pool_capability(
conn, poolInstanceName)
self.assertRaises(exception.VolumeBackendAPIException,
provisionv3.get_storage_pool_setting,
conn, storagePoolCapability, slo, workload)
def test_extend_volume_in_SG(self):
provisionv3 = self.driver.common.provisionv3
conn = FakeEcomConnection()
storageConfigService = {
'CreationClassName': 'Symm_ElementCompositionService',
'SystemName': 'SYMMETRIX+000195900551'}
theVolumeInstanceName = (
conn.EnumerateInstanceNames("EMC_StorageVolume")[0])
inVolumeInstanceName = (
conn.EnumerateInstanceNames("EMC_StorageVolume")[0])
volumeSize = 3
extraSpecs = {'volume_backend_name': 'GOLD_BE',
'isV3': True}
job = {
'Job': {'InstanceID': '9999', 'status': 'success', 'type': None}}
conn.InvokeMethod = mock.Mock(return_value=(4096, job))
provisionv3.utils.wait_for_job_complete = mock.Mock(return_value=(
0, 'Success'))
volumeDict = {'classname': u'Symm_StorageVolume',
'keybindings': EMCVMAXCommonData.keybindings}
provisionv3.get_volume_dict_from_job = (
mock.Mock(return_value=volumeDict))
result = provisionv3.extend_volume_in_SG(conn, storageConfigService,
theVolumeInstanceName,
inVolumeInstanceName,
volumeSize, extraSpecs)
self.assertEqual(
({'classname': u'Symm_StorageVolume',
'keybindings': {
'CreationClassName': u'Symm_StorageVolume',
'DeviceID': u'1',
'SystemCreationClassName': u'Symm_StorageSystem',
'SystemName': u'SYMMETRIX+000195900551'}}, 0), result)
def test_extend_volume_in_SG_with_Exception(self):
provisionv3 = self.driver.common.provisionv3
conn = FakeEcomConnection()
storageConfigService = {
'CreationClassName': 'Symm_ElementCompositionService',
'SystemName': 'SYMMETRIX+000195900551'}
theVolumeInstanceName = (
conn.EnumerateInstanceNames("EMC_StorageVolume")[0])
inVolumeInstanceName = (
conn.EnumerateInstanceNames("EMC_StorageVolume")[0])
volumeSize = 3
extraSpecs = {'volume_backend_name': 'GOLD_BE',
'isV3': True}
job = {
'Job': {'InstanceID': '9999', 'status': 'success', 'type': None}}
conn.InvokeMethod = mock.Mock(return_value=(4096, job))
provisionv3.utils.wait_for_job_complete = mock.Mock(return_value=(
2, 'Failure'))
self.assertRaises(
exception.VolumeBackendAPIException,
provisionv3.extend_volume_in_SG, conn, storageConfigService,
theVolumeInstanceName, inVolumeInstanceName, volumeSize,
extraSpecs)
class EMCVMAXMaskingTest(test.TestCase):
def setUp(self):
self.data = EMCVMAXCommonData()
super(EMCVMAXMaskingTest, self).setUp()
configuration = mock.Mock()
configuration.safe_get.return_value = 'MaskingTests'
configuration.config_group = 'MaskingTests'
emc_vmax_common.EMCVMAXCommon._get_ecom_connection = mock.Mock(
return_value=self.fake_ecom_connection())
emc_vmax_common.EMCVMAXCommon._gather_info = mock.Mock(
return_value=self.fake_gather_info())
instancename = FakeCIMInstanceName()
emc_vmax_utils.EMCVMAXUtils.get_instance_name = (
instancename.fake_getinstancename)
driver = emc_vmax_iscsi.EMCVMAXISCSIDriver(configuration=configuration)
driver.db = FakeDB()
self.driver = driver
self.driver.utils = emc_vmax_utils.EMCVMAXUtils(object)
def fake_ecom_connection(self):
conn = FakeEcomConnection()
return conn
def fake_gather_info(self):
return
def test_get_v3_default_storage_group_instance_name(self):
masking = self.driver.common.masking
conn = self.fake_ecom_connection()
extraSpecs = self.data.extra_specs
masking._get_and_remove_from_storage_group_v3 = mock.Mock()
controllerConfigService = (
self.driver.utils.find_controller_configuration_service(
conn, self.data.storage_system))
maskingviewdict = self.driver.common._populate_masking_dict(
self.data.test_volume, self.data.connector, extraSpecs)
result = (
masking._get_v3_default_storagegroup_instancename(
conn, maskingviewdict['volumeInstance'],
maskingviewdict,
controllerConfigService, maskingviewdict['volumeName']))
self.assertEqual('OS-SRP_1-Bronze-DSS-SG', result['ElementName'])
def test_get_v3_default_storage_group_instance_name_warning(self):
masking = self.driver.common.masking
conn = self.fake_ecom_connection()
extraSpecs = self.data.extra_specs
masking.utils.get_storage_groups_from_volume = mock.Mock(
return_value=[])
controllerConfigService = (
self.driver.utils.find_controller_configuration_service(
conn, self.data.storage_system))
maskingviewdict = self.driver.common._populate_masking_dict(
self.data.test_volume, self.data.connector, extraSpecs)
result = (
masking._get_v3_default_storagegroup_instancename(
conn, maskingviewdict['volumeInstance'],
maskingviewdict,
controllerConfigService, maskingviewdict['volumeName']))
self.assertIsNone(result)
def test_return_volume_to_default_storage_group_v3(self):
masking = self.driver.common.masking
conn = self.fake_ecom_connection()
volumeInstanceName = (
conn.EnumerateInstanceNames("EMC_StorageVolume")[0])
volumeInstance = conn.GetInstance(volumeInstanceName)
volumeName = "V3-Vol"
extraSpecs = {'volume_backend_name': 'V3_BE',
'isV3': True,
'storagetype:pool': 'SRP_1',
'storagetype:workload': 'DSS',
'storagetype:slo': 'Bronze'}
controllerConfigService = (
self.driver.utils.find_controller_configuration_service(
conn, self.data.storage_system))
masking.provisionv3.create_storage_group_v3 = mock.Mock(
return_value={'Value'})
masking._is_volume_in_storage_group = mock.Mock(
return_value=True)
masking.return_volume_to_default_storage_group_v3 = mock.Mock()
masking._return_back_to_default_sg(
conn, controllerConfigService, volumeInstance, volumeName,
extraSpecs)
masking.return_volume_to_default_storage_group_v3.assert_called_with(
conn, controllerConfigService,
volumeInstance, volumeName, extraSpecs)
def test_return_volume_to_default_storage_group_v3_exception(self):
masking = self.driver.common.masking
conn = self.fake_ecom_connection()
volumeInstanceName = (
conn.EnumerateInstanceNames("EMC_StorageVolume")[0])
volumeInstance = conn.GetInstance(volumeInstanceName)
volumeName = "V3-Vol"
extraSpecs = {'volume_backend_name': 'V3_BE',
'isV3': True,
'storagetype:pool': 'SRP_1',
'storagetype:workload': 'DSS',
'storagetype:slo': 'Bronze'}
controllerConfigService = (
self.driver.utils.find_controller_configuration_service(
conn, self.data.storage_system))
self.assertRaises(
exception.VolumeBackendAPIException,
masking.return_volume_to_default_storage_group_v3,
conn, controllerConfigService,
volumeInstance, volumeName, extraSpecs)
def test_add_volume_to_sg_and_verify(self):
masking = self.driver.common.masking
conn = self.fake_ecom_connection()
volumeInstanceName = (
conn.EnumerateInstanceNames("EMC_StorageVolume")[0])
volumeInstance = conn.GetInstance(volumeInstanceName)
volumeName = "V3-Vol"
storageGroupInstanceName = self.data.storagegroups[0]
sgGroupName = self.data.storagegroupname
extraSpecs = {'volume_backend_name': 'V3_BE',
'isV3': True,
'storagetype:pool': 'SRP_1',
'storagetype:workload': 'DSS',
'storagetype:slo': 'Bronze'}
controllerConfigService = (
self.driver.utils.find_controller_configuration_service(
conn, self.data.storage_system))
msg = masking._add_volume_to_sg_and_verify(
conn, controllerConfigService, storageGroupInstanceName,
volumeInstance, volumeName, sgGroupName, extraSpecs)
self.assertIsNone(msg)
def test_remove_volume_from_sg(self):
masking = self.driver.common.masking
conn = self.fake_ecom_connection()
volumeInstanceName = (
conn.EnumerateInstanceNames("EMC_StorageVolume")[0])
volumeInstance = conn.GetInstance(volumeInstanceName)
storageGroupInstanceName = self.data.storagegroups[1]
extraSpecs = {'volume_backend_name': 'V3_BE',
'isV3': True,
'storagetype:pool': 'SRP_1',
'storagetype:workload': 'DSS',
'storagetype:slo': 'Bronze'}
controllerConfigService = (
self.driver.utils.find_controller_configuration_service(
conn, self.data.storage_system))
masking._remove_volume_from_sg = mock.Mock()
masking._cleanup_deletion_v3(
conn, controllerConfigService, volumeInstance, extraSpecs)
masking._remove_volume_from_sg.assert_called_with(
conn, controllerConfigService, storageGroupInstanceName,
volumeInstance, extraSpecs)
class EMCVMAXFCTest(test.TestCase):
def setUp(self):
self.data = EMCVMAXCommonData()
super(EMCVMAXFCTest, self).setUp()
configuration = mock.Mock()
configuration.safe_get.return_value = 'FCTests'
configuration.config_group = 'FCTests'
emc_vmax_common.EMCVMAXCommon._gather_info = mock.Mock()
driver = emc_vmax_fc.EMCVMAXFCDriver(configuration=configuration)
driver.db = FakeDB()
self.driver = driver
def test_terminate_connection(self):
common = self.driver.common
common.conn = FakeEcomConnection()
common._unmap_lun = mock.Mock()
common.get_masking_view_by_volume = mock.Mock(
return_value='testMV')
common.get_masking_views_by_port_group = mock.Mock(
return_value=[])
common.get_target_wwns = mock.Mock(
return_value=EMCVMAXCommonData.target_wwns)
data = self.driver.terminate_connection(self.data.test_volume_v3,
self.data.connector)
common.get_target_wwns.assert_called_once_with(
EMCVMAXCommonData.storage_system, EMCVMAXCommonData.connector)
numTargetWwns = len(EMCVMAXCommonData.target_wwns)
self.assertEqual(numTargetWwns, len(data['data']))
def test_get_common_masking_views_two_exist(self):
common = self.driver.common
common.conn = FakeEcomConnection()
maskingviews = [{'CreationClassName': 'Symm_LunMaskingView',
'ElementName': 'MV1'},
{'CreationClassName': 'Symm_LunMaskingView',
'ElementName': 'MV2'}]
portGroupInstanceName = (
self.driver.common.masking._get_port_group_from_masking_view(
common.conn, self.data.lunmaskctrl_name,
self.data.storage_system))
initiatorGroupInstanceName = (
self.driver.common.masking._get_initiator_group_from_masking_view(
common.conn, self.data.lunmaskctrl_name,
self.data.storage_system))
common.get_masking_views_by_port_group = mock.Mock(
return_value=maskingviews)
common.get_masking_views_by_initiator_group = mock.Mock(
return_value=maskingviews)
mvInstances = self.driver._get_common_masking_views(
portGroupInstanceName, initiatorGroupInstanceName)
self.assertTrue(len(mvInstances) == 2)
def test_get_common_masking_views_one_overlap(self):
common = self.driver.common
common.conn = FakeEcomConnection()
maskingviewsPG = [{'CreationClassName': 'Symm_LunMaskingView',
'ElementName': 'MV1'},
{'CreationClassName': 'Symm_LunMaskingView',
'ElementName': 'MV2'}]
maskingviewsIG = [{'CreationClassName': 'Symm_LunMaskingView',
'ElementName': 'MV1'}]
portGroupInstanceName = (
self.driver.common.masking._get_port_group_from_masking_view(
common.conn, self.data.lunmaskctrl_name,
self.data.storage_system))
initiatorGroupInstanceName = (
self.driver.common.masking._get_initiator_group_from_masking_view(
common.conn, self.data.lunmaskctrl_name,
self.data.storage_system))
common.get_masking_views_by_port_group = mock.Mock(
return_value=maskingviewsPG)
common.get_masking_views_by_initiator_group = mock.Mock(
return_value=maskingviewsIG)
mvInstances = self.driver._get_common_masking_views(
portGroupInstanceName, initiatorGroupInstanceName)
self.assertTrue(len(mvInstances) == 1)
def test_get_common_masking_views_no_overlap(self):
common = self.driver.common
common.conn = FakeEcomConnection()
maskingviewsPG = [{'CreationClassName': 'Symm_LunMaskingView',
'ElementName': 'MV2'}]
maskingviewsIG = [{'CreationClassName': 'Symm_LunMaskingView',
'ElementName': 'MV1'}]
portGroupInstanceName = (
self.driver.common.masking._get_port_group_from_masking_view(
common.conn, self.data.lunmaskctrl_name,
self.data.storage_system))
initiatorGroupInstanceName = (
self.driver.common.masking._get_initiator_group_from_masking_view(
common.conn, self.data.lunmaskctrl_name,
self.data.storage_system))
common.get_masking_views_by_port_group = mock.Mock(
return_value=maskingviewsPG)
common.get_masking_views_by_initiator_group = mock.Mock(
return_value=maskingviewsIG)
mvInstances = self.driver._get_common_masking_views(
portGroupInstanceName, initiatorGroupInstanceName)
self.assertTrue(len(mvInstances) == 0)
class EMCVMAXUtilsTest(test.TestCase):
def setUp(self):
self.data = EMCVMAXCommonData()
super(EMCVMAXUtilsTest, self).setUp()
configuration = mock.Mock()
configuration.safe_get.return_value = 'UtilsTests'
configuration.config_group = 'UtilsTests'
emc_vmax_common.EMCVMAXCommon._gather_info = mock.Mock()
driver = emc_vmax_iscsi.EMCVMAXISCSIDriver(configuration=configuration)
driver.db = FakeDB()
self.driver = driver
self.driver.utils = emc_vmax_utils.EMCVMAXUtils(object)
def test_get_target_endpoints(self):
conn = FakeEcomConnection()
hardwareid = 123456789012345
result = self.driver.utils.get_target_endpoints(conn, hardwareid)
self.assertEqual(
([{'Name': '5000090000000000'}]), result)
def test_get_protocol_controller(self):
conn = FakeEcomConnection()
hardwareid = 123456789012345
result = self.driver.utils.get_protocol_controller(conn, hardwareid)
self.assertEqual(
({'CreationClassName': 'Symm_LunMaskingView',
'ElementName': 'OS-fakehost-gold-I-MV'}), result)
def test_get_protocol_controller_exception(self):
conn = FakeEcomConnection()
conn.AssociatorNames = mock.Mock(return_value=[])
hardwareid = 123456789012345
self.assertRaises(
exception.VolumeBackendAPIException,
self.driver.utils.get_protocol_controller,
conn, hardwareid)
def test_set_target_element_supplier_in_rsd(self):
conn = FakeEcomConnection()
extraSpecs = self.data.extra_specs
repServiceInstanceName = (
self.driver.utils.find_replication_service(
conn, self.data.storage_system))
rsdInstance = self.driver.utils.set_target_element_supplier_in_rsd(
conn, repServiceInstanceName,
emc_vmax_common.SNAPVX_REPLICATION_TYPE,
emc_vmax_common.CREATE_NEW_TARGET, extraSpecs)
self.assertIsNotNone(rsdInstance)
def test_set_copy_methodology_in_rsd(self):
conn = FakeEcomConnection()
extraSpecs = self.data.extra_specs
repServiceInstanceName = (
self.driver.utils.find_replication_service(
conn, self.data.storage_system))
rsdInstance = self.driver.utils.set_copy_methodology_in_rsd(
conn, repServiceInstanceName,
emc_vmax_provision.SYNC_CLONE_LOCAL,
emc_vmax_provision.COPY_ON_WRITE, extraSpecs)
self.assertIsNotNone(rsdInstance)
class EMCVMAXCommonTest(test.TestCase):
def setUp(self):
self.data = EMCVMAXCommonData()
super(EMCVMAXCommonTest, self).setUp()
configuration = mock.Mock()
configuration.safe_get.return_value = 'CommonTests'
configuration.config_group = 'CommonTests'
emc_vmax_common.EMCVMAXCommon._gather_info = mock.Mock()
driver = emc_vmax_iscsi.EMCVMAXISCSIDriver(configuration=configuration)
driver.db = FakeDB()
self.driver = driver
self.driver.utils = emc_vmax_utils.EMCVMAXUtils(object)
@mock.patch.object(
emc_vmax_common.EMCVMAXCommon,
'_get_pool_and_storage_system',
return_value=(None, EMCVMAXCommonData.storage_system))
def test_create_duplicate_volume(self, mock_pool):
common = self.driver.common
common.conn = FakeEcomConnection()
volumeInstanceName = (
common.conn.EnumerateInstanceNames("EMC_StorageVolume")[0])
sourceInstance = common.conn.GetInstance(volumeInstanceName)
cloneName = "SS-V3-Vol"
extraSpecs = {'volume_backend_name': 'V3_BE',
'isV3': True,
'storagetype:pool': 'SRP_1',
'storagetype:workload': 'DSS',
'storagetype:slo': 'Bronze'}
targetInstance = common.conn.GetInstance(volumeInstanceName)
common.utils.find_volume_instance = mock.Mock(
return_value=targetInstance)
duplicateVolumeInstance = self.driver.common._create_duplicate_volume(
sourceInstance, cloneName, extraSpecs)
self.assertIsNotNone(duplicateVolumeInstance)
def test_cleanup_target(self):
common = self.driver.common
common.conn = FakeEcomConnection()
volumeInstanceName = (
common.conn.EnumerateInstanceNames("EMC_StorageVolume")[0])
extraSpecs = {'volume_backend_name': 'V3_BE',
'isV3': True,
'storagetype:pool': 'SRP_1',
'storagetype:workload': 'DSS',
'storagetype:slo': 'Bronze'}
targetInstance = common.conn.GetInstance(volumeInstanceName)
repServiceInstanceName = (
self.driver.utils.find_replication_service(
common.conn, self.data.storage_system))
common.utils.find_sync_sv_by_target = mock.Mock(
return_value=(None, None))
self.driver.common._cleanup_target(
repServiceInstanceName, targetInstance, extraSpecs)
|
[
"zhangsheng1730@hotmail.com"
] |
zhangsheng1730@hotmail.com
|
a281901b631c2d1cd8e0a7f71112b0a17d2268be
|
7143ab72314f2fd01c77cbe30413e7c9c1861d14
|
/grapple/middleware.py
|
f9e6b07035681d5721a5b9d50fdfa6c4d7858ff3
|
[
"MIT",
"BSD-3-Clause"
] |
permissive
|
yashttm/wagtail-grapple
|
00e57912993f8914ee3b200e030e494ee1048be4
|
abaf758d4d3b6b023a36b5291d36cc6523ebabf2
|
refs/heads/main
| 2023-08-04T02:03:35.607325
| 2021-10-05T05:56:53
| 2021-10-05T05:56:53
| 413,691,061
| 0
| 0
|
BSD-3-Clause
| 2021-10-05T05:52:12
| 2021-10-05T05:52:12
| null |
UTF-8
|
Python
| false
| false
| 1,871
|
py
|
import inspect
from graphql.execution.middleware import MIDDLEWARE_RESOLVER_FUNCTION
from graphene import ResolveInfo
from .registry import registry
ROOT_TYPES = ["Query", "Mutation", "Subscription"]
def get_middleware_resolvers(middlewares):
for middleware in middlewares:
if inspect.isfunction(middleware):
yield middleware
if not hasattr(middleware, MIDDLEWARE_RESOLVER_FUNCTION):
raise Exception(
"Middleware must be either a class or a function. Got: {}.\nYou can read more about middleware here: https://docs.graphene-python.org/en/latest/execution/middleware/".format(
type(middleware)
)
)
yield getattr(middleware(), MIDDLEWARE_RESOLVER_FUNCTION)
class IsAuthenticatedMiddleware(object):
def resolve(self, next, root, info, **args):
if not info.context.user.is_authenticated:
return None
return next(root, info, **args)
class IsAnonymousMiddleware(object):
def resolve(self, next, root, info, **args):
if not info.context.user.is_anonymous:
return None
return next(root, info, **args)
class GrappleMiddleware(object):
def __init__(self):
self.field_middlewares = {}
for field_name in registry.field_middlewares:
self.field_middlewares[field_name] = list(
get_middleware_resolvers(registry.field_middlewares[field_name])
)
def resolve(self, next, root, info: ResolveInfo, **args):
field_name = info.field_name
parent_name = info.parent_type.name
if field_name in self.field_middlewares and parent_name in ROOT_TYPES:
for middleware in self.field_middlewares[field_name]:
return middleware(next, root, info, **args)
return next(root, info, **args)
|
[
"noreply@github.com"
] |
yashttm.noreply@github.com
|
c98ac2e400d366ef5b1728e28d6130604a754592
|
80381da9ebaeb9232669dc0540fa59fe8a1f794a
|
/setor/userAgents.py
|
c69503b1f9fa30630034b58c4fd6b9c607d4c475
|
[
"MIT"
] |
permissive
|
IMMILDEW/setor
|
9f1a850ba6e7e7228b0d13baf0517898dba111f1
|
3d22cdcdd077da2b70ce9ce9ed963125d178f7ed
|
refs/heads/master
| 2020-03-28T13:44:39.632886
| 2018-05-15T02:38:28
| 2018-05-15T02:38:28
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 8,830
|
py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
'''
This userAgents put from: https://github.com/claudioviviani/wordbrutepress/blob/master/wordbrutepress.py#L85
Thanks so much for it.
'''
userAgents = [
'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.9.0.7) Gecko/2009021910 Firefox/3.0.7',
'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/36.0.1985.125 Safari/537.36',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_4 AppleWebKit/537.36 (KHTML, like Gecko) Chrome/36.0.1985.125 Safari/537.36',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_4) AppleWebKit/537.77.4 (KHTML, like Gecko) Version/7.0.5 Safari/537.77.4',
'Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/36.0.1985.125 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:31.0) Gecko/20100101 Firefox/31.0',
'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:30.0) Gecko/20100101 Firefox/30.0',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.9; rv:31.0) Gecko/20100101 Firefox/31.0',
'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/36.0.1985.125 Safari/537.36',
'Mozilla/5.0 (iPhone; CPU iPhone OS 7_1_2 like Mac OS X) AppleWebKit/537.51.2 (KHTML, like Gecko) Version/7.0 Mobile/11D257 Safari/9537.53',
'Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; rv:11.0) like Gecko',
'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/35.0.1916.153 Safari/537.36',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.9; rv:30.0) Gecko/20100101 Firefox/30.0',
'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/36.0.1985.125 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/36.0.1985.125 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.3; WOW64; rv:31.0) Gecko/20100101 Firefox/31.0',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/36.0.1985.125 Safari/537.36',
'Mozilla/5.0 (iPad; CPU OS 7_1_2 like Mac OS X) AppleWebKit/537.51.2 (KHTML, like Gecko) Version/7.0 Mobile/11D257 Safari/9537.53',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_4) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/36.0.1985.143 Safari/537.36',
'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:31.0) Gecko/20100101 Firefox/31.0',
'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/36.0.1985.143 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.1; rv:31.0) Gecko/20100101 Firefox/31.0',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_8_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/36.0.1985.125 Safari/537.36',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_4) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/35.0.1916.153 Safari/537.36',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/36.0.1985.125 Safari/537.36',
'Mozilla/5.0 (iPhone; CPU iPhone OS 7_1_1 like Mac OS X) AppleWebKit/537.51.2 (KHTML, like Gecko) Version/7.0 Mobile/11D201 Safari/9537.53',
'Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/36.0.1985.125 Safari/537.36',
'Mozilla/5.0 (Windows NT 5.1; rv:31.0) Gecko/20100101 Firefox/31.0',
'Mozilla/5.0 (Windows NT 6.3; WOW64; rv:30.0) Gecko/20100101 Firefox/30.0',
'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/35.0.1916.153 Safari/537.36',
'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:30.0) Gecko/20100101 Firefox/30.0',
'Mozilla/5.0 (Windows NT 6.3; WOW64; Trident/7.0; rv:11.0) like Gecko',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_3) AppleWebKit/537.76.4 (KHTML, like Gecko) Version/7.0.4 Safari/537.76.4',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_4) AppleWebKit/537.78.2 (KHTML, like Gecko) Version/7.0.6 Safari/537.78.2',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10) AppleWebKit/538.46 (KHTML, like Gecko) Version/8.0 Safari/538.46',
'Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; WOW64; Trident/5.0)',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/36.0.1985.125 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.1; rv:30.0) Gecko/20100101 Firefox/30.0',
'Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/35.0.1916.153 Safari/537.36',
'Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; WOW64; Trident/6.0)',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_6_8) AppleWebKit/534.59.10 (KHTML, like Gecko) Version/5.1.9 Safari/534.59.10',
'Mozilla/5.0 (Windows NT 6.1; Trident/7.0; rv:11.0) like Gecko',
'Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/36.0.1985.143 Safari/537.36',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_5) AppleWebKit/537.77.4 (KHTML, like Gecko) Version/6.1.5 Safari/537.77.4',
'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Ubuntu Chromium/34.0.1847.116 Chrome/34.0.1847.116 Safari/537.36',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_8_5) AppleWebKit/537.77.4 (KHTML, like Gecko) Version/6.1.5 Safari/537.77.4',
'Mozilla/5.0 (X11; Linux x86_64; rv:31.0) Gecko/20100101 Firefox/31.0',
'Mozilla/5.0 (iPad; CPU OS 7_1_1 like Mac OS X) AppleWebKit/537.51.2 (KHTML, like Gecko) Version/7.0 Mobile/11D201 Safari/9537.53',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/36.0.1985.125 Safari/537.36',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_6_8) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/36.0.1985.125 Safari/537.36',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_2) AppleWebKit/537.75.14 (KHTML, like Gecko) Version/7.0.3 Safari/537.75.14',
'Mozilla/5.0 (X11; Ubuntu; Linux i686; rv:31.0) Gecko/20100101 Firefox/31.0',
'Mozilla/5.0 (iPhone; CPU iPhone OS 7_1 like Mac OS X) AppleWebKit/537.51.2 (KHTML, like Gecko) Version/7.0 Mobile/11D167 Safari/9537.53',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_2) AppleWebKit/537.74.9 (KHTML, like Gecko) Version/7.0.2 Safari/537.74.9',
'Mozilla/5.0 (X11; Linux x86_64; rv:30.0) Gecko/20100101 Firefox/30.0',
'Mozilla/5.0 (iPhone; CPU iPhone OS 7_0_4 like Mac OS X) AppleWebKit/537.51.1 (KHTML, like Gecko) Version/7.0 Mobile/11B554a Safari/9537.53',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:31.0) Gecko/20100101 Firefox/31.0',
'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:24.0) Gecko/20100101 Firefox/24.0',
'Mozilla/5.0 (Windows NT 6.0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/36.0.1985.125 Safari/537.36',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.7; rv:31.0) Gecko/20100101 Firefox/31.0',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_3) AppleWebKit/537.75.14 (KHTML, like Gecko) Version/7.0.3 Safari/537.75.14',
'Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; Trident/5.0)',
'Mozilla/5.0 (Windows NT 5.1; rv:30.0) Gecko/20100101 Firefox/30.0',
'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/35.0.1916.153 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/36.0.1985.143 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:29.0) Gecko/20100101 Firefox/29.0',
'Mozilla/5.0 (Windows NT 6.2; WOW64; rv:31.0) Gecko/20100101 Firefox/31.0',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/36.0.1985.125 Safari/537.36',
'Mozilla/5.0 (iPhone; CPU iPhone OS 7_1_2 like Mac OS X) AppleWebKit/537.51.1 (KHTML, like Gecko) GSA/4.1.0.31802 Mobile/11D257 Safari/9537.53',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/35.0.1916.153 Safari/537.36',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.6; rv:31.0) Gecko/20100101 Firefox/31.0',
'Mozilla/5.0 (Windows NT 6.1; rv:24.0) Gecko/20100101 Firefox/24.0',
'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/35.0.1916.114 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.2; WOW64; rv:30.0) Gecko/20100101 Firefox/30.0',
'Mozilla/5.0 (X11; Linux i686) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/36.0.1985.125 Safari/537.36',
'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/36.0.1985.143 Safari/537.36',
'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Ubuntu Chromium/36.0.1985.125 Chrome/36.0.1985.125 Safari/537.36',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:30.0) Gecko/20100101 Firefox/30.0',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10) AppleWebKit/600.1.3 (KHTML, like Gecko) Version/8.0 Safari/600.1.3',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/35.0.1916.153 Safari/537.36'
]
|
[
"summon.agus@gmail.com"
] |
summon.agus@gmail.com
|
0c768b30b11e1b2fc2f054dd70ff21fe6e306f6b
|
ba99ab021ba53de77611d30b0f7c18e6fedb4b09
|
/tango_with_django_project/settings.py
|
f88052ecdb29983050753096b5969c940248991a
|
[] |
no_license
|
blairprophett/Django-Tutorial
|
8961bc86efc8115b953bf4e82f77ff2fab3c7293
|
7ef7f366804d7a31d4f10d5bbb57bc0722355446
|
refs/heads/master
| 2020-05-30T18:00:46.009618
| 2015-01-21T21:25:09
| 2015-01-21T21:25:09
| 29,602,982
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,540
|
py
|
"""
Django settings for tango_with_django_project project.
For more information on this file, see
https://docs.djangoproject.com/en/1.6/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.6/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
SETTINGS_DIR = os.path.dirname(__file__)
PROJECT_PATH = os.path.join(SETTINGS_DIR, os.pardir)
PROJECT_PATH = os.path.abspath(PROJECT_PATH)
TEMPLATE_PATH = os.path.join(PROJECT_PATH, 'templates')
TEMPLATE_DIRS = [
TEMPLATE_PATH,
]
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.6/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '@6otpnfbd$%x66hpy((&+tnj=1k4s=6c22n(ivvu8ox7y(12*%'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
TEMPLATE_DEBUG = True
# TEMPLATE_PATH = os.path.join(BASE_DIR, 'templates')
# TEMPLATE_DIRS = [
# TEMPLATE_PATH,
# ]
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'rango'
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'tango_with_django_project.urls'
WSGI_APPLICATION = 'tango_with_django_project.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.6/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.6/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.6/howto/static-files/
STATIC_PATH = os.path.join(PROJECT_PATH, 'static')
STATIC_URL = '/static/'
STATICFILES_DIRS = (
STATIC_PATH,
)
MEDIA_URL = '/media/'
MEDIA_ROOT = os.path.join(PROJECT_PATH, 'media')
|
[
"blaircuny@gmail.com"
] |
blaircuny@gmail.com
|
920124716f92dc68d37b475413f64312238cf5c5
|
df76fb81d5173476e269327b7cb0a58875c45b5c
|
/COSC262/labs/a1_4.py
|
76ca15b9338dbecf25c80af18050740fda0ad873
|
[] |
no_license
|
pakeke-constructor/UNIVERSITY
|
c33899dea9ebe10dde1a8a11d5f07dd83a4ad02a
|
88b3829e3ca5a45924b422f2c50b04bd0cdbfb68
|
refs/heads/master
| 2023-08-26T03:51:18.023533
| 2021-11-09T00:17:39
| 2021-11-09T00:17:39
| 341,054,382
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,305
|
py
|
from math import inf
EN=enumerate
RL= lambda x: range(len(x))
def get_info(info):
directed = False
weighted = False
if info[0] == 'D':
directed = True
num_verts = int(info[1])
if len(info) > 2:
if info[2] == "W":
weighted = True
return (directed, num_verts, weighted)
def adjacency_list(st):
info = st.splitlines()
directed, num_verts, weighted = get_info(info[0].split())
adj_list = []
for i in range(num_verts):
adj_list.append([])
for edge in info[1:]:
edge_info = edge.split()
s_v = int(edge_info[0])
e_v = int(edge_info[1])
if weighted:
adj_list[s_v].append((e_v, int(edge_info[2])))
else:
adj_list[s_v].append((e_v, None))
if not directed:
if weighted:
adj_list[e_v].append((s_v, int(edge_info[2])))
else:
adj_list[e_v].append((s_v, None))
return adj_list
def which_segments(city_map):
adj = adjacency_list(city_map)
par = prims(adj)
edges_req = []
for v in RL(par):
if par[v] is not None:
new = ((tuple(sorted([v, par[v]]))))
edges_req.append(new)
return edges_req
def prims(adj, start=0):
n = len(adj)
tree = [False for _ in range(n)]
distance = [inf for _ in range(n)]
par = [None for _ in range(n)]
distance[start] = 0
while False in tree:
cur = next_vert(tree, distance)
tree[cur] = True
for v, w in adj[cur]:
if not tree[v] and w < distance[v]:
distance[v] = w
par[v] = cur
return par
def next_vert(tree, distance):
data = []
for v, b in EN(tree):
if not b:
data.append((v, distance[v]))
cur = data[0][1]
minn = data[0][0]
for dat in data:
if dat[1] < cur:
cur = dat[1]
minn = dat[0]
return minn
city_map = """\
U 3 W
0 1 1
2 1 2
2 0 4
"""
print(sorted(which_segments(city_map)))
[(0, 1), (1, 2)]
city_map = """\
U 1 W
"""
print(sorted(which_segments(city_map)))
city_map = """\
U 6 W
2 0 1
1 2 3
0 1 2
2 3 4
5 2 7
3 4 5
4 5 6
"""
print(sorted(which_segments(city_map)))
|
[
"OliverwdGarrett@gmail.com"
] |
OliverwdGarrett@gmail.com
|
690353cf33b668f110fa1d0e006bf265b3dbc5f1
|
2d4493191ad031287a482bca8273c2ad7a7a0db8
|
/retrieve_urls.py
|
0659dc05c03081accc5a0a26750ddf28c3aedc7c
|
[] |
no_license
|
Phleisch/MP3-z
|
a16a9e124d2ed609c714642e1a62e3d71bb81916
|
fa6ea3d2f74da94956394517bd4b63c2d2200dc4
|
refs/heads/master
| 2021-09-11T15:22:08.474392
| 2018-04-09T06:49:04
| 2018-04-09T06:49:04
| 125,971,309
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,725
|
py
|
import re
import urllib
from bs4 import BeautifulSoup
def get_urls(song_list):
urls = open('SongURLs.txt', 'w')
songs = song_list.readlines()
regex = '[^A-Za-z0-9\'\. ]'
search_const = 'https://www.youtube.com/results?search_query='
for song in songs:
if '#' in song:
continue
song = song.strip('\n')
unchanged = song
song = re.sub(regex, '', song)
song = song + ' audio'
song = urllib.parse.quote(song)
query = search_const + song
#print(query)
response = urllib.request.urlopen(query)
html = response.read()
soup = BeautifulSoup(html, 'html5lib')
videos_info = soup.findAll(attrs={'class':'yt-uix-tile-link'})
first_video_info = videos_info[0]
parts = first_video_info['href'].split('=')
parts[1] = urllib.parse.quote(parts[1])
first_url = 'https://www.youtube.com' + parts[0] + "=" + parts[1] + '#' + unchanged
urls.write('%s\n'%first_url)
song_list.write('%s\n'%('#' + unchanged))
urls.close()
song_list.close()
def get_url(song):
regex = '[^A-Za-z0-9\'\. ]'
search_const = 'https://www.youtube.com/results?search_query='
if '#' in song:
return
song = song.strip('\n')
unchanged = song
song = re.sub(regex, '', song)
song = song + ' audio'
song = urllib.parse.quote(song)
query = search_const + song
#print(query)
response = urllib.request.urlopen(query)
html = response.read()
soup = BeautifulSoup(html, 'html5lib')
videos_info = soup.findAll(attrs={'class':'yt-uix-tile-link'})
first_video_info = videos_info[0]
parts = first_video_info['href'].split('=')
parts[1] = urllib.parse.quote(parts[1])
first_url = 'https://www.youtube.com' + parts[0] + "=" + parts[1] + '#' + unchanged
return first_url
|
[
"KaiFleischmans@gmail.com"
] |
KaiFleischmans@gmail.com
|
b67ee0f8b149e415a8c4d1da320dd9452b6f2526
|
95613a70265e6871c25a4be18075c180427d0980
|
/contrib/devtools/check-doc.py
|
ef323db12a725f400a17f49a29e303a8145e70f2
|
[
"MIT"
] |
permissive
|
CryptoLover705/NocNoc
|
394b7426a25a0b048fb5e2ae6c02b5f75099189e
|
fafa860f29c63d7357721a231fef7ad314355263
|
refs/heads/master
| 2023-02-11T04:11:49.555586
| 2021-01-01T00:21:57
| 2021-01-01T00:21:57
| 318,871,415
| 1
| 0
|
MIT
| 2020-12-05T19:25:13
| 2020-12-05T19:25:12
| null |
UTF-8
|
Python
| false
| false
| 1,912
|
py
|
#!/usr/bin/env python
# Copyright (c) 2015-2016 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
'''
This checks if all command line args are documented.
Return value is 0 to indicate no error.
Author: @MarcoFalke
'''
from subprocess import check_output
import re
FOLDER_GREP = 'src'
FOLDER_TEST = 'src/test/'
CMD_ROOT_DIR = '`git rev-parse --show-toplevel`/%s' % FOLDER_GREP
CMD_GREP_ARGS = r"egrep -r -I '(map(Multi)?Args(\.count\(|\[)|Get(Bool)?Arg\()\"\-[^\"]+?\"' %s | grep -v '%s'" % (CMD_ROOT_DIR, FOLDER_TEST)
CMD_GREP_DOCS = r"egrep -r -I 'HelpMessageOpt\(\"\-[^\"=]+?(=|\")' %s" % (CMD_ROOT_DIR)
REGEX_ARG = re.compile(r'(?:map(?:Multi)?Args(?:\.count\(|\[)|Get(?:Bool)?Arg\()\"(\-[^\"]+?)\"')
REGEX_DOC = re.compile(r'HelpMessageOpt\(\"(\-[^\"=]+?)(?:=|\")')
# list unsupported, deprecated and duplicate args as they need no documentation
SET_DOC_OPTIONAL = set(['-rpcssl', '-benchmark', '-h', '-help', '-socks', '-tor', '-debugnet', '-whitelistalwaysrelay', '-prematurewitness', '-walletprematurewitness', '-promiscuousmempoolflags', '-blockminsize', '-sendfreetransactions', '-checklevel', '-liquidityprovider', '-anonymizenocnocamount'])
def main():
used = check_output(CMD_GREP_ARGS, shell=True)
docd = check_output(CMD_GREP_DOCS, shell=True)
args_used = set(re.findall(REGEX_ARG,used))
args_docd = set(re.findall(REGEX_DOC,docd)).union(SET_DOC_OPTIONAL)
args_need_doc = args_used.difference(args_docd)
args_unknown = args_docd.difference(args_used)
print "Args used : %s" % len(args_used)
print "Args documented : %s" % len(args_docd)
print "Args undocumented: %s" % len(args_need_doc)
print args_need_doc
print "Args unknown : %s" % len(args_unknown)
print args_unknown
exit(len(args_need_doc))
if __name__ == "__main__":
main()
|
[
"edward.thomas3@yahoo.com"
] |
edward.thomas3@yahoo.com
|
5a6006602948ef1a186d46113e83073574f1fad1
|
9133e8d160f3a647ea938414f61da955d538a078
|
/test_dir/settings.py
|
d64a8fcee51d049df2dee28a4c6704f5fa238b8d
|
[] |
no_license
|
IvanChernoborodov/Test-task
|
3832e9e5c7722de1473e2d8246e0762015e97d2b
|
a7b8824e068f796bdb6f52822c78526f67d7e56f
|
refs/heads/master
| 2021-06-25T02:59:27.243113
| 2019-11-17T21:40:23
| 2019-11-17T21:40:23
| 174,311,312
| 0
| 0
| null | 2021-06-10T21:14:44
| 2019-03-07T09:16:22
|
Python
|
UTF-8
|
Python
| false
| false
| 4,204
|
py
|
"""
Django settings for test_dir project.
Generated by 'django-admin startproject' using Django 2.1.7.
For more information on this file, see
https://docs.djangoproject.com/en/2.1/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.1/ref/settings/
"""
import os
from celery.schedules import crontab
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.1/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'w9#zw2bz%doi6&^7jjd+)3mh0yj@g-r8p4magnxhl(ee2%=q=w'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = ['*']
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'API',
'rest_framework',
'django_filters',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'test_dir.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'test_dir.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.1/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql',
'NAME': 'postgres',
'USER': 'postgres',
'HOST': 'db',
'PORT': 5432,
}
}
# Password validation
# https://docs.djangoproject.com/en/2.1/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.1/topics/i18n/
TIME_ZONE = 'Europe/Moscow'
LANGUAGE_CODE = 'ru-RU'
USE_TZ = True
USE_I18N = True
USE_L10N = False
DATE_FORMAT = 'd E Y'
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.1/howto/static-files/
STATIC_URL = '/static/'
STATIC_ROOT = os.path.join(BASE_DIR, 'static')
REST_FRAMEWORK = {
# Use Django's standard `django.contrib.auth` permissions,
# or allow read-only access for unauthenticated users.
'DEFAULT_PERMISSION_CLASSES': [
'rest_framework.permissions.DjangoModelPermissionsOrAnonReadOnly'
],
'EXCEPTION_HANDLER': 'rest_framework.views.exception_handler',
'DEFAULT_FILTER_BACKENDS': ('django_filters.rest_framework.DjangoFilterBackend',),
'DEFAULT_PAGINATION_CLASS': 'rest_framework.pagination.LimitOffsetPagination',
'PAGE_SIZE': 5,
}
CELERY_BROKER_URL = 'redis://redis:6379/0'
CELERY_RESULT_BACKEND = 'redis://redis:6379/0'
CELERY_ACCEPT_CONTENT = ['application/json']
CELERY_TASK_SERIALIZER = 'json'
CELERY_RESULT_SERIALIZER = 'json'
CELERY_BEAT_SCHEDULE = {
'main_parse': {
'task': 'API.tasks.main_parse',
'schedule': crontab(hour="*/8"),
# 'schedule': 20.0,
}
}
|
[
"skich1984@gmail.com"
] |
skich1984@gmail.com
|
c7eaad1617af662738ed1cdf822982fe1af2713c
|
48423db71e54dbde439c97b94637ac4cdd829377
|
/ex006.py
|
b1f19d7d0e93bac270e976ee381494bef89ec1c8
|
[] |
no_license
|
jeeras/allexerciciespython3
|
5e12134a6a22683aa810a05e1b18bdb28791f495
|
c258ebe3d169bcc3ef0171bbf66217fb4a34052d
|
refs/heads/master
| 2020-03-19T18:52:12.345776
| 2018-06-10T17:16:59
| 2018-06-10T17:16:59
| 136,829,047
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 262
|
py
|
print("Digite uma medida em metros: ")
medida = float(input(''))
medidakm = medida / 1000
medidacm = medida * 100
medidamm = medida * 1000
print("Sua medida foi {}, em km é {}, em cm é {}, em mm é {}".format(medida, medidakm, medidacm, medidamm))
|
[
"noreply@github.com"
] |
jeeras.noreply@github.com
|
969829bbc08a2fdfed7f05ee11a5e0a8c2206fc5
|
171eed79c580d32a8e4e27e092bbbc0fa67c4681
|
/PyBot/drivers/if_sensors.py
|
835df9b3ac6848fba5a40e292ad1b5e3edb86b57
|
[
"MIT"
] |
permissive
|
Nitzsch/PyBot
|
2396b5b1a3592ae8c967f938863010ac29670858
|
94c913fac949d4fd866d9b24f37f6610c58ecafa
|
refs/heads/master
| 2020-06-04T14:16:45.296711
| 2019-07-08T17:09:11
| 2019-07-08T17:09:11
| 192,058,815
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,861
|
py
|
"""
This file manages the TCRT5000 Sensors.
Theyre used for the cliff left and right and for the wheel encodings.
the cliff sensors are connected to the mcp23017 and the wheel encoders directly to the pi
no real need of expl. methodes are quite simple.
"""
import mcp23017 as mcp
import RPi.GPIO as GPIO
import time
import pin_belegung as pins
GPIO.setmode(GPIO.BOARD)
GPIO.setwarnings(False)
pin = pins.get_cliff_sens()
cliff_sens1 = pin[0]
cliff_sens2 = pin[1]
pin = pins.get_wheel_encoder()
wheel_encoder_left = pin[0]
wheel_encoder_right = pin[1]
mcp.start(0x20)
mcp.setup(cliff_sens1, mcp.IN)
mcp.setup(cliff_sens2, mcp.IN)
GPIO.setup(wheel_encoder_left, GPIO.IN)
GPIO.setup(wheel_encoder_right, GPIO.IN)
def cliff_left():
if(mcp.input(cliff_sens1)):
return True
else:
return False
def cliff_right():
if(mcp.input(cliff_sens2)):
return True
else:
return False
# The wheel counter counts the edges on the pins (high-> low, low -> high)
# therefor we just listen to the pin with an event. for each edge we call a function that does nothing else then
# increase our global counter.
# we can reset the counter with an extra methode. this is nessc. if we do stuff with it. if we just change the
# value on the robot, it does update to the one before after a new update
i = 0
j = 0
# counter workers
def counter_left(channel):
global i
i +=1
def counter_right():
global j
j +=1
#event detect.
def wheel_count_left():
GPIO.add_event_detect(wheel_encoder_left, GPIO.BOTH, callback=counter_left)
def wheel_count_right():
GPIO.add_event_detect(wheel_encoder_right, GPIO.BOTH, callback=counter_right)
def wheel_count():
global i,j
GPIO.add_event_detect(wheel_encoder_right, GPIO.BOTH, callback=counter_right)
return [i,j]
def reset_count():
global i
global j
i = 0
j = 0
return True
|
[
"noreply@github.com"
] |
Nitzsch.noreply@github.com
|
190bb7322b59d67fd63b71248baf7a3923c19c63
|
e568b2089b7ae7f5d8148994833562b8f5112975
|
/learn-python-the-hard-way/ex34.py
|
c33098f2879dbf4baeb5b48c13cef4fa14fae531
|
[] |
no_license
|
marshallnw18/Python-SysAdmin-Scripts
|
d0047ddafcee9cc18b54b210c9003923bd74ba4c
|
3cbf3979385685cd893ad2cffcc4c811788d6c91
|
refs/heads/master
| 2023-06-19T12:42:44.658130
| 2021-07-13T14:46:55
| 2021-07-13T14:46:55
| 253,178,760
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,932
|
py
|
#!/bin/usr/env python3.6
from sys import exit
def gold_room():
print("This room is full of gold. How much do you take?")
choice = input("> ")
if "0" in choice or "1" in choice:
how_much = int(choice)
else:
dead("Man, learn to type a number.")
if how_much > 50:
print("Nice, you're not greedy. You win!")
exit(0)
def bear_room():
print("There is a bear here.")
print("The bear has a bunch of honey.")
print("The fat bear is in front of another door.")
print("How are you going to move the bear?")
bear_moved = False
while True:
choice = input("> ")
if choice == "take honey":
dead("The bear looks at you and slaps your face.")
elif choice == "taunt bear" and not bear_moved:
print("The bear has moved from the door.")
print("You can go through it now.")
bear_moved = True
elif choice == "taunt bear" and bear_moved:
dead("The bear gets pissed and chews off your leg")
elif choice == "open door" and bear_moved:
gold_room()
else:
print("I got no idea what that means.")
def cthulu_room():
print("Here you see the great evil Cthulu.")
print("He, it, whatever stares at you and you go insane.")
print("Do you flee for your life or eat your head?")
choice = input("> ")
if "flee" in choice:
start()
elif "head" in choice:
dead("Well that was tasty!")
else:
cthulu_room()
def dead(why):
print(why, "Good job!")
exit(0)
def start():
print("You are in a dark room.")
print("There is a door to your right and left.")
print("Which one do you take?")
choice = input("> ")
if choice == "left":
bear_room()
elif choice == "right":
cthulu_room()
else:
dead("You stumble around the room until you starve.")
start()
|
[
"marshallnw18@gmail.com"
] |
marshallnw18@gmail.com
|
d2fd4e42dd200d6a5bee4313df22deee3eaa91b7
|
f77fbdcd4e154af10896df57d78de1781ae66383
|
/tests/detect_threadsafety_violations_test.py
|
98588cdb21e374a4fadd4ce5b9bbecb99922c9d8
|
[
"MIT"
] |
permissive
|
hhru/pycerberus-deb
|
de6c1892f27897ee40d6c3e9ac24650372943ea0
|
6a372a549523efb55c50a7deb1db6c39ac979003
|
refs/heads/master
| 2020-05-03T05:38:41.952516
| 2011-06-02T12:55:13
| 2011-06-02T12:55:13
| 1,836,988
| 1
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,028
|
py
|
# -*- coding: UTF-8 -*-
#
# The MIT License
#
# Copyright (c) 2010 Felix Schwarz <felix.schwarz@oss.schwarz.eu>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from pycerberus.api import Validator
from pycerberus.errors import ThreadSafetyError
from pycerberus.test_util import ValidationTest
class DetectThreadSafetyViolationInValidatorTest(ValidationTest):
class NonThreadSafeValidator(Validator):
def validate(self, value, context):
self.fnord = 42
validator_class = NonThreadSafeValidator
def test_can_detect_threadsafety_violations(self):
self.assert_raises(ThreadSafetyError, self.process, 42)
def test_can_disable_threadsafety_detection(self):
class ValidatorWrittenByExpert(self.validator_class):
def __init__(self, *args, **kwargs):
self._is_internal_state_frozen = False
self.super()
self.init_validator(ValidatorWrittenByExpert())
self.assert_equals(42, self.process(42))
|
[
"knevcher@gmail.com"
] |
knevcher@gmail.com
|
30536ef72e6a6da850b8eba2d4da0b5120788252
|
1ba0c55bf6a0001a21db20bab7b82e78ec249a94
|
/blog/models.py
|
7ba306d5a4d25d1c40c2d27542ed4b315c6a0510
|
[] |
no_license
|
armaan786-web/django
|
b188bdd081160fd0e5a1b44b9a1eb6164884c77f
|
325b9c197071c3b379d0b72cfc8b9a9821a02a81
|
refs/heads/main
| 2023-03-13T00:24:58.306730
| 2021-03-03T08:25:44
| 2021-03-03T08:25:44
| 333,813,639
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 674
|
py
|
from django.db import models
# Create your models here.
class Blogpost(models.Model):
post_id = models.AutoField(primary_key=True)
title = models.CharField(max_length=5000)
head0 = models.CharField(max_length=500,default="")
c_head0 = models.CharField(max_length=5000,default="")
head1 = models.CharField(max_length=500,default="")
c_head1 = models.CharField(max_length=5000,default="")
head2 = models.CharField(max_length=500,default="")
c_head2 = models.CharField(max_length=5000,default="")
pub_date = models.DateField()
thumbnail = models.ImageField(upload_to='blog/img')
def __str__(self):
return self.title
|
[
"armaanalam65@gmail.com"
] |
armaanalam65@gmail.com
|
f91fbe6b61d5c2e529a94d7101c6699f676abe8c
|
dcb38a3260be425af100ca881f4e6d491df42d82
|
/eemont/image.py
|
1f0442594f02a7efd6eb8faba6f9e5595e69caf7
|
[
"MIT"
] |
permissive
|
sharadgupta27/eemont
|
14121ab4839403b0e1d7fa2ba76db7794d3d33ab
|
99f434e77dbbf7c015d8808ea544ee854b3af8c9
|
refs/heads/master
| 2023-06-05T11:53:50.691720
| 2021-06-27T11:49:24
| 2021-06-27T11:49:24
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 34,683
|
py
|
import ee
import warnings
import requests
from .common import _index
from .common import _maskClouds
from .common import _get_scale_params
from .common import _get_offset_params
from .common import _scale_STAC
from .common import _preprocess
from .common import _getSTAC
from .common import _getDOI
from .common import _getCitation
from .extending import extend
@extend(ee.image.Image)
def __getitem__(self, key):
'''Gets the band of the image according to the specified key.
Parameters
----------
self : ee.Image
Image to get the bands from.
key : numeric | string | list[numeric] | list[string] | slice
Key used to get the specified band. If numeric, it gets the band at that index. If string, it gets the band with that name
or that matches with regex. If list, it gets multiple bands. If slice, it calls the slice() method (the step parameter is ignored).
Returns
-------
ee.Image
Image with the selected bands.
'''
if isinstance(key,slice):
if key.start == None:
start = 0
else:
start = key.start
if key.stop == None:
stop = self.bandNames().size()
else:
stop = key.stop
selected = self.slice(start,stop)
else:
selected = self.select(key)
return selected
@extend(ee.image.Image)
def __add__(self, other):
"""Computes the addition between two images.
Parameters
----------
self : ee.Image
Left operand.
other : ee.Image | numeric | list[numeric]
Right operand. If numeric, an ee.Image is created from its value. If list, an ee.Image with n bands (n = len(list)) is created from its values.
Returns
-------
ee.Image
Addition of two images.
"""
return self.add(other)
@extend(ee.image.Image)
def __radd__(self, other):
"""Computes the addition between two images.
Parameters
----------
self : ee.Image
Right operand.
other : ee.Image | numeric | list[numeric]
Left operand. If numeric, an ee.Image is created from its value. If list, an ee.Image with n bands (n = len(list)) is created from its values.
Returns
-------
ee.Image
Addition of two images.
"""
return self.add(other)
@extend(ee.image.Image)
def __sub__(self, other):
"""Computes the subtraction between two images.
Parameters
----------
self : ee.Image
Left operand.
other : ee.Image | numeric | list[numeric]
Right operand. If numeric, an ee.Image is created from its value. If list, an ee.Image with n bands (n = len(list)) is created from its values.
Returns
-------
ee.Image
Subtraction of two images.
"""
return self.subtract(other)
@extend(ee.image.Image)
def __rsub__(self, other):
"""Computes the subtraction between two images.
Parameters
----------
self : ee.Image
Right operand.
other : ee.Image | numeric | list[numeric]
Left operand. If numeric, an ee.Image is created from its value. If list, an ee.Image with n bands (n = len(list)) is created from its values.
Returns
-------
ee.Image
Subtraction of two images.
"""
return ee.Image(other).subtract(self)
@extend(ee.image.Image)
def __mul__(self, other):
"""Computes the multiplication between two images.
Parameters
----------
self : ee.Image
Left operand.
other : ee.Image | numeric | list[numeric]
Right operand. If numeric, an ee.Image is created from its value. If list, an ee.Image with n bands (n = len(list)) is created from its values.
Returns
-------
ee.Image
Multiplication of two images.
"""
return self.multiply(other)
@extend(ee.image.Image)
def __rmul__(self, other):
"""Computes the multiplication between two images.
Parameters
----------
self : ee.Image
Right operand.
other : ee.Image | numeric | list[numeric]
Left operand. If numeric, an ee.Image is created from its value. If list, an ee.Image with n bands (n = len(list)) is created from its values.
Returns
-------
ee.Image
Multiplication of two images.
"""
return self.multiply(other)
@extend(ee.image.Image)
def __truediv__(self, other):
"""Computes the division between two images.
Parameters
----------
self : ee.Image
Left operand.
other : ee.Image | numeric | list[numeric]
Right operand. If numeric, an ee.Image is created from its value. If list, an ee.Image with n bands (n = len(list)) is created from its values.
Returns
-------
ee.Image
Division of two images.
"""
return self.divide(other)
@extend(ee.image.Image)
def __rtruediv__(self, other):
"""Computes the division between two images.
Parameters
----------
self : ee.Image
Right operand.
other : ee.Image | numeric | list[numeric]
Left operand. If numeric, an ee.Image is created from its value. If list, an ee.Image with n bands (n = len(list)) is created from its values.
Returns
-------
ee.Image
Division of two images.
"""
return ee.Image(other).divide(self)
@extend(ee.image.Image)
def __floordiv__(self, other):
"""Computes the floor division of two images.
Parameters
----------
self : ee.Image
Left operand.
other : ee.Image | numeric | list[numeric]
Right operand. If numeric, an ee.Image is created from its value. If list, an ee.Image with n bands (n = len(list)) is created from its values.
Returns
-------
ee.Image
Floor division of two images.
"""
return self.divide(other).floor()
@extend(ee.image.Image)
def __rfloordiv__(self, other):
"""Computes the floor division of two images.
Parameters
----------
self : ee.Image
Right operand.
other : ee.Image | numeric | list[numeric]
Left operand. If numeric, an ee.Image is created from its value. If list, an ee.Image with n bands (n = len(list)) is created from its values.
Returns
-------
ee.Image
Floor division of two images.
"""
return ee.Image(other).divide(self).floor()
@extend(ee.image.Image)
def __mod__(self, other):
"""Computes the modulo of two images.
Parameters
----------
self : ee.Image
Left operand.
other : ee.Image | numeric | list[numeric]
Right operand. If numeric, an ee.Image is created from its value. If list, an ee.Image with n bands (n = len(list)) is created from its values.
Returns
-------
ee.Image
Modulo of two images.
"""
return self.mod(other)
@extend(ee.image.Image)
def __rmod__(self, other):
"""Computes the modulo of two images.
Parameters
----------
self : ee.Image
Right operand.
other : ee.Image | numeric | list[numeric]
Left operand. If numeric, an ee.Image is created from its value. If list, an ee.Image with n bands (n = len(list)) is created from its values.
Returns
-------
ee.Image
Modulo of two images.
"""
return ee.Image(other).mod(self)
@extend(ee.image.Image)
def __pow__(self, other):
"""Computes the base (left operand) to the power (right operand).
Parameters
----------
self : ee.Image
Left operand.
other : ee.Image | numeric | list[numeric]
Right operand. If numeric, an ee.Image is created from its value. If list, an ee.Image with n bands (n = len(list)) is created from its values.
Returns
-------
ee.Image
Bsae to the power of two images.
"""
return self.pow(other)
@extend(ee.image.Image)
def __rpow__(self, other):
"""Computes the base (left operand) to the power (right operand).
Parameters
----------
self : ee.Image
Right operand.
other : ee.Image | numeric | list[numeric]
Left operand. If numeric, an ee.Image is created from its value. If list, an ee.Image with n bands (n = len(list)) is created from its values.
Returns
-------
ee.Image
Base to the power of two images.
"""
return ee.Image(other).pow(self)
@extend(ee.image.Image)
def __lshift__(self, other):
"""Computes the left shift operation between two images.
Parameters
----------
self : ee.Image
Left operand.
other : ee.Image | numeric | list[numeric]
Right operand. If numeric, an ee.Image is created from its value. If list, an ee.Image with n bands (n = len(list)) is created from its values.
Returns
-------
ee.Image
Left shift operation.
"""
return self.leftShift(other)
@extend(ee.image.Image)
def __rlshift__(self, other):
"""Computes the left shift operation between two images.
Parameters
----------
self : ee.Image
Right operand.
other : ee.Image | numeric | list[numeric]
Left operand. If numeric, an ee.Image is created from its value. If list, an ee.Image with n bands (n = len(list)) is created from its values.
Returns
-------
ee.Image
Left shift operation.
"""
return ee.Image(other).leftShift(self)
@extend(ee.image.Image)
def __rshift__(self, other):
"""Computes the right shift operation between two images.
Parameters
----------
self : ee.Image
Left operand.
other : ee.Image | numeric | list[numeric]
Right operand. If numeric, an ee.Image is created from its value. If list, an ee.Image with n bands (n = len(list)) is created from its values.
Returns
-------
ee.Image
Right shift operation.
"""
return self.rightShift(other)
@extend(ee.image.Image)
def __rrshift__(self, other):
"""Computes the right shift operation between two images.
Parameters
----------
self : ee.Image
Right operand.
other : ee.Image | numeric | list[numeric]
Left operand. If numeric, an ee.Image is created from its value. If list, an ee.Image with n bands (n = len(list)) is created from its values.
Returns
-------
ee.Image
Right shift operation.
"""
return ee.Image(other).rightShift(self)
@extend(ee.image.Image)
def __and__(self, other):
"""Computes the binary operator AND between two images.
Parameters
----------
self : ee.Image
Left operand.
other : ee.Image | numeric | list[numeric]
Right operand. If numeric, an ee.Image is created from its value. If list, an ee.Image with n bands (n = len(list)) is created from its values.
Returns
-------
ee.Image
Binary operator AND.
"""
return self.And(other)
@extend(ee.image.Image)
def __rand__(self, other):
"""Computes the binary operator AND between two images.
Parameters
----------
self : ee.Image
Right operand.
other : ee.Image | numeric | list[numeric]
Left operand. If numeric, an ee.Image is created from its value. If list, an ee.Image with n bands (n = len(list)) is created from its values.
Returns
-------
ee.Image
Binary operator AND.
"""
return ee.Image(other).And(self)
@extend(ee.image.Image)
def __or__(self, other):
"""Computes the binary operator OR between two images.
Parameters
----------
self : ee.Image
Left operand.
other : ee.Image | numeric | list[numeric]
Right operand. If numeric, an ee.Image is created from its value. If list, an ee.Image with n bands (n = len(list)) is created from its values.
Returns
-------
ee.Image
Binary operator OR.
"""
return self.Or(other)
@extend(ee.image.Image)
def __ror__(self, other):
"""Computes the binary operator OR between two images.
Parameters
----------
self : ee.Image
Right operand.
other : ee.Image | numeric | list[numeric]
Left operand. If numeric, an ee.Image is created from its value. If list, an ee.Image with n bands (n = len(list)) is created from its values.
Returns
-------
ee.Image
Binary operator OR.
"""
return ee.Image(other).Or(self)
@extend(ee.image.Image)
def __lt__(self, other):
"""Computes the rich comparison LOWER THAN between two images.
Parameters
----------
self : ee.Image
Left operand.
other : ee.Image | numeric | list[numeric]
Right operand. If numeric, an ee.Image is created from its value. If list, an ee.Image with n bands (n = len(list)) is created from its values.
Returns
-------
ee.Image
Rich comparison LOWER THAN.
"""
return self.lt(other)
@extend(ee.image.Image)
def __le__(self, other):
"""Computes the rich comparison LOWER THAN OR EQUAL between two images.
Parameters
----------
self : ee.Image
Left operand.
other : ee.Image | numeric | list[numeric]
Right operand. If numeric, an ee.Image is created from its value. If list, an ee.Image with n bands (n = len(list)) is created from its values.
Returns
-------
ee.Image
Rich comparison LOWER THAN OR EQUAL.
"""
return self.lte(other)
@extend(ee.image.Image)
def __eq__(self, other):
"""Computes the rich comparison EQUAL between two images.
Parameters
----------
self : ee.Image
Left operand.
other : ee.Image | numeric | list[numeric]
Right operand. If numeric, an ee.Image is created from its value. If list, an ee.Image with n bands (n = len(list)) is created from its values.
Returns
-------
ee.Image
Rich comparison EQUAL.
"""
return self.eq(other)
@extend(ee.image.Image)
def __ne__(self, other):
"""Computes the rich comparison NOT EQUAL THAN between two images.
Parameters
----------
self : ee.Image
Left operand.
other : ee.Image | numeric | list[numeric]
Right operand. If numeric, an ee.Image is created from its value. If list, an ee.Image with n bands (n = len(list)) is created from its values.
Returns
-------
ee.Image
Rich comparison NOT EQUAL.
"""
return self.neq(other)
@extend(ee.image.Image)
def __gt__(self, other):
"""Computes the rich comparison GREATER THAN between two images.
Parameters
----------
self : ee.Image
Left operand.
other : ee.Image | numeric | list[numeric]
Right operand. If numeric, an ee.Image is created from its value. If list, an ee.Image with n bands (n = len(list)) is created from its values.
Returns
-------
ee.Image
Rich comparison GREATER THAN.
"""
return self.gt(other)
@extend(ee.image.Image)
def __ge__(self, other):
"""Computes the rich comparison GREATER THAN OR EQUAL between two images.
Parameters
----------
self : ee.Image
Left operand.
other : ee.Image | numeric | list[numeric]
Right operand. If numeric, an ee.Image is created from its value. If list, an ee.Image with n bands (n = len(list)) is created from its values.
Returns
-------
ee.Image
Rich comparison GREATER THAN OR EQUAL.
"""
return self.gte(other)
@extend(ee.image.Image)
def __neg__(self):
"""Computes the unary operator NEGATIVE on an image.
Parameters
----------
self : ee.Image
Operand.
Returns
-------
ee.Image
Unary operator NEGATIVE.
"""
return self.multiply(-1)
@extend(ee.image.Image)
def __invert__(self):
"""Computes the unary operator NOT on an image.
Parameters
----------
self : ee.Image
Operand.
Returns
-------
ee.Image
Unary operator NOT.
"""
return self.Not()
@extend(ee.image.Image)
def index(
self,
index="NDVI",
G=2.5,
C1=6.0,
C2=7.5,
L=1.0,
cexp=1.16,
nexp=2.0,
alpha=0.1,
slope=1.0,
intercept=0.0,
kernel="RBF",
sigma="0.5 * (a + b)",
p=2.0,
c=1.0,
online=False,
):
"""Computes one or more spectral indices (indices are added as bands) for an image.
Warning
-------------
**Pending Deprecation:** The :code:`index()` method will no longer be available for future versions. Please use :code:`spectralIndices()` instead.
Tip
----------
Check more info about the supported platforms and spectral indices in the :ref:`User Guide<Spectral Indices Computation>`.
Parameters
----------
self : ee.Image [this]
Image to compute indices on. Must be scaled to [0,1].
index : string | list[string], default = 'NDVI'
Index or list of indices to compute.\n
Available options:
- 'vegetation' : Compute all vegetation indices.
- 'burn' : Compute all burn indices.
- 'water' : Compute all water indices.
- 'snow' : Compute all snow indices.
- 'drought' : Compute all drought indices.
- 'urban' : Compute all urban (built-up) indices.
- 'kernel' : Compute all kernel indices.
- 'all' : Compute all indices listed below.
Awesome Spectral Indices for GEE:
Check the complete list of indices `here <https://awesome-ee-spectral-indices.readthedocs.io/en/latest/list.html>`_.
G : float, default = 2.5
Gain factor. Used just for index = 'EVI'.
C1 : float, default = 6.0
Coefficient 1 for the aerosol resistance term. Used just for index = 'EVI'.
C2 : float, default = 7.5
Coefficient 2 for the aerosol resistance term. Used just for index = 'EVI'.
L : float, default = 1.0
Canopy background adjustment. Used just for index = ['EVI','SAVI'].
cexp : float, default = 1.16
Exponent used for OCVI.
nexp : float, default = 2.0
Exponent used for GDVI.
alpha : float, default = 0.1
Weighting coefficient used for WDRVI.
slope : float, default = 1.0
Soil line slope.
intercept : float, default = 0.0
Soil line intercept.
kernel : str, default = 'RBF'
Kernel used for kernel indices.\n
Available options:
- 'linear' : Linear Kernel.
- 'RBF' : Radial Basis Function (RBF) Kernel.
- 'poly' : Polynomial Kernel.
sigma : str | float, default = '0.5 * (a + b)'
Length-scale parameter. Used for kernel = 'RBF'. If str, this must be an expression including 'a' and 'b'. If numeric, this must be positive.
p : float, default = 2.0
Kernel degree. Used for kernel = 'poly'.
c : float, default = 1.0
Free parameter that trades off the influence of higher-order versus lower-order terms in the polynomial kernel.
Used for kernel = 'poly'. This must be greater than or equal to 0.
online : boolean, default = False
Wheter to retrieve the most recent list of indices directly from the GitHub repository and not from the local copy.
.. versionadded:: 0.2.0
Returns
-------
ee.Image
Image with the computed spectral index, or indices, as new bands.
See Also
--------
scale : Scales bands on an image collection.
Examples
--------
>>> import ee, eemont
>>> ee.Authenticate()
>>> ee.Initialize()
>>> S2 = ee.ImageCollection('COPERNICUS/S2_SR').scale().first()
- Computing one spectral index:
>>> S2.index('NDVI')
- Computing indices with different parameters:
>>> S2.index('SAVI',L = 0.5)
- Computing multiple indices:
>>> S2.index(['NDVI','EVI','GNDVI'])
- Computing a specific group of indices:
>>> S2.index('vegetation')
- Computing kernel indices:
>>> S2.index(['kNDVI'],kernel = 'poly',p = 5)
- Computing all indices:
>>> S2.index('all')
"""
warnings.warn(
"index() will be deprecated in future versions, please use spectralIndices() instead",
PendingDeprecationWarning,
)
return _index(
self,
index,
G,
C1,
C2,
L,
cexp,
nexp,
alpha,
slope,
intercept,
kernel,
sigma,
p,
c,
online,
)
@extend(ee.image.Image)
def spectralIndices(
self,
index="NDVI",
G=2.5,
C1=6.0,
C2=7.5,
L=1.0,
cexp=1.16,
nexp=2.0,
alpha=0.1,
slope=1.0,
intercept=0.0,
kernel="RBF",
sigma="0.5 * (a + b)",
p=2.0,
c=1.0,
online=False,
):
"""Computes one or more spectral indices (indices are added as bands) for an image from the Awesome List of Spectral Indices.
Tip
----------
Check more info about the supported platforms and spectral indices in the :ref:`User Guide<Spectral Indices Computation>`.
Parameters
----------
self : ee.Image [this]
Image to compute indices on. Must be scaled to [0,1].
index : string | list[string], default = 'NDVI'
Index or list of indices to compute.\n
Available options:
- 'vegetation' : Compute all vegetation indices.
- 'burn' : Compute all burn indices.
- 'water' : Compute all water indices.
- 'snow' : Compute all snow indices.
- 'drought' : Compute all drought indices.
- 'urban' : Compute all urban (built-up) indices.
- 'kernel' : Compute all kernel indices.
- 'all' : Compute all indices listed below.
Awesome Spectral Indices for GEE:
Check the complete list of indices `here <https://awesome-ee-spectral-indices.readthedocs.io/en/latest/list.html>`_.
G : float, default = 2.5
Gain factor. Used just for index = 'EVI'.
C1 : float, default = 6.0
Coefficient 1 for the aerosol resistance term. Used just for index = 'EVI'.
C2 : float, default = 7.5
Coefficient 2 for the aerosol resistance term. Used just for index = 'EVI'.
L : float, default = 1.0
Canopy background adjustment. Used just for index = ['EVI','SAVI'].
cexp : float, default = 1.16
Exponent used for OCVI.
nexp : float, default = 2.0
Exponent used for GDVI.
alpha : float, default = 0.1
Weighting coefficient used for WDRVI.
slope : float, default = 1.0
Soil line slope.
intercept : float, default = 0.0
Soil line intercept.
kernel : str, default = 'RBF'
Kernel used for kernel indices.\n
Available options:
- 'linear' : Linear Kernel.
- 'RBF' : Radial Basis Function (RBF) Kernel.
- 'poly' : Polynomial Kernel.
sigma : str | float, default = '0.5 * (a + b)'
Length-scale parameter. Used for kernel = 'RBF'. If str, this must be an expression including 'a' and 'b'. If numeric, this must be positive.
p : float, default = 2.0
Kernel degree. Used for kernel = 'poly'.
c : float, default = 1.0
Free parameter that trades off the influence of higher-order versus lower-order terms in the polynomial kernel.
Used for kernel = 'poly'. This must be greater than or equal to 0.
online : boolean, default = False
Wheter to retrieve the most recent list of indices directly from the GitHub repository and not from the local copy.
Returns
-------
ee.Image
Image with the computed spectral index, or indices, as new bands.
See Also
--------
scaleAndOffset : Scales bands on an image collection.
Examples
--------
>>> import ee, eemont
>>> ee.Authenticate()
>>> ee.Initialize()
>>> S2 = ee.ImageCollection('COPERNICUS/S2_SR').scaleAndOffset().first()
- Computing one spectral index:
>>> S2.spectralIndices('NDVI')
- Computing indices with different parameters:
>>> S2.spectralIndices('SAVI',L = 0.5)
- Computing multiple indices:
>>> S2.spectralIndices(['NDVI','EVI','GNDVI'])
- Computing a specific group of indices:
>>> S2.spectralIndices('vegetation')
- Computing kernel indices:
>>> S2.spectralIndices(['kNDVI'],kernel = 'poly',p = 5)
- Computing all indices:
>>> S2.spectralIndices('all')
"""
return _index(
self,
index,
G,
C1,
C2,
L,
cexp,
nexp,
alpha,
slope,
intercept,
kernel,
sigma,
p,
c,
online,
)
@extend(ee.image.Image)
def maskClouds(
self,
method="cloud_prob",
prob=60,
maskCirrus=True,
maskShadows=True,
scaledImage=False,
dark=0.15,
cloudDist=1000,
buffer=250,
cdi=None,
):
"""Masks clouds and shadows in an image (valid just for Surface Reflectance products).
Tip
----------
Check more info about the supported platforms and clouds masking in the :ref:`User Guide<Masking Clouds and Shadows>`.
Parameters
----------
self : ee.Image [this]
Image to mask.
method : string, default = 'cloud_prob'
Method used to mask clouds.\n
Available options:
- 'cloud_prob' : Use cloud probability.
- 'qa' : Use Quality Assessment band.
This parameter is ignored for Landsat products.
prob : numeric [0, 100], default = 60
Cloud probability threshold. Valid just for method = 'prob'. This parameter is ignored for Landsat products.
maskCirrus : boolean, default = True
Whether to mask cirrus clouds. Valid just for method = 'qa'. This parameter is ignored for Landsat products.
maskShadows : boolean, default = True
Whether to mask cloud shadows. For more info see 'Braaten, J. 2020. Sentinel-2 Cloud Masking with s2cloudless. Google Earth Engine, Community Tutorials'.
scaledImage : boolean, default = False
Whether the pixel values are scaled to the range [0,1] (reflectance values). This parameter is ignored for Landsat products.
dark : float [0,1], default = 0.15
NIR threshold. NIR values below this threshold are potential cloud shadows. This parameter is ignored for Landsat products.
cloudDist : int, default = 1000
Maximum distance in meters (m) to look for cloud shadows from cloud edges. This parameter is ignored for Landsat products.
buffer : int, default = 250
Distance in meters (m) to dilate cloud and cloud shadows objects. This parameter is ignored for Landsat products.
cdi : float [-1,1], default = None
Cloud Displacement Index threshold. Values below this threshold are considered potential clouds.
A cdi = None means that the index is not used. For more info see 'Frantz, D., HaS, E., Uhl, A., Stoffels, J., Hill, J. 2018. Improvement of the Fmask algorithm for Sentinel-2 images:
Separating clouds from bright surfaces based on parallax effects. Remote Sensing of Environment 2015: 471-481'.
This parameter is ignored for Landsat products.
Returns
-------
ee.Image
Cloud-shadow masked image.
Notes
-----
This method may mask water as well as clouds for the Sentinel-3 Radiance product.
Examples
--------
>>> import ee, eemont
>>> ee.Authenticate()
>>> ee.Initialize()
>>> S2 = ee.ImageCollection('COPERNICUS/S2_SR').first().maskClouds(prob = 75,buffer = 300,cdi = -0.5)
"""
return _maskClouds(
self,
method,
prob,
maskCirrus,
maskShadows,
scaledImage,
dark,
cloudDist,
buffer,
cdi,
)
@extend(ee.image.Image)
def scale(self):
"""Scales bands on an image.
Warning
-------------
**Pending Deprecation:** The :code:`scale()` method will no longer be available for future versions. Please use :code:`scaleAndOffset()` instead.
Tip
----------
Check more info about the supported platforms and image scaling the :ref:`User Guide<Image Scaling>`.
Parameters
----------
self : ee.Image [this]
Image to scale.
Returns
-------
ee.Image
Scaled image.
Examples
--------
>>> import ee, eemont
>>> ee.Authenticate()
>>> ee.Initialize()
>>> S2 = ee.ImageCollection('COPERNICUS/S2_SR').first().scale()
"""
warnings.warn(
"scale() will be deprecated in future versions, please use scaleAndOffset() instead",
PendingDeprecationWarning,
)
return _scale_STAC(self)
@extend(ee.image.Image)
def getScaleParams(self):
"""Gets the scale parameters for each band of the image.
Parameters
----------
self : ee.Image (this)
Image to get the scale parameters from.
Returns
-------
dict
Dictionary with the scale parameters for each band.
See Also
--------
getOffsetParams : Gets the offset parameters for each band of the image.
scaleAndOffset : Scales bands on an image according to their scale and offset parameters.
Examples
--------
>>> import ee, eemont
>>> ee.Authenticate()
>>> ee.Initialize()
>>> ee.ImageCollection('MODIS/006/MOD11A2').first().getScaleParams()
{'Clear_sky_days': 1.0,
'Clear_sky_nights': 1.0,
'Day_view_angl': 1.0,
'Day_view_time': 0.1,
'Emis_31': 0.002,
'Emis_32': 0.002,
'LST_Day_1km': 0.02,
'LST_Night_1km': 0.02,
'Night_view_angl': 1.0,
'Night_view_time': 0.1,
'QC_Day': 1.0,
'QC_Night': 1.0}
"""
return _get_scale_params(self)
@extend(ee.image.Image)
def getOffsetParams(self):
"""Gets the offset parameters for each band of the image.
Parameters
----------
self : ee.Image (this)
Image to get the offset parameters from.
Returns
-------
dict
Dictionary with the offset parameters for each band.
See Also
--------
getScaleParams : Gets the scale parameters for each band of the image.
scaleAndOffset : Scales bands on an image according to their scale and offset parameters.
Examples
--------
>>> import ee, eemont
>>> ee.Authenticate()
>>> ee.Initialize()
>>> ee.ImageCollection('MODIS/006/MOD11A2').first().getOffsetParams()
{'Clear_sky_days': 0.0,
'Clear_sky_nights': 0.0,
'Day_view_angl': -65.0,
'Day_view_time': 0.0,
'Emis_31': 0.49,
'Emis_32': 0.49,
'LST_Day_1km': 0.0,
'LST_Night_1km': 0.0,
'Night_view_angl': -65.0,
'Night_view_time': 0.0,
'QC_Day': 0.0,
'QC_Night': 0.0}
"""
return _get_offset_params(self)
@extend(ee.image.Image)
def scaleAndOffset(self):
"""Scales bands on an image according to their scale and offset parameters.
Tip
----------
Check more info about the supported platforms and image scaling the :ref:`User Guide<Image Scaling>`.
Parameters
----------
self : ee.Image [this]
Image to scale.
Returns
-------
ee.Image
Scaled image.
See Also
--------
getScaleParams : Gets the scale parameters for each band of the image.
getOffsetParams : Gets the offset parameters for each band of the image.
Examples
--------
>>> import ee, eemont
>>> ee.Authenticate()
>>> ee.Initialize()
>>> S2 = ee.ImageCollection('COPERNICUS/S2_SR').first().scaleAndOffset()
"""
return _scale_STAC(self)
@extend(ee.image.Image)
def preprocess(self, **kwargs):
"""Pre-processes the image: masks clouds and shadows, and scales and offsets the image.
Tip
----------
Check more info here about the supported platforms, :ref:`Image Scaling<Image Scaling>` and :ref:`Masking Clouds and Shadows<Masking Clouds and Shadows>`.
Parameters
----------
self : ee.Image [this]
Image to pre-process.
**kwargs :
Keywords arguments for maskClouds().
Returns
-------
ee.Image
Pre-processed image.
See Also
--------
getScaleParams : Gets the scale parameters for each band of the image.
getOffsetParams : Gets the offset parameters for each band of the image.
scaleAndOffset : Scales bands on an image according to their scale and offset parameters.
maskClouds : Masks clouds and shadows in an image.
Examples
--------
>>> import ee, eemont
>>> ee.Authenticate()
>>> ee.Initialize()
>>> S2 = ee.ImageCollection('COPERNICUS/S2_SR').first().preprocess()
"""
return _preprocess(self, **kwargs)
@extend(ee.image.Image)
def getSTAC(self):
"""Gets the STAC of the image.
Parameters
----------
self : ee.Image [this]
Image to get the STAC from.
Returns
-------
dict
STAC of the image.
Examples
--------
>>> import ee, eemont
>>> ee.Authenticate()
>>> ee.Initialize()
>>> ee.ImageCollection('COPERNICUS/S2_SR').first().getSTAC()
{'stac_version': '1.0.0-rc.2',
'type': 'Collection',
'stac_extensions': ['https://stac-extensions.github.io/eo/v1.0.0/schema.json'],
'id': 'COPERNICUS/S2_SR',
'title': 'Sentinel-2 MSI: MultiSpectral Instrument, Level-2A',
'gee:type': 'image_collection',
...}
"""
return _getSTAC(self)
@extend(ee.image.Image)
def getDOI(self):
"""Gets the DOI of the image, if available.
Parameters
----------
self : ee.Image [this]
Image to get the DOI from.
Returns
-------
str
DOI of the ee.Image dataset.
See Also
--------
getCitation : Gets the citation of the image, if available.
Examples
--------
>>> import ee, eemont
>>> ee.Authenticate()
>>> ee.Initialize()
>>> ee.ImageCollection('NASA/GPM_L3/IMERG_V06').first().getDOI()
'10.5067/GPM/IMERG/3B-HH/06'
"""
return _getDOI(self)
@extend(ee.image.Image)
def getCitation(self):
"""Gets the citation of the image, if available.
Parameters
----------
self : ee.Image [this]
Image to get the citation from.
Returns
-------
str
Citation of the ee.Image dataset.
See Also
--------
getDOI : Gets the DOI of the image, if available.
Examples
--------
>>> import ee, eemont
>>> ee.Authenticate()
>>> ee.Initialize()
>>> ee.ImageCollection('NASA/GPM_L3/IMERG_V06').first().getCitation()
'Huffman, G.J., E.F. Stocker, D.T. Bolvin, E.J. Nelkin, Jackson Tan (2019),
GPM IMERG Final Precipitation L3 Half Hourly 0.1 degree x 0.1 degree V06, Greenbelt,
MD, Goddard Earth Sciences Data and Information Services Center (GES DISC), Accessed: [Data Access Date],
[doi:10.5067/GPM/IMERG/3B-HH/06](https://doi.org/10.5067/GPM/IMERG/3B-HH/06)'
"""
return _getCitation(self)
|
[
"dml.mont@gmail.com"
] |
dml.mont@gmail.com
|
ee3e49467a80d531a970152d9b6457cec9bf4862
|
1e381f2b974bc82cd2f0bd0cc5029cbda1baedb2
|
/Hackerearth/acet03/Puzzleria.py
|
50fd07de83d23ca12aca6fc092394f4390d5ded2
|
[] |
no_license
|
rajat189/Competetive_programming
|
7655678935d40cada5a3d39ed400ee430f0311db
|
709065b3527eceb3923c13091608c174ae3a5d64
|
refs/heads/master
| 2021-01-19T05:53:35.790236
| 2016-04-12T19:12:34
| 2016-04-12T19:12:34
| 38,609,439
| 1
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 309
|
py
|
# link :- https://www.hackerearth.com/acet03/algorithm/puzzleria/
# problem :- Puzzleria
def fac(n):
if n==1 or n==0:
return 1
else :
return n*fac(n-1)
t=input()
while t>0:
t-=1
x,y=map(int,raw_input().split())
ans=(fac(7)/(fac(x)*fac(7-x)))*(fac(5)/(fac(y)*fac(5-y)))*fac(x+y)
print ans
|
[
"coolrajatsharma18@gmail.com"
] |
coolrajatsharma18@gmail.com
|
60cd264325bef4e607f1a0f01c23d4d270ac743a
|
8239d707d211eed8e91597b73a6e6c25694e8ab7
|
/test_l/replaceFileContent.py
|
fd08bfe4921ed182a584aac5eb30ba869239bae6
|
[] |
no_license
|
Tw1stFate/LearnPython
|
083c705dc1576ffe42b924f9f69d8de31a61ac00
|
fb455e34bb4299b588d06997e5f6ecdaf6affe75
|
refs/heads/master
| 2022-10-11T00:36:33.467859
| 2020-06-14T15:01:20
| 2020-06-14T15:01:20
| 101,948,514
| 0
| 0
| null | 2019-07-01T02:43:45
| 2017-08-31T02:08:31
|
Python
|
UTF-8
|
Python
| false
| false
| 1,645
|
py
|
import os
def replaceFileContent(path):
for file in os.listdir(path):
if os.path.isdir(path + '/' + file):
replaceFileContent(path + '/' + file)
# print('==========')
else:
# print(file)
if file == '.DS_Store':
continue
newContent = ''
# with open(path + '/' + file, "r+", encoding='utf-8') as f:
#
# content = f.read()
#
# if 'ViewModel' in content:
# newContent = content.replace("ViewModel", "ViewModel_NEW")
#
# else:
# newContent = content
#
# if 'ViewController' in newContent:
# newContent = newContent.replace("ViewController", "ViewController_NEW")
#
# with open(path+'/'+file, 'w', encoding='utf-8') as f:
# f.write(newContent)
# if 'ViewModel' in file:
# newName = file.replace("ViewModel", "ViewModel_NEW")
# os.rename(os.path.join(path,file), os.path.join(path,newName))
#
#
# if 'ViewController' in file:
# newName = file.replace("ViewController", "ViewController_NEW")
# os.rename(os.path.join(path,file), os.path.join(path,newName))
if ".h" in file:
vc = file.replace("ViewModel", "ViewController")
print("@\"%s\" : @\"%s\"," %(file[0:-2], vc[0:-2]))
if __name__ == '__main__':
replaceFileContent('/Users/lynn/Desktop/公文审批(新)')
|
[
"devlynn@126.com"
] |
devlynn@126.com
|
c59194c040c4a0fae4bf07f663deec3a01375ff6
|
f2462bff402f67648dfe850b159819e0bc5de3dc
|
/api/urls.py
|
3a7262c517673af5fc66d9c4dee9208915605dd8
|
[] |
no_license
|
zerossB/Django-Logging
|
2b45b64f4601dcff67639c45353776700c72fdfe
|
a6072a0dae979641fa092d785ba93313bfba0df1
|
refs/heads/master
| 2022-12-05T16:32:12.923359
| 2020-08-31T00:15:25
| 2020-08-31T00:15:25
| 291,574,554
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 133
|
py
|
from django.urls import path
app_name = "api"
from . import views
urlpatterns = [path("", views.HomeView.as_view(), name="home")]
|
[
"zerossb@gmail.com"
] |
zerossb@gmail.com
|
eb598d14dbeac43c675d45e387b3bc2e50637c24
|
678f794b2ce27eb396bcae8c683cac66a76f91fd
|
/00 - Alldirections/torch_train_res18-4.py
|
7f3ca37088174381f6fba7ecd2d0050e22492728
|
[] |
no_license
|
jumpycat/00-Similarity
|
cc670ff4db2381b930797033613bfed3ebda0b16
|
18fd59c5cc95377da870c6f6565c2261849ea1a0
|
refs/heads/master
| 2023-08-13T21:17:51.439011
| 2021-09-13T08:13:23
| 2021-09-13T08:13:23
| 399,512,090
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 12,123
|
py
|
import torch
import torch.optim as optim
import torchvision.transforms as transforms
from torch.utils.data import Dataset, DataLoader
import numpy as np
import os
import cv2
from PIL import Image
from numpy.lib.stride_tricks import as_strided
import torch.nn as nn
from torchsummary import summary
AVG_SIZE = 2
EPOCH = 100
BATCH_SIZE = 64
LR = 0.01
SIZE = 256
LENGTH = 10000
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
print(device)
def conv3x3(in_planes, out_planes, stride=1, groups=1, dilation=1):
return nn.Conv2d(in_planes, out_planes, kernel_size=3, stride=stride,
padding=dilation, groups=groups, bias=False, dilation=dilation)
def conv1x1(in_planes, out_planes, stride=1):
return nn.Conv2d(in_planes, out_planes, kernel_size=1, stride=stride, bias=False)
class BasicBlock(nn.Module):
expansion = 1
def __init__(self, inplanes, planes, stride=1, downsample=None):
super(BasicBlock, self).__init__()
norm_layer = nn.BatchNorm2d
self.conv1 = conv3x3(inplanes, planes, stride)
self.bn1 = norm_layer(planes)
self.relu = nn.ReLU(inplace=True)
self.conv2 = conv3x3(planes, planes)
self.bn2 = norm_layer(planes)
self.downsample = downsample
self.stride = stride
def forward(self, x):
identity = x
out = self.conv1(x)
out = self.bn1(out)
out = self.relu(out)
out = self.conv2(out)
out = self.bn2(out)
if self.downsample is not None:
identity = self.downsample(x)
out += identity
out = self.relu(out)
return out
class ResNet(nn.Module):
def __init__(self, block, layers):
super(ResNet, self).__init__()
norm_layer = nn.BatchNorm2d
self._norm_layer = norm_layer
self.inplanes = 64
self.conv1 = nn.Conv2d(3, self.inplanes, kernel_size=7, stride=2, padding=3, bias=False)
self.bn1 = norm_layer(self.inplanes)
self.relu = nn.ReLU(inplace=True)
self.maxpool = nn.MaxPool2d(kernel_size=1, stride=1, padding=0)
self.layer1 = self._make_layer(block, 64, layers[0])
self.layer2 = self._make_layer(block, 128, layers[1], stride=2)
self.layer3 = self._make_layer(block, 256, layers[2], stride=2)
self.layer4 = self._make_layer(block, 512, layers[3], stride=2)
self.avgpool = nn.AdaptiveAvgPool2d((1, 1))
self.fc = nn.Linear(512 * block.expansion, 1)
self.avgpl = nn.AvgPool2d((AVG_SIZE, AVG_SIZE), stride=(AVG_SIZE, AVG_SIZE))
self.calsim_up = nn.Conv2d(256, 1, kernel_size=(2,1), stride=1, bias=True)
self.calsim_down = nn.Conv2d(256, 1, kernel_size=(2,1), stride=1, bias=True)
self.calsim_left = nn.Conv2d(256, 1, kernel_size=(1,2), stride=1, bias=True)
self.calsim_right = nn.Conv2d(256, 1, kernel_size=(1,2), stride=1, bias=True)
def _make_layer(self, block, planes, blocks, stride=1):
norm_layer = self._norm_layer
downsample = None
if stride != 1 or self.inplanes != planes * block.expansion:
downsample = nn.Sequential(
conv1x1(self.inplanes, planes * block.expansion, stride),
norm_layer(planes * block.expansion)
)
layers = []
layers.append(block(self.inplanes, planes, stride, downsample))
self.inplanes = planes * block.expansion
for _ in range(1, blocks):
layers.append(block(self.inplanes, planes))
return nn.Sequential(*layers)
def forward(self, x):
x = self.conv1(x)
x = self.bn1(x)
x = self.relu(x)
x = self.maxpool(x)
x = self.layer1(x)
x = self.layer2(x)
x = self.layer3(x)
x = self.avgpl(x)
up = self.calsim_up(x)
down = self.calsim_down(x)
left = self.calsim_left(x)
right = self.calsim_right(x)
return up,down,left,right
def resnet18():
return ResNet(BasicBlock, [2, 2, 2, 2])
preprocess = transforms.Compose([
transforms.Resize((256,256)),
transforms.ToTensor()
])
def default_loader(path):
img_pil = Image.open(path)
img_tensor = preprocess(img_pil)
return img_tensor
def pool2d(A, kernel_size, stride):
output_shape = ((A.shape[0] - kernel_size) // stride + 1,
(A.shape[1] - kernel_size) // stride + 1)
kernel_size = (kernel_size, kernel_size)
A_w = as_strided(A, shape=output_shape + kernel_size,
strides=(stride * A.strides[0],
stride * A.strides[1]) + A.strides)
A_w = A_w.reshape(-1, *kernel_size)
return A_w.mean(axis=(1, 2)).reshape(output_shape)
def Calsimup(x):
m, n = x.shape[0], x.shape[1]
ret = np.ones((m-1, n))
for i in range(m-1):
for j in range(n):
if abs(x[i+1, j] - x[i, j]) > 0:
ret[i,j] = 0
return ret
def Calsimleft(x):
m, n = x.shape[0], x.shape[1]
ret = np.ones((m, n-1))
for i in range(m):
for j in range(n-1):
if abs(x[i, j+1] - x[i, j]) > 0:
ret[i,j] = 0
return ret
class DealDataset(Dataset):
def __init__(self, loader=default_loader):
self.len = LENGTH
self.loader = loader
# Deepfakes Face2Face FaceSwap NeuralTextures
fake_root = r'D:\DATA\FF++_Images\Face2Face\c23\train/'
train_fake_video_paths = os.listdir(fake_root)
self.train_fake_imgs = []
for i in train_fake_video_paths:
video_path = fake_root + i
img = os.listdir(video_path)
self.train_fake_imgs.append([video_path + '/' + j for j in img])
real_root = r'D:\DATA\FF++_Images\Real\c23\train/'
train_real_video_paths = os.listdir(real_root)
self.train_real_imgs = []
for i in train_real_video_paths:
video_path = real_root + i
img = os.listdir(video_path)
self.train_real_imgs.append([video_path + '/' + j for j in img])
self.NUM_fake = len(self.train_fake_imgs)
self.NUM_real = len(self.train_real_imgs)
def __getitem__(self, index):
if np.random.randint(0, 2):
video_index = np.random.randint(0, self.NUM_fake)
img_index = np.random.randint(0, len(self.train_fake_imgs[video_index]))
img_path = self.train_fake_imgs[video_index][img_index]
img = self.loader(img_path)
mask_path = img_path.replace('c23','mask')
fake_mask = cv2.imread(mask_path, 0)
fake_mask = np.array(cv2.resize(fake_mask, (SIZE, SIZE)) > 1, dtype=np.float64)
fake_mask1 = pool2d(fake_mask, 16, 16)
fake_mask_up = Calsimup(fake_mask1)
fake_mask_left = Calsimleft(fake_mask1)
fake_mask_up = torch.from_numpy(np.expand_dims(fake_mask_up, 0))
fake_mask_left = torch.from_numpy(np.expand_dims(fake_mask_left, 0))
mask_up = torch.tensor(fake_mask_up, dtype=torch.float32)
mask_left = torch.tensor(fake_mask_left, dtype=torch.float32)
else:
video_index = np.random.randint(0, self.NUM_real)
img_index = np.random.randint(0, len(self.train_real_imgs[video_index]))
img_path = self.train_real_imgs[video_index][img_index]
img = self.loader(img_path)
mask_up = torch.ones((1, 15, 16), dtype=torch.float32)
mask_left = torch.ones((1, 16, 15), dtype=torch.float32)
return img, (mask_up,mask_up,mask_left,mask_left)
def __len__(self):
return self.len
def findthrehold(pred,label):
best_acc = 0
best_th = 0
for th in [0.8 + mom/1000 for mom in range(200)]:
threhold_acc = np.array(np.array(pred)>th,dtype=int)
acc = np.sum(threhold_acc == np.array(label))/2000
if acc > best_acc:
best_acc = acc
best_th = th
return best_acc,best_th
def getValdata(size):
imgs = []
labels = []
for i in range(size):
if np.random.randint(0, 2):
video_index = np.random.randint(0, NUM_fake)
img_index = np.random.randint(0, len(test_fake_imgs[video_index]))
img_path = test_fake_imgs[video_index][img_index]
img = default_loader(img_path)
imgs.append(img)
labels.append(0)
else:
video_index = np.random.randint(0, NUM_real)
img_index = np.random.randint(0, len(test_real_imgs[video_index]))
img_path = test_real_imgs[video_index][img_index]
img = default_loader(img_path)
imgs.append(img)
labels.append(1)
return torch.stack(imgs, dim=0), labels
def val(model):
model.eval()
ret_hist = []
ret_labels = []
for i in range(80):
inputs, label = getValdata(25)
input = inputs.cuda()
output1, output2,output3, output4 = model(input)
up = torch.sigmoid(output1).detach().cpu().numpy()[:,:,:-1,1:15]
down = torch.sigmoid(output2).detach().cpu().numpy()[:,:,1:,1:15]
left = torch.sigmoid(output3).detach().cpu().numpy()[:,:,1:15,:-1]
right = torch.sigmoid(output4).detach().cpu().numpy()[:,:,1:15,1:]
sim_map = np.mean(np.concatenate((up, down, left, right), axis=1),axis=(1,2,3))
batch_sim_map_avg = list(sim_map)
ret_hist += batch_sim_map_avg
ret_labels += label
best_acc,best_th = findthrehold(ret_hist, ret_labels)
return best_acc,best_th
real_root = r'D:\DATA\FF++_Images\Real\c23\val'
test_real_video_paths = os.listdir(real_root)
test_real_imgs = []
for i in test_real_video_paths:
video_path = real_root + '/' + i
img = os.listdir(video_path)
test_real_imgs.append([video_path + '/' + j for j in img])
# Deepfakes Face2Face FaceSwap NeuralTextures
fake_root = r'D:\DATA\FF++_Images\Face2Face\c23\val/'
test_fake_video_paths = os.listdir(fake_root)
test_fake_imgs = []
for i in test_fake_video_paths:
video_path = fake_root + '/' + i
img = os.listdir(video_path)
test_fake_imgs.append([video_path + '/' + j for j in img])
NUM_fake = len(test_fake_imgs)
NUM_real = len(test_real_imgs)
net = resnet18().to(device)
pretext_model = torch.load(r'C:\Users\jumpycat\.cache\torch\checkpoints/resnet18-5c106cde.pth')
model2_dict = net.state_dict()
state_dict = {k: v for k, v in pretext_model.items() if k in model2_dict.keys()}
state_dict.pop('fc.weight')
state_dict.pop('fc.bias')
model2_dict.update(state_dict)
net.load_state_dict(model2_dict)
net.to(device)
dealDataset = DealDataset()
train_loader = DataLoader(dataset=dealDataset, batch_size=BATCH_SIZE, shuffle=True)
criterion = nn.BCEWithLogitsLoss()
optimizer = optim.SGD(net.parameters(), lr=LR, momentum=0.9)
if __name__ == '__main__':
for epoch in range(EPOCH):
print('\nEpoch: %d' % (epoch + 1))
net.train()
for i, data in enumerate(train_loader, 0):
inputs, labels = data
inputs, label1,label2, label3,label4 = inputs.to(device), labels[0].to(device), labels[1].to(device),\
labels[2].to(device), labels[3].to(device)
optimizer.zero_grad()
output1, output2,output3, output4 = net(inputs)
loss1 = criterion(output1, label1)
loss2 = criterion(output2, label2)
loss3 = criterion(output3, label3)
loss4 = criterion(output4, label4)
loss = loss1 + loss2 + loss3 + loss4
loss.backward()
optimizer.step()
data = '[epoch:%03d, iter:%03d] Loss: %.03f' % (epoch + 1, i, loss.item())
print(data)
with open('logs-c23-f2f-4.txt', 'a', encoding='utf-8') as f:
f.write(data)
f.write('\n')
best_acc,best_th = val(net)
tag = 'c23-f2f-4-epoch-%03d-loss-%.03f-ValAcc-%.03f-Threshold-%.03f' % (epoch + 1,loss.item(),best_acc,best_th)
print(tag)
torch.save(net, r'trained_models\v3\c23/'+tag + '.pkl')
|
[
"826244895@qq.com"
] |
826244895@qq.com
|
29285e3e7e7a5f76f61f531bef3eeff8d1d35a6d
|
7704de5d02678908acdcbca4cf92e87e2251c64c
|
/UltimateNewFlask/ForumAppProject/migrations/versions/f3a6c6ba97cf_.py
|
78f3a3e53f72c6181f452e42dd2e52a2918d6f41
|
[
"BSD-3-Clause"
] |
permissive
|
frontendprof/FlaskProjectsRepo
|
9e8e8ed27771aef3b6f5008722f6ed1daea376eb
|
18d8b737a03f132bf97483ae256be06fbd389eff
|
refs/heads/master
| 2020-07-02T15:22:27.859618
| 2020-01-15T08:57:05
| 2020-01-15T08:57:05
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,700
|
py
|
"""empty message
Revision ID: f3a6c6ba97cf
Revises:
Create Date: 2019-10-04 23:09:42.639636
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'f3a6c6ba97cf'
down_revision = None
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('role',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=80), nullable=True),
sa.Column('description', sa.String(length=250), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('name')
)
op.create_table('user',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('email', sa.String(length=255), nullable=True),
sa.Column('password', sa.String(length=255), nullable=True),
sa.Column('name', sa.String(length=255), nullable=True),
sa.Column('username', sa.String(length=255), nullable=True),
sa.Column('active', sa.Boolean(), nullable=True),
sa.Column('confirmed_at', sa.DateTime(), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('email'),
sa.UniqueConstraint('username')
)
op.create_table('roles_users',
sa.Column('user_id', sa.Integer(), nullable=True),
sa.Column('role_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['role_id'], ['role.id'], ),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], )
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('roles_users')
op.drop_table('user')
op.drop_table('role')
# ### end Alembic commands ###
|
[
"abdumaliksharipov@gmail.com"
] |
abdumaliksharipov@gmail.com
|
3d0b8e1c2ce4cc6cb72a22a25fa5cf651b946b91
|
ded10c2f2f5f91c44ec950237a59225e8486abd8
|
/.history/2/matrix_squaring_20200420191916.py
|
56b8551f9db2c3e29060ef2717bae7c420791554
|
[] |
no_license
|
jearistiz/Statistical-Physics-Projects
|
276a86407b32ded4e06b32efb2fadbd8eff8daed
|
d9c5b16a50856e148dc8604d92b6de3ea21fc552
|
refs/heads/master
| 2022-11-05T03:41:23.623050
| 2020-06-28T06:36:05
| 2020-06-28T06:36:05
| 254,909,897
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 21,114
|
py
|
# -*- coding: utf-8 -*-
from __future__ import division
import os
import numpy as np
import matplotlib.pyplot as plt
from time import time
import pandas as pd
# Author: Juan Esteban Aristizabal-Zuluaga
# date: 20200414
def rho_free(x,xp,beta):
"""Uso: devuelve elemento de matriz dsnsidad para el caso de una partícula libre en un toro infinito."""
return (2.*np.pi*beta)**(-0.5) * np.exp(-(x-xp)**2 / (2 * beta) )
def harmonic_potential(x):
"""Devuelve valor del potencial armónico para una posición x dada"""
return 0.5*x**2
def anharmonic_potential(x):
"""Devuelve valor de potencial anarmónico para una posición x dada"""
# return np.abs(x)*(1+np.cos(x)) #el resultado de este potencial es interesante
return 0.5*x**2 - x**3 + x**4
def QHO_canonical_ensemble(x,beta):
"""
Uso: calcula probabilidad teórica cuántica de encontrar al oscilador armónico
(inmerso en un baño térmico a temperatura inversa beta) en la posición x.
Recibe:
x: float -> posición
beta: float -> inverso de temperatura en unidades reducidas beta = 1/T.
Devuelve:
probabilidad teórica cuántica en posición x para temperatura inversa beta.
"""
return (np.tanh(beta/2.)/np.pi)**0.5 * np.exp(- x**2 * np.tanh(beta/2.))
def Z_QHO(beta):
"""Uso: devuelve valor de función de partición para el QHO unidimensional"""
return 0.5/np.sinh(beta/2)
def E_QHO_avg_theo(beta):
"""Uso: devuelve valor de energía interna para el QHO unidimensional"""
return 0.5/np.tanh(0.5*beta)
def rho_trotter(x_max = 5., nx = 101, beta=1, potential=harmonic_potential):
"""
Uso: devuelve matriz densidad en aproximación de Trotter para altas temperaturas
y bajo influencia del potencial "potential".
Recibe:
x_max: float -> los valores de x estarán en el intervalo (-x_max,x_max).
nx: int -> número de valores de x considerados (igualmente espaciados).
beta: float -> inverso de temperatura en unidades reducidas.
potential: func -> potencial de interacción. Debe ser función de x.
Devuelve:
rho: numpy array, shape=(nx,nx) -> matriz densidad en aproximación de Trotter para
altas temperaturas y potencial dado.
grid_x: numpy array, shape=(nx,) -> valores de x en los que está evaluada rho.
dx: float -> separación entre valores contiguos de grid_x
"""
# Valor de la discretización de posiciones según x_max y nx dados como input
dx = 2. * x_max / (nx - 1)
# Lista de valores de x teniendo en cuenta discretización y x_max
grid_x = np.array([i*dx for i in range(-int((nx-1)/2), int(nx/2 + 1))])
# Construcción de matriz densidad dada por aproximación de Trotter
rho = np.array([ [ rho_free(x , xp, beta) * np.exp(-0.5*beta*(potential(x)+potential(xp))) for x in grid_x] for xp in grid_x])
return rho, grid_x, dx
def density_matrix_squaring(rho, grid_x, N_iter = 1, beta_ini = 1, print_steps=True):
"""
Uso: devuelve matriz densidad luego de aplicarle algoritmo matrix squaring N_iter veces.
En la primera iteración se usa matriz de densidad dada por el input rho (a
temperatura inversa beta_ini); en las siguientes iteraciones se usa matriz densidad
generada por la iteración inmediatamente anterior. El sistema asociado a la matriz
densidad obtenida (al final de aplicar el algoritmo) está a temperatura inversa
beta_fin = beta_ini * 2**(N_iter).
Recibe:
rho: numpy array, shape=(nx,nx) -> matriz densidad discretizada en valores dados
por x_grid.
grid_x: numpy array, shape=(nx,) -> valores de x en los que está evaluada rho.
N_iter: int -> número de iteraciones del algoritmo.
beta_ini: float -> valor de inverso de temperatura asociado a la
matriz densidad rho dada como input.
print_steps: bool -> decide si muestra valores de beta en cada
iteración.
Devuelve:
rho: numpy array, shape=(nx,nx) -> matriz densidad de estado rho a temperatura
inversa igual a beta_fin.
trace_rho: float -> traza de la matriz densidad a temperatura inversa
igual a beta_fin. Por la definición que tomamos
de rho, ésta es equivalente a la función
partición a dicha temperatura.
beta_fin: float -> temperatura inversa del sistema asociado a rho.
"""
# Valor de discretixación de las posiciones
dx = grid_x[1] - grid_x[0]
# Cálculo del valor de beta_fin según valores beta_ini y N_iter dados como input
beta_fin = beta_ini * 2 ** N_iter
# Imprime infromación relevante
if print_steps:
print('\nbeta_ini = %.3f'%beta_ini,
'\n----------------------------------------------------------------')
# Itera algoritmo matrix squaring
for i in range(N_iter):
rho = dx * np.dot(rho,rho)
# Imprime información relevante
if print_steps:
print(u'Iteración %d) 2^%d * beta_ini --> 2^%d * beta_ini'%(i, i, i+1))
if print_steps:
print('----------------------------------------------------------------\n' +
u'beta_fin = %.3f'%beta_fin)
# Calcula traza de rho
trace_rho = np.trace(rho)*dx
return rho, trace_rho, beta_fin
def save_csv(data, data_headers=None, file_name='file.csv', relevant_info=None, print_data=True):
"""
Uso: data debe contener listas que serán las columnas de un archivo CSV que se guardará con
nombre file_name. relevant_info agrega comentarios en primeras líneas del archivo.
Recibe:
data: array of arrays, shape=(nx,ny) -> cada lista es una columna del archivo.
data_headers: numpy array, shape=(nx,) -> nombres de las columnas
file_name: str -> nombre del archivo en el que se guardarán datos.
relevant_info: list of str -> información que se agrega como comentario en
primeras líneas. Cada elemento de esta lista
se agrega como una nueva línea.
print_data: bool -> decide si imprime datos guardados, en pantalla.
Devuelve:
data_pdDF: pd.DataFrame -> archivo con datos formato "pandas data frame".
guarda archivo con datos e inforamación relevante en primera línea.
"""
# Almacena datos de probabilifad en diccionario: grid_x para posiciones y x_weights para
# valores de densidad de probabilidad.
if file_name=='file.csv':
script_dir = os.path.dirname(os.path.abspath(__file__)) #path completa para este script
file_name = script_dir + '/' + 'file_name'
if len(data_headers)!=len(data) or data_headers is None:
data_headers = range(len(data))
print( 'Nota: no hay suficientes headers en data_headers para función save_csv().\n'+
'Los headers usados en el archivo serán los números 0, 1, 2,...')
data_dict = {}
for i,column in enumerate(data):
data_dict[data_headers[i]] = column
# Pasamos datos a formato DataFrame de pandas.
data_pdDF = pd.DataFrame(data=data_dict)
# Crea archivo .csv y agrega comentarios relevantes dados como input
if relevant_info is not None:
with open(file_name,mode='w') as file_csv:
for info in list(relevant_info):
file_csv.write('# '+info+'\n')
file_csv.close()
# Usamos pandas para escribir en archivo en formato csv.
with open(file_name,mode='a') as file_csv:
data_pdDF.to_csv(file_csv)
file_csv.close()
else:
with open(file_name,mode='w') as file_csv:
data_pdDF.to_csv(file_csv)
file_csv.close()
# Imprime datos en pantalla.
if print_data==True:
print(data_pdDF)
return data_pdDF
def run_pi_x_sq_trotter(x_max=5., nx=201, N_iter=7, beta_fin=4, potential=harmonic_potential,
potential_string = 'harmonic_potential', print_steps=True,
save_data=True, file_name=None, relevant_info=None,
plot=True, save_plot=True, show_plot=True):
"""
Uso: corre algoritmo matrix squaring iterativamente (N_iter veces). En la primera
iteración se usa una matriz densidad en aproximación de Trotter a temperatura
inversa beta_ini = beta_fin * 2**(-N_iter) para potencial dado por potential;
en las siguientes iteraciones se usa matriz densidad generada por la iteración
inmediatamente anterior. Además ésta función guarda datos de pi(x;beta) vs. x
en archivo de texto y grafica pi(x;beta) comparándolo con teoría para el oscilador
armónico cuántico.
Recibe:
x_max: float -> los valores de x estarán en el intervalo (-x_max,x_max).
nx: int -> número de valores de x considerados.
N_iter: int -> número de iteraciones del algoritmo matrix squaring.
beta_ini: float -> valor de inverso de temperatura que queremos tener al final de
aplicar el algoritmo matrix squaring iterativamente.
potential: func -> potencial de interacción usado en aproximación de trotter. Debe
ser función de x.
potential_string: str -> nombre del potencial (con éste nombramos los archivos que
se generan).
print_steps: bool -> decide si imprime los pasos del algoritmo matrix squaring.
save_data: bool -> decide si guarda los datos en archivo .csv.
plot: bool -> decide si grafica.
save_plot: bool -> decide si guarda la figura.
show_plot: bool -> decide si muestra la figura en pantalla.
Devuelve:
rho: numpy array, shape=(nx,nx) -> matriz densidad de estado rho a temperatura
inversa igual a beta_fin.
trace_rho: float -> traza de la matriz densidad a temperatura inversa
igual a beta_fin. Por la definición que tomamos
de "rho", ésta es equivalente a la función
partición en dicha temperatura.
grid_x: numpy array, shape=(nx,) -> valores de x en los que está evaluada rho.
"""
# Cálculo del valor de beta_ini según valores beta_fin y N_iter dados como input
beta_ini = beta_fin * 2**(-N_iter)
# Cálculo de rho con aproximación de Trotter
rho, grid_x, dx = rho_trotter(x_max, nx, beta_ini, potential)
# Aproximación de rho con matrix squaring iterado N_iter veces.
rho, trace_rho, beta_fin_2 = density_matrix_squaring( rho, grid_x, N_iter,
beta_ini, print_steps )
print( '----------------------------------------------------------------' +
'--------------------------------------------------------\n'
u'Matrix squaring: beta_ini = %.3f --> beta_fin = %.3f'%(beta_ini, beta_fin_2) +
u' N_iter = %d Z(beta_fin) = Tr(rho(beta_fin)) = %.3E'%(N_iter,trace_rho))
# Normalización de rho a 1 y cálculo de densidades de probabilidad para valores en grid_x.
rho_normalized = np.copy(rho)/trace_rho
x_weights = np.diag(rho_normalized)
# Guarda datos en archivo .csv.
script_dir = os.path.dirname(os.path.abspath(__file__)) #path completa para este script
if save_data==True:
# Nombre del archivo .csv en el que guardamos valores de pi(x;beta_fin).
if file_name is None:
csv_file_name = script_dir+u'/pi_x-ms-%s-x_max_%.3f-nx_%d-N_iter_%d-beta_fin_%.3f.csv'\
%(potential_string,x_max,nx,N_iter,beta_fin)
else:
csv_file_name = script_dir + u'/pi_x-ms-' + file_name +'.csv'
# Información relevante para agregar como comentario al archivo csv.
if relevant_info is None:
relevant_info = [ 'pi(x;beta_fin) computed using matrix squaring algorithm and' + \
' Trotter approximation. Parameters:',
u'%s x_max = %.3f nx = %d '%(potential_string,x_max,nx) + \
u'N_iter = %d beta_ini = %.3f '%(N_iter,beta_ini,) + \
u'beta_fin = %.3f'%beta_fin ]
# Guardamos valores de pi(x;beta_fin) en archivo csv.
pi_x_data = [grid_x.copy(),x_weights.copy()]
pi_x_data_headers = ['position_x','prob_density']
pi_x_data = save_csv(pi_x_data,pi_x_data_headers,csv_file_name,relevant_info,print_data=0)
# Gráfica y comparación con teoría
if plot == True:
plt.figure(figsize=(8,5))
plt.plot(grid_x, x_weights, label = 'Matrix squaring +\nfórmula de Trotter.\n$N=%d$ iteraciones\n$dx=%.3E$'%(N_iter,dx))
plt.plot(grid_x, QHO_canonical_ensemble(grid_x,beta_fin), label=u'Valor teórico QHO')
plt.xlabel(u'x')
plt.ylabel(u'$\pi^{(Q)}(x;\\beta)$')
plt.legend(loc='best',title=u'$\\beta=%.2f$'%beta_fin)
plt.tight_layout()
if save_plot==True:
if file_name is None:
plot_file_name = script_dir+u'/pi_x-ms-plot-%s-x_max_%.3f-nx_%d-N_iter_%d-beta_fin_%.3f.eps'%(potential_string,x_max,nx,N_iter,beta_fin)
else:
plot_file_name = script_dir+u'/pi_x-ms-plot-'+file_name+'.eps'
plt.savefig(plot_file_name)
if show_plot==True:
plt.show()
plt.close()
return rho, trace_rho, grid_x
def Z_several_values( temp_min=1./10, temp_max=1/2., N_temp=10, save_Z_csv=True,
Z_file_name = 'Z.csv', relevant_info_Z = None, print_Z_data = True,
x_max=7., nx=201, N_iter=7, potential = harmonic_potential,
potential_string = 'harmonic_potential', print_steps=False,
save_pi_x_data=False, pi_x_file_name=None, relevant_info_pi_x=None,
plot=False, save_plot=False, show_plot=False ):
"""
"""
beta_max = 1./temp_min
beta_min = 1./temp_max
N_temp = int(N_temp)
beta_array = np.linspace(beta_max,beta_min,N_temp)
Z = []
for beta_fin in beta_array:
rho, trace_rho, grid_x = \
run_pi_x_sq_trotter( x_max, nx, N_iter, beta_fin, potential, potential_string,
print_steps, save_pi_x_data, file_name, relevant_info, plot,
save_plot, show_plot)
Z.append(trace_rho)
Z_data = [beta_array.copy(),1./beta_array.copy(),Z.copy()]
if Z_file_name is None:
script_dir = os.path.dirname(os.path.abspath(__file__)) #path completa para este script
Z_file_name = script_dir + '/' + 'Z.csv'
if save_Z_csv == True:
Z_data_headers = ['beta','temperature','Z']
Z_data = save_csv( Z_data, Z_data_headers, Z_file_name, relevant_info_Z,
print_data = False )
if print_Z_data == True:
print(Z_data)
return Z_data
def average_energy( read_Z_data=True, generate_Z_data=False, Z_file_name = 'Z.csv',
plot_energy=True, save_plot_E=True, show_plot_E=True,
E_plot_file_name=None,
temp_min=1./10, temp_max=1/2., N_temp=10, save_Z_csv=True,
relevant_info_Z = None, print_Z_data = True,
x_max=7., nx=201, N_iter=7, potential = harmonic_potential,
potential_string = 'harmonic_potential', print_steps=False,
save_pi_x_data=False, pi_x_file_name=None, relevant_info_pi_x=None,
plot=False, save_plot=False, show_plot=False ):
"""
"""
if read_Z_data:
Z_file_read = pd.read_csv(Z_file_name, index_col=0, comment='#')
elif generate_Z_data:
t_0 = time()
Z_data = Z_several_values( temp_min, temp_max, N_temp, save_Z_csv, Z_file_name,
relevant_info_Z, print_Z_data, x_max, nx, N_iter, potential,
potential_string,print_steps, save_pi_x_data, pi_x_file_name,
relevant_info_pi_x, plot, save_plot, show_plot )
t_1 = time()
print( '--------------------------------------------------------------------------\n' +
'%d values of Z(beta) generated --> %.3f sec.'%(N_temp,t_1-t_0))
Z_file_read = Z_data
else:
print( 'Elegir si se generan o se leen los datos para la función partición, Z.\n' +
'Estas opciones son mutuamente exluyentes. Si se seleccionan las dos, el' +
'algoritmo escoge leer los datos.')
# READ DATA IS OK
beta_read = Z_file_read['beta']
temp_read = Z_file_read['temperature']
Z_read = Z_file_read['Z']
E_avg = np.gradient(-np.log(Z_read),beta_read)
if plot_energy:
plt.figure(figsize=(8,5))
plt.plot(temp_read,E_avg,label=u'$\langle E \\rangle$ via path integral\nnaive sampling')
plt.plot(temp_read,E_QHO_avg_theo(beta_read),label=u'$\langle E \\rangle$ theory')
plt.legend(loc='best')
plt.xlabel(u'$T$')
plt.ylabel(u'$\langle E \\rangle$')
if save_plot_E:
if E_plot_file_name is None:
E_plot_file_name = '' # set name
plt.savefig('E.png')
if show_plot_E:
plt.show()
plt.close()
return E_avg, beta_read
plt.rcParams.update({'font.size':15}) # Agranda tamaño de fuente en texto de figuras generadas
script_dir = os.path.dirname(os.path.abspath(__file__)) #path completa para este script
# Corre algoritmo matrix squaring
run_ms_algorithm = False
# Parámetros físicos del algoritmo
x_max = 5.
nx = 201
N_iter = 7
beta_fin = 4
potential, potential_string = harmonic_potential, 'harmonic_potential'
# Parámetros técnicos
print_steps = True
save_data = True
file_name = None
relevant_info = None
plot = True
save_plot = True
show_plot = True
if run_ms_algorithm:
rho, trace_rho, grid_x = \
run_pi_x_sq_trotter( x_max, nx, N_iter, beta_fin, potential, potential_string,
print_steps, save_data, file_name, relevant_info, plot,
save_plot, show_plot)
# Algoritmo para cálculo de energía interna
# Parte1: algoritmo para cálculo de función de partición para varios valores de beta interna
calculate_several_Z = True
temp_min = 1./10
temp_max = 1./2
N_temp = 10
save_Z_csv = True
Z_file_name = script_dir+'/'+'partition-function-test-2.csv'
relevant_info_Z = None
print_Z_data = False
x_max=7.
nx=201
N_iter=7
potential, potential_string = harmonic_potential, 'harmonic_potential'
print_steps=False
save_pi_x_data=False
pi_x_file_name=None
relevant_info_pi_x=None
plot=False
save_plot = False
show_plot=False
if calculate_several_Z:
t_0 = time()
Z_data = Z_several_values( temp_min, temp_max, N_temp, save_Z_csv, Z_file_name,
relevant_info_Z, print_Z_data, x_max, nx, N_iter, potential,
potential_string, print_steps, save_pi_x_data, pi_x_file_name,
relevant_info_pi_x, plot, save_plot, show_plot )
t_1 = time()
print( '--------------------------------------------------------\n'+
'%d values of Z(beta) --> %.3f sec.'%(N_temp,t_1-t_0))
# READ DATA IS OK
Z_file_name = script_dir+'/'+'partition-function-test-2.csv'
Z_file_read = pd.read_csv(Z_file_name, index_col=0, comment='#')
beta_read = Z_file_read['beta']
temp_read = Z_file_read['temperature']
Z_read = Z_file_read['Z']
E_avg = np.gradient(-np.log(Z_read),beta_read)
plt.figure()
plt.plot(temp_read,E_avg,label=u'$\langle E \\rangle$ Path Integral')
plt.plot(temp_read,E_QHO_avg_theo(beta_read),label=u'$< E > theory$')
plt.plot(temp_read,Z_read,'v-',label=u'$ Z(T) Matrix Squaring $')
plt.plot(temp_read,Z_QHO(beta_read),'^-',label=u'$ Z(T) Matrix Squaring $')
plt.legend(loc='best')
plt.xlabel(u'$T$')
plt.ylabel(u'$< E >$ or $Z(T)$')
plt.show()
plt.close()
|
[
"jeaz.git@gmail.com"
] |
jeaz.git@gmail.com
|
ac64f51aea7fe349c9f505f13c4f72df6051c56d
|
7c5c104b6093b6c3f2c8839591797c6812efc290
|
/parallel_xnoise.py
|
d41fac0c2b46deb1252dd04b1e746646b8ad0fa0
|
[] |
no_license
|
parkus/galex
|
a29262c281834bcdb973f5f27c2a77460ebc029a
|
06065a7cc5a600c4b94fa7ce64cc841b34f2015f
|
refs/heads/master
| 2016-09-11T02:13:28.732370
| 2014-07-28T22:12:03
| 2014-07-28T22:12:03
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,465
|
py
|
# -*- coding: utf-8 -*-
"""
Created on Fri May 16 12:58:49 2014
@author: Parke
"""
import pointers, utils
from multiprocessing import Pool
from optparse import OptionParser
from astropy.table import Table
import variability as vb
import pdfutils as pu
import pdb
parser = OptionParser()
parser.add_option("-c", "--cat", "--catalog", action="store", type="string",
dest="cat_file")
parser.add_option('-b', '--band', action='store', type='string', dest='band')
parser.add_option('-p', '--processes', action='store', type='int', dest='Np')
(options, args) = parser.parse_args()
cat = Table.read(options.cat_file, format='ascii')
def compute_xnoise(kid):
curvefile = '{}{}.{}.csv'.format(pointers.curve_folder,
kid, options.band)
curve, isBad = utils.read_curve(curvefile,cull_bad_data=True)
limitflag, x_noise, err0, err1 = '', None, None, None
if not isBad and len(curve) > 3:
try:
#get prob dist func of sigma/mu
pdf, x_noise = vb.excess_noise_PDF(curve['cps'], curve['cps_err'])
width_appx = 1.0/pdf(x_noise)
limit683 = pu.upper_limit(pdf, 0.683, normalized=True, x0=0.0,
xpeak=x_noise, x1guess=x_noise + 0.5*width_appx)
if pdf(limit683) > pdf(0.0): #if there is a well-defined 68.3% interval
xlo, xhi = pu.confidence_interval(pdf, x_noise, normalized=True)
err0 = x_noise - xlo
err1 = xhi - x_noise
else:
limitflag = '<'
x_noise = pu.upper_limit(pdf, normalized=True, x0=0.0, xpeak=x_noise,
x1guess=x_noise + 2*width_appx)
except:
pass
row = [kid,limitflag,x_noise,err0,err1]
return row
if __name__ == '__main__':
kids = cat['Kepler ID']
pool = Pool(processes=options.Np)
rows = pool.map(compute_xnoise, kids)
# compute_xnoise(kids[153])
# rows = []
# for i, kid in enumerate(kids):
# print i
# rows.append(compute_xnoise(kid))
# rows = map(compute_xnoise, kids)
# pdb.set_trace()
xcat = Table(names=['Kepler ID','flag','x_noise','-err','+err'],
masked=True, dtype=[float,str,float,float,float])
for row in rows: xcat.add_row(row, [r == None for r in row])
xcat.sort(['Kepler ID'])
utils.table_to_csv(xcat, pointers.xnoisecat_file)
|
[
"parke.loyd@gmail.com"
] |
parke.loyd@gmail.com
|
030d3c79799f9e6685828f4e6ee3aed8b8bd432e
|
eab36f5adb15ba24acb51ace389959fa9592346f
|
/twisted-intro/twisted-client-4/get-poetry.py
|
87c612e3554aac9052021a3b37cee6d2425ee627
|
[
"MIT"
] |
permissive
|
yance-dev/twdemo
|
ebda30c32dae3e722e740b71fe5623a6f47ff70f
|
e331f5a1c13df1d9e5bc2bba8a9a50dfd9b6b2ba
|
refs/heads/master
| 2022-02-07T10:29:28.804627
| 2019-06-14T16:57:39
| 2019-06-14T16:57:39
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,979
|
py
|
# This is the Twisted Get Poetry Now! client, version 4.0
import optparse, sys
from twisted.internet import defer
from twisted.internet.protocol import Protocol, ClientFactory
def parse_args():
usage = """usage: %prog [options] [hostname]:port ...
This is the Get Poetry Now! client, Twisted version 4.0
Run it like this:
python get-poetry.py port1 port2 port3 ...
If you are in the base directory of the twisted-intro package,
you could run it like this:
python twisted-client-4/get-poetry.py 10001 10002 10003
to grab poetry from servers on ports 10001, 10002, and 10003.
Of course, there need to be servers listening on those ports
for that to work.
"""
parser = optparse.OptionParser(usage)
_, addresses = parser.parse_args()
if not addresses:
print(parser.format_help())
parser.exit()
def parse_address(addr):
if ':' not in addr:
host = '127.0.0.1'
port = addr
else:
host, port = addr.split(':', 1)
if not port.isdigit():
parser.error('Ports must be integers.')
return host, int(port)
return list(map(parse_address, addresses))
class PoetryProtocol(Protocol):
poem = ''
def dataReceived(self, data):
self.poem += data
def connectionLost(self, reason):
self.poemReceived(self.poem)
def poemReceived(self, poem):
self.factory.poem_finished(poem)
class PoetryClientFactory(ClientFactory):
protocol = PoetryProtocol
def __init__(self, deferred):
self.deferred = deferred
def poem_finished(self, poem):
if self.deferred is not None:
d, self.deferred = self.deferred, None
d.callback(poem)
def clientConnectionFailed(self, connector, reason):
if self.deferred is not None:
d, self.deferred = self.deferred, None
d.errback(reason)
def get_poetry(host, port):
"""
Download a poem from the given host and port. This function
returns a Deferred which will be fired with the complete text of
the poem or a Failure if the poem could not be downloaded.
"""
d = defer.Deferred()
from twisted.internet import reactor
factory = PoetryClientFactory(d)
reactor.connectTCP(host, port, factory)
return d
def poetry_main():
addresses = parse_args()
from twisted.internet import reactor
poems = []
errors = []
def got_poem(poem):
poems.append(poem)
def poem_failed(err):
print('Poem failed:', err, file=sys.stderr)
errors.append(err)
def poem_done(_):
if len(poems) + len(errors) == len(addresses):
reactor.stop()
for address in addresses:
host, port = address
d = get_poetry(host, port)
d.addCallbacks(got_poem, poem_failed)
d.addBoth(poem_done)
reactor.run()
for poem in poems:
print(poem)
if __name__ == '__main__':
poetry_main()
|
[
"1415940604@qq.com"
] |
1415940604@qq.com
|
3c3b43a4cb14bf35539294a2f18b1ddab3a02e8d
|
653a3d9d66f3d359083cb588fc7c9ece8bb48417
|
/src/graph_transpiler/webdnn/backend/webgpu/kernels/max_pooling_2d.py
|
49ab3d4cef396af1c366129263b980c319d888a9
|
[
"Zlib",
"MIT"
] |
permissive
|
leonskim/webdnn
|
fec510254b15f3dec00f5bed8f498737b372e470
|
f97c798c9a659fe953f9dc8c8537b8917e4be7a2
|
refs/heads/master
| 2020-04-15T18:42:43.632244
| 2019-01-10T10:07:18
| 2019-01-10T10:07:18
| 164,921,764
| 0
| 0
|
NOASSERTION
| 2019-01-09T19:07:35
| 2019-01-09T19:07:30
|
Python
|
UTF-8
|
Python
| false
| false
| 3,947
|
py
|
from typing import List
from webdnn.backend.code_generator.allocator import MemoryLayout
from webdnn.backend.code_generator.injectors.buffer_injector import BufferInjector
from webdnn.backend.code_generator.injectors.kernel_name_injector import KernelNameInjector
from webdnn.backend.webgpu.generator import WebGPUDescriptorGenerator
from webdnn.backend.webgpu.kernel import Kernel, GPUSize
from webdnn.backend.webgpu.preset_placeholders import MAX_THREADS_PER_THREADGROUP
from webdnn.graph.axis import Axis
from webdnn.graph.operators.max_pooling_2d import MaxPooling2D
from webdnn.graph.order import OrderNHWC
template = """
kernel void %%FUNC_NAME%%(device float * %%STATIC_BUFFER%%[[buffer(0)]],
device float * %%DYNAMIC_BUFFER%%[[buffer(1)]],
const device int * %%META_BUFFER%% [[buffer(2)]],
uint index[[thread_position_in_grid]],
uint num_threads[[threads_per_grid]])
{
const device float *X = %%LOAD_BUFFER(max_pooling_2d_X)%%;
device float *Y = %%LOAD_BUFFER(max_pooling_2d_Y)%%;
const int N = %%LOAD_BUFFER(max_pooling_2d_N)%%;
const int H1 = %%LOAD_BUFFER(max_pooling_2d_H1)%%;
const int W1 = %%LOAD_BUFFER(max_pooling_2d_W1)%%;
const int C = %%LOAD_BUFFER(max_pooling_2d_C)%%;
const int H2 = %%LOAD_BUFFER(max_pooling_2d_H2)%%;
const int W2 = %%LOAD_BUFFER(max_pooling_2d_W2)%%;
const int KH = %%LOAD_BUFFER(max_pooling_2d_KH)%%;
const int KW = %%LOAD_BUFFER(max_pooling_2d_KW)%%;
const int SH = %%LOAD_BUFFER(max_pooling_2d_SH)%%;
const int SW = %%LOAD_BUFFER(max_pooling_2d_SW)%%;
const int PH = %%LOAD_BUFFER(max_pooling_2d_PH)%%;
const int PW = %%LOAD_BUFFER(max_pooling_2d_PW)%%;
for (int gid = index; gid < N * H2 * W2 * C; gid += num_threads) {
const int c = gid % C;
const int w2 = gid / C % W2;
const int h2 = gid / C / W2 % H2;
const int n = gid / C / W2 / H2;
float v = -1e7;
for (int kh = 0; kh < KH; kh++) {
const int h1 = h2 * SH - PH + kh;
if (h1 < 0 || h1 >= H1) continue;
for (int kw = 0; kw < KW; kw++) {
const int w1 = w2 * SW - PW + kw;
if (w1 < 0 || w1 >= W1) continue;
v = v > X[((n * H1 + h1) * W1 + w1) * C + c] ? v : X[((n * H1 + h1) * W1 + w1) * C + c];
}
}
Y[gid] = v;
}
}
"""
@WebGPUDescriptorGenerator.register_handler(MaxPooling2D)
def max_pooling_2d(op: MaxPooling2D, memory_layout: MemoryLayout) -> List[Kernel]:
x = op.inputs["x"]
y = op.outputs["y"]
assert x.order == OrderNHWC
assert y.order == OrderNHWC
buffer_injector = BufferInjector()
buffer_injector.register({
"max_pooling_2d_X": memory_layout[x],
"max_pooling_2d_Y": memory_layout[y],
"max_pooling_2d_N": x.shape_dict[Axis.N],
"max_pooling_2d_H1": x.shape_dict[Axis.H],
"max_pooling_2d_W1": x.shape_dict[Axis.W],
"max_pooling_2d_C": x.shape_dict[Axis.C],
"max_pooling_2d_H2": y.shape_dict[Axis.H],
"max_pooling_2d_W2": y.shape_dict[Axis.W],
"max_pooling_2d_KH": op.parameters["ksize"][0],
"max_pooling_2d_KW": op.parameters["ksize"][1],
"max_pooling_2d_SH": op.parameters["stride"][0],
"max_pooling_2d_SW": op.parameters["stride"][1],
"max_pooling_2d_PH": op.parameters["padding"][0],
"max_pooling_2d_PW": op.parameters["padding"][1],
})
name_injector = KernelNameInjector(op)
source = template
source = buffer_injector.inject(source)
source = name_injector.inject(source)
kernel = Kernel(
{name_injector.name: source},
name_injector.name,
GPUSize(8, 1, 1),
GPUSize(MAX_THREADS_PER_THREADGROUP, 1, 1),
buffer_injector.buffer,
buffer_injector.unresolved_value_list
)
return [kernel]
|
[
"y.kikura@gmail.com"
] |
y.kikura@gmail.com
|
6a0f7809c86fd780ee902070abef4e4f821bb4fa
|
02360d68d2af6d4f40779dbb8223003b89d6209b
|
/algorithm_course/divide_conquer_algorithms/closest_points_problem.py
|
01000d9fe0768e95495cf7fd2620d3ded0379926
|
[] |
no_license
|
codefoxut/pycourses
|
88b5b8490ea7a0d1c852ee008a62d37fb5112450
|
4163fdb96f3fcc962b95c5b56bece939e1f5d75e
|
refs/heads/main
| 2023-03-27T22:28:14.215125
| 2021-04-05T18:42:00
| 2021-04-05T18:42:00
| 354,257,302
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,014
|
py
|
import math
class Point:
def __init__(self, x, y):
self.x = x
self.y = y
def distance(p, q):
"""euclidean distance."""
return math.sqrt((p.x - q.x) ** 2 + (p.y - q.y) ** 2)
def brute_force_approach(sub_array):
# distance between all the points of sub array.
min_distance = float('inf')
for i in range(len(sub_array) - 1):
for j in range(i + 1, len(sub_array)):
actual_distance = distance(sub_array[i], sub_array[j])
if actual_distance < min_distance:
min_distance = actual_distance
return min_distance
def closest_pairs_problem(list_sorted_x, list_sorted_y, num_of_items):
if num_of_items <= 3:
return brute_force_approach(list_sorted_x)
middle_index = num_of_items // 2
middle_item = list_sorted_x[middle_index]
# divide phase
delta_left = closest_pairs_problem(list_sorted_x[:middle_index], list_sorted_y, middle_index)
delta_right = closest_pairs_problem(list_sorted_x[middle_index:], list_sorted_y,
num_of_items - middle_index)
# conquer phase
delta = min(delta_left, delta_right)
strip_points = [k for k in list_sorted_y if abs(k.x - middle_item.x) < delta]
strip_delta = get_strip_delta(strip_points, delta)
return min(strip_delta, delta)
def get_strip_delta(strip_points, delta):
min_distance = delta
n = len(strip_points)
for i in range(n):
j = i + 1
while j < n and abs(strip_points[i].y - strip_points[j].y) < min_distance:
min_distance = distance(strip_points[i], strip_points[j])
j += 1
return min_distance
def run(point_list):
x_list = sorted(point_list, key=lambda k: k.x)
y_list = sorted(point_list, key=lambda k: k.y)
return closest_pairs_problem(x_list, y_list, len(point_list))
if __name__ == '__main__':
p_list = [Point(1, 1), Point(4, 2), Point(10, 10), Point(0, 0), Point(5, 3), Point(0, 1),]
print(run(p_list))
|
[
"ujjwal.tak@go-mmt.com"
] |
ujjwal.tak@go-mmt.com
|
af2820638c3a3bbddaa78f505cf4d912cbd2d28f
|
ef6bffb9a672520950f10d6078ec7a6847ec6cce
|
/tests/test_scraper.py
|
4d25b506237fa192e14d7ffe2014f838f46d876a
|
[] |
no_license
|
tybrs/yelp-corpus-generator
|
b0858315fd445d6114ff0c10d30f6662b489f573
|
7239ab2c4c911d3c0331b33c14783b9d4d8a6f0d
|
refs/heads/master
| 2021-10-13T15:36:04.389684
| 2021-10-06T23:35:24
| 2021-10-06T23:35:24
| 154,608,069
| 1
| 0
| null | 2021-10-06T23:36:57
| 2018-10-25T04:02:37
|
Python
|
UTF-8
|
Python
| false
| false
| 955
|
py
|
import requests
from scrapy.http import Request, TextResponse
from yaml import load, SafeLoader
def test_xpaths():
"""Unit test to make sure all xpaths in src/yelp_scrapy/xpath.yml
retrieve a non-empty output for predefined test urls.
"""
config = load(open('scrapy/src/yelp_scrapy/xpath.yml', 'r'),
Loader=SafeLoader)
for page in config:
page_dict = config[page]
url = 'http://localhost:8050/render.html?url='
url += page_dict['test_url']
page_xpaths = {k: v for k, v in page_dict.items()
if k != 'test_url'}
req = Request(url=url)
resp = requests.get(url)
response = TextResponse(url=url, request=req,
body=resp.text, encoding='utf-8')
for key, xpath in page_xpaths.items():
extraction = response.xpath(xpath).extract()
assert extraction, f"XPath for {key} failed"
|
[
"gavagai@archy.localdomain"
] |
gavagai@archy.localdomain
|
f23157568aaa28ae0889b1754bf588b412951638
|
87695989bbafe0ec6892cb4d1bb1965c84d6b35f
|
/vatt/utils/train/restore.py
|
837dd5fc3c500bc269bf00adf7462038e39318aa
|
[
"CC-BY-4.0",
"Apache-2.0"
] |
permissive
|
guangyusong/google-research
|
ef6e85e7de75bd1289575374accc7fe19af896c7
|
cac4a7f3b82ab629e25fa8afe33ce80cc6933e54
|
refs/heads/master
| 2022-01-01T11:22:31.825772
| 2021-12-30T20:08:40
| 2021-12-30T20:19:03
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 7,711
|
py
|
# coding=utf-8
# Copyright 2021 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Lint as: python3
"""Partial restore utils for pretraining or finetuning."""
import os
import pickle
import re
from absl import logging
import tensorflow as tf
import tensorflow_addons.image as tfa_image
_TEXT_EMBEDDINGS_DIR = "./misc/"
_KRS_TO_CKPT = [
[r"^(.*)video_module/vit_(.*?)/(.*):0",
(r"model/layer_with_weights-0/_base_layer/vid_backbone/_base/\3/.ATTRIBUTES/VARIABLE_VALUE")],
[r"^(.*)audio_module/spt_(.*?)/(.*):0",
(r"model/layer_with_weights-0/_base_layer/aud_backbone/_base/\3/.ATTRIBUTES/VARIABLE_VALUE")],
[r"^(.*)/waveform_to_patch/(.*)",
(r"\1/wave_to_patch/\2")],
[r"^(.*)audio_module/wat_(.*?)/(.*):0",
(r"model/layer_with_weights-0/_base_layer/aud_backbone/_base/\3/.ATTRIBUTES/VARIABLE_VALUE")],
[r"^(.*)/spatio_temporal_embeddings/(.*)",
r"\1/pos_embedding_lookup/\2"],
[r"^(.*)/spectro_temporal_embeddings/(.*)",
r"\1/pos_embedding_lookup/\2"],
[r"^(.*)/temporal_embeddings/(.*)",
r"\1/pos_embedding_lookup/\2"],
[r"^(.*)/pos_embedding_lookup/layer_norm/(.*)",
r"\1/pos_embedding_lookup/layernorm/\2"],
[r"^(.*)/transformer/(.*)",
r"\1/tx/\2"],
[r"^(.*)/dense_relu_dense/(.*)",
r"\1/mlp/\2"],
[r"^(.*)/dense_gelu_dense/(.*)",
r"\1/mlp/\2"],
[r"^(.*)/dense_geglu_dense/(.*)",
r"\1/mlp/\2"],
[r"^(.*)/multi_head_attention/(.*)",
r"\1/mha/\2"],
[r"^(.*)/layer_([0-99]+)/(.*)",
r"\1/layers/\2/\3"],
]
_KRS_TO_UT_CKPT = [
[r"^(.*)/vit_(.*?)/agg_embedding:0",
(r"\1/vit_\2/vid_agg_embedding:0")],
[r"^(.*)/wat_(.*?)/agg_embedding:0",
(r"\1/wat_\2/aud_agg_embedding:0")],
[r"^(.*)/spt_(.*?)/agg_embedding:0",
(r"\1/spt_\2/aud_agg_embedding:0")],
[r"^(.*)/vit_(.*?)/pre_tx_projection/(.*?)",
(r"\1/vit_\2/pre_proj/video/\3")],
[r"^(.*)/vit_(.*?)/post_tx_projection/(.*?)",
(r"\1/vit_\2/post_proj/video/\3")],
[r"^(.*)/wat_(.*?)/pre_tx_projection/(.*?)",
(r"\1/wat_\2/pre_proj/audio/\3")],
[r"^(.*)/wat_(.*?)/post_tx_projection/(.*?)",
(r"\1/wat_\2/post_proj/audio/\3")],
[r"^(.*)/spt_(.*?)/pre_tx_projection/(.*?)",
(r"\1/spt_\2/pre_proj/audio/\3")],
[r"^(.*)/spt_(.*?)/post_tx_projection/(.*?)",
(r"\1/spt_\2/post_proj/audio/\3")],
[r"^(.*)video_module/vit_(.*?)/(.*):0",
(r"model/layer_with_weights-0/_base_layer/unified_backbone/unified_transformer/\3/.ATTRIBUTES/VARIABLE_VALUE")],
[r"^(.*)audio_module/spt_(.*?)/(.*):0",
(r"model/layer_with_weights-0/_base_layer/unified_backbone/unified_transformer/\3/.ATTRIBUTES/VARIABLE_VALUE")],
[r"^(.*)audio_module/wat_(.*?)/(.*):0",
(r"model/layer_with_weights-0/_base_layer/unified_backbone/unified_transformer/\3/.ATTRIBUTES/VARIABLE_VALUE")],
[r"^(.*)/voxel_to_patch/(.*)",
(r"\1/raw_to_embeddings/video/\2")],
[r"^(.*)/waveform_to_patch/(.*)",
(r"\1/raw_to_embeddings/audio/\2")],
[r"^(.*)/spectrum_to_patch/(.*)",
(r"\1/raw_to_embeddings/audio/\2")],
[r"^(.*)/spatio_temporal_embeddings/(.*)",
r"\1/pos_embedding_lookup/video/\2"],
[r"^(.*)/temporal_embeddings/(.*)",
r"\1/pos_embedding_lookup/audio/\2"],
[r"^(.*)/spectro_temporal_embeddings/(.*)",
r"\1/pos_embedding_lookup/audio/\2"],
[r"^(.*)/pos_embedding_lookup/(.*)/layer_norm/(.*)",
r"\1/pos_embedding_lookup/\2/layernorm/\3"],
[r"^(.*)/transformer/(.*)",
r"\1/tx/\2"],
[r"^(.*)/dense_relu_dense/(.*)",
r"\1/mlp/\2"],
[r"^(.*)/dense_gelu_dense/(.*)",
r"\1/mlp/\2"],
[r"^(.*)/dense_geglu_dense/(.*)",
r"\1/mlp/\2"],
[r"^(.*)/multi_head_attention/(.*)",
r"\1/mha/\2"],
[r"^(.*)/layer_([0-99]+)/(.*)",
r"\1/layers/\2/\3"],
]
_KRS_TO_TSM_CKPT = [
[r"^(.*)video_module/tsm/(.*):0",
r"model/layer_with_weights-0/_base_layer/vid_backbone/_base/\2/.ATTRIBUTES/VARIABLE_VALUE"],
[r"^(.*)/post_batch_norm/(.*)",
r"\1/post_bn/\2"],
[r"^(.*)/pre_batch_norm/(.*)",
r"\1/pre_bn/\2"],
[r"^(.*)/res_batch_norm_([0-99]+)/(.*)",
r"\1/res_bn_\2/\3"],
[r"^(.*)/shortcut_conv/(.*)",
r"\1/projection/\2"],
[r"^(.*)/tsm_block_([0-99]+)/(.*)",
r"\1/tsm_blocks/\2/\3"],
[r"^(.*)/unit_([0-99]+)/(.*)",
r"\1/tsm_units/\2/\3"],
]
def interpolate_pos(source_weights, target_shape):
"""Interpolate missing points in the new pos embeddings."""
source_buckets = source_weights.shape[0]
lookup_keys = tf.range(source_buckets)
available_buckets = lookup_keys / source_buckets
available_buckets = tf.cast(available_buckets, tf.float32)[None, :, None]
# define all possible target buckets
# shape = [1, target_buckets, 1]
target_buckets = target_shape[0]
query_buckets = tf.range(target_buckets) / target_buckets
query_buckets = tf.cast(query_buckets, tf.float32)[None, :, None]
# fetch current available embeddings
# shape = [1, source_buckets, embd_dim]
available_embeddings = source_weights[None, Ellipsis]
expanded_embeddings = tf.squeeze(tfa_image.interpolate_spline(
train_points=available_buckets,
train_values=available_embeddings,
query_points=query_buckets,
order=3), axis=0)
logging.info("Positional embeddings interpolated from %s to %s",
source_weights.shape, target_shape)
return expanded_embeddings
def convert_keras_name_to_ckpt(krs_name):
for source, dest in _KRS_TO_CKPT:
krs_name = re.sub(source, dest, krs_name)
return krs_name
def convert_keras_name_to_ut_ckpt(krs_name):
for source, dest in _KRS_TO_UT_CKPT:
krs_name = re.sub(source, dest, krs_name)
return krs_name
def convert_keras_name_to_tsm_ckpt(krs_name):
for source, dest in _KRS_TO_TSM_CKPT:
krs_name = re.sub(source, dest, krs_name)
return krs_name
def assign_weight_from_ckpt(layer, ckpt_path):
"""Convert Keras model name to saved checkpoint name and restore."""
ckpt_reader = tf.train.load_checkpoint(ckpt_path)
ckpt_names = [v[0] for v in tf.train.list_variables(ckpt_path)]
is_unified = any(["unified_backbone" in name for name in ckpt_names])
is_tsm = any(["tsm_blocks" in name for name in ckpt_names])
skipped = []
for krs_w in layer.weights:
krs_name = krs_w.name
if is_unified:
ckpt_name = convert_keras_name_to_ut_ckpt(krs_name)
elif is_tsm:
ckpt_name = convert_keras_name_to_tsm_ckpt(krs_name)
else:
ckpt_name = convert_keras_name_to_ckpt(krs_name)
if ckpt_name in ckpt_names:
ckpt_weight = ckpt_reader.get_tensor(ckpt_name)
if ckpt_weight.shape == krs_w.shape:
krs_w.assign(ckpt_weight)
elif "pos_embedding_lookup" in ckpt_name:
krs_w.assign(interpolate_pos(ckpt_weight, krs_w.shape))
else:
skipped.append(krs_name)
else:
skipped.append(krs_name)
return skipped
def assign_word_embeddings(embedding_layer, embedding_name):
path = os.path.join(_TEXT_EMBEDDINGS_DIR, embedding_name + ".pkl")
with open(path, "rb") as f:
embedding_weights = pickle.load(f)["word_embeddings"]
embedding_layer.set_weights([embedding_weights])
|
[
"copybara-worker@google.com"
] |
copybara-worker@google.com
|
d17c6af9dbeb02a7eb6d51bd2bb76681a825baab
|
6e57bdc0a6cd18f9f546559875256c4570256c45
|
/development/vndk/tools/header-checker/tests/module.py
|
82f6baa3b4b0b3f484f210df181dbf2744908297
|
[
"Apache-2.0"
] |
permissive
|
dongdong331/test
|
969d6e945f7f21a5819cd1d5f536d12c552e825c
|
2ba7bcea4f9d9715cbb1c4e69271f7b185a0786e
|
refs/heads/master
| 2023-03-07T06:56:55.210503
| 2020-12-07T04:15:33
| 2020-12-07T04:15:33
| 134,398,935
| 2
| 1
| null | 2022-11-21T07:53:41
| 2018-05-22T10:26:42
| null |
UTF-8
|
Python
| false
| false
| 16,057
|
py
|
#!/usr/bin/env python3
import os
import sys
import tempfile
import_path = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
import_path = os.path.abspath(os.path.join(import_path, 'utils'))
sys.path.insert(1, import_path)
from utils import run_header_abi_dumper_on_file
from utils import run_header_abi_linker
from utils import TARGET_ARCHS
from utils import SOURCE_ABI_DUMP_EXT
SCRIPT_DIR = os.path.abspath(os.path.dirname(__file__))
INPUT_DIR = os.path.join(SCRIPT_DIR, 'input')
EXPECTED_DIR = os.path.join(SCRIPT_DIR, 'expected')
REF_DUMP_DIR = os.path.join(SCRIPT_DIR, 'reference_dumps')
ARCH_TARGET_CFLAGS = {'arm': ['-target', 'arm-linux-androideabi'],
'arm64': ['-target', 'aarch64-linux-android'],
'x86' : ['-target', 'i386-linux-androideabi'],
'x86_64' : ['-target', 'x86_64-linux-android'],
'mips' : ['-target', 'mips-linux-androideabi'],
'mips64' : ['-target', 'mips64-linux-android'],}
def relative_to_abs_path(relative_path):
return os.path.join(SCRIPT_DIR, relative_path)
def relative_to_abs_path_list(relative_path_list):
abs_paths = []
for relative_path in relative_path_list:
abs_paths.append(relative_to_abs_path(relative_path))
return abs_paths
class Module(object):
def __init__(self, name, arch, srcs, version_script, cflags,
export_include_dirs, api):
self.name = name
self.arch = arch
self.srcs = relative_to_abs_path_list(srcs)
self.version_script = relative_to_abs_path(version_script)
self.cflags = cflags
self.arch_cflags = ['']
if self.arch != '':
self.arch_cflags = ARCH_TARGET_CFLAGS.get(self.arch)
self.export_include_dirs = relative_to_abs_path_list(export_include_dirs)
self.api = api
def get_name(self):
return self.name
def get_arch(self):
return self.arch
def get_srcs(self):
return self.srcs
def get_export_include_dirs(self):
return self.export_include_dirs
def get_cflags(self):
return self.cflags
def get_version_script(self):
return self.version_script
def get_api(self):
return self.api
def make_lsdump(self, default_cflags):
""" For each source file, produce a .sdump file, and link them to form
an lsump file"""
dumps_to_link = []
with tempfile.TemporaryDirectory() as tmp:
output_lsdump = os.path.join(tmp, self.name) + SOURCE_ABI_DUMP_EXT
for src in self.srcs:
output_path = os.path.join(tmp, os.path.basename(src)) + '.sdump'
dumps_to_link.append(output_path)
run_header_abi_dumper_on_file(
src, output_path, self.export_include_dirs,
self.cflags + self.arch_cflags + default_cflags)
return run_header_abi_linker(output_lsdump, dumps_to_link,
self.version_script, self.api,
self.arch)
@staticmethod
def mutate_module_for_arch(module, target_arch):
name = module.get_name()
srcs = module.get_srcs()
version_script = module.get_version_script()
cflags = module.get_cflags()
export_include_dirs = module.get_export_include_dirs()
api = module.get_api()
return Module(name, target_arch, srcs, version_script, cflags,
export_include_dirs, api)
@staticmethod
def mutate_module_for_all_arches(module):
modules = []
for target_arch in TARGET_ARCHS:
modules.append(Module.mutate_module_for_arch(module, target_arch))
return modules
@staticmethod
def get_test_modules():
modules = []
for module in TEST_MODULES.values():
if module.get_arch() == '':
modules += Module.mutate_module_for_all_arches(module)
return modules
@staticmethod
def get_test_module_by_name(name):
return TEST_MODULES[name]
TEST_MODULES = [
Module(
name = 'libc_and_cpp',
srcs = ['integration/c_and_cpp/source1.cpp',
'integration/c_and_cpp/source2.c',
],
version_script = 'integration/c_and_cpp/map.txt',
export_include_dirs = ['integration/c_and_cpp/include'],
cflags = [],
arch = '',
api = 'current',
),
Module(
name = 'libc_and_cpp_with_unused_struct',
srcs = ['integration/c_and_cpp/source1.cpp',
'integration/c_and_cpp/source2.c',
],
version_script = 'integration/c_and_cpp/map.txt',
export_include_dirs = ['integration/c_and_cpp/include'],
cflags = ['-DINCLUDE_UNUSED_STRUCTS=1'],
arch = '',
api = 'current',
),
Module(
name = 'libc_and_cpp_with_unused_cstruct',
srcs = ['integration/c_and_cpp/source1.cpp',
'integration/c_and_cpp/source2.c',
],
version_script = 'integration/c_and_cpp/map.txt',
export_include_dirs = ['integration/c_and_cpp/include'],
cflags = ['-DINCLUDE_UNUSED_STRUCTS=1', '-DMAKE_UNUSED_STRUCT_C=1'],
arch = '',
api = 'current',
),
Module(
name = 'libgolden_cpp',
srcs = ['integration/cpp/gold/golden_1.cpp',
'integration/cpp/gold/high_volume_speaker.cpp',
'integration/cpp/gold/low_volume_speaker.cpp',
],
version_script = 'integration/cpp/gold/map.txt',
export_include_dirs = ['integration/cpp/gold/include'],
cflags = [],
arch = '',
api = 'current',
),
Module(
name = 'libgolden_cpp_odr',
srcs = ['integration/cpp/gold/golden_1.cpp',
'integration/cpp/gold/high_volume_speaker.cpp',
'integration/cpp/gold/low_volume_speaker.cpp',
],
version_script = 'integration/cpp/gold/map.txt',
export_include_dirs = ['integration/cpp/gold/include'],
cflags = ['-DTEST_ODR'],
arch = '',
api = 'current',
),
Module(
name = 'libgolden_cpp_add_function',
srcs = ['integration/cpp/gold/golden_1.cpp',
'integration/cpp/gold/high_volume_speaker.cpp',
'integration/cpp/gold/low_volume_speaker.cpp',
],
version_script = 'integration/cpp/gold/map_add_function.txt',
export_include_dirs = ['integration/cpp/gold/include'],
cflags = ['-DGOLDEN_ADD_FUNCTION=1'],
arch = '',
api = 'current',
),
Module(
name = 'libgolden_cpp_add_function_and_unexported_elf',
srcs = ['integration/cpp/gold/golden_1.cpp',
'integration/cpp/gold/high_volume_speaker.cpp',
'integration/cpp/gold/low_volume_speaker.cpp',
],
version_script = \
'integration/cpp/gold/map_add_function_elf_symbol.txt',
export_include_dirs = ['integration/cpp/gold/include'],
cflags = ['-DGOLDEN_ADD_FUNCTION=1', '-DADD_UNEXPORTED_ELF_SYMBOL'],
arch = '',
api = 'current',
),
Module(
name = 'libgolden_cpp_change_function_access',
srcs = ['integration/cpp/gold/golden_1.cpp',
'integration/cpp/gold/high_volume_speaker.cpp',
'integration/cpp/gold/low_volume_speaker.cpp',
],
version_script = 'integration/cpp/gold/map.txt',
export_include_dirs = ['integration/cpp/gold/include'],
cflags = ['-DGOLDEN_CHANGE_FUNCTION_ACCESS=1'],
arch = '',
api = 'current',
),
Module(
name = 'libgolden_cpp_add_global_variable',
srcs = ['integration/cpp/gold/golden_1.cpp',
'integration/cpp/gold/high_volume_speaker.cpp',
'integration/cpp/gold/low_volume_speaker.cpp',
],
version_script = 'integration/cpp/gold/map_added_globvar.txt',
export_include_dirs = ['integration/cpp/gold/include'],
cflags = ['-DGOLDEN_ADD_GLOBVAR=1'],
arch = '',
api = 'current',
),
Module(
name = 'libgolden_cpp_add_global_variable_private',
srcs = ['integration/cpp/gold/golden_1.cpp',
'integration/cpp/gold/high_volume_speaker.cpp',
'integration/cpp/gold/low_volume_speaker.cpp',
],
version_script = 'integration/cpp/gold/map_added_globvar.txt',
export_include_dirs = ['integration/cpp/gold/include'],
cflags = ['-DGOLDEN_ADD_GLOBVAR=1', '-DGOLDEN_ADD_GLOBVAR_PRIVATE'],
arch = '',
api = 'current',
),
Module(
name = 'libgolden_cpp_return_type_diff',
srcs = ['integration/cpp/gold/golden_1.cpp',
'integration/cpp/gold/high_volume_speaker.cpp',
'integration/cpp/gold/low_volume_speaker.cpp',
],
version_script = 'integration/cpp/gold/map.txt',
export_include_dirs = ['integration/cpp/gold/include'],
cflags = ['-DGOLDEN_RETURN_TYPE_DIFF=1'],
arch = '',
api = 'current',
),
Module(
name = 'libgolden_cpp_parameter_type_diff',
srcs = ['integration/cpp/gold/golden_1.cpp',
'integration/cpp/gold/high_volume_speaker.cpp',
'integration/cpp/gold/low_volume_speaker.cpp',
],
version_script = 'integration/cpp/gold/map_parameter_type_diff.txt',
export_include_dirs = ['integration/cpp/gold/include'],
cflags = ['-DGOLDEN_PARAMETER_TYPE_DIFF=1'],
arch = '',
api = 'current',
),
Module(
name = 'libgolden_cpp_vtable_diff',
srcs = ['integration/cpp/gold/golden_1.cpp',
'integration/cpp/gold/high_volume_speaker.cpp',
'integration/cpp/gold/low_volume_speaker.cpp',
],
version_script = 'integration/cpp/gold/map.txt',
export_include_dirs = ['integration/cpp/gold/include'],
cflags = ['-DGOLDEN_VTABLE_DIFF=1'],
arch = '',
api = 'current',
),
Module(
name = 'libgolden_cpp_member_diff',
srcs = ['integration/cpp/gold/golden_1.cpp',
'integration/cpp/gold/high_volume_speaker.cpp',
'integration/cpp/gold/low_volume_speaker.cpp',
],
version_script = 'integration/cpp/gold/map.txt',
export_include_dirs = ['integration/cpp/gold/include'],
cflags = ['-DGOLDEN_MEMBER_DIFF=1'],
arch = '',
api = 'current',
),
Module(
name = 'libgolden_cpp_member_fake_diff',
srcs = ['integration/cpp/gold/golden_1.cpp',
'integration/cpp/gold/high_volume_speaker.cpp',
'integration/cpp/gold/low_volume_speaker.cpp',
],
version_script = 'integration/cpp/gold/map.txt',
export_include_dirs = ['integration/cpp/gold/include'],
cflags = ['-DGOLDEN_MEMBER_FAKE_DIFF=1'],
arch = '',
api = 'current',
),
Module(
name = 'libgolden_cpp_member_cv_diff',
srcs = ['integration/cpp/gold/golden_1.cpp',
'integration/cpp/gold/high_volume_speaker.cpp',
'integration/cpp/gold/low_volume_speaker.cpp',
],
version_script = 'integration/cpp/gold/map.txt',
export_include_dirs = ['integration/cpp/gold/include'],
cflags = ['-DGOLDEN_MEMBER_CV_DIFF=1'],
arch = '',
api = 'current',
),
Module(
name = 'libgolden_cpp_change_member_access',
srcs = ['integration/cpp/gold/golden_1.cpp',
'integration/cpp/gold/high_volume_speaker.cpp',
'integration/cpp/gold/low_volume_speaker.cpp',
],
version_script = 'integration/cpp/gold/map.txt',
export_include_dirs = ['integration/cpp/gold/include'],
cflags = ['-DGOLDEN_CHANGE_MEMBER_ACCESS=1'],
arch = '',
api = 'current',
),
Module(
name = 'libgolden_cpp_member_integral_type_diff',
srcs = ['integration/cpp/gold/golden_1.cpp',
'integration/cpp/gold/high_volume_speaker.cpp',
'integration/cpp/gold/low_volume_speaker.cpp',
],
version_script = 'integration/cpp/gold/map.txt',
export_include_dirs = ['integration/cpp/gold/include'],
cflags = ['-DGOLDEN_MEMBER_INTEGRAL_TYPE_DIFF=1'],
arch = '',
api = 'current',
),
Module(
name = 'libgolden_cpp_enum_diff',
srcs = ['integration/cpp/gold/golden_1.cpp',
'integration/cpp/gold/high_volume_speaker.cpp',
'integration/cpp/gold/low_volume_speaker.cpp',
],
version_script = 'integration/cpp/gold/map.txt',
export_include_dirs = ['integration/cpp/gold/include'],
cflags = ['-DGOLDEN_ENUM_DIFF=1'],
arch = '',
api = 'current',
),
Module(
name = 'libgolden_cpp_enum_extended',
srcs = ['integration/cpp/gold/golden_1.cpp',
'integration/cpp/gold/high_volume_speaker.cpp',
'integration/cpp/gold/low_volume_speaker.cpp',
],
version_script = 'integration/cpp/gold/map.txt',
export_include_dirs = ['integration/cpp/gold/include'],
cflags = ['-DGOLDEN_ENUM_EXTENSION=1'],
arch = '',
api = 'current',
),
Module(
name = 'libgolden_cpp_unreferenced_elf_symbol_removed',
srcs = ['integration/cpp/gold/golden_1.cpp',
'integration/cpp/gold/high_volume_speaker.cpp',
'integration/cpp/gold/low_volume_speaker.cpp',
],
version_script = 'integration/cpp/gold/map_elf_symbol_removed.txt',
export_include_dirs = ['integration/cpp/gold/include'],
cflags = [],
arch = '',
api = 'current',
),
Module(
name = 'libreproducability',
srcs = ['integration/c_and_cpp/reproducability.c',
],
version_script = 'integration/c_and_cpp/repro_map.txt',
export_include_dirs = ['integration/c_and_cpp/include'],
cflags = [],
arch = '',
api = 'current',
),
Module(
name = 'libgolden_cpp_member_name_changed',
srcs = ['integration/cpp/gold/golden_1.cpp',
'integration/cpp/gold/high_volume_speaker.cpp',
'integration/cpp/gold/low_volume_speaker.cpp',
],
version_script = 'integration/cpp/gold/map.txt',
export_include_dirs = ['integration/cpp/gold/include'],
cflags = ['-DGOLDEN_CHANGE_MEMBER_NAME_SAME_OFFSET=1'],
arch = '',
api = 'current',
),
Module(
name = 'libgolden_cpp_function_pointer',
srcs = ['integration/cpp/gold/golden_1.cpp',
'integration/cpp/gold/high_volume_speaker.cpp',
'integration/cpp/gold/low_volume_speaker.cpp',
],
version_script = 'integration/cpp/gold/map.txt',
export_include_dirs = ['integration/cpp/gold/include'],
cflags = ['-DGOLDEN_FUNCTION_POINTER=1'],
arch = '',
api = 'current',
),
Module(
name = 'libgolden_cpp_function_pointer_parameter_added',
srcs = ['integration/cpp/gold/golden_1.cpp',
'integration/cpp/gold/high_volume_speaker.cpp',
'integration/cpp/gold/low_volume_speaker.cpp',
],
version_script = 'integration/cpp/gold/map.txt',
export_include_dirs = ['integration/cpp/gold/include'],
cflags = ['-DGOLDEN_FUNCTION_POINTER_ADD_PARAM=1',
'-DGOLDEN_FUNCTION_POINTER=1'],
arch = '',
api = 'current',
),
]
TEST_MODULES = { m.name: m for m in TEST_MODULES }
|
[
"dongdong331@163.com"
] |
dongdong331@163.com
|
2c590e0a193a11e38be8147c00b75b88ea0a4d98
|
93f93f96e8ef92e7bf5185ca7d8f12ecd9ee23f2
|
/0x0A-python-inheritance/8-rectangle.py
|
a40d3e0b1ff834e52c4c366f4a28acc2cb4858f2
|
[] |
no_license
|
Alafresh/holbertonschool-higher_level_programming
|
1a54091d94eb2370badf35117c03b27e3baa1e35
|
111cf99415b0a0557c6f21f5acbe4c7b8ba5f325
|
refs/heads/master
| 2020-07-22T21:07:27.921742
| 2020-02-13T15:34:30
| 2020-02-13T15:34:30
| 207,327,647
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 570
|
py
|
#!/usr/bin/python3
class BaseGeometry():
def area(self):
raise Exception('area() is not implemented')
def integer_validator(self, name, value):
if type(value) != int:
raise TypeError(name + ' must be an integer')
elif value <= 0:
raise ValueError(name + ' must be greater than 0')
class Rectangle(BaseGeometry):
def __init__(self, width, height):
self.integer_validator("width", width)
self.integer_validator("height", height)
self.__width = width
self.__height = height
|
[
"Alafresh"
] |
Alafresh
|
455b5a68e06d42c27ff61f75e671d72f76de854f
|
435b287d58ed65bfa65bf042134e1bb1bfaed0dd
|
/day18/maze2.py
|
26df6506f86db48faee14acfb71ac7778833e7c7
|
[] |
no_license
|
marxin/AoC2019
|
2ef5b79b37351e86be3e44c9d400332f6dae3ae0
|
2a76ec78908be2f3c3c145fef6e52ade11c48a7b
|
refs/heads/master
| 2020-12-01T20:53:38.068563
| 2019-12-25T08:38:05
| 2019-12-25T08:38:05
| 230,766,575
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 12,054
|
py
|
#!/usr/bin/env python3
data = '''
#########
#b.A.@.a#
#########
'''
data = '''
########################
#f.D.E.e.C.b.A.@.a.B.c.#
######################.#
#d.....................#
########################
'''
data = '''
########################
#...............b.C.D.f#
#.######################
#.....@.a.B.c.d.A.e.F.g#
########################
'''
data = '''
#################
#i.G..c...e..H.p#
########.########
#j.A..b...f..D.o#
########@########
#k.E..a...g..B.n#
########.########
#l.F..d...h..C.m#
#################
'''
data3 = '''
########################
#@..............ac.GI.b#
###d#e#f################
###A#B#C################
###g#h#i################
########################
'''
data = '''
#################################################################################
#m....#...#...#.....#...#...............#.#...#.....#...........................#
#.###.#.#.#.#.###.#.#.#.#####.#.#########.#.#.#.#.###.###.#.#############.#####.#
#...#...#...#...#.#b#.#.#...#.#........x#.#.#.#.#.....#...#.#.......#.....#...#.#
#.#.###########.#.#.#.#.#.#.###########.#.#.#.#.#######.#####.#####.#######.#.###
#.#.......#...#.U.#...#...#...........#.#.#.#...#.....#..........p#.........#...#
#.#######.#.#.#######################.#.#.#.#####.###.###########.#############.#
#...#...#.#.#.#...#.S...#...........#...#.#.#...#.#.#.....#....k#.....#.......#.#
###.#.#.#.#.#.#.#.#.###.#.###.#####L###.#.#.#.#.#.#.###N###.###.#######.#####.#.#
#...#.#...#.#.#.#...#.#.#...#.#.......#.#.#...#...#.....#...#.#.#.......#...#.#.#
#.###.#####.#.#.#####.#.###.#.#########.#.#########.#####.###.#.#.#######.#.#.#.#
#.#.#.#...#.#.#.#.T...#.....#.........#.#...#.....#...#.#...#.....#.......#.#...#
#.#.#.#.#.#.#.#.#.###.#############.#.#.###.#.#.#####.#.###.#######.#####.#.#####
#.#.#q#.#.H.#.#.#.#...#.....F.#...#.#...#...#.#.....#.#...#...#.#...#.#...#...#.#
#.#.#.###.###.#.###.#.#####.#.#.#.###.###.###.###.#.#.###.###.#.#.###.#.#####.#.#
#...#...#.#...#.#...#.....#.#.#.#...#...#.#...#.#.#.#.#...#...#.......#.#.#...#.#
#######.#.#.###.#.#######.###.#.###.###.#.#.###.#.###.#.###.###########.#.#.###.#
#...J...#.#y....#.W.#...#.....#.#...#.#.#.#.....#.....#.#...#...........#.#.....#
#.#######.#########.#.#.#.#####.#.###.#.#.#####.#######.#.###.#.###.#####.#.#####
#.#...#...#.......#.#.#...#.....#.#.....#.......#.......#.#...#...#.#...#...#...#
#.#.#.#.#######.#V#.###.###.#####.#.#####.#########.#.#.#.#####.#.###.#Z#####.#.#
#.#v#.#.#.....#.#.#.#.#...#.#...#.#.....#.........#.#.#.#...#.#.#.....#.......#.#
#.###R#C#.###.#.#.#.#.#####.###.#.#####.#########.###.#.###.#.#################.#
#.#...#.#.#.#...#w#.#.....#...#...#...#.#.......#.....#...#...#.............#...#
#.#.###.#.#.#####.#.#####.###.#.###.#.#.#.#####.#########.#####.###########.#.#.#
#.#.#...#.#.........#...#.....#.#...#.#.#.A.#...........#...#...#..j......#.G.#.#
#.#.#.###.#.#######.###.#######.#.###.#####.#.#######.#####.#.###.#######.#####.#
#.#..r....#.#.....#.......#...#...#.#...#...#.#...#...#.....#.#...#.....#.......#
#.#######.###D###.#######.#.#.#####.###.#.###.#.#.#.###.#####.#.###.###.#########
#..z....#.#...#.#.....#t....#.#.......#.#...#.#i#.#...#.#.....#.#.....#.....#...#
#.#####.###.###.#####.#######.#######.#.#.#.###.#.#####.###O###.#####.#####.###.#
#.#...#...#.#.#.....#...#...#.......#.#.#.#.#...#.....#.#...#...#...#...#.......#
#.###.###.#.#.#.###.###.###.#.#####.#.#.###.#.#######.#.#.###.###.#.###.#######.#
#.....#.#.#.#.....#...#...#...#...#..f#.#...#.#.....#...#...#.....#...#...#...#.#
#####.#.#.#.#.#######.###.#####.#.#####.#.###.###.#.#######.#########.#.#.#.#.#.#
#...#.#.#...#...#.....#...#.....#.....#.#...#...#.#.#.......#...#.....#.#.#.#.#.#
#.#.#.#.#######.#.###.#.#.#.#########.#.#.#.###.#.#.#.#######.#.#.#######.#.#.#.#
#.#.#...#.....#.#.#...#.#.#.....#.#...#.#.#...#.#.#.#.#.....#.#...#.....#...#.#.#
#.#.###.###.#.###.#.###.#######.#.#.###.#.###.#.#.#.#.#.#####.#######.#.#####.#.#
#.#.........#.....#...#...........#....@#@..#.....#...#...............#.......#.#
#################################################################################
#.....#.........#.............#.#......@#@..........#.......#.......#.....#.....#
#.#.#.#.#######.#.###########.#.#.#.#####.###########.###.#.#.#####.#.###.#.###.#
#.#.#.#.#.....#.#.#.......#...#...#.#...#.......#...#.#.#.#.#.#...#.....#.#.#.#.#
#.#.#.#.#.###.#.#.###.#.###.###.###.#.#.#.#####.#.#.#.#.#.#.###.#.#######.#.#.#.#
#.#.#...#...#.#.#...#.#.#...#...#.#...#.#.#...#...#.....#.#.....#.......#.....#.#
###.#####.#.###.#.#.###.#.###.###.#####.#.#.#.###########.#############.#######.#
#...#.#...#.....#.#...#...#...#.......#.#...#.......#.....#.............#...#...#
#.###.#.#########.###.#.###.#########.#.###.#####.###.#########.#######.#.#.#.###
#.#.......#.....#...#.#.#.......#...#.#.#a..#...#.#...#.......#.#.....#d..#.#...#
#.#######.###.#.#.###.#.#######.#.#.#.#.#####.#.###.###.#####.#.#.###Y#####.#.#.#
#...#...#.....#.#.#...#.....I.#...#...#.#...#.#.....#...#.#...#.#.#...#...#.#.#.#
###.#.#.#######.###.#########.#########.#.#.#.#####.#.###.#.###.#.#.#####.#.###.#
#.#...#...#.........#.......#...........#.#.#.#.....#.#...#.#...#h#.#.....#.....#
#.#######.###########.#################.#.#.#.#######.#.#.#.#####.#.#.#########.#
#...#...#.....#...#.......#.......#.....#.#.#.....#...#.#.#.#.....#.#.#.......#.#
#.#.###.#####.#.#.#.#####.#.#.#####.#####.#.#####.#.###.#.#.#.#####.#.#.###.###.#
#.#.....#.#...#.#...#...#g#.#...#...#...#.#.#.....#.#...#.#.#.#...#.#.#...#.....#
#.#####.#.#.#########.#.#.#.###.#.###.#.#.#.#.###.#.#####.#.#.#.#.#.#.###.#######
#.#.....#.#...........#.#.#...#.#...#.#.#.#.#.#...#.....#.#.#.#.#.#.#...#...#...#
#.#.#####.#############.#.###.#.###.#.#.#.#.#.###.#####.#.#.#.#.###.#.#####.#.#.#
#.#...#.....#.#.....#...#...#.#o....#.#.#.#.#...#.#.....#.#.#c#...K.#.#.....#.#.#
#.###E#.###.#.#.###.#####.#.#.#########.#.#.###.#.#.#####.#.###.#####.#.#####.###
#.#...#...#.#.#...#...#...#...#.#.....#.#.#.....#.#.......#...#.....#.#.#...#...#
#.###.###.#.#.###.###.#.#######.#.#.#.#.#.#######.#######.#.#######.#.#.#.###.#.#
#...#.....#...#.#.#.#...#.....#...#.#...#.#.......#.....#.#...#.....#.#.#.....#.#
###.#.#######.#.#.#.#####.#.###.###.###.#.#.###########.#.###.#.#####.#.#######.#
#...#.#...#.....#.#..s#...#.....#...#.#.#.#.#.....#.....#.#.#...#.....#.....#...#
#.#.###.#.#####.#.###.#.#########.###.#.#.#.###.#.#.###.#.#.#####.#.#######.#.###
#.#.#...#.....#.#.#...#...#...#...#...#.#.#.....#...#...#...#...#.#.....#...#...#
#.###.#######.###.#.#.###.#.#.#.###.#.#.#.###########.#####.#.###.###M###.#####.#
#.#...#.....#.....#.#...#...#.#.#...#.#.#.#...........#.....#.....#.#...#.......#
#.#.#######.#######.#######.###.#.#.###.#.###########.#.#####.#####.###.#######.#
#.#.......#.#.....#...#.....#...#.#.#...#.......#...#.#.....#...#.....#.......#.#
#.#######.#.#.#.#.###.#.#####B###.###.###.#####.#.#.#######.###.#####.###.#####.#
#...#...#.#...#.#...#..l#...#...#...#.#.#.#...#.#.#.#.....#...#.#.....#...#...#.#
#.#.#.#.#.#.###.###.#.###.#.###.###.#P#.#.#.#Q#.#.#.#.###.###.#.#.###.#.###.#.#.#
#.#.#.#.#.#.#...#.#.#.#...#.....#...#...#...#.#.#.#.#...#...#.#...#...#...#.#...#
#.#.#.#.#.###.###.#.###.#########.#####.#####.#.#.#.###.###.#.#####.#####.#.#####
#.#...#.......#........u#.......X.......#.....#..n#.......#...#....e....#.......#
#################################################################################
'''
dataa = '''
#######
#a.#Cd#
##@#@##
#######
##@#@##
#cB#.b#
#######
'''
dataa = '''
###############
#d.ABC.#.....a#
######@#@######
###############
######@#@######
#b.....#.....c#
###############
'''
dataa = '''
#############
#g#f.D#..h#l#
#F###e#E###.#
#dCba@#@BcIJ#
#############
#nK.L@#@G...#
#M###N#H###.#
#o#m..#i#jk.#
#############
'''
maze = data.strip().split('\n')
width = len(maze[0])
height = len(maze)
me = None
moves = [(1, 0), (0, 1), (-1, 0), (0, -1)]
def get_pixel(x, y):
return maze[y][x]
def set_pixel(x, y, value):
maze[y] = maze[y][:x] + value + maze[y][x + 1:]
def get_positions():
for y in range(height):
for x in range(width):
value = get_pixel(x,y)
if value != '#' and value != '.':
yield (value, (x, y))
def print_maze(steps, depth):
#print('Steps: %d, depth: %d' % (steps, depth))
for line in maze:
print(line)
print()
def flood_fill(keys):
flood = {me: 0}
queue = [me]
reachable_keys = []
while len(queue):
pos = queue[0]
queue = queue[1:]
steps = flood[pos]
for m in moves:
newpos = (pos[0] + m[0], pos[1] + m[1])
if newpos in flood:
continue
pixel = get_pixel(newpos[0], newpos[1])
if pixel == '#':
pass
elif 'A' <= pixel and pixel <= 'Z':
if pixel.lower() in keys:
reachable_keys.append((pixel, newpos, steps + 1))
flood[newpos] = steps + 1
else:
if 'a' <= pixel and pixel <= 'z':
reachable_keys.append((pixel, newpos, steps + 1))
elif pixel == '.' or pixel == '@':
pass
else:
assert False
queue.append(newpos)
flood[newpos] = steps + 1
return reachable_keys
def get_distances(start):
flood = {start: (0, set())}
queue = [start]
distances = {}
while len(queue):
pos = queue[0]
queue = queue[1:]
fl = flood[pos]
for m in moves:
newpos = (pos[0] + m[0], pos[1] + m[1])
if newpos in flood:
continue
pixel = get_pixel(newpos[0], newpos[1])
if pixel == '#':
pass
elif 'A' <= pixel and pixel <= 'Z':
queue.append(newpos)
copy = fl[1].copy()
copy.add(pixel.lower())
flood[newpos] = (fl[0] + 1, copy)
else:
if 'a' <= pixel and pixel <= 'z':
distances[pixel] = (fl[0] + 1, fl[1].copy())
elif pixel == '.' or pixel == '@':
pass
else:
assert False
queue.append(newpos)
flood[newpos] = (fl[0] + 1, fl[1].copy())
return distances
distance_table = {}
at_distance_table = []
for k, v in get_positions():
distances = get_distances(v)
if k == '@':
at_distance_table.append(distances)
else:
distance_table[k] = distances
print(distance_table)
print(at_distance_table)
all_keys = len(set([x for x in data if 'a' <= x and 'z']))
print_maze(0, 0)
calls = 0
cache_hits = 0
minimum = 2**50
def walk(letters, keys, steps, depth, cache):
global distance_table, at_distance_table, calls, cache_hits, minimum, all_keys
if len(keys) == all_keys + 1:
return 0
calls += 1
key = (frozenset(keys), tuple(letters))
if calls % (100 * 1000) == 0:
print('Calls: %d, hits: %d (%.2f%%), cache size: %d, steps: %d' % (calls, cache_hits, 100.0 * cache_hits / calls, len(cache), steps))
# print_maze(steps, depth)
if key in cache:
cache_hits += 1
return cache[key]
saved_letters = letters.copy()
best = None
for i in range(4):
letter = letters[i]
ff = None
if letter == '@':
ff = [x for x in at_distance_table[i].items() if x[0] not in keys and len(x[1][1] - keys) == 0]
else:
ff = [x for x in distance_table[letter].items() if x[0] not in keys and len(x[1][1] - keys) == 0]
for f in ff:
keys.add(f[0])
letters[i] = f[0]
subtime = walk(letters, keys, steps + f[1][0], depth + 1, cache)
if best == None or subtime + f[1][0] < best:
best = subtime + f[1][0]
letters = saved_letters.copy()
keys.remove(f[0])
cache[key] = best
return best
time = walk(['@', '@', '@', '@'], set(['@']), 0, 0, {})
print('Minimum: %d, calls: %d' % (time, calls))
|
[
"mliska@suse.cz"
] |
mliska@suse.cz
|
87c1f871ded7f21d2508012994d13c3340fe3122
|
35b6cc658ba30c9c7b906dea40dafe48a01dd093
|
/Energy/Sinks/Exhibits/OceanVoyager/calc_hydraulic.py
|
75de46a7f2251503ba7458f443cc1e2faaab2e35
|
[] |
no_license
|
rothnic/GeorgiaAquarium
|
4f56d374516560914f53dab520331c08bcb802e0
|
5b650decfafbe8b8b5e3298a3a2da9f2db5e1daa
|
refs/heads/master
| 2020-04-14T22:40:25.246815
| 2014-07-31T20:22:57
| 2014-07-31T20:22:57
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 6,724
|
py
|
__author__ = 'Nick'
from math import pi
from numba import jit
from Common.FfnetSurrogate.FfnetSurrogate import FfnetSurrogate
import os
def init_protein_skimmer_surrogate():
# Create configuration for surrogate
inputCols = [
"ratedSpeed",
"flc",
"ratedEff",
"ratedHead",
"ratedFlow",
"csa",
"runSpeed"
]
outputCols = [
"pumpHp",
"totalFlowOut",
"pskFlow2",
"pskFlow1",
"pIn",
"pOut1"
]
trainFile = 'hydroProteinTraining.csv'
netFile = 'trainedProteinSurrogate.net'
# Get full paths to file co-located with this one
path = os.path.dirname(os.path.realpath(__file__))
trainFile = os.path.join(path, trainFile)
netFile = os.path.join(path, netFile)
# Load and return stored surrogate object
return FfnetSurrogate(trainFile, inputCols, outputCols, netFile)
def init_sand_filter_surrogate():
# Create configuration for surrogate
inputCols = [
'pumpFlow',
'pumpRatedHead',
'pumpRatedRpm',
'pumpRunRpm',
'pumpEff',
'flowLossCoeff',
'heatExchFlowLossCoef',
'heatExchValveOpen',
'denitFlowLossCoef',
'ozoneFlowLossCoef',
'ozoneValveOpen',
'denitValveOpen',
'deaerationFlowLossCoef'
]
outputCols = [
'pumpEffOut',
'pumpPower',
'pumpFlow',
'heatExchFlow1',
'heatExchFlow2',
'sandPowerOut',
'sandPowerIn',
'sandFlow',
'heatExchPowerIn',
'heatExchPowerOut',
'juncPowerIn',
'ozone1_6Flow',
'ozone1_6PowerIn',
'denitrifictionPowerIn',
'denitrificationFlow',
'denitificationPowerOut',
'ozone7_12Flow',
'deareationPowerIn',
'deareationFlow',
'powerIn',
'bypassFlow',
'deareationPowerOut'
]
trainFile = 'hydroSandTraining.csv'
netFile = 'trainedSandSurrogate.net'
# Get full paths to file co-located with this one
path = os.path.dirname(os.path.realpath(__file__))
trainFile = os.path.join(path, trainFile)
netFile = os.path.join(path, netFile)
# Load and return stored surrogate object
return FfnetSurrogate(trainFile, inputCols, outputCols, netFile)
def calc_protein_power(surrogate, inputs, numPumps):
outputs = surrogate.sim(inputs)
totalPower, headOut, totalFlow, proteinPumpPower = calc_protein_power_fast(outputs, numPumps)
return totalPower, headOut, totalFlow, proteinPumpPower
#@jit
def calc_protein_power_fast(outputs, numPumps):
# total kWh per year
proteinPumpPower = outputs[0]
totalPower = (proteinPumpPower * numPumps) * 24 * 365
# head out
pumpHeadDiff = outputs[5] - outputs[4]
pumpHeadDiff = pumpHeadDiff * 14.5037738
headOut = pumpHeadDiff / 0.433
# total flow out
totalFlow = outputs[1]
return totalPower, headOut, totalFlow, proteinPumpPower
def calc_sand_power(surrogate, inputs, numPumps):
outputs = surrogate.sim(inputs)
sandPumpPower = outputs[1]
totalPower = (sandPumpPower * numPumps) * 24 * 365
sandFlow = -outputs[6]
heatExchFlow1 = outputs[3]
heatExchFlow2 = -outputs[4]
denitFlow = -outputs[12]
bypassFlow = -outputs[18]
ozFlow = -outputs[9] * 6
totalFlow = heatExchFlow2 + denitFlow + bypassFlow + sandFlow
return totalPower, sandFlow, heatExchFlow1, heatExchFlow2, denitFlow, bypassFlow, totalFlow, sandPumpPower, ozFlow
#@jit
def calc_protein_cost(numPumpRetrofits, pumpModificationUnitCost, lossMultiplier, currentProteinCircuitLoss,
proteinRatedEff, currentProteinRatedEff, proteinRatedSpeed, currentProteinRatedSpeed,
ratedFlow, currentProteinRatedFlow, ratedHead, currentProteinRatedHead):
# Cost depends on whether we have changed the configuration of the system
if (currentProteinRatedSpeed == proteinRatedSpeed and currentProteinRatedFlow == ratedFlow and
currentProteinRatedEff == proteinRatedEff and currentProteinRatedHead == ratedHead and
currentProteinCircuitLoss == lossMultiplier):
# Cost is nothing if we don't change the system
return 0.0
else:
effDiff = max(proteinRatedEff*100 - currentProteinRatedEff, 0)
headDiff = max(ratedHead - currentProteinRatedHead, 0)
lossDiff = abs(min((lossMultiplier - currentProteinCircuitLoss)/currentProteinCircuitLoss, 0))
effCostFactor = 4500 * effDiff
headCostFactor = 400 * headDiff
lossCostFactor = 200000 * lossDiff
return (numPumpRetrofits * pumpModificationUnitCost) + effCostFactor + headCostFactor + lossCostFactor
#@jit
def calc_sand_cost(numPumpRetrofits, pumpModificationUnitCost, pumpRatedRpm, currentSandRatedSpeed,
pumpFlow, currentSandRatedFlow, pumpEff, currentSandRatedEff,
pumpRatedHead, currentSandRatedHead, flowLossCoef, currentSandCircuitLoss,
heatExchFlowLossCoef, currentSandHxCircuitLoss, ozoneFlowLossCoef, currentSandOzCircuitLoss,
denitFLowLossCoef, currentSandDnCircuitLoss, deaerationFlowLossCoef, currentSandDatCircuitLoss):
# Cost depends on whether we have changed the configuration of the system
if (pumpRatedRpm == currentSandRatedSpeed and pumpFlow == currentSandRatedFlow and
pumpEff == currentSandRatedEff and pumpRatedHead == currentSandRatedHead):
return 0.0
else:
effDiff = max(pumpEff*100 - currentSandRatedEff, 0)
headDiff = max(pumpRatedHead - currentSandRatedHead, 0)
lossDiff = abs(min((flowLossCoef - currentSandCircuitLoss) / currentSandCircuitLoss, 0))
hxLossDiff = abs(min((heatExchFlowLossCoef - currentSandHxCircuitLoss) / currentSandHxCircuitLoss, 0))
ozLossDiff = abs(min((ozoneFlowLossCoef - currentSandOzCircuitLoss) / currentSandOzCircuitLoss, 0))
dnLossDiff = abs(min(((denitFLowLossCoef - currentSandDnCircuitLoss) / currentSandDnCircuitLoss, 0)))
datLossDiff = abs(min((deaerationFlowLossCoef - currentSandDatCircuitLoss) / currentSandDatCircuitLoss, 0))
effCostFactor = 4500 * effDiff
headCostFactor = 400 * headDiff
lossCostFactor = 200000 * lossDiff
heatExchCostFactor = 200000 * hxLossDiff
ozLossFactor = 200000 * ozLossDiff
dnLossFactor = 200000 * dnLossDiff
datLossDiff = 200000 * datLossDiff
return (numPumpRetrofits * pumpModificationUnitCost) + effCostFactor + headCostFactor + lossCostFactor \
+ heatExchCostFactor + ozLossFactor + dnLossFactor + datLossDiff
|
[
"nlr06886@gmail.com"
] |
nlr06886@gmail.com
|
c87ea0bf9da1164e6130ba4e4549aa781cf6c0a6
|
6efdde402815db9865a150cdaaded0518ff6756f
|
/GameLauncher.py
|
3fc3242561a9c53a53d1f0224e84a138a2b88162
|
[] |
no_license
|
BoxifyMC/BalloonPopper
|
6ae3719825a965c1811570dd68ef6beb4804bba7
|
adb8de0c49afea1e2dbbc2518f4216450234d8ce
|
refs/heads/master
| 2022-11-26T22:36:14.511055
| 2020-08-01T11:47:12
| 2020-08-01T11:47:12
| 284,248,570
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 14
|
py
|
import client
|
[
"noreply@github.com"
] |
BoxifyMC.noreply@github.com
|
adb37daac3e41737f7a4b3252d8f1793df7914f5
|
750bb7c7e00281c63678bf63fa7d2ceb28f9092d
|
/setup.py
|
ea00e7b52b27e4df4ce4f248788275cf951d06f9
|
[
"MIT"
] |
permissive
|
the-bantoo/coffee_moments
|
90003e7d83461cda4d58f795a3368e38be40c710
|
725d4b795fedb4a3994290bb0e09cf7f48fbbd68
|
refs/heads/main
| 2023-08-05T07:16:37.203621
| 2021-08-24T08:57:54
| 2021-08-24T08:57:54
| 384,875,724
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 518
|
py
|
from setuptools import setup, find_packages
with open('requirements.txt') as f:
install_requires = f.read().strip().split('\n')
# get version from __version__ variable in coffee_moments/__init__.py
from coffee_moments import __version__ as version
setup(
name='coffee_moments',
version=version,
description='Customisations for COMO',
author='Bantoo and Saudi BTI',
author_email='devs@thebantoo.com',
packages=find_packages(),
zip_safe=False,
include_package_data=True,
install_requires=install_requires
)
|
[
"frappe@ubuntu"
] |
frappe@ubuntu
|
14ce2ae12e9992160937bc9c2370454accbd757a
|
5f1737a12f5143c7b90b7b9708ae8122006e20ef
|
/test_basic.py
|
27479b9e849f8e1d86834e137aaf26491464b07d
|
[] |
no_license
|
KarumanchiMahesh/Account-Management-API-DevOps-
|
569d620548efc7926d7b78fe216c5f2893f2d156
|
d552b3674246dde82ff77aee4213900d69d9d585
|
refs/heads/master
| 2022-12-29T13:34:32.207673
| 2020-10-13T13:09:55
| 2020-10-13T13:09:55
| 303,705,751
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,447
|
py
|
import unittest
import helper, services
class TestSum(unittest.TestCase):
def test_header(self):
headers = 'application/json'
result = helper.is_headers_supported(headers)
self.assertEqual(result, True)
def test_nagtive_header(self):
negative_headers = 'application/xml'
self.assertEqual(helper.is_headers_supported(negative_headers), False)
def test_no_account_id(self):
data = {'account_id':'a40bcc03-6f39-418c-ad0b-97e14f522ec1', 'amount':None}
request = helper.validate_request_body(data)['status']
self.assertEqual(request, 400)
def test_no_amount(self):
data = {'amount': 1}
request = helper.validate_request_body(data)['status']
self.assertEqual(request, 400)
def test_wrong_account_id_type(self):
data = {'account_id':12313, 'amount': 12}
request = helper.validate_request_body(data)['status']
self.assertEqual(request, 400)
def test_wrong_amount_type(self):
data = {'account_id':'a40bcc03-6f39-418c-ad0b-97e14f522ec1', 'amount': '123'}
request = helper.validate_request_body(data)['status']
self.assertEqual(request, 400)
def test_wrong_account_id_format(self):
data = {'account_id':'aca2', 'amount': 12}
request = helper.validate_request_body(data)['status']
self.assertEqual(request, 400)
def test_add_initial_balance(self):
response = services.update_amount_status('a40bcc03-6f39-418c-ad0b-97e14f522ec9', 10)
self.assertEqual(response['status'], 200)
def test_add_balance(self):
response = services.update_amount_status('a40bcc03-6f39-418c-ad0b-97e14f522ec9', 10)
self.assertEqual(response['status'], 200)
def test_deduct_balance(self):
response = services.update_amount_status('a40bcc03-6f39-418c-ad0b-97e14f522ec9', -10)
self.assertEqual(response['status'], 200)
def test_get_balance_existing_account(self):
response = services.get_account_balance('a40bcc03-6f39-418c-ad0b-97e14f522ec9')
self.assertEqual(response['status'], 200)
self.assertEqual(response['balance'], 10)
def test_get_balance_non_existing_account(self):
response = services.get_account_balance('a40bcc03-6f39-418c-ad0b-97e14f522ec5')
self.assertEqual(response['status'], 404)
if __name__ == '__main__':
unittest.main()
|
[
"maheshkarumanchi23@gmail.com"
] |
maheshkarumanchi23@gmail.com
|
3c1fe29ea31af97b0f515cbc89433aa3a6d5f930
|
66bc879a01c972291532e7fe468b9cd1fae2ffe3
|
/contact_book/asgi.py
|
1da888f13e0a3e7ae884a6ccc98bc2e78678fded
|
[] |
no_license
|
Santhasangar/contactBook
|
02e03bc06b792ca9aba9bfc1e2ba830f51b9ff03
|
3486437e8d5ea9b618b6743f9e3a90e576672164
|
refs/heads/main
| 2023-03-01T15:29:38.994838
| 2021-01-31T07:47:07
| 2021-01-31T07:47:07
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 417
|
py
|
"""
ASGI config for contact_book project.
It exposes the ASGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.0/howto/deployment/asgi/
"""
import os
from django.core.asgi import get_asgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'contact_book.settings')
application = get_asgi_application()
|
[
"noreply@github.com"
] |
Santhasangar.noreply@github.com
|
05bca5cdded25acf4419fa23cf1c9df996f234ed
|
5ede9cb1b4a13286c7844680f3ca3e7befb0f06d
|
/www/node/dragonfly/patch-tools_gyp_pylib_gyp_common.py
|
5255c7be2a70d48fc664d1f592e62d7567bee44a
|
[] |
no_license
|
waynemareci/DPorts
|
3fdeb479f3aaee1188704799c143acaa0191de38
|
8f47230488a8a169641b023a357dbc9d50858d30
|
refs/heads/master
| 2021-01-17T08:52:43.282910
| 2014-12-16T05:42:03
| 2014-12-16T05:42:03
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 380
|
py
|
--- tools/gyp/pylib/gyp/common.py.orig 2013-02-25 21:07:25.000000000 +0000
+++ tools/gyp/pylib/gyp/common.py
@@ -369,6 +369,8 @@ def GetFlavor(params):
return flavors[sys.platform]
if sys.platform.startswith('sunos'):
return 'solaris'
+ if sys.platform.startswith('dragonfly'):
+ return 'freebsd'
if sys.platform.startswith('freebsd'):
return 'freebsd'
|
[
"nobody@home.ok"
] |
nobody@home.ok
|
30bc9b07ca0d937ca4ccdd7c2383acae700c1871
|
2fd087fbc5faf43940153693823969df6c8ec665
|
/pyc_decrypted/latest/encodings/ptcp154.py
|
9e2efa4550ab04d8b4c7ecfcc077ba9fc21a947f
|
[] |
no_license
|
mickeystone/DropBoxLibrarySRC
|
ed132bbffda7f47df172056845e5f8f6c07fb5de
|
2e4a151caa88b48653f31a22cb207fff851b75f8
|
refs/heads/master
| 2021-05-27T05:02:30.255399
| 2013-08-27T13:16:55
| 2013-08-27T13:16:55
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,569
|
py
|
#Embedded file name: encodings/ptcp154.py
import codecs
class Codec(codecs.Codec):
def encode(self, input, errors = 'strict'):
return codecs.charmap_encode(input, errors, encoding_map)
def decode(self, input, errors = 'strict'):
return codecs.charmap_decode(input, errors, decoding_map)
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final = False):
return codecs.charmap_encode(input, self.errors, encoding_map)[0]
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final = False):
return codecs.charmap_decode(input, self.errors, decoding_map)[0]
class StreamWriter(Codec, codecs.StreamWriter):
pass
class StreamReader(Codec, codecs.StreamReader):
pass
def getregentry():
return codecs.CodecInfo(name='ptcp154', encode=Codec().encode, decode=Codec().decode, incrementalencoder=IncrementalEncoder, incrementaldecoder=IncrementalDecoder, streamreader=StreamReader, streamwriter=StreamWriter)
decoding_map = codecs.make_identity_dict(range(256))
decoding_map.update({128: 1174,
129: 1170,
130: 1262,
131: 1171,
132: 8222,
133: 8230,
134: 1206,
135: 1198,
136: 1202,
137: 1199,
138: 1184,
139: 1250,
140: 1186,
141: 1178,
142: 1210,
143: 1208,
144: 1175,
145: 8216,
146: 8217,
147: 8220,
148: 8221,
149: 8226,
150: 8211,
151: 8212,
152: 1203,
153: 1207,
154: 1185,
155: 1251,
156: 1187,
157: 1179,
158: 1211,
159: 1209,
161: 1038,
162: 1118,
163: 1032,
164: 1256,
165: 1176,
166: 1200,
168: 1025,
170: 1240,
173: 1263,
175: 1180,
177: 1201,
178: 1030,
179: 1110,
180: 1177,
181: 1257,
184: 1105,
185: 8470,
186: 1241,
188: 1112,
189: 1194,
190: 1195,
191: 1181,
192: 1040,
193: 1041,
194: 1042,
195: 1043,
196: 1044,
197: 1045,
198: 1046,
199: 1047,
200: 1048,
201: 1049,
202: 1050,
203: 1051,
204: 1052,
205: 1053,
206: 1054,
207: 1055,
208: 1056,
209: 1057,
210: 1058,
211: 1059,
212: 1060,
213: 1061,
214: 1062,
215: 1063,
216: 1064,
217: 1065,
218: 1066,
219: 1067,
220: 1068,
221: 1069,
222: 1070,
223: 1071,
224: 1072,
225: 1073,
226: 1074,
227: 1075,
228: 1076,
229: 1077,
230: 1078,
231: 1079,
232: 1080,
233: 1081,
234: 1082,
235: 1083,
236: 1084,
237: 1085,
238: 1086,
239: 1087,
240: 1088,
241: 1089,
242: 1090,
243: 1091,
244: 1092,
245: 1093,
246: 1094,
247: 1095,
248: 1096,
249: 1097,
250: 1098,
251: 1099,
252: 1100,
253: 1101,
254: 1102,
255: 1103})
encoding_map = codecs.make_encoding_map(decoding_map)
|
[
"bizonix@me.com"
] |
bizonix@me.com
|
be68137aa8777b508a725fe7e108c85268c0621f
|
061c1c0907987f14b44dfe781babebdf2d0c91ad
|
/emd-src/plot_figureS3.py
|
9408dfca4e41bc93ac98558e3cb2fc7f63eadebe
|
[] |
no_license
|
StromTroopers/asymmetric-motion
|
0b058ef21aaf3a55cd030923b43dddbb3e888b94
|
97f511d3fc6640522e339e5edba245e955485d96
|
refs/heads/master
| 2021-12-03T21:02:08.229255
| 2014-11-13T10:31:54
| 2014-11-13T10:31:54
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,827
|
py
|
import numpy as np
import h5py
from emd.receptivefield import ReceptiveField
# Receptive Field parameters
# ==========================
EMD_SIGMA_RF = {'sigma_theta': np.radians(33),
'sigma_phi_a': np.radians(45),
'sigma_phi_b': np.radians(102)}
def plot_figure_S3A(ax0, ax1, cmap):
"""plots the two recpetive fields from the emd model
"""
receptive_field_left = ReceptiveField(phi_c=np.radians(15), **EMD_SIGMA_RF)
receptive_field_right = ReceptiveField(phi_c=np.radians(-15), **EMD_SIGMA_RF)
# generate grid data
dphi = np.radians(2)
phi = np.arange(-np.pi, np.pi + dphi, dphi)
theta = np.linspace(-25 * dphi, 25 * dphi, 51)
PHI, THETA = np.meshgrid(phi, theta)
extent = map(np.degrees, [phi[0], phi[-1], theta[0], theta[-1]])
ax0.imshow(receptive_field_left(PHI, THETA), extent=extent, interpolation='nearest', cmap=cmap)
ax0.set_xlim([-180,180])
ax0.set_xticklabels([])
ax0.set_xticks([-180, -90, 0, 90, 180])
ax0.set_ylim([-50,50])
ax0.set_yticks([-50, 0, 50])
ax0.set_ylabel('Elevation (deg)')
im = ax1.imshow(receptive_field_right(PHI, THETA), extent=extent, interpolation='nearest', cmap=cmap)
ax1.set_xlim([-180,180])
ax1.set_xticks([-180, -90, 0, 90, 180])
ax1.set_xlabel('Azimuth(deg)')
ax1.set_ylim([-50,50])
ax1.set_yticks([-50, 0, 50])
ax1.set_ylabel('Elevation (deg)')
return im
def plot_figure_S3B(ax):
"""plots the motion response vs temporal frequency
"""
try:
hdf = h5py.File('data_figureS3B.hdf5', 'r')
except IOError:
print "requires 'data_figureS3B.hdf5'. please run calculate_figure_data.py first."
exit(1)
# assgin data
tf = hdf['temporal_frequency'].value
hsl = hdf['average_hs_cell_response'].value
hsl /= hsl.max() # normalize
hsr = hsl[::-1] # the right cell is mirror symmetric to the left
ax.plot(tf, hsl, lw=2, color='r', marker='o')
ax.plot(tf, hsr, lw=2, color='b', marker='o')
ax.set_xlim(min(tf), max(tf))
ax.set_ylim(-0.5, 1.)
ax.set_xticks([-50, -25, 0, 25, 50])
ax.set_yticks([-0.5, 0, 0.5, 1])
ax.set_ylabel('Normalized Response (a.u.)')
ax.set_xlabel('Temporal Frequency (Hz)')
return
if __name__ == '__main__':
import matplotlib.pyplot as plt
fig = plt.figure()
ax0 = plt.subplot2grid((2, 1), (0, 0))
ax1 = plt.subplot2grid((2, 1), (1, 0))
fig.subplots_adjust(bottom=0.4)
cbar_ax = fig.add_axes([0.9, 0.4, 0.03, 0.5])
im = plot_figure_S3A(ax0, ax1, plt.get_cmap('gray'))
cbar = plt.colorbar(im, cax=cbar_ax, ticks=[0, 1])
cbar.ax.set_yticklabels([0, 1])
cbar.ax.set_ylabel('Response (a.u.)')
figure = plt.figure()
ax = figure.add_subplot(1, 1, 1)
plot_figure_S3B(ax)
plt.show()
|
[
"mail@andreaspoehlmann.de"
] |
mail@andreaspoehlmann.de
|
f1a08c6d3beec77681adb00a7cb1846e1855db7f
|
ff36072cee91426f338abc8109936f136bb9845c
|
/user/migrations/0001_initial.py
|
e91188a33e8fa637275f6d84ed1feeb3783d441c
|
[] |
no_license
|
jsparmani/city_governance_api
|
bc2454d26f6b0c22498080776fba7cd3450c078b
|
96f9ea4024a5931fbda3f3074b2e29f65bb68f9b
|
refs/heads/master
| 2022-02-27T19:28:01.044257
| 2019-10-20T02:14:32
| 2019-10-20T02:14:32
| 215,721,679
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 864
|
py
|
# Generated by Django 2.1.7 on 2019-10-18 18:20
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('department', '0006_file_department'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='DepartmentUser',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('department', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to='department.Department')),
('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
]
|
[
"jsparmani@gmail.com"
] |
jsparmani@gmail.com
|
3362eef2c11297763d25919eb761c0fc5eb0f66b
|
4f062f49d8c82419c760c28c6d7b00f8ce3f4836
|
/ipyslack/__init__.py
|
8f06e9172e3347c5712ede18c4618d68c376a18d
|
[
"MIT"
] |
permissive
|
konstantint/ipyslack
|
bb47e75d001931b7689700e9d9efd449b96506f1
|
ccfc2b26a025c6d82b390954b31d62481980dc3d
|
refs/heads/master
| 2021-01-24T06:44:20.026797
| 2017-06-07T17:08:49
| 2017-06-07T17:08:49
| 93,318,737
| 5
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,700
|
py
|
"""
IPython magic for sending slack notifications.
Copyright 2017, Konstantin Tretyakov
Based on: https://github.com/kalaidin/ipytelegram/blob/master/ipytelegram.py
License: MIT
"""
import sys
import os
import slacker
import argparse
import string
from io import StringIO, BytesIO
from IPython.core.magic import Magics, magics_class, line_magic, cell_magic
class ArgumentParser(argparse.ArgumentParser):
def error(self, message):
raise ValueError(message)
class SafeFormatDict(dict):
def __init__(self, main_dict, secondary_dict):
super(SafeFormatDict, self).__init__(main_dict)
self.secondary_dict = secondary_dict
def __missing__(self, key):
return self.secondary_dict.get(key, '{' + key + '}')
class SafeFormatList(object):
def __getitem__(self, idx):
return '{%d}' % idx
class Capture(object):
def __init__(self, name):
self.name = name
self.data = StringIO() if sys.version_info.major == 3 else BytesIO()
def __enter__(self):
self.original = getattr(sys, self.name)
setattr(sys, self.name, self)
return self
def __exit__(self, exc_type, exc_value, traceback):
setattr(sys, self.name, self.original)
def write(self, data):
self.data.write(data)
self.original.write(data)
def flush(self):
self.original.flush()
@magics_class
class SlackMagics(Magics):
def __init__(self, shell):
super(SlackMagics, self).__init__(shell)
self.slacker = None
p = ArgumentParser()
p.add_argument('-c', '--channel', default=None)
p.add_argument('-u', '--as_user', default=None)
p.add_argument('-t', '--token', default=None)
p.add_argument('file', nargs='?', default=None)
self.parser = p
self._default_config()
def _update_args(self, new_args):
self.args.channel = new_args.channel or self.args.channel
self.args.as_user = new_args.as_user or self.args.as_user
self.args.token = new_args.token or self.args.token
if self.args.token:
self.slacker = slacker.Slacker(self.args.token)
def _read_config_file(self, filename, strict=False):
if strict and not os.path.exists(filename):
raise ValueError("File %s does not exist!" % filename)
line = '' if not os.path.exists(filename) else open(filename).readline().strip()
self._update_args(self.parser.parse_args(line.split()))
def _default_config(self):
self.args = self.parser.parse_args([])
self._read_config_file(os.path.expanduser('~/.ipyslack.cfg'))
self._read_config_file('.ipyslack.cfg')
@line_magic
def slack_setup(self, line):
args = self.parser.parse_args(line.strip().split())
if args.file is not None:
self._read_config_file(args.file, True)
self._update_args(args)
@cell_magic
def slack_notify(self, line, cell):
if not self.slacker or not self.args.channel:
self._default_config()
if not self.slacker or not self.args.channel:
raise ValueError("Call %slack_setup -t <token> -c <#channel_or_@user> first or provide this information in .ipyslack.cfg.")
with Capture('stdout') as stdout, Capture('stderr') as stderr:
result = self.shell.run_cell(cell)
out = stdout.data.getvalue()
err = stderr.data.getvalue()
exc = repr(result.error_in_exec) if result.error_in_exec else ''
self.slacker.chat.post_message(self.args.channel, self._format_message(line, {'out': out, 'exc': exc, 'err': err}), as_user=self.args.as_user)
@line_magic
def slack_send(self, line):
if not self.slacker or not self.args.channel:
self._default_config()
if not self.slacker or not self.args.channel:
raise ValueError("Call %slack_setup -t <token> -c <#channel_or_@user> first or provide this information in .ipyslack.cfg.")
self.slacker.chat.post_message(self.args.channel, self._format_message(line), as_user=self.args.as_user)
def _format_message(self, msg, override_ns = dict()):
if msg == '': msg = ' ' # Slack does not like empty messages
msg = msg.replace('\\n', '\n')
try:
return string.Formatter().vformat(msg, SafeFormatList(), SafeFormatDict(override_ns, self.shell.user_ns))
except:
return msg # May fail if one uses weird formatting stuff, e.g. {nonexistent_var.something}
def load_ipython_extension(ipython):
magics = SlackMagics(ipython)
ipython.register_magics(magics)
def unload_ipython_extension(ipython):
pass
|
[
"kt@ut.ee"
] |
kt@ut.ee
|
5657f8b67362c0a1f6e3a5e0cd3de807cf2f0ca9
|
60f85abda06894729281f6cd86d33a06119c89e8
|
/test_python_docx/test_python_docx.py
|
28f48d0b63933698e11344cea2ab5babc88391ca
|
[] |
no_license
|
lijingchn/html2doc
|
ac69acd9597cc91e97e5e8e5caf15c2f6c60843a
|
358fb86a8c64dda21926cd6df313f0aad7e8cca4
|
refs/heads/master
| 2021-01-23T16:50:59.270025
| 2016-11-20T08:47:51
| 2016-11-20T08:47:51
| 68,780,834
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,973
|
py
|
#!/usr/bin/env python
# encoding: utf-8
import json
import pandas as pd
from docx import Document
from docx.shared import Inches
from docx.shared import Pt
# 处理元数据
class Handle_metadata():
def __init__(self,datafile):
self.datafile = datafile
self.metadata = self.get_metadata()
def get_metadata(self):
metadata = pd.read_json(self.datafile)
return metadata
# 获取按资产类型统计数据
def get_assetTypeStatisData(self):
metadata = self.metadata
content = metadata[metadata.index.isin(["content"])]['res'].values[0]
for i in content:
print i
assetTypeStatis = content["asset_analysis"]["asset_statis"]["type"]
assetTypeStatisResult = []
for item in assetTypeStatis:
temp = {}
temp["type_name"] = item["type_name"]
temp["asset_counts"] = item["asset_counts"]
assetTypeStatisResult.append(temp)
return assetTypeStatisResult
# 获取cleandata
def get_cleandata(self):
cleandata = {}
cleandata["assetTypeStatisData"] = self.get_assetTypeStatisData()
return cleandata
#def get_meta_data(datafile):
# meta_data = pd.read_json(datafile)
# print meta_data
# content = meta_data[meta_data.index.isin(["content"])]['res'].values[0]
# print type(content)
# for i in content:
# print i
# asset_type_statis = content["asset_analysis"]["asset_statis"]["type"]
# print type(asset_type_statis)
# print len(asset_type_statis)
# asset_type_statis_result = []
# for item in asset_type_statis:
# temp = {}
# temp["type_name"] = asset_type_statis[0]["type_name"]
# temp["asset_counts"] = asset_type_statis[0]["asset_counts"]
# asset_type_statis_result.append(temp)
# return asset_type_statis_result
class Generate_report_docx():
def __init__(self, cleandata):
self.cleandata = cleandata
def generate(self):
cleandata = self.cleandata
document = Document()
document.add_heading(u"工控脆弱性评估报告", 0)
p = document.add_paragraph(u"生成工控脆弱性评估报告测试...")
# p.style = 'ListBullet'
# 概览
document.add_heading(u"一. 概览", level=2)
document.add_picture("img/overview.png", width=Inches(5.5))
# 资产统计
document.add_heading(u"二. 资产统计", level=2)
document.add_heading(u"1.资产按产品类型统计", level=3)
#document.add_paragraph(u"资产按产品类型统计", style="ListNumber")
# 插入图表
table = document.add_table(rows=1, cols=2)
table.style = "Table Grid"
# table.style = 'LightShading-Accent1'
hdr_cells = table.rows[0].cells
hdr_cells[0].text = u"产品类型"
hdr_cells[1].text = u"数量"
for item in cleandata["assetTypeStatisData"]:
row_cells = table.add_row().cells
row_cells[0].text = item["type_name"]
row_cells[1].text = str(item["asset_counts"])
document.add_heading(u"2. 资产类型分布图", level=3)
document.add_picture("img/assetTypeStatis.png", width=Inches(6))
# 资产脆弱性统计
document.add_heading(u"三. 资产脆弱性统计", level=2)
document.add_heading(u"1.脆弱性按厂商统计", level=3)
#document.add_paragraph(u"资产按产品类型统计", style="ListNumber")
# 插入图表
table = document.add_table(rows=1, cols=2)
# table.style = "Table Grid"
table.style = "Medium List 1 Accent 3"
# table.style = 'LightShading-Accent1'
hdr_cells = table.rows[0].cells
hdr_cells[0].text = u"产品类型"
hdr_cells[1].text = u"数量"
for item in cleandata["assetTypeStatisData"]:
row_cells = table.add_row().cells
row_cells[0].text = item["type_name"]
row_cells[1].text = str(item["asset_counts"])
document.add_heading(u"2. 脆弱性厂商分布图", level=3)
document.add_picture("img/cuiruoxingChangshang.png", width=Inches(6))
document.add_heading(u"3. 资产类型分布图", level=3)
document.add_picture("img/hahaha.png", width=Inches(4.5))
document.add_heading(u"4. 资产类型分布图", level=3)
document.add_picture("img/haha.png", width=Inches(6))
document.add_heading(u"4. 资产类型分布图", level=3)
document.add_picture("img/piepie.png", width=Inches(5))
document.add_heading(u"5. 资产类型分布图", level=3)
document.add_picture("img/heihei.png", width=Inches(5))
document.save("gongkongbaogao.docx")
if __name__ == "__main__":
data_file = "data/report.json"
dataObj = Handle_metadata(data_file)
cleandata = dataObj.get_cleandata()
report = Generate_report_docx(cleandata)
report.generate()
|
[
"lijingjing.chn@gmail.com"
] |
lijingjing.chn@gmail.com
|
f0c4d0d5527dba4cc2a7c61ff1be203503bf82bc
|
a9ce9e90eceb4c527ba368fb32e7be3eb691f90d
|
/Aplicação/livraria/forms/cliente.py
|
16464dd7fb1fec6ba7afe63fb14331919339e8c8
|
[] |
no_license
|
reno/projeto-eng-software
|
9aeaf2f4e012fb08d0bc4923d496e757d443ddb5
|
a5e1f3c1009e5e056add76a60e44c29e2954b6d5
|
refs/heads/master
| 2022-01-10T09:22:40.365657
| 2019-06-29T17:11:51
| 2019-06-29T17:11:51
| 183,648,698
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,090
|
py
|
"""
livraria/forms/cliente.py
Define formularios usados no menu Clientes.
"""
from flask_wtf import FlaskForm
from wtforms import StringField, DateField, SelectField, FormField, SubmitField
from wtforms.validators import DataRequired as Data, Email, Regexp
from livraria.models import *
MSG_DOC = 'Informe apenas números do RG ou CPF.'
MSG_TEL = 'Informe apenas números, incluindo o DDD'
MSG_CEP = 'Informe apenas números'
class FormConsultaCliente(FlaskForm):
opcoes = [(atr, atr.capitalize()) for atr in dir(Cliente)
if not atr.startswith(('_', 'id', 'm', 'q', 'data'))]
campo = SelectField('Campo de busca', choices=opcoes)
termo = StringField('Palavra-chave', validators=[Data()])
submit = SubmitField('Consultar')
class FormEndereco(FlaskForm):
logradouro = StringField('Logradouro', validators=[Data()])
numero = StringField('Número', validators=[Data()])
complemento = StringField('Complemento')
bairro = StringField('Bairro', validators=[Data()])
cidade = StringField('Cidade', validators=[Data()])
estado = StringField('Estado', validators=[Data()])
cep = StringField('CEP', validators=[Data(), Regexp('^[0-9]{8}$',
message=MSG_CEP)])
class FormCadastroCliente(FlaskForm):
nome = StringField('Nome', validators=[Data()])
documento = StringField('Documento', validators=[Data(),
Regexp('^[0-9]{5,13}$', message=MSG_DOC)])
data_nascimento = DateField('Data de nascimento',
validators=[Data()], format='%d/%m/%Y')
endereco = FormField(FormEndereco)
telefone = StringField('Telefone', validators=[Data(),
Regexp('^[0-9]{10,11}$', message=MSG_TEL)])
email = StringField('E-mail', validators=[Data(), Email()])
submit = SubmitField('Cadastrar')
class FormConsultaDocumento(FlaskForm):
documento = StringField('Documento', validators=[Data(),
Regexp('^[0-9]{5,13}$', message=MSG_DOC)])
submit = SubmitField('Localizar')
|
[
"renan.modenese@gmail.com"
] |
renan.modenese@gmail.com
|
0f51a10d7923e3cc41c733b6ac55a879f4acbedb
|
b15d2787a1eeb56dfa700480364337216d2b1eb9
|
/accelbyte_py_sdk/api/ugc/operations/admin_content/admin_download_content_preview.py
|
9416b063ae2bbfaa0cbe46ad23d3786f5fa0d964
|
[
"MIT"
] |
permissive
|
AccelByte/accelbyte-python-sdk
|
dedf3b8a592beef5fcf86b4245678ee3277f953d
|
539c617c7e6938892fa49f95585b2a45c97a59e0
|
refs/heads/main
| 2023-08-24T14:38:04.370340
| 2023-08-22T01:08:03
| 2023-08-22T01:08:03
| 410,735,805
| 2
| 1
|
MIT
| 2022-08-02T03:54:11
| 2021-09-27T04:00:10
|
Python
|
UTF-8
|
Python
| false
| false
| 7,206
|
py
|
# Copyright (c) 2021 AccelByte Inc. All Rights Reserved.
# This is licensed software from AccelByte Inc, for limitations
# and restrictions contact your company contract manager.
#
# Code generated. DO NOT EDIT!
# template file: ags_py_codegen
# pylint: disable=duplicate-code
# pylint: disable=line-too-long
# pylint: disable=missing-function-docstring
# pylint: disable=missing-module-docstring
# pylint: disable=too-many-arguments
# pylint: disable=too-many-branches
# pylint: disable=too-many-instance-attributes
# pylint: disable=too-many-lines
# pylint: disable=too-many-locals
# pylint: disable=too-many-public-methods
# pylint: disable=too-many-return-statements
# pylint: disable=too-many-statements
# pylint: disable=unused-import
# AccelByte Gaming Services Ugc Service (2.11.3)
from __future__ import annotations
from typing import Any, Dict, List, Optional, Tuple, Union
from .....core import Operation
from .....core import HeaderStr
from .....core import HttpResponse
from ...models import ModelsGetContentPreviewResponse
from ...models import ResponseError
class AdminDownloadContentPreview(Operation):
"""Get content preview (AdminDownloadContentPreview)
Required permission ADMIN:NAMESPACE:{namespace}:USER:*:CONTENT [READ]
NOTE: Preview is Legacy Code, please use Screenshot for better solution to display preview of a content
Required Permission(s):
- ADMIN:NAMESPACE:{namespace}:USER:*:CONTENT [READ]
Properties:
url: /ugc/v1/admin/namespaces/{namespace}/contents/{contentId}/preview
method: GET
tags: ["Admin Content"]
consumes: ["application/json"]
produces: ["application/json"]
securities: [BEARER_AUTH]
content_id: (contentId) REQUIRED str in path
namespace: (namespace) REQUIRED str in path
Responses:
200: OK - ModelsGetContentPreviewResponse (OK)
401: Unauthorized - ResponseError (Unauthorized)
404: Not Found - ResponseError (Not Found)
500: Internal Server Error - ResponseError (Internal Server Error)
"""
# region fields
_url: str = "/ugc/v1/admin/namespaces/{namespace}/contents/{contentId}/preview"
_method: str = "GET"
_consumes: List[str] = ["application/json"]
_produces: List[str] = ["application/json"]
_securities: List[List[str]] = [["BEARER_AUTH"]]
_location_query: str = None
content_id: str # REQUIRED in [path]
namespace: str # REQUIRED in [path]
# endregion fields
# region properties
@property
def url(self) -> str:
return self._url
@property
def method(self) -> str:
return self._method
@property
def consumes(self) -> List[str]:
return self._consumes
@property
def produces(self) -> List[str]:
return self._produces
@property
def securities(self) -> List[List[str]]:
return self._securities
@property
def location_query(self) -> str:
return self._location_query
# endregion properties
# region get methods
# endregion get methods
# region get_x_params methods
def get_all_params(self) -> dict:
return {
"path": self.get_path_params(),
}
def get_path_params(self) -> dict:
result = {}
if hasattr(self, "content_id"):
result["contentId"] = self.content_id
if hasattr(self, "namespace"):
result["namespace"] = self.namespace
return result
# endregion get_x_params methods
# region is/has methods
# endregion is/has methods
# region with_x methods
def with_content_id(self, value: str) -> AdminDownloadContentPreview:
self.content_id = value
return self
def with_namespace(self, value: str) -> AdminDownloadContentPreview:
self.namespace = value
return self
# endregion with_x methods
# region to methods
def to_dict(self, include_empty: bool = False) -> dict:
result: dict = {}
if hasattr(self, "content_id") and self.content_id:
result["contentId"] = str(self.content_id)
elif include_empty:
result["contentId"] = ""
if hasattr(self, "namespace") and self.namespace:
result["namespace"] = str(self.namespace)
elif include_empty:
result["namespace"] = ""
return result
# endregion to methods
# region response methods
# noinspection PyMethodMayBeStatic
def parse_response(
self, code: int, content_type: str, content: Any
) -> Tuple[
Union[None, ModelsGetContentPreviewResponse],
Union[None, HttpResponse, ResponseError],
]:
"""Parse the given response.
200: OK - ModelsGetContentPreviewResponse (OK)
401: Unauthorized - ResponseError (Unauthorized)
404: Not Found - ResponseError (Not Found)
500: Internal Server Error - ResponseError (Internal Server Error)
---: HttpResponse (Undocumented Response)
---: HttpResponse (Unexpected Content-Type Error)
---: HttpResponse (Unhandled Error)
"""
pre_processed_response, error = self.pre_process_response(
code=code, content_type=content_type, content=content
)
if error is not None:
return None, None if error.is_no_content() else error
code, content_type, content = pre_processed_response
if code == 200:
return ModelsGetContentPreviewResponse.create_from_dict(content), None
if code == 401:
return None, ResponseError.create_from_dict(content)
if code == 404:
return None, ResponseError.create_from_dict(content)
if code == 500:
return None, ResponseError.create_from_dict(content)
return self.handle_undocumented_response(
code=code, content_type=content_type, content=content
)
# endregion response methods
# region static methods
@classmethod
def create(
cls, content_id: str, namespace: str, **kwargs
) -> AdminDownloadContentPreview:
instance = cls()
instance.content_id = content_id
instance.namespace = namespace
return instance
@classmethod
def create_from_dict(
cls, dict_: dict, include_empty: bool = False
) -> AdminDownloadContentPreview:
instance = cls()
if "contentId" in dict_ and dict_["contentId"] is not None:
instance.content_id = str(dict_["contentId"])
elif include_empty:
instance.content_id = ""
if "namespace" in dict_ and dict_["namespace"] is not None:
instance.namespace = str(dict_["namespace"])
elif include_empty:
instance.namespace = ""
return instance
@staticmethod
def get_field_info() -> Dict[str, str]:
return {
"contentId": "content_id",
"namespace": "namespace",
}
@staticmethod
def get_required_map() -> Dict[str, bool]:
return {
"contentId": True,
"namespace": True,
}
# endregion static methods
|
[
"elmernocon@gmail.com"
] |
elmernocon@gmail.com
|
09e0a676086371654a56d2662bdd6a243542a56c
|
a1cb1dbad03077c727b84901953d3ecccfa0f623
|
/Trump x Bible/tweet_dumper.py
|
cf9d8b2c888451a1453e32d881eec46af1c02041
|
[] |
no_license
|
bgreenawald/Markov-Text-Generation
|
8982bfd4e0866882f0114ec1cde11ef8fc976c45
|
fedf5e84fbc3a8cd7bd0c7179a8913f6bd1d6e5c
|
refs/heads/master
| 2021-01-25T13:46:48.144129
| 2019-06-07T01:21:41
| 2019-06-07T01:21:41
| 123,608,715
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,975
|
py
|
#!/usr/bin/env python
# encoding: utf-8
import tweepy #https://github.com/tweepy/tweepy
import csv
#Twitter API credentials
consumer_key = "pmTt2HRgUvx5uFWJVCNFZ2gJw"
consumer_secret = "hhLQP7eUf0GkzHVjbj0Rsh6lavtTWpvXSSUTeTuYXO0uJgx6qx"
access_key = "219705420-F12q5r55is7BsJVvsoJPn3BpYiWpaD7sHtuAbF1T"
access_secret = "fyfJ1XMZiOQ3gOBHibG42PPDLbhqFd0kF1q7SctKMHofb"
def get_all_tweets(screen_name):
#Twitter only allows access to a users most recent 3240 tweets with this method
#authorize twitter, initialize tweepy
auth = tweepy.OAuthHandler(consumer_key, consumer_secret)
auth.set_access_token(access_key, access_secret)
api = tweepy.API(auth)
#initialize a list to hold all the tweepy Tweets
alltweets = []
#make initial request for most recent tweets (200 is the maximum allowed count)
new_tweets = api.user_timeline(screen_name = screen_name,count=200)
#save most recent tweets
alltweets.extend(new_tweets)
#save the id of the oldest tweet less one
oldest = alltweets[-1].id - 1
#keep grabbing tweets until there are no tweets left to grab
while len(new_tweets) > 0:
print("getting tweets before %s" % (oldest))
#all subsiquent requests use the max_id param to prevent duplicates
new_tweets = api.user_timeline(screen_name = screen_name,count=200,max_id=oldest)
#save most recent tweets
alltweets.extend(new_tweets)
#update the id of the oldest tweet less one
oldest = alltweets[-1].id - 1
print("...%s tweets downloaded so far" % (len(alltweets)))
#transform the tweepy tweets into a 2D array that will populate the csv
outtweets = [[tweet.id_str, tweet.created_at, tweet.text.encode("utf-8")] for tweet in alltweets]
#write the csv
with open('%s_tweets.csv' % screen_name, 'w') as f:
writer = csv.writer(f)
writer.writerow(["id","created_at","text"])
writer.writerows(outtweets)
pass
if __name__ == '__main__':
#pass in the username of the account you want to download
get_all_tweets("realDonaldTrump")
|
[
"bgreenawald@gmail.com"
] |
bgreenawald@gmail.com
|
61f83d9968d8194dc8460f48004575157a02f201
|
e56d792be935f2065aeb6dabaf742fcfe8a622ef
|
/Util/hsv_codes.py
|
757b81bf658bf05210a6e0073f8f5bc525e73a5b
|
[
"MIT"
] |
permissive
|
huzz/LaserMote
|
464c4f862706fcaad4bb4004f5e2b6224ca80d31
|
acbf69a7e37746ae3a19138eb0c7fa30895511fa
|
refs/heads/master
| 2021-05-29T16:35:20.192286
| 2015-07-31T02:19:48
| 2015-07-31T02:19:48
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,128
|
py
|
import cv2
import numpy as np
cap = cv2.VideoCapture(0)
def nothing(x):
pass
# Creating a window for later use
cv2.namedWindow('result')
# Starting with 100's to prevent error while masking
h, s, v = 100, 100, 100
# Creating track bar
cv2.createTrackbar('h', 'result', 0, 179, nothing)
cv2.createTrackbar('s', 'result', 0, 255, nothing)
cv2.createTrackbar('v', 'result', 0, 255, nothing)
while True:
_, frame = cap.read()
# mirror the frame
frame = cv2.flip(frame, 1)
# converting to HSV
hsv = cv2.cvtColor(frame, cv2.COLOR_BGR2HSV)
# get info from track bar and appy to result
h = cv2.getTrackbarPos('h', 'result')
s = cv2.getTrackbarPos('s', 'result')
v = cv2.getTrackbarPos('v', 'result')
# Normal masking algorithm
lower_blue = np.array([h, s, v])
upper_blue = np.array([180, 255, 255])
mask = cv2.inRange(hsv, lower_blue, upper_blue)
result = cv2.bitwise_and(frame, frame, mask=mask)
cv2.imshow('result', result)
cv2.imshow('frame', frame)
k = cv2.waitKey(5) & 0xFF
if k == 27:
break
cap.release()
cv2.destroyAllWindows()
|
[
"ammsa7@gmail.com"
] |
ammsa7@gmail.com
|
567639e372965e982c4e51dfa859140ce4a49b73
|
31e41995dea5e4a41bc9b942da7e5266cd686757
|
/learning/training/pythonsolns/py3/solns/timer.py
|
edd7d97a844d3616244d9d2fd6d29205b4fbc624
|
[] |
no_license
|
tamle022276/python
|
3b75758b8794801d202565c05d32976c146beffd
|
4fec225d1e5e2bf0adac5048f7f9f3313ac76e23
|
refs/heads/master
| 2020-04-01T21:03:01.458768
| 2017-03-13T20:47:35
| 2017-03-13T20:47:35
| 64,878,939
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 470
|
py
|
#!/usr/bin/env python3
# timer.py - Clock Exceptions
from ClockException import *
from Clock import Clock
try:
c1 = Clock(12, 33)
print("%d:%d" %(c1.getHour(), c1.getMinute()))
c2 = Clock (12, 73) # raises exception
except ClockException as ex:
print("%s: %s" %(ex.__class__.__name__, ex.response()))
#################################################
#
# $ timer.py
# 12:33
# MinuteException: minute 73 not between 0 and 59
#
|
[
"tam.le@teradata.com"
] |
tam.le@teradata.com
|
106229842fc806a718a90f7bd279400a6c6a8117
|
16f7d4d8a1826b0a60fc4ced662dc4ec029c60af
|
/Code/Libs/Python/PlotData.py
|
3aa2dfd6fcf24e638926618e0b17351abf850967
|
[] |
no_license
|
Sharonrab/Autoboat
|
8ca5d935ce35b720298a957795e3352c00408cc9
|
7735ea0848235833835670a309486003864544a7
|
refs/heads/master
| 2021-01-16T19:53:50.445742
| 2016-03-01T20:28:32
| 2016-03-01T20:28:32
| 52,815,718
| 0
| 0
| null | 2016-02-29T19:02:56
| 2016-02-29T19:02:56
| null |
UTF-8
|
Python
| false
| false
| 2,395
|
py
|
# This file plots the data for given field(s) from .CSV files of recorded runs. With multiple fields specified, they all get plotted together.
# Additionally, if the first argument is a number, it will plot this data for only that autonomous section
import numpy as np
import matplotlib.pyplot as plt
import sys
from helpers import GetDataForFields, GetTimespanForRun
if __name__ == '__main__':
if len(sys.argv) < 3:
print("ERROR: Arguments required - CSV file, field_name1[,field_name2] (MESSAGE_TYPE.FIELD_NAME), [autonomous run number]")
sys.exit(1)
# Read in the program arguments from the command line
csv_file = sys.argv[1]
field_names = sys.argv[2].split(',')
if len(sys.argv) > 3:
auto_run = int(sys.argv[3])
else:
auto_run = None
# Get all the data from the CSV file
data = GetDataForFields(csv_file)
# Check that the user provided a valid field name
for f in field_names:
if f not in data:
print("ERROR: Invalid field name '{}'. '{}' only contains the following fields: {}".format(f, csv_file, ','.join(data.keys())))
sys.exit(1)
# Now plot the data, showing the whole dataset if no autonomous run number was specified
if auto_run:
# Determine the range of values to plot
start_time, end_time = GetTimespanForRun(data, auto_run)
plt.figure()
for f in field_names:
valid_indices = np.logical_and(data[f][0] >= start_time, data[f][0] <= end_time)
plt.plot(data[f][0,valid_indices] - data[f][0,0], data[f][1,valid_indices])
plt.title('{} for auto run {}'.format(','.join(field_names), auto_run))
plt.xlabel('Time (s)')
if len(field_names) > 1:
plt.legend(field_names)
plt.show()
else:
plt.figure()
for f in field_names:
plt.plot(data[f][0,:] - data[f][0,0], data[f][1,:])
plt.title("'{}' in '{}'".format(', '.join(field_names), csvfile))
plt.xlabel('Time (s)')
if len(field_names) > 1:
plt.legend(field_names)
plt.show()
# Preprocess the data
# Subtract off the timestamp and convert to seconds
# plt.scatter(data['LOCAL_POSITION_NED.y'][1,:], data['LOCAL_POSITION_NED.x'][1,:])
# plt.axis('equal')
# plt.show()
|
[
"susurrus@gmail.com"
] |
susurrus@gmail.com
|
e45a55e79d0de07c47f727688a518742cdd893e3
|
6e8fa7dfcd59fd545b9fff2d0935474582100192
|
/scripts/change_snap_to_csv.py
|
d03ca559ac0f7ad0b12a817baa050336239a9355
|
[] |
no_license
|
avudzor/optimizing-subgraph-queries-combining-binary-and-worst-case-optimal-joins
|
2e85530b7a8ac5796f33f93df21d903ee534cef3
|
be9a64befd5212a1068ed2669e8f07933fabed55
|
refs/heads/master
| 2023-03-20T10:15:49.066800
| 2020-09-10T16:00:14
| 2020-09-11T00:53:13
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,165
|
py
|
#!/usr/bin/env python3
"""
Given an absolute path to a directory containing '.txt' files from The Stanford
Large Network Dataset Collection, the script outputs for each file in the
directory an edges '.csv' files. The CSV is later loaded to graphflow and saved
to a directory in binary format.
"""
import sys
import argparse
import random
from os import listdir
from os.path import isfile, join
def main():
args = parse_args()
highestVertexId = produce_edges_file(args.input_file,
args.output_edges_file, args.separator, args.label)
if args.output_vertices_file:
pass
def parse_args():
parser = argparse.ArgumentParser(description='reads ')
parser.add_argument('input_file',
help='the raw input file using absolute path')
parser.add_argument('output_edges_file',
help='the csv edges output file using absolute path.')
parser.add_argument('-o', '--output_vertices_file',
help='the csv vertices output file using absolute path.')
parser.add_argument('-s', '--separator',
help='separator between vertices in each line.', default='\t')
parser.add_argument('-t', '--type',
help='number of vertex types.', type=int, default=1)
parser.add_argument('-l', '--label',
help='number of edge labels.', type=int, default=1)
return parser.parse_args()
def produce_edges_file(input_file, output_file, separator, num_of_labels):
edges_file = open(output_file, 'w+')
highestVertexId = -1
# format file written as: FROM,TO,LABEL.
random.seed(0) # use '0' to always get the same sequence of labels
with open(input_file) as f:
for line in f:
if line[0] == '#': # read comment and remove, process the rest.
continue
try:
edge = line.split(separator)
if len(edge) == 1:
edge = line.split(' ') # edge=['<from>','<to>\n']
fromVertex = edge[0]
toVertex = edge[1]
toVertex = toVertex[:len(toVertex)-1] # removes '\n'
if int(fromVertex) > highestVertexId:
highestVertexId = int(fromVertex)
if int(toVertex) > highestVertexId:
highestVertexId = int(toVertex)
except Exception: # does not follow the usual csv pattern
continue
if fromVertex == toVertex: # remove self-loops
continue
edge_label = random.randint(0, num_of_labels - 1)
edges_file.write(fromVertex + ',' + toVertex + ',' + \
str(edge_label) + '\n')
edges_file.close()
def produce_vertices_file(input_file, output_file, separator, num_of_types,
highestVertexId):
vertices_file = open(output_file, 'w+')
# format file written as: VERTEX_ID,TYPE.
random.seed(0) # use '0' to always get the same sequence of types
for vertexId in range(0, highestVertexId + 1):
vertex_type = random.randint(0, num_of_types - 1)
vertices_file.write(str(vertexId) + ',' + str(vertex_type) + '\n')
vertices_file.close()
if __name__ == '__main__':
main()
|
[
"m.amine.mhedhbi@gmail.com"
] |
m.amine.mhedhbi@gmail.com
|
1c4c677cf4b4b6ae777e9e48d5dd9ed1d8338f8d
|
6268a19db5d7806b3a91d6350ec2777b3e13cee6
|
/old_stuff/code/aaron-cv/src/loss/hpo.py
|
ff3d306435b65ee6e4cc699da739304aaae99086
|
[] |
no_license
|
aaronlws95/phd_2019
|
3ae48b4936f039f369be3a40404292182768cf3f
|
22ab0f5029b7d67d32421d06caaf3e8097a57772
|
refs/heads/master
| 2023-03-22T14:38:18.275184
| 2021-03-21T11:39:29
| 2021-03-21T11:39:29
| 186,387,381
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 15,131
|
py
|
import torch
import time
import math
import time
import torch.nn as nn
import numpy as np
from src.utils import FPHA
class HPOLoss(torch.nn.Module):
""" HPO loss calculation """
def __init__(self, cfg):
super().__init__()
self.W = None
self.H = None
self.D = None
self.hand_scale = float(cfg["hand_scale"])
self.no_hand_scale = float(cfg["no_hand_scale"])
self.hand_root = int(cfg["hand_root"])
self.sharp = int(cfg["sharpness"])
self.d_th = int(cfg["d_th"])
self.sil_thresh = float(cfg["sil_thresh"])
self.time = cfg['time']
self.debug = cfg['debug']
def offset_to_uvd(self, x):
"""Split prediction into predicted uvd with grid offset and
predicted conf
Args:
x : Predicted output (b, (63+1)*D, H, W)
Out:
pred_uvd_no_offset : Predicted uvd without offset
(b, 21, 3, H, W, D)
pred_uvd : Predicted uvd with grid offset
(b, 21, 3, H, W, D)
pred_conf : Predicted confidence values (b, H, W, D)
"""
FT = torch.FloatTensor
self.bs = x.shape[0]
self.W = x.shape[2]
self.H = x.shape[3]
self.D = 5
x = x.view(self.bs, 64, self.D, self.H, self.W)
x = x.permute(0, 1, 3, 4, 2)
# if self.debug:
# x = torch.zeros(x.shape).cuda()
# x[0, :, 12, 12, 4] = 1
pred_uvd_no_offset = x[:, :63, :, :, :].view(self.bs, 21, 3,
self.H, self.W, self.D)
pred_conf = x[:, 63, :, :, :]
pred_conf = torch.sigmoid(pred_conf)
yv, xv, zv = torch.meshgrid([torch.arange(self.H),
torch.arange(self.W),
torch.arange(self.D)])
grid_x = xv.repeat((21, 1, 1, 1)).type(FT).cuda()
grid_y = yv.repeat((21, 1, 1, 1)).type(FT).cuda()
grid_z = zv.repeat((21, 1, 1, 1)).type(FT).cuda()
pred_uvd_no_offset[:, self.hand_root, :, :, :, :] = \
torch.sigmoid(pred_uvd_no_offset[:, self.hand_root, :, :, :, :])
pred_uvd = pred_uvd_no_offset.clone().detach()
pred_uvd[:, :, 0, :, :, :] = \
(pred_uvd[:, :, 0, :, :, :] + grid_x)/self.W
pred_uvd[:, :, 1, :, :, :] = \
(pred_uvd[:, :, 1, :, :, :] + grid_y)/self.H
pred_uvd[:, :, 2, :, :, :] = \
(pred_uvd[:, :, 2, :, :, :] + grid_z)/self.D
# if self.debug:
# print('Checking grid')
# c = torch.ones(pred_uvd_no_offset.shape)
# c[:, :, 0, :, :, :] = (c[:, :, 0, :, :, :] + grid_x)
# c[:, :, 1, :, :, :] = (c[:, :, 1, :, :, :] + grid_y)
# c[:, :, 2, :, :, :] = (c[:, :, 2, :, :, :] + grid_z)
# print('First depth layer')
# print(c[0, 0, 0, :, : , 0])
# print(c[0, 0, 1, :, : , 0])
# print(c[0, 0, 2, :, : , 0])
# print('Second depth layer, second batch, second joint')
# print('Should be the same[')
# print(c[1, 1, 0, :, : , 1])
# print(c[1, 1, 1, :, : , 1])
# print(c[1, 1, 2, :, : , 1])
return pred_uvd_no_offset, pred_uvd, pred_conf
def calc_conf_grid(self, pred_uvd, uvd_gt,
im_width=FPHA.ORI_WIDTH,
im_height=FPHA.ORI_HEIGHT,
ref_depth=FPHA.REF_DEPTH):
"""
Calculate true confidence values in a grid for confidence mask
Args:
pred_uvd : Predicted uvd values with grid offset scaled
to (1, 1) (21, 3, H, W, D)
uvd_gt : Ground truth uvd repeated in grid (21, 3, H, W, D)
Out:
mean_conf : Mean confidence in a grid (H, W, D)
"""
dist = pred_uvd - uvd_gt
dist[:, 0, :, :, :] = dist[:, 0, :, :, :]*im_width
dist[:, 1, :, :, :] = dist[:, 1, :, :, :]*im_height
dist[:, 2, :, :, :] = dist[:, 2, :, :, :]*ref_depth
eps = 1e-5
dist = torch.sqrt(torch.sum((dist)**2, dim = 1) + eps)
mask = (dist < self.d_th).type(torch.FloatTensor)
conf = torch.exp(self.sharp*(1 - dist/self.d_th)) - 1
conf0 = torch.exp(self.sharp*(1 - torch.zeros(conf.shape))) - 1
conf = conf/conf0.cuda()
conf = mask.cuda()*conf
mean_conf = torch.mean(conf, dim=0)
return mean_conf
def calc_conf(self, pred_uvd, uvd_gt,
im_width=FPHA.ORI_WIDTH,
im_height=FPHA.ORI_HEIGHT,
ref_depth=FPHA.REF_DEPTH):
"""
Calculate true specific target confidence values
Args:
pred_uvd : Predicted uvd at target location (21, 3)
uvd_gt : Ground truth uvd (21, 3)
Out:
mean_conf : Mean confidence (1)
"""
eps = 1e-5
dist = pred_uvd - uvd_gt
dist[:, 0] = dist[:, 0]*im_width
dist[:, 1] = dist[:, 1]*im_height
dist[:, 2] = dist[:, 2]*ref_depth
dist = torch.sqrt(torch.sum((dist)**2, dim = 1) + eps)
mask = (dist < self.d_th).type(torch.FloatTensor)
conf = torch.exp(self.sharp*(1 - dist/self.d_th)) - 1
conf0 = torch.exp(self.sharp*(1 - torch.zeros(conf.shape))) - 1
conf = conf/conf0
conf = mask*conf
mean_conf = torch.mean(conf, dim=0)
return mean_conf
def get_conf_mask(self, pred_uvd, uvd_gt):
""" Get mask to weigh confidence values
Args:
pred_uvd : Predicted uvd values with grid offset scaled
to (1, 1) (21, 3, H, W, D)
uvd_gt : Ground truth uvd (21, 3)
Out:
conf_mask : All set to no_hand_scale except those with confidence
more than sil_thresh which are set to 0. Later
will set target location to hand_scale (b, H, W, D)
"""
conf_mask = torch.ones(self.bs,
self.H,
self.W,
self.D)*self.no_hand_scale
for batch in range(self.bs):
cur_pred_uvd = pred_uvd[batch]
cur_uvd_gt = uvd_gt[batch].repeat(self.H,
self.W,
self.D,
1, 1)
cur_uvd_gt = cur_uvd_gt.permute(3, 4, 0, 1, 2)
cur_conf = self.calc_conf_grid(cur_pred_uvd, cur_uvd_gt)
conf_mask[batch][cur_conf > self.sil_thresh] = 0
# if self.debug:
# print('Should be the same')
# print(cur_uvd_gt[:, :, 0, 0, 0])
# print(cur_uvd_gt[:, :, 12, 12, 4])
# if self.debug:
# if batch == 0:
# print(cur_pred_uvd)
# print(cur_uvd_gt)
# print(cur_conf)
# print(conf_mask[batch])
return conf_mask
def get_target(self, uvd_gt, pred_uvd):
"""
Get target boxes and masks
Args:
pred_uvd : Predicted uvd values with grid offset scaled
to (1, 1) (21, 3, H, W, D)
uvd_gt : Ground truth uvd (21, 3)
Out:
Target location refers to the ground truth x, y, z
conf_mask : From get_conf_mask. Set target locations to
object_scale (b, na, H, W)
target_conf : All 0 except at target location it is the conf
between the target and predicted bbox
(b, H, W, D)
target_uvd : Target uvd keypoints. Set all to 0 except at
target location where it is scaled to compare
with pred_uvd_no_offset (b, 21, 3, H, W, D)
coord_mask : All 0 except at target locations it is 1
(b, H, W, D)
"""
t0 = time.time() # start
FT = torch.FloatTensor
target_uvd = torch.zeros(self.bs, 21, 3,
self.H, self.W, self.D).type(FT)
coord_mask = torch.zeros(self.bs, self.H, self.W, self.D).type(FT)
target_conf = torch.zeros(self.bs, self.H, self.W, self.D).type(FT)
conf_mask = self.get_conf_mask(pred_uvd, uvd_gt)
t1 = time.time() # get_conf_mask
# if self.debug:
# print('Checking get_conf_mask')
# check_pred_uvd = torch.zeros(pred_uvd.shape)
# check_uvd_gt = torch.zeros(uvd_gt.shape)
# check_pred_uvd[0, :, :, 12, 12, 4] = 100
# check_uvd_gt[0, :, :] = 100
# check_conf_mask = self.get_conf_mask(check_pred_uvd,
# check_uvd_gt)
# print(check_conf_mask[0, :, :, :])
pred_uvd = pred_uvd.cpu()
uvd_gt = uvd_gt.cpu()
for batch in range(self.bs):
cur_uvd_gt = uvd_gt[batch]
# get cell where hand root is present
gi0 = int(cur_uvd_gt[self.hand_root, 0]*self.W)
gj0 = int(cur_uvd_gt[self.hand_root, 1]*self.H)
gk0 = int(cur_uvd_gt[self.hand_root, 2]*self.D)
if gi0 >= self.W:
gi0 = self.W - 1
if gj0 >= self.H:
gj0 = self.H - 1
if gk0 >= self.D:
gk0 = self.D - 1
target_uvd[batch, :, 0, gj0, gi0, gk0] = \
cur_uvd_gt[:, 0]*self.W - gi0
target_uvd[batch, :, 1, gj0, gi0, gk0] = \
cur_uvd_gt[:, 1]*self.H - gj0
target_uvd[batch, :, 2, gj0, gi0, gk0] = \
cur_uvd_gt[:, 2]*self.D - gk0
coord_mask[batch, gj0, gi0, gk0] = 1
cur_pred_uvd = pred_uvd[batch, :, :, gj0, gi0, gk0]
target_conf[batch, gj0, gi0, gk0] = \
self.calc_conf(cur_pred_uvd, cur_uvd_gt)
conf_mask[batch, gj0, gi0, gk0] = self.hand_scale
t2 = time.time() # get target_uvd
# if self.debug:
# check_uvd_gt = torch.ones(uvd_gt[0].shape)
# gi0 = int(check_uvd_gt[self.hand_root, 0])
# gj0 = int(check_uvd_gt[self.hand_root, 1])
# gk0 = int(check_uvd_gt[self.hand_root, 2])
# target_uvd[0, :, 0, gj0, gi0, gk0] = \
# check_uvd_gt[:, 0]*self.W - gi0
# target_uvd[0, :, 1, gj0, gi0, gk0] = \
# check_uvd_gt[:, 1]*self.H - gj0
# target_uvd[0, :, 2, gj0, gi0, gk0] = \
# check_uvd_gt[:, 2]*self.D - gk0
# coord_mask[0, gj0, gi0, gk0] = 1
# check_pred_uvd = torch.ones(uvd_gt[0].shape)
# conf = self.calc_conf(check_pred_uvd, check_uvd_gt)
# target_conf[0, gj0, gi0, gk0] = conf
# conf_mask[0, gj0, gi0, gk0] = self.hand_scale
# print('conf_mask')
# print(conf_mask[0])
# print('target_conf')
# print(target_conf[0])
# print('target_uvd')
# print(target_uvd[0, 0, 0])
# print('coord_mask')
# print(coord_mask[0])
target_uvd = target_uvd.cuda()
target_conf = target_conf.cuda()
coord_mask = coord_mask.cuda()
conf_mask = conf_mask.cuda()
conf_mask = conf_mask.sqrt()
t3 = time.time() # CPU to GPU
if self.time:
print('------get_target-----')
print(' get_conf_mask : %f' % (t1 - t0))
print(' get target_uvd : %f' % (t2 - t1))
print(' CPU to GPU : %f' % (t3 - t2))
print(' total : %f' % (t3 - t0))
return target_uvd, target_conf, coord_mask, conf_mask
def forward(self, pred, uvd_gt):
"""
Loss calculation and processing
Args:
pred : (b, (63+1)*D, H, W)
uvd_gt : (b, 21, 3)
Out:
loss : Total loss and its components
"""
t0 = time.time() # start
pred_uvd_no_offset, pred_uvd, pred_conf = self.offset_to_uvd(pred)
t1 = time.time() # offset_to_uvd
# if self.debug:
# print("get_target shouldn't require_grad:")
# print(uvd_gt.requires_grad, pred_uvd.requires_grad)
# if self.debug:
# uvd_gt = torch.ones(uvd_gt.shape).cuda()*100
# uvd_gt[0, :, :] = 1
targets = self.get_target(uvd_gt, pred_uvd)
t2 = time.time() # get_target
target_uvd, target_conf, coord_mask, conf_mask = targets
mseloss = nn.MSELoss(reduction="sum")
coord_mask = coord_mask.repeat(21, 1, 1, 1, 1)
coord_mask = coord_mask.permute(1, 0, 2, 3, 4)
# if self.debug:
# print('pred_uvd_no_offset')
# print(pred_uvd_no_offset[0, 0, 0])
# print('pred_uvd')
# print(pred_uvd[0, 0, 0])
# print('pred_conf')
# print(pred_conf[0])
# print('target_conf')
# print(target_conf[0])
# print('conf_mask')
# print(conf_mask[0])
# print('coord_mask')
# print(coord_mask[0, 0])
# print('target_uvd')
# print(target_uvd[0, 0, 0])
loss_u = mseloss(coord_mask*pred_uvd_no_offset[:, :, 0, :, :, :],
coord_mask*target_uvd[:, :, 0, :, :, :])/2.0
loss_v = mseloss(coord_mask*pred_uvd_no_offset[:, :, 1, :, :, :],
coord_mask*target_uvd[:, :, 1, :, :, :])/2.0
loss_d = mseloss(coord_mask*pred_uvd_no_offset[:, :, 2, :, :, :],
coord_mask*target_uvd[:, :, 2, :, :, :])/2.0
loss_conf = mseloss(conf_mask*pred_conf, conf_mask*target_conf)/2.0
total_loss = loss_u + loss_v + loss_d + loss_conf
t3 = time.time() # loss
if self.time:
print('-----HPOLoss-----')
print(' offset_to_uvd : %f' % (t1 - t0))
print(' get target : %f' % (t2 - t1))
print(' calc loss : %f' % (t3 - t2))
print(' total : %f' % (t3 - t0))
return total_loss, loss_u, loss_v, loss_d, loss_conf
|
[
"aaronlws95@gmail.com"
] |
aaronlws95@gmail.com
|
313e02753867389f88313c3544ac0e53d58f6a9d
|
99c4d4a6592fded0e8e59652484ab226ac0bd38c
|
/code/batch-2/vse-naloge-brez-testov/DN6-M-108.py
|
3a22a42ad2bd80cd9b45619ec8eaeb982037e6c5
|
[] |
no_license
|
benquick123/code-profiling
|
23e9aa5aecb91753e2f1fecdc3f6d62049a990d5
|
0d496d649247776d121683d10019ec2a7cba574c
|
refs/heads/master
| 2021-10-08T02:53:50.107036
| 2018-12-06T22:56:38
| 2018-12-06T22:56:38
| 126,011,752
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,222
|
py
|
def besedilo(tvit):
a = tvit.split(" ")
stavek = " ".join(a[1:])
return stavek
def zadnji_tvit(tviti):
slovar = {}
for tvit in tviti:
avtor = tvit.split(":")[0]
slovar[avtor] = besedilo(tvit)
return slovar
def prvi_tvit(tviti):
slovar = {}
for tvit in tviti:
avtor = tvit.split(":")[0]
if avtor not in slovar:
slovar[avtor] = besedilo(tvit)
return slovar
def prestej_tvite(tviti):
slovar = {}
for tvit in tviti:
avtor = tvit.split(":")[0]
if avtor not in slovar:
slovar[avtor] = 1
else:
slovar[avtor] += 1
return slovar
def isal(beseda):
while beseda[len(beseda) - 1].isalnum() == False:
beseda = beseda[:-1]
while beseda[0].isalnum() == False:
beseda = beseda[1:]
return beseda
from collections import defaultdict
def omembe(tviti):
slovar = defaultdict(list)
for tvit in tviti:
avtor = tvit.split(":")[0]
seznam = slovar[avtor]
for beseda in tvit.split(" "):
if beseda[0] == "@":
seznam.append(isal(beseda))
slovar[avtor] = seznam
print(slovar)
return slovar
def neomembe(ime, omembe):
seznam = []
for x in omembe: #gres cez vse kljuce
if x not in omembe[ime] and x != ime: #ce kljuc ni v seznamu omembe[ime] in ce kljuc ni enako imenu
seznam.append(x) # ga dodas na seznam
return seznam
def se_poznata(ime1, ime2, omembe):
se_poznata = False
for kljuc in omembe:
if kljuc == ime1 and ime2 in omembe[ime1] or kljuc == ime2 and ime1 in omembe[ime2]:
se_poznata = True
break
return se_poznata
def hashtagi(tviti):
slovar = defaultdict(list)
for tvit in tviti:
avtor = tvit.split(":")[0]
besede = tvit.split(" ")
for beseda in besede:
if beseda[0] == "#":
seznam = slovar[isal(beseda)]
seznam.append(avtor)
slovar[isal(beseda)] = sorted(seznam)
return slovar
|
[
"benjamin.fele@gmail.com"
] |
benjamin.fele@gmail.com
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.